diff --git a/.gitignore b/.gitignore index 08ac8dbda..eb2fc68b4 100644 --- a/.gitignore +++ b/.gitignore @@ -150,6 +150,9 @@ webhook-server.private-key.pem log-colors.json webhook_server/tests/manifests/logs .coverage_report.txt +coverage.json +webhook-examples +find_unused_code.py # AI .cursor/ @@ -159,3 +162,4 @@ CLAUDE.md .claude/ .claude-flow/ .swarm/ +CRUSH.md diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8ec9fa4ca..da0e383ee 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -61,3 +61,13 @@ repos: - id: mypy exclude: (tests/) additional_dependencies: [types-requests, types-PyYAML, types-colorama] + + - repo: https://github.com/pre-commit/mirrors-eslint + rev: v9.38.0 + hooks: + - id: eslint + files: \.js$ + exclude: eslint\.config\.js + args: [--fix] + additional_dependencies: + - eslint@9.38.0 diff --git a/Dockerfile b/Dockerfile index 63b4b2fad..e0ab5bd98 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,11 +9,12 @@ ENV PATH="$PATH:$BIN_DIR" ENV DATA_DIR="$HOME_DIR/data" ENV APP_DIR="$HOME_DIR/github-webhook-server" +RUN systemd-machine-id-setup + RUN dnf -y install dnf-plugins-core \ && dnf -y update \ && dnf -y install \ git \ - hub \ unzip \ gcc \ python3-devel \ @@ -51,6 +52,7 @@ ENV UV_PYTHON=python3.13 ENV UV_COMPILE_BYTECODE=1 ENV UV_NO_SYNC=1 ENV UV_CACHE_DIR=${APP_DIR}/.cache +ENV PYTHONUNBUFFERED=1 COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx ${BIN_DIR}/ RUN uv tool install pre-commit && uv tool install poetry && uv tool install prek @@ -62,7 +64,13 @@ RUN set -x \ && chmod +x $BIN_DIR/rosa \ && rm -rf $BIN_DIR/rosa-linux.tar.gz \ && curl -L https://github.com/regclient/regclient/releases/latest/download/regctl-linux-amd64 >$BIN_DIR/regctl \ - && chmod +x $BIN_DIR/regctl + && chmod +x $BIN_DIR/regctl \ + && curl -L https://github.com/mislav/hub/releases/download/v2.14.2/hub-linux-amd64-2.14.2.tgz --output ${BIN_DIR}/hub-linux-amd64.tgz \ + && tmp_dir="$(mktemp -d)" \ + && tar xvf ${BIN_DIR}/hub-linux-amd64.tgz -C "${tmp_dir}" \ + && mv "${tmp_dir}"/hub-linux-amd64-2.14.2/bin/hub ${BIN_DIR}/hub \ + && chmod +x ${BIN_DIR}/hub \ + && rm -rf "${tmp_dir}" ${BIN_DIR}/hub-linux-amd64.tgz WORKDIR $APP_DIR diff --git a/README.md b/README.md index 7d9fc3c75..e12f23a49 100644 --- a/README.md +++ b/README.md @@ -57,6 +57,11 @@ GitHub Events → Webhook Server → Repository Management └─────────────────────────────────────┘ ``` +**Key Architecture Components:** + +- **Performance Optimized**: Repository data fetched efficiently to minimize API calls +- **Type-Safe**: Full mypy strict mode coverage ensuring code reliability + ## Features ### 🔧 Repository Management diff --git a/entrypoint.py b/entrypoint.py index 00051ba81..a44bdf8c2 100644 --- a/entrypoint.py +++ b/entrypoint.py @@ -46,6 +46,13 @@ def run_podman_cleanup() -> None: run_podman_cleanup() result = asyncio.run(repository_and_webhook_settings(webhook_secret=_webhook_secret)) + + # Logging Configuration: + # - Uvicorn uses default logging which automatically respects FORCE_COLOR environment variable + # for colored terminal output (useful for Docker logs with color support) + # - Application logs use simple-logger with console=True for colored output in Docker logs + # - Both logging systems work together: uvicorn handles HTTP request logs, + # while simple-logger handles application-level logs with structured formatting uvicorn.run( "webhook_server.app:FASTAPI_APP", host=_ip_bind, diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 000000000..9f845d8c0 --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,92 @@ +module.exports = [ + // Frontend configuration - for browser-based JavaScript + { + files: ["webhook_server/web/static/**/*.js"], + languageOptions: { + ecmaVersion: 2022, + sourceType: "script", + globals: { + // Browser environment globals + window: "readonly", + document: "readonly", + console: "readonly", + fetch: "readonly", + WebSocket: "readonly", + localStorage: "readonly", + sessionStorage: "readonly", + alert: "readonly", + confirm: "readonly", + prompt: "readonly", + setTimeout: "readonly", + clearTimeout: "readonly", + setInterval: "readonly", + clearInterval: "readonly", + URLSearchParams: "readonly", + AbortController: "readonly", + }, + }, + rules: { + // ESLint recommended rules (manually specified for broader coverage) + "constructor-super": "error", + "for-direction": "error", + "getter-return": "error", + "no-async-promise-executor": "error", + "no-case-declarations": "error", + "no-class-assign": "error", + "no-compare-neg-zero": "error", + "no-cond-assign": "error", + "no-const-assign": "error", + "no-constant-condition": "error", + "no-control-regex": "error", + "no-debugger": "error", + "no-delete-var": "error", + "no-dupe-args": "error", + "no-dupe-class-members": "error", + "no-dupe-else-if": "error", + "no-dupe-keys": "error", + "no-duplicate-case": "error", + "no-empty": "error", + "no-empty-character-class": "error", + "no-empty-pattern": "error", + "no-ex-assign": "error", + "no-extra-boolean-cast": "error", + "no-fallthrough": "error", + "no-func-assign": "error", + "no-global-assign": "error", + "no-import-assign": "error", + "no-inner-declarations": "error", + "no-invalid-regexp": "error", + "no-irregular-whitespace": "error", + "no-loss-of-precision": "error", + "no-misleading-character-class": "error", + "no-new-symbol": "error", + "no-obj-calls": "error", + "no-octal": "error", + "no-prototype-builtins": "error", + "no-redeclare": "error", + "no-regex-spaces": "error", + "no-self-assign": "error", + "no-setter-return": "error", + "no-shadow-restricted-names": "error", + "no-sparse-arrays": "error", + "no-this-before-super": "error", + "no-unexpected-multiline": "error", + "no-unreachable": "error", + "no-unsafe-finally": "error", + "no-unsafe-negation": "error", + "no-unsafe-optional-chaining": "error", + "no-unused-labels": "error", + "no-useless-backreference": "error", + "no-useless-catch": "error", + "no-useless-escape": "error", + "no-with": "error", + "require-yield": "error", + "use-isnan": "error", + "valid-typeof": "error", + // Project-specific overrides + "no-unused-vars": "warn", + "no-undef": "error", + "no-console": "off", + }, + }, +]; diff --git a/pyproject.toml b/pyproject.toml index 7b88fd4a1..0fc6a5468 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,12 @@ line-length = 120 fix = true output-format = "grouped" +[tool.ruff.lint] +select = ["E", "F", "W", "I", "B", "UP", "PLC0415", "ARG"] + +[tool.ruff.lint.per-file-ignores] +"webhook_server/tests/*" = ["ARG"] + [tool.ruff.format] exclude = [".git", ".venv", ".mypy_cache", ".tox", "__pycache__"] @@ -33,20 +39,11 @@ warn_redundant_casts = true [tool.hatch.build.targets.wheel] packages = ["webhook_server"] -[tool.uv] -dev-dependencies = [ - "ipdb>=0.13.13", - "ipython>=8.12.3", - "types-colorama>=0.4.15.20240311", - "types-pyyaml>=6.0.12.20250516", - "types-requests>=2.32.4.20250611", -] - [project] name = "github-webhook-server" version = "3.0.7" requires-python = ">=3.12" -description = "A webhook server to manage Github reposotories and pull requests." +description = "A webhook server to manage Github repositories and pull requests." readme = "README.md" license = "Apache-2.0" classifiers = [ @@ -101,6 +98,11 @@ requires = ["hatchling"] build-backend = "hatchling.build" [dependency-groups] -tests = [ - "psutil>=7.0.0", +dev = [ + "ipdb>=0.13.13", + "ipython>=8.12.3", + "types-colorama>=0.4.15.20240311", + "types-pyyaml>=6.0.12.20250516", + "types-requests>=2.32.4.20250611", ] +tests = ["psutil>=7.0.0", "pytest-asyncio>=0.26.0", "pytest-xdist>=3.7.0"] diff --git a/scripts/generate_changelog.py b/scripts/generate_changelog.py index 31902492f..b3a0151ac 100644 --- a/scripts/generate_changelog.py +++ b/scripts/generate_changelog.py @@ -10,17 +10,22 @@ def json_line(line: str) -> dict: Format str line to str that can be parsed with json. In case line is not formatted for json for example: - '{"title": "Revert "feat: Use git cliff to generate the change log. (#2322)" (#2324)", "commit": "137331fd", "author": "Meni Yakove", "date": "2025-02-16"}' - title have `"` inside the external `"` `"Revert "feat: Use git cliff to generate the change log. (#2322)" (#2324)"` + '{"title": "Revert "feat: Use git cliff to generate the change log. (#2322)" ' + '(#2324)", "commit": "137331fd", "author": "Meni Yakove", "date": "2025-02-16"}' + title have `"` inside the external `"` `"Revert "feat: Use git cliff to ' + 'generate the change log. (#2322)" (#2324)"` """ try: return json.loads(line) except json.JSONDecodeError: # split line like by `,` - # '{"title": "Revert "feat: Use git cliff to generate the change log. (#2322)" (#2324)", "commit": "137331fd", "author": "Meni Yakove", "date": "2025-02-16"}' + # '{"title": "Revert "feat: Use git cliff to generate the change log. ' + # '(#2322)" (#2324)", "commit": "137331fd", "author": "Meni Yakove", ' + # '"date": "2025-02-16"}' line_split = line.split(",") - # Pop and save `title key` and `title body` from '{"title": "Revert "feat: Use git cliff to generate the change log. (#2322)" (#2324)"' + # Pop and save `title key` and `title body` from '{"title": "Revert ' + # '"feat: Use git cliff to generate the change log. (#2322)" (#2324)"' title_key, title_body = line_split.pop(0).split(":", 1) if title_body.count('"') > 2: diff --git a/uv.lock b/uv.lock index ad78ff7d0..1618bb22e 100644 --- a/uv.lock +++ b/uv.lock @@ -317,76 +317,76 @@ wheels = [ [[package]] name = "coverage" -version = "7.11.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c4/db/86f6906a7c7edc1a52b2c6682d6dd9be775d73c0dfe2b84f8923dfea5784/coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1", size = 216098, upload-time = "2025-10-15T15:13:02.916Z" }, - { url = "https://files.pythonhosted.org/packages/21/54/e7b26157048c7ba555596aad8569ff903d6cd67867d41b75287323678ede/coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007", size = 216331, upload-time = "2025-10-15T15:13:04.403Z" }, - { url = "https://files.pythonhosted.org/packages/b9/19/1ce6bf444f858b83a733171306134a0544eaddf1ca8851ede6540a55b2ad/coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46", size = 247825, upload-time = "2025-10-15T15:13:05.92Z" }, - { url = "https://files.pythonhosted.org/packages/71/0b/d3bcbbc259fcced5fb67c5d78f6e7ee965f49760c14afd931e9e663a83b2/coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893", size = 250573, upload-time = "2025-10-15T15:13:07.471Z" }, - { url = "https://files.pythonhosted.org/packages/58/8d/b0ff3641a320abb047258d36ed1c21d16be33beed4152628331a1baf3365/coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115", size = 251706, upload-time = "2025-10-15T15:13:09.4Z" }, - { url = "https://files.pythonhosted.org/packages/59/c8/5a586fe8c7b0458053d9c687f5cff515a74b66c85931f7fe17a1c958b4ac/coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415", size = 248221, upload-time = "2025-10-15T15:13:10.964Z" }, - { url = "https://files.pythonhosted.org/packages/d0/ff/3a25e3132804ba44cfa9a778cdf2b73dbbe63ef4b0945e39602fc896ba52/coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186", size = 249624, upload-time = "2025-10-15T15:13:12.5Z" }, - { url = "https://files.pythonhosted.org/packages/c5/12/ff10c8ce3895e1b17a73485ea79ebc1896a9e466a9d0f4aef63e0d17b718/coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d", size = 247744, upload-time = "2025-10-15T15:13:14.554Z" }, - { url = "https://files.pythonhosted.org/packages/16/02/d500b91f5471b2975947e0629b8980e5e90786fe316b6d7299852c1d793d/coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d", size = 247325, upload-time = "2025-10-15T15:13:16.438Z" }, - { url = "https://files.pythonhosted.org/packages/77/11/dee0284fbbd9cd64cfce806b827452c6df3f100d9e66188e82dfe771d4af/coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2", size = 249180, upload-time = "2025-10-15T15:13:17.959Z" }, - { url = "https://files.pythonhosted.org/packages/59/1b/cdf1def928f0a150a057cab03286774e73e29c2395f0d30ce3d9e9f8e697/coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5", size = 218479, upload-time = "2025-10-15T15:13:19.608Z" }, - { url = "https://files.pythonhosted.org/packages/ff/55/e5884d55e031da9c15b94b90a23beccc9d6beee65e9835cd6da0a79e4f3a/coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0", size = 219290, upload-time = "2025-10-15T15:13:21.593Z" }, - { url = "https://files.pythonhosted.org/packages/23/a8/faa930cfc71c1d16bc78f9a19bb73700464f9c331d9e547bfbc1dbd3a108/coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad", size = 217924, upload-time = "2025-10-15T15:13:23.39Z" }, - { url = "https://files.pythonhosted.org/packages/60/7f/85e4dfe65e400645464b25c036a26ac226cf3a69d4a50c3934c532491cdd/coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1", size = 216129, upload-time = "2025-10-15T15:13:25.371Z" }, - { url = "https://files.pythonhosted.org/packages/96/5d/dc5fa98fea3c175caf9d360649cb1aa3715e391ab00dc78c4c66fabd7356/coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be", size = 216380, upload-time = "2025-10-15T15:13:26.976Z" }, - { url = "https://files.pythonhosted.org/packages/b2/f5/3da9cc9596708273385189289c0e4d8197d37a386bdf17619013554b3447/coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d", size = 247375, upload-time = "2025-10-15T15:13:28.923Z" }, - { url = "https://files.pythonhosted.org/packages/65/6c/f7f59c342359a235559d2bc76b0c73cfc4bac7d61bb0df210965cb1ecffd/coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82", size = 249978, upload-time = "2025-10-15T15:13:30.525Z" }, - { url = "https://files.pythonhosted.org/packages/e7/8c/042dede2e23525e863bf1ccd2b92689692a148d8b5fd37c37899ba882645/coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52", size = 251253, upload-time = "2025-10-15T15:13:32.174Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a9/3c58df67bfa809a7bddd786356d9c5283e45d693edb5f3f55d0986dd905a/coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b", size = 247591, upload-time = "2025-10-15T15:13:34.147Z" }, - { url = "https://files.pythonhosted.org/packages/26/5b/c7f32efd862ee0477a18c41e4761305de6ddd2d49cdeda0c1116227570fd/coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4", size = 249411, upload-time = "2025-10-15T15:13:38.425Z" }, - { url = "https://files.pythonhosted.org/packages/76/b5/78cb4f1e86c1611431c990423ec0768122905b03837e1b4c6a6f388a858b/coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd", size = 247303, upload-time = "2025-10-15T15:13:40.464Z" }, - { url = "https://files.pythonhosted.org/packages/87/c9/23c753a8641a330f45f221286e707c427e46d0ffd1719b080cedc984ec40/coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc", size = 247157, upload-time = "2025-10-15T15:13:42.087Z" }, - { url = "https://files.pythonhosted.org/packages/c5/42/6e0cc71dc8a464486e944a4fa0d85bdec031cc2969e98ed41532a98336b9/coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48", size = 248921, upload-time = "2025-10-15T15:13:43.715Z" }, - { url = "https://files.pythonhosted.org/packages/e8/1c/743c2ef665e6858cccb0f84377dfe3a4c25add51e8c7ef19249be92465b6/coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040", size = 218526, upload-time = "2025-10-15T15:13:45.336Z" }, - { url = "https://files.pythonhosted.org/packages/ff/d5/226daadfd1bf8ddbccefbd3aa3547d7b960fb48e1bdac124e2dd13a2b71a/coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05", size = 219317, upload-time = "2025-10-15T15:13:47.401Z" }, - { url = "https://files.pythonhosted.org/packages/97/54/47db81dcbe571a48a298f206183ba8a7ba79200a37cd0d9f4788fcd2af4a/coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a", size = 217948, upload-time = "2025-10-15T15:13:49.096Z" }, - { url = "https://files.pythonhosted.org/packages/e5/8b/cb68425420154e7e2a82fd779a8cc01549b6fa83c2ad3679cd6c088ebd07/coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b", size = 216837, upload-time = "2025-10-15T15:13:51.09Z" }, - { url = "https://files.pythonhosted.org/packages/33/55/9d61b5765a025685e14659c8d07037247de6383c0385757544ffe4606475/coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37", size = 217061, upload-time = "2025-10-15T15:13:52.747Z" }, - { url = "https://files.pythonhosted.org/packages/52/85/292459c9186d70dcec6538f06ea251bc968046922497377bf4a1dc9a71de/coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de", size = 258398, upload-time = "2025-10-15T15:13:54.45Z" }, - { url = "https://files.pythonhosted.org/packages/1f/e2/46edd73fb8bf51446c41148d81944c54ed224854812b6ca549be25113ee0/coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f", size = 260574, upload-time = "2025-10-15T15:13:56.145Z" }, - { url = "https://files.pythonhosted.org/packages/07/5e/1df469a19007ff82e2ca8fe509822820a31e251f80ee7344c34f6cd2ec43/coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c", size = 262797, upload-time = "2025-10-15T15:13:58.635Z" }, - { url = "https://files.pythonhosted.org/packages/f9/50/de216b31a1434b94d9b34a964c09943c6be45069ec704bfc379d8d89a649/coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa", size = 257361, upload-time = "2025-10-15T15:14:00.409Z" }, - { url = "https://files.pythonhosted.org/packages/82/1e/3f9f8344a48111e152e0fd495b6fff13cc743e771a6050abf1627a7ba918/coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740", size = 260349, upload-time = "2025-10-15T15:14:02.188Z" }, - { url = "https://files.pythonhosted.org/packages/65/9b/3f52741f9e7d82124272f3070bbe316006a7de1bad1093f88d59bfc6c548/coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef", size = 258114, upload-time = "2025-10-15T15:14:03.907Z" }, - { url = "https://files.pythonhosted.org/packages/0b/8b/918f0e15f0365d50d3986bbd3338ca01178717ac5678301f3f547b6619e6/coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0", size = 256723, upload-time = "2025-10-15T15:14:06.324Z" }, - { url = "https://files.pythonhosted.org/packages/44/9e/7776829f82d3cf630878a7965a7d70cc6ca94f22c7d20ec4944f7148cb46/coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca", size = 259238, upload-time = "2025-10-15T15:14:08.002Z" }, - { url = "https://files.pythonhosted.org/packages/9a/b8/49cf253e1e7a3bedb85199b201862dd7ca4859f75b6cf25ffa7298aa0760/coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2", size = 219180, upload-time = "2025-10-15T15:14:09.786Z" }, - { url = "https://files.pythonhosted.org/packages/ac/e1/1a541703826be7ae2125a0fb7f821af5729d56bb71e946e7b933cc7a89a4/coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268", size = 220241, upload-time = "2025-10-15T15:14:11.471Z" }, - { url = "https://files.pythonhosted.org/packages/d5/d1/5ee0e0a08621140fd418ec4020f595b4d52d7eb429ae6a0c6542b4ba6f14/coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836", size = 218510, upload-time = "2025-10-15T15:14:13.46Z" }, - { url = "https://files.pythonhosted.org/packages/f4/06/e923830c1985ce808e40a3fa3eb46c13350b3224b7da59757d37b6ce12b8/coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497", size = 216110, upload-time = "2025-10-15T15:14:15.157Z" }, - { url = "https://files.pythonhosted.org/packages/42/82/cdeed03bfead45203fb651ed756dfb5266028f5f939e7f06efac4041dad5/coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e", size = 216395, upload-time = "2025-10-15T15:14:16.863Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ba/e1c80caffc3199aa699813f73ff097bc2df7b31642bdbc7493600a8f1de5/coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1", size = 247433, upload-time = "2025-10-15T15:14:18.589Z" }, - { url = "https://files.pythonhosted.org/packages/80/c0/5b259b029694ce0a5bbc1548834c7ba3db41d3efd3474489d7efce4ceb18/coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca", size = 249970, upload-time = "2025-10-15T15:14:20.307Z" }, - { url = "https://files.pythonhosted.org/packages/8c/86/171b2b5e1aac7e2fd9b43f7158b987dbeb95f06d1fbecad54ad8163ae3e8/coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd", size = 251324, upload-time = "2025-10-15T15:14:22.419Z" }, - { url = "https://files.pythonhosted.org/packages/1a/7e/7e10414d343385b92024af3932a27a1caf75c6e27ee88ba211221ff1a145/coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43", size = 247445, upload-time = "2025-10-15T15:14:24.205Z" }, - { url = "https://files.pythonhosted.org/packages/c4/3b/e4f966b21f5be8c4bf86ad75ae94efa0de4c99c7bbb8114476323102e345/coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777", size = 249324, upload-time = "2025-10-15T15:14:26.234Z" }, - { url = "https://files.pythonhosted.org/packages/00/a2/8479325576dfcd909244d0df215f077f47437ab852ab778cfa2f8bf4d954/coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2", size = 247261, upload-time = "2025-10-15T15:14:28.42Z" }, - { url = "https://files.pythonhosted.org/packages/7b/d8/3a9e2db19d94d65771d0f2e21a9ea587d11b831332a73622f901157cc24b/coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d", size = 247092, upload-time = "2025-10-15T15:14:30.784Z" }, - { url = "https://files.pythonhosted.org/packages/b3/b1/bbca3c472544f9e2ad2d5116b2379732957048be4b93a9c543fcd0207e5f/coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4", size = 248755, upload-time = "2025-10-15T15:14:32.585Z" }, - { url = "https://files.pythonhosted.org/packages/89/49/638d5a45a6a0f00af53d6b637c87007eb2297042186334e9923a61aa8854/coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721", size = 218793, upload-time = "2025-10-15T15:14:34.972Z" }, - { url = "https://files.pythonhosted.org/packages/30/cc/b675a51f2d068adb3cdf3799212c662239b0ca27f4691d1fff81b92ea850/coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad", size = 219587, upload-time = "2025-10-15T15:14:37.047Z" }, - { url = "https://files.pythonhosted.org/packages/93/98/5ac886876026de04f00820e5094fe22166b98dcb8b426bf6827aaf67048c/coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479", size = 218168, upload-time = "2025-10-15T15:14:38.861Z" }, - { url = "https://files.pythonhosted.org/packages/14/d1/b4145d35b3e3ecf4d917e97fc8895bcf027d854879ba401d9ff0f533f997/coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f", size = 216850, upload-time = "2025-10-15T15:14:40.651Z" }, - { url = "https://files.pythonhosted.org/packages/ca/d1/7f645fc2eccd318369a8a9948acc447bb7c1ade2911e31d3c5620544c22b/coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e", size = 217071, upload-time = "2025-10-15T15:14:42.755Z" }, - { url = "https://files.pythonhosted.org/packages/54/7d/64d124649db2737ceced1dfcbdcb79898d5868d311730f622f8ecae84250/coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44", size = 258570, upload-time = "2025-10-15T15:14:44.542Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3f/6f5922f80dc6f2d8b2c6f974835c43f53eb4257a7797727e6ca5b7b2ec1f/coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3", size = 260738, upload-time = "2025-10-15T15:14:46.436Z" }, - { url = "https://files.pythonhosted.org/packages/0e/5f/9e883523c4647c860b3812b417a2017e361eca5b635ee658387dc11b13c1/coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b", size = 262994, upload-time = "2025-10-15T15:14:48.3Z" }, - { url = "https://files.pythonhosted.org/packages/07/bb/43b5a8e94c09c8bf51743ffc65c4c841a4ca5d3ed191d0a6919c379a1b83/coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d", size = 257282, upload-time = "2025-10-15T15:14:50.236Z" }, - { url = "https://files.pythonhosted.org/packages/aa/e5/0ead8af411411330b928733e1d201384b39251a5f043c1612970310e8283/coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2", size = 260430, upload-time = "2025-10-15T15:14:52.413Z" }, - { url = "https://files.pythonhosted.org/packages/ae/66/03dd8bb0ba5b971620dcaac145461950f6d8204953e535d2b20c6b65d729/coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e", size = 258190, upload-time = "2025-10-15T15:14:54.268Z" }, - { url = "https://files.pythonhosted.org/packages/45/ae/28a9cce40bf3174426cb2f7e71ee172d98e7f6446dff936a7ccecee34b14/coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996", size = 256658, upload-time = "2025-10-15T15:14:56.436Z" }, - { url = "https://files.pythonhosted.org/packages/5c/7c/3a44234a8599513684bfc8684878fd7b126c2760f79712bb78c56f19efc4/coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11", size = 259342, upload-time = "2025-10-15T15:14:58.538Z" }, - { url = "https://files.pythonhosted.org/packages/e1/e6/0108519cba871af0351725ebdb8660fd7a0fe2ba3850d56d32490c7d9b4b/coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73", size = 219568, upload-time = "2025-10-15T15:15:00.382Z" }, - { url = "https://files.pythonhosted.org/packages/c9/76/44ba876e0942b4e62fdde23ccb029ddb16d19ba1bef081edd00857ba0b16/coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547", size = 220687, upload-time = "2025-10-15T15:15:02.322Z" }, - { url = "https://files.pythonhosted.org/packages/b9/0c/0df55ecb20d0d0ed5c322e10a441775e1a3a5d78c60f0c4e1abfe6fcf949/coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3", size = 218711, upload-time = "2025-10-15T15:15:04.575Z" }, - { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" }, +version = "7.11.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/89/12/3e2d2ec71796e0913178478e693a06af6a3bc9f7f9cb899bf85a426d8370/coverage-7.11.1.tar.gz", hash = "sha256:b4b3a072559578129a9e863082a2972a2abd8975bc0e2ec57da96afcd6580a8a", size = 814037, upload-time = "2025-11-07T10:52:41.067Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/31/04af7e42fdb3681e4d73d37bf3f375f0488aa38d1001ee746c7dbfe09643/coverage-7.11.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:421e2d237dcecdefa9b77cae1aa0dfff5c495f29e053e776172457e289976311", size = 216896, upload-time = "2025-11-07T10:50:31.429Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e9/1c3628a1225bdea66295a117cd2bb1d324d9c433c40078b24d50f55448a7/coverage-7.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:08ef89c812072ecd52a862b46e131f75596475d23cc7f5a75410394341d4332f", size = 217261, upload-time = "2025-11-07T10:50:33.008Z" }, + { url = "https://files.pythonhosted.org/packages/2b/80/4d4f943da23c432b2bba8664f4eada9b19911081852e8cc89776c61d0b94/coverage-7.11.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bc6e0b2d6ed317810b4e435ffabc31b2d517d6ceb4183dfd6af4748c52d170eb", size = 248742, upload-time = "2025-11-07T10:50:34.634Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e1/c4b42f02fbb6ce08e05d7a2b26bcf5df11d3e67a3806e40415f7ab9511e7/coverage-7.11.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b59736704df8b1f8b1dafb36b16f2ef8a952e4410465634442459426bd2319ae", size = 251503, upload-time = "2025-11-07T10:50:36.501Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3df60e88f1dabccae4994c6df4a2f23d4cd0eee27fc3ae8f0bb2e78cb538/coverage-7.11.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:843816452d8bfc4c2be72546b3b382850cb91150feaa963ec7d2b665ec9d4768", size = 252590, upload-time = "2025-11-07T10:50:38.059Z" }, + { url = "https://files.pythonhosted.org/packages/06/1c/2b9fae11361b0348c2d3612a8179d2cc8b6b245e8b14d5479c75b9f18613/coverage-7.11.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:19363046125d4a423c25d3d7c90bab3a0230932c16014198f87a6b3960c1b187", size = 249133, upload-time = "2025-11-07T10:50:39.648Z" }, + { url = "https://files.pythonhosted.org/packages/b8/2b/e33712a8eede02762a536bdc2f89e736e0ad87bd13b35d724306585aeb54/coverage-7.11.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e37486aed7045c280ebdc207026bdef9267730177d929a5e25250e1f33cc125", size = 250524, upload-time = "2025-11-07T10:50:41.59Z" }, + { url = "https://files.pythonhosted.org/packages/84/c9/6181877977a0f6e46b9c93a8382b8c671769fb12df8a15be8d6091541b77/coverage-7.11.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7c68180e67b4843674bfb1d3ec928ffcfc94081b5da959e616405eca51c23356", size = 248673, upload-time = "2025-11-07T10:50:43.153Z" }, + { url = "https://files.pythonhosted.org/packages/9b/d6/ff26c2eb57d4dcd46c6ed136d6b04aceb7f58f48dcc500c77f7194711a6f/coverage-7.11.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:cf825b60f94d1706c22d4887310db26cc3117d545ac6ad4229b4a0d718afcf9a", size = 248251, upload-time = "2025-11-07T10:50:45.069Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ff/411803f1fcb9efe00afbc96442564cc691f537541a8bde377cf1ac04e695/coverage-7.11.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:437149272ff0440df66044bd6ee87cbc252463754ca43cafa496cfb2f57f56dd", size = 250111, upload-time = "2025-11-07T10:50:46.701Z" }, + { url = "https://files.pythonhosted.org/packages/c1/9f/781c045e1e5f8930f8266f224318040413b60837749d2ed11883b7478c81/coverage-7.11.1-cp312-cp312-win32.whl", hash = "sha256:98ea0b8d1addfc333494c2248af367e8ecb27724a99804a18376b801f876da58", size = 219407, upload-time = "2025-11-07T10:50:48.862Z" }, + { url = "https://files.pythonhosted.org/packages/26/59/813d8eedc96a781e8a6f9c37f6ecb4326ebbffdafe2e1154ed2def468b76/coverage-7.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:7d49a473799e55a465bcadd19525977ab80031b8b86baaa622241808df4585cd", size = 220220, upload-time = "2025-11-07T10:50:51.576Z" }, + { url = "https://files.pythonhosted.org/packages/63/5f/c0905d9159d38194943a21d7d013f1c2f0c43e7d63f680ed56269728418a/coverage-7.11.1-cp312-cp312-win_arm64.whl", hash = "sha256:0c77e5951ab176a6ccb70c6f688fca2a7ac834753ba82ee4eb741be655f30b43", size = 218856, upload-time = "2025-11-07T10:50:53.591Z" }, + { url = "https://files.pythonhosted.org/packages/f4/01/0c50c318f5e8f1a482da05d788d0ff06137803ed8fface4a1ba51e04b3ad/coverage-7.11.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:da9930594ca99d66eb6f613d7beba850db2f8dfa86810ee35ae24e4d5f2bb97d", size = 216920, upload-time = "2025-11-07T10:50:55.992Z" }, + { url = "https://files.pythonhosted.org/packages/20/11/9f038e6c2baea968c377ab355b0d1d0a46b5f38985691bf51164e1b78c1f/coverage-7.11.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cc47a280dc014220b0fc6e5f55082a3f51854faf08fd9635b8a4f341c46c77d3", size = 217301, upload-time = "2025-11-07T10:50:57.609Z" }, + { url = "https://files.pythonhosted.org/packages/68/cd/9dcf93d81d0cddaa0bba90c3b4580e6f1ddf833918b816930d250cc553a4/coverage-7.11.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:74003324321bbf130939146886eddf92e48e616b5910215e79dea6edeb8ee7c8", size = 248277, upload-time = "2025-11-07T10:50:59.442Z" }, + { url = "https://files.pythonhosted.org/packages/11/f5/b2c7c494046c9c783d3cac4c812fc24d6104dd36a7a598e7dd6fea3e7927/coverage-7.11.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:211f7996265daab60a8249af4ca6641b3080769cbedcffc42cc4841118f3a305", size = 250871, upload-time = "2025-11-07T10:51:01.094Z" }, + { url = "https://files.pythonhosted.org/packages/a5/5a/b359649566954498aa17d7c98093182576d9e435ceb4ea917b3b48d56f86/coverage-7.11.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70619d194d8fea0cb028cb6bb9c85b519c7509c1d1feef1eea635183bc8ecd27", size = 252115, upload-time = "2025-11-07T10:51:03.087Z" }, + { url = "https://files.pythonhosted.org/packages/f3/17/3cef1ede3739622950f0737605353b797ec564e70c9d254521b10f4b03ba/coverage-7.11.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0208bb59d441cfa3321569040f8e455f9261256e0df776c5462a1e5a9b31e13", size = 248442, upload-time = "2025-11-07T10:51:04.888Z" }, + { url = "https://files.pythonhosted.org/packages/5f/63/d5854c47ae42d9d18855329db6bc528f5b7f4f874257edb00cf8b483f9f8/coverage-7.11.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:545714d8765bda1c51f8b1c96e0b497886a054471c68211e76ef49dd1468587d", size = 250253, upload-time = "2025-11-07T10:51:06.515Z" }, + { url = "https://files.pythonhosted.org/packages/48/e8/c7706f8a5358a59c18b489e7e19e83d6161b7c8bc60771f95920570c94a8/coverage-7.11.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d0a2b02c1e20158dd405054bcca87f91fd5b7605626aee87150819ea616edd67", size = 248217, upload-time = "2025-11-07T10:51:08.405Z" }, + { url = "https://files.pythonhosted.org/packages/5b/c9/a2136dfb168eb09e2f6d9d6b6c986243fdc0b3866a9376adb263d3c3378b/coverage-7.11.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0f4aa986a4308a458e0fb572faa3eb3db2ea7ce294604064b25ab32b435a468", size = 248040, upload-time = "2025-11-07T10:51:10.626Z" }, + { url = "https://files.pythonhosted.org/packages/18/9a/a63991c0608ddc6adf65e6f43124951aaf36bd79f41937b028120b8268ea/coverage-7.11.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d51cc6687e8bbfd1e041f52baed0f979cd592242cf50bf18399a7e03afc82d88", size = 249801, upload-time = "2025-11-07T10:51:12.63Z" }, + { url = "https://files.pythonhosted.org/packages/84/19/947acf7c0c6e90e4ec3abf474133ed36d94407d07e36eafdfd3acb59fee9/coverage-7.11.1-cp313-cp313-win32.whl", hash = "sha256:1b3067db3afe6deeca2b2c9f0ec23820d5f1bd152827acfadf24de145dfc5f66", size = 219430, upload-time = "2025-11-07T10:51:14.329Z" }, + { url = "https://files.pythonhosted.org/packages/35/54/36fef7afb3884450c7b6d494fcabe2fab7c669d547c800ca30f41c1dc212/coverage-7.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:39a4c44b0cd40e3c9d89b2b7303ebd6ab9ae8a63f9e9a8c4d65a181a0b33aebe", size = 220239, upload-time = "2025-11-07T10:51:16.418Z" }, + { url = "https://files.pythonhosted.org/packages/d3/dc/7d38bb99e8e69200b7dd5de15507226bd90eac102dfc7cc891b9934cdc76/coverage-7.11.1-cp313-cp313-win_arm64.whl", hash = "sha256:a2e3560bf82fa8169a577e054cbbc29888699526063fee26ea59ea2627fd6e73", size = 218868, upload-time = "2025-11-07T10:51:18.186Z" }, + { url = "https://files.pythonhosted.org/packages/36/c6/d1ff54fbd6bcad42dbcfd13b417e636ef84aae194353b1ef3361700f2525/coverage-7.11.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47a4f362a10285897ab3aa7a4b37d28213a4f2626823923613d6d7a3584dd79a", size = 217615, upload-time = "2025-11-07T10:51:21.065Z" }, + { url = "https://files.pythonhosted.org/packages/73/f9/6ed59e7cf1488d6f975e5b14ef836f5e537913523e92175135f8518a83ce/coverage-7.11.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0df35fa7419ef571db9dacd50b0517bc54dbfe37eb94043b5fc3540bff276acd", size = 217960, upload-time = "2025-11-07T10:51:22.797Z" }, + { url = "https://files.pythonhosted.org/packages/c4/74/2dab1dc2ebe16f074f80ae483b0f45faf278d102be703ac01b32cd85b6c3/coverage-7.11.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e1a2c621d341c9d56f7917e56fbb56be4f73fe0d0e8dae28352fb095060fd467", size = 259262, upload-time = "2025-11-07T10:51:24.467Z" }, + { url = "https://files.pythonhosted.org/packages/15/49/eccfe039663e29a50a54b0c2c8d076acd174d7ac50d018ef8a5b1c37c8dc/coverage-7.11.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c354b111be9b2234d9573d75dd30ca4e414b7659c730e477e89be4f620b3fb5", size = 261326, upload-time = "2025-11-07T10:51:26.232Z" }, + { url = "https://files.pythonhosted.org/packages/f0/bb/2b829aa23fd5ee8318e33cc02a606eb09900921291497963adc3f06af8bb/coverage-7.11.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4589bd44698728f600233fb2881014c9b8ec86637ef454c00939e779661dbe7e", size = 263758, upload-time = "2025-11-07T10:51:27.912Z" }, + { url = "https://files.pythonhosted.org/packages/ac/03/d44c3d70e5da275caf2cad2071da6b425412fbcb1d1d5a81f1f89b45e3f1/coverage-7.11.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c6956fc8754f2309131230272a7213a483a32ecbe29e2b9316d808a28f2f8ea1", size = 258444, upload-time = "2025-11-07T10:51:30.107Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c1/cf61d9f46ae088774c65dd3387a15dfbc72de90c1f6e105025e9eda19b42/coverage-7.11.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:63926a97ed89dc6a087369b92dcb8b9a94cead46c08b33a7f1f4818cd8b6a3c3", size = 261335, upload-time = "2025-11-07T10:51:31.814Z" }, + { url = "https://files.pythonhosted.org/packages/95/9a/b3299bb14f11f2364d78a2b9704491b15395e757af6116694731ce4e5834/coverage-7.11.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f5311ba00c53a7fb2b293fdc1f478b7286fe2a845a7ba9cda053f6e98178f0b4", size = 258951, upload-time = "2025-11-07T10:51:33.925Z" }, + { url = "https://files.pythonhosted.org/packages/3f/a3/73cb2763e59f14ba6d8d6444b1f640a9be2242bfb59b7e50581c695db7ff/coverage-7.11.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:31bf5ffad84c974f9e72ac53493350f36b6fa396109159ec704210698f12860b", size = 257840, upload-time = "2025-11-07T10:51:36.092Z" }, + { url = "https://files.pythonhosted.org/packages/85/db/482e72589a952027e238ffa3a15f192c552e0685fd0c5220ad05b5f17d56/coverage-7.11.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:227ee59fbc4a8c57a7383a1d7af6ca94a78ae3beee4045f38684548a8479a65b", size = 260040, upload-time = "2025-11-07T10:51:38.277Z" }, + { url = "https://files.pythonhosted.org/packages/18/a1/b931d3ee099c2dca8e9ea56c07ae84c0f91562f7bbbcccab8c91b3474ef1/coverage-7.11.1-cp313-cp313t-win32.whl", hash = "sha256:a447d97b3ce680bb1da2e6bd822ebb71be6a1fb77ce2c2ad2fe4bd8aacec3058", size = 220102, upload-time = "2025-11-07T10:51:40.017Z" }, + { url = "https://files.pythonhosted.org/packages/9a/53/b553b7bfa6207def4918f0cb72884c844fa4c3f1566e58fbb4f34e54cdc5/coverage-7.11.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d6d11180437c67bde2248563a42b8e5bbf85c8df78fae13bf818ad17bfb15f02", size = 221166, upload-time = "2025-11-07T10:51:41.921Z" }, + { url = "https://files.pythonhosted.org/packages/6b/45/1c1d58b3ed585598764bd2fe41fcf60ccafe15973ad621c322ba52e22d32/coverage-7.11.1-cp313-cp313t-win_arm64.whl", hash = "sha256:1e19a4c43d612760c6f7190411fb157e2d8a6dde00c91b941d43203bd3b17f6f", size = 219439, upload-time = "2025-11-07T10:51:43.753Z" }, + { url = "https://files.pythonhosted.org/packages/d9/c2/ac2c3417eaa4de1361036ebbc7da664242b274b2e00c4b4a1cfc7b29920b/coverage-7.11.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0305463c45c5f21f0396cd5028de92b1f1387e2e0756a85dd3147daa49f7a674", size = 216967, upload-time = "2025-11-07T10:51:45.55Z" }, + { url = "https://files.pythonhosted.org/packages/5e/a3/afef455d03c468ee303f9df9a6f407e8bea64cd576fca914ff888faf52ca/coverage-7.11.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fa4d468d5efa1eb6e3062be8bd5f45cbf28257a37b71b969a8c1da2652dfec77", size = 217298, upload-time = "2025-11-07T10:51:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/9d/59/6e2fb3fb58637001132dc32228b4fb5b332d75d12f1353cb00fe084ee0ba/coverage-7.11.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d2b2f5fc8fe383cbf2d5c77d6c4b2632ede553bc0afd0cdc910fa5390046c290", size = 248337, upload-time = "2025-11-07T10:51:49.48Z" }, + { url = "https://files.pythonhosted.org/packages/1d/5e/ce442bab963e3388658da8bde6ddbd0a15beda230afafaa25e3c487dc391/coverage-7.11.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bde6488c1ad509f4fb1a4f9960fd003d5a94adef61e226246f9699befbab3276", size = 250853, upload-time = "2025-11-07T10:51:51.215Z" }, + { url = "https://files.pythonhosted.org/packages/d1/2f/43f94557924ca9b64e09f1c3876da4eec44a05a41e27b8a639d899716c0e/coverage-7.11.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a69e0d6fa0b920fe6706a898c52955ec5bcfa7e45868215159f45fd87ea6da7c", size = 252190, upload-time = "2025-11-07T10:51:53.262Z" }, + { url = "https://files.pythonhosted.org/packages/8c/fa/a04e769b92bc5628d4bd909dcc3c8219efe5e49f462e29adc43e198ecfde/coverage-7.11.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:976e51e4a549b80e4639eda3a53e95013a14ff6ad69bb58ed604d34deb0e774c", size = 248335, upload-time = "2025-11-07T10:51:55.388Z" }, + { url = "https://files.pythonhosted.org/packages/99/d0/b98ab5d2abe425c71117a7c690ead697a0b32b83256bf0f566c726b7f77b/coverage-7.11.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d61fcc4d384c82971a3d9cf00d0872881f9ded19404c714d6079b7a4547e2955", size = 250209, upload-time = "2025-11-07T10:51:57.263Z" }, + { url = "https://files.pythonhosted.org/packages/9c/3f/b9c4fbd2e6d1b64098f99fb68df7f7c1b3e0a0968d24025adb24f359cdec/coverage-7.11.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:284c5df762b533fae3ebd764e3b81c20c1c9648d93ef34469759cb4e3dfe13d0", size = 248163, upload-time = "2025-11-07T10:51:59.014Z" }, + { url = "https://files.pythonhosted.org/packages/08/fc/3e4d54fb6368b0628019eefd897fc271badbd025410fd5421a65fb58758f/coverage-7.11.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:bab32cb1d4ad2ac6dcc4e17eee5fa136c2a1d14ae914e4bce6c8b78273aece3c", size = 247983, upload-time = "2025-11-07T10:52:01.027Z" }, + { url = "https://files.pythonhosted.org/packages/b9/4a/a5700764a12e932b35afdddb2f59adbca289c1689455d06437f609f3ef35/coverage-7.11.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:36f2fed9ce392ca450fb4e283900d0b41f05c8c5db674d200f471498be3ce747", size = 249646, upload-time = "2025-11-07T10:52:02.856Z" }, + { url = "https://files.pythonhosted.org/packages/0e/2c/45ed33d9e80a1cc9b44b4bd535d44c154d3204671c65abd90ec1e99522a2/coverage-7.11.1-cp314-cp314-win32.whl", hash = "sha256:853136cecb92a5ba1cc8f61ec6ffa62ca3c88b4b386a6c835f8b833924f9a8c5", size = 219700, upload-time = "2025-11-07T10:52:05.05Z" }, + { url = "https://files.pythonhosted.org/packages/90/d7/5845597360f6434af1290118ebe114642865f45ce47e7e822d9c07b371be/coverage-7.11.1-cp314-cp314-win_amd64.whl", hash = "sha256:77443d39143e20927259a61da0c95d55ffc31cf43086b8f0f11a92da5260d592", size = 220516, upload-time = "2025-11-07T10:52:07.259Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d0/d311a06f9cf7a48a98ffcfd0c57db0dcab6da46e75c439286a50dc648161/coverage-7.11.1-cp314-cp314-win_arm64.whl", hash = "sha256:829acb88fa47591a64bf5197e96a931ce9d4b3634c7f81a224ba3319623cdf6c", size = 219091, upload-time = "2025-11-07T10:52:09.216Z" }, + { url = "https://files.pythonhosted.org/packages/a7/3d/c6a84da4fa9b840933045b19dd19d17b892f3f2dd1612903260291416dba/coverage-7.11.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2ad1fe321d9522ea14399de83e75a11fb6a8887930c3679feb383301c28070d9", size = 217700, upload-time = "2025-11-07T10:52:11.348Z" }, + { url = "https://files.pythonhosted.org/packages/94/10/a4fc5022017dd7ac682dc423849c241dfbdad31734b8f96060d84e70b587/coverage-7.11.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f69c332f0c3d1357c74decc9b1843fcd428cf9221bf196a20ad22aa1db3e1b6c", size = 217968, upload-time = "2025-11-07T10:52:13.203Z" }, + { url = "https://files.pythonhosted.org/packages/59/2d/a554cd98924d296de5816413280ac3b09e42a05fb248d66f8d474d321938/coverage-7.11.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:576baeea4eebde684bf6c91c01e97171c8015765c8b2cfd4022a42b899897811", size = 259334, upload-time = "2025-11-07T10:52:15.079Z" }, + { url = "https://files.pythonhosted.org/packages/05/98/d484cb659ec33958ca96b6f03438f56edc23b239d1ad0417b7a97fc1848a/coverage-7.11.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:28ad84c694fa86084cfd3c1eab4149844b8cb95bd8e5cbfc4a647f3ee2cce2b3", size = 261445, upload-time = "2025-11-07T10:52:17.134Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fa/920cba122cc28f4557c0507f8bd7c6e527ebcc537d0309186f66464a8fd9/coverage-7.11.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b1043ff958f09fc3f552c014d599f3c6b7088ba97d7bc1bd1cce8603cd75b520", size = 263858, upload-time = "2025-11-07T10:52:19.836Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a0/036397bdbee0f3bd46c2e26fdfbb1a61b2140bf9059240c37b61149047fa/coverage-7.11.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c6681add5060c2742dafcf29826dff1ff8eef889a3b03390daeed84361c428bd", size = 258381, upload-time = "2025-11-07T10:52:21.687Z" }, + { url = "https://files.pythonhosted.org/packages/b6/61/2533926eb8990f182eb287f4873216c8ca530cc47241144aabf46fe80abe/coverage-7.11.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:773419b225ec9a75caa1e941dd0c83a91b92c2b525269e44e6ee3e4c630607db", size = 261321, upload-time = "2025-11-07T10:52:23.612Z" }, + { url = "https://files.pythonhosted.org/packages/32/6e/618f7e203a998e4f6b8a0fa395744a416ad2adbcdc3735bc19466456718a/coverage-7.11.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a9cb272a0e0157dbb9b2fd0b201b759bd378a1a6138a16536c025c2ce4f7643b", size = 258933, upload-time = "2025-11-07T10:52:25.514Z" }, + { url = "https://files.pythonhosted.org/packages/22/40/6b1c27f772cb08a14a338647ead1254a57ee9dabbb4cacbc15df7f278741/coverage-7.11.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e09adb2a7811dc75998eef68f47599cf699e2b62eed09c9fefaeb290b3920f34", size = 257756, upload-time = "2025-11-07T10:52:27.845Z" }, + { url = "https://files.pythonhosted.org/packages/73/07/f9cd12f71307a785ea15b009c8d8cc2543e4a867bd04b8673843970b6b43/coverage-7.11.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:1335fa8c2a2fea49924d97e1e3500cfe8d7c849f5369f26bb7559ad4259ccfab", size = 260086, upload-time = "2025-11-07T10:52:29.776Z" }, + { url = "https://files.pythonhosted.org/packages/34/02/31c5394f6f5d72a466966bcfdb61ce5a19862d452816d6ffcbb44add16ee/coverage-7.11.1-cp314-cp314t-win32.whl", hash = "sha256:4782d71d2a4fa7cef95e853b7097c8bbead4dbd0e6f9c7152a6b11a194b794db", size = 220483, upload-time = "2025-11-07T10:52:31.752Z" }, + { url = "https://files.pythonhosted.org/packages/7f/96/81e1ef5fbfd5090113a96e823dbe055e4c58d96ca73b1fb0ad9d26f9ec36/coverage-7.11.1-cp314-cp314t-win_amd64.whl", hash = "sha256:939f45e66eceb63c75e8eb8fc58bb7077c00f1a41b0e15c6ef02334a933cfe93", size = 221592, upload-time = "2025-11-07T10:52:33.724Z" }, + { url = "https://files.pythonhosted.org/packages/38/7a/a5d050de44951ac453a2046a0f3fb5471a4a557f0c914d00db27d543d94c/coverage-7.11.1-cp314-cp314t-win_arm64.whl", hash = "sha256:01c575bdbef35e3f023b50a146e9a75c53816e4f2569109458155cd2315f87d9", size = 219627, upload-time = "2025-11-07T10:52:36.285Z" }, + { url = "https://files.pythonhosted.org/packages/76/32/bd9f48c28e23b2f08946f8e83983617b00619f5538dbd7e1045fa7e88c00/coverage-7.11.1-py3-none-any.whl", hash = "sha256:0fa848acb5f1da24765cee840e1afe9232ac98a8f9431c6112c15b34e880b9e8", size = 208689, upload-time = "2025-11-07T10:52:38.646Z" }, ] [[package]] @@ -474,7 +474,7 @@ wheels = [ [[package]] name = "fastapi" -version = "0.120.1" +version = "0.121.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, @@ -482,9 +482,9 @@ dependencies = [ { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/40/cc/28aff6e246ee85bd571b26e4a793b84d42700e3bdc3008c3d747eda7b06d/fastapi-0.120.1.tar.gz", hash = "sha256:b5c6217e9ddca6dfcf54c97986180d4a1955e10c693d74943fc5327700178bff", size = 337616, upload-time = "2025-10-27T17:53:42.954Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/e3/77a2df0946703973b9905fd0cde6172c15e0781984320123b4f5079e7113/fastapi-0.121.0.tar.gz", hash = "sha256:06663356a0b1ee93e875bbf05a31fb22314f5bed455afaaad2b2dad7f26e98fa", size = 342412, upload-time = "2025-11-03T10:25:54.818Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/bb/1a74dbe87e9a595bf63052c886dfef965dc5b91d149456a8301eb3d41ce2/fastapi-0.120.1-py3-none-any.whl", hash = "sha256:0e8a2c328e96c117272d8c794d3a97d205f753cc2e69dd7ee387b7488a75601f", size = 108254, upload-time = "2025-10-27T17:53:40.076Z" }, + { url = "https://files.pythonhosted.org/packages/dd/2c/42277afc1ba1a18f8358561eee40785d27becab8f80a1f945c0a3051c6eb/fastapi-0.121.0-py3-none-any.whl", hash = "sha256:8bdf1b15a55f4e4b0d6201033da9109ea15632cb76cf156e7b8b4019f2172106", size = 109183, upload-time = "2025-11-03T10:25:53.27Z" }, ] [[package]] @@ -555,6 +555,8 @@ dev = [ ] tests = [ { name = "psutil" }, + { name = "pytest-asyncio" }, + { name = "pytest-xdist" }, ] [package.metadata] @@ -596,7 +598,11 @@ dev = [ { name = "types-pyyaml", specifier = ">=6.0.12.20250516" }, { name = "types-requests", specifier = ">=2.32.4.20250611" }, ] -tests = [{ name = "psutil", specifier = ">=7.0.0" }] +tests = [ + { name = "psutil", specifier = ">=7.0.0" }, + { name = "pytest-asyncio", specifier = ">=0.26.0" }, + { name = "pytest-xdist", specifier = ">=3.7.0" }, +] [[package]] name = "h11" @@ -715,7 +721,7 @@ wheels = [ [[package]] name = "ipython" -version = "9.6.0" +version = "9.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -729,9 +735,9 @@ dependencies = [ { name = "stack-data" }, { name = "traitlets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2a/34/29b18c62e39ee2f7a6a3bba7efd952729d8aadd45ca17efc34453b717665/ipython-9.6.0.tar.gz", hash = "sha256:5603d6d5d356378be5043e69441a072b50a5b33b4503428c77b04cb8ce7bc731", size = 4396932, upload-time = "2025-09-29T10:55:53.948Z" } +sdist = { url = "https://files.pythonhosted.org/packages/29/e6/48c74d54039241a456add616464ea28c6ebf782e4110d419411b83dae06f/ipython-9.7.0.tar.gz", hash = "sha256:5f6de88c905a566c6a9d6c400a8fed54a638e1f7543d17aae2551133216b1e4e", size = 4422115, upload-time = "2025-11-05T12:18:54.646Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/48/c5/d5e07995077e48220269c28a221e168c91123ad5ceee44d548f54a057fc0/ipython-9.6.0-py3-none-any.whl", hash = "sha256:5f77efafc886d2f023442479b8149e7d86547ad0a979e9da9f045d252f648196", size = 616170, upload-time = "2025-09-29T10:55:47.676Z" }, + { url = "https://files.pythonhosted.org/packages/05/aa/62893d6a591d337aa59dcc4c6f6c842f1fe20cd72c8c5c1f980255243252/ipython-9.7.0-py3-none-any.whl", hash = "sha256:bce8ac85eb9521adc94e1845b4c03d88365fd6ac2f4908ec4ed1eb1b0a065f9f", size = 618911, upload-time = "2025-11-05T12:18:52.484Z" }, ] [[package]] @@ -811,7 +817,7 @@ wheels = [ [[package]] name = "mcp" -version = "1.19.0" +version = "1.21.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -820,15 +826,16 @@ dependencies = [ { name = "jsonschema" }, { name = "pydantic" }, { name = "pydantic-settings" }, + { name = "pyjwt", extra = ["crypto"] }, { name = "python-multipart" }, { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "sse-starlette" }, { name = "starlette" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/69/2b/916852a5668f45d8787378461eaa1244876d77575ffef024483c94c0649c/mcp-1.19.0.tar.gz", hash = "sha256:213de0d3cd63f71bc08ffe9cc8d4409cc87acffd383f6195d2ce0457c021b5c1", size = 444163, upload-time = "2025-10-24T01:11:15.839Z" } +sdist = { url = "https://files.pythonhosted.org/packages/33/54/dd2330ef4611c27ae59124820863c34e1d3edb1133c58e6375e2d938c9c5/mcp-1.21.0.tar.gz", hash = "sha256:bab0a38e8f8c48080d787233343f8d301b0e1e95846ae7dead251b2421d99855", size = 452697, upload-time = "2025-11-06T23:19:58.432Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/a3/3e71a875a08b6a830b88c40bc413bff01f1650f1efe8a054b5e90a9d4f56/mcp-1.19.0-py3-none-any.whl", hash = "sha256:f5907fe1c0167255f916718f376d05f09a830a215327a3ccdd5ec8a519f2e572", size = 170105, upload-time = "2025-10-24T01:11:14.151Z" }, + { url = "https://files.pythonhosted.org/packages/39/47/850b6edc96c03bd44b00de9a0ca3c1cc71e0ba1cd5822955bc9e4eb3fad3/mcp-1.21.0-py3-none-any.whl", hash = "sha256:598619e53eb0b7a6513db38c426b28a4bdf57496fed04332100d2c56acade98b", size = 173672, upload-time = "2025-11-06T23:19:56.508Z" }, ] [[package]] @@ -917,28 +924,28 @@ wheels = [ [[package]] name = "psutil" -version = "7.1.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/ec/7b8e6b9b1d22708138630ef34c53ab2b61032c04f16adfdbb96791c8c70c/psutil-7.1.2.tar.gz", hash = "sha256:aa225cdde1335ff9684708ee8c72650f6598d5ed2114b9a7c5802030b1785018", size = 487424, upload-time = "2025-10-25T10:46:34.931Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/d9/b56cc9f883140ac10021a8c9b0f4e16eed1ba675c22513cdcbce3ba64014/psutil-7.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0cc5c6889b9871f231ed5455a9a02149e388fffcb30b607fb7a8896a6d95f22e", size = 238575, upload-time = "2025-10-25T10:46:38.728Z" }, - { url = "https://files.pythonhosted.org/packages/36/eb/28d22de383888deb252c818622196e709da98816e296ef95afda33f1c0a2/psutil-7.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8e9e77a977208d84aa363a4a12e0f72189d58bbf4e46b49aae29a2c6e93ef206", size = 239297, upload-time = "2025-10-25T10:46:41.347Z" }, - { url = "https://files.pythonhosted.org/packages/89/5d/220039e2f28cc129626e54d63892ab05c0d56a29818bfe7268dcb5008932/psutil-7.1.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d9623a5e4164d2220ecceb071f4b333b3c78866141e8887c072129185f41278", size = 280420, upload-time = "2025-10-25T10:46:44.122Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7a/286f0e1c167445b2ef4a6cbdfc8c59fdb45a5a493788950cf8467201dc73/psutil-7.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:364b1c10fe4ed59c89ec49e5f1a70da353b27986fa8233b4b999df4742a5ee2f", size = 283049, upload-time = "2025-10-25T10:46:47.095Z" }, - { url = "https://files.pythonhosted.org/packages/aa/cc/7eb93260794a42e39b976f3a4dde89725800b9f573b014fac142002a5c98/psutil-7.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f101ef84de7e05d41310e3ccbdd65a6dd1d9eed85e8aaf0758405d022308e204", size = 248713, upload-time = "2025-10-25T10:46:49.573Z" }, - { url = "https://files.pythonhosted.org/packages/ab/1a/0681a92b53366e01f0a099f5237d0c8a2f79d322ac589cccde5e30c8a4e2/psutil-7.1.2-cp313-cp313t-win_arm64.whl", hash = "sha256:20c00824048a95de67f00afedc7b08b282aa08638585b0206a9fb51f28f1a165", size = 244644, upload-time = "2025-10-25T10:46:51.924Z" }, - { url = "https://files.pythonhosted.org/packages/56/9e/f1c5c746b4ed5320952acd3002d3962fe36f30524c00ea79fdf954cc6779/psutil-7.1.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:e09cfe92aa8e22b1ec5e2d394820cf86c5dff6367ac3242366485dfa874d43bc", size = 238640, upload-time = "2025-10-25T10:46:54.089Z" }, - { url = "https://files.pythonhosted.org/packages/32/ee/fd26216a735395cc25c3899634e34aeb41fb1f3dbb44acc67d9e594be562/psutil-7.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fa6342cf859c48b19df3e4aa170e4cfb64aadc50b11e06bb569c6c777b089c9e", size = 239303, upload-time = "2025-10-25T10:46:56.932Z" }, - { url = "https://files.pythonhosted.org/packages/3c/cd/7d96eaec4ef7742b845a9ce2759a2769ecce4ab7a99133da24abacbc9e41/psutil-7.1.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:625977443498ee7d6c1e63e93bacca893fd759a66c5f635d05e05811d23fb5ee", size = 281717, upload-time = "2025-10-25T10:46:59.116Z" }, - { url = "https://files.pythonhosted.org/packages/bc/1a/7f0b84bdb067d35fe7fade5fff888408688caf989806ce2d6dae08c72dd5/psutil-7.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a24bcd7b7f2918d934af0fb91859f621b873d6aa81267575e3655cd387572a7", size = 284575, upload-time = "2025-10-25T10:47:00.944Z" }, - { url = "https://files.pythonhosted.org/packages/de/05/7820ef8f7b275268917e0c750eada5834581206d9024ca88edce93c4b762/psutil-7.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:329f05610da6380982e6078b9d0881d9ab1e9a7eb7c02d833bfb7340aa634e31", size = 249491, upload-time = "2025-10-25T10:47:03.174Z" }, - { url = "https://files.pythonhosted.org/packages/db/9a/58de399c7cb58489f08498459ff096cd76b3f1ddc4f224ec2c5ef729c7d0/psutil-7.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:7b04c29e3c0c888e83ed4762b70f31e65c42673ea956cefa8ced0e31e185f582", size = 244880, upload-time = "2025-10-25T10:47:05.228Z" }, - { url = "https://files.pythonhosted.org/packages/ae/89/b9f8d47ddbc52d7301fc868e8224e5f44ed3c7f55e6d0f54ecaf5dd9ff5e/psutil-7.1.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c9ba5c19f2d46203ee8c152c7b01df6eec87d883cfd8ee1af2ef2727f6b0f814", size = 237244, upload-time = "2025-10-25T10:47:07.086Z" }, - { url = "https://files.pythonhosted.org/packages/c8/7a/8628c2f6b240680a67d73d8742bb9ff39b1820a693740e43096d5dcb01e5/psutil-7.1.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:2a486030d2fe81bec023f703d3d155f4823a10a47c36784c84f1cc7f8d39bedb", size = 238101, upload-time = "2025-10-25T10:47:09.523Z" }, - { url = "https://files.pythonhosted.org/packages/30/28/5e27f4d5a0e347f8e3cc16cd7d35533dbce086c95807f1f0e9cd77e26c10/psutil-7.1.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3efd8fc791492e7808a51cb2b94889db7578bfaea22df931424f874468e389e3", size = 258675, upload-time = "2025-10-25T10:47:11.082Z" }, - { url = "https://files.pythonhosted.org/packages/e5/5c/79cf60c9acf36d087f0db0f82066fca4a780e97e5b3a2e4c38209c03d170/psutil-7.1.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e2aeb9b64f481b8eabfc633bd39e0016d4d8bbcd590d984af764d80bf0851b8a", size = 260203, upload-time = "2025-10-25T10:47:13.226Z" }, - { url = "https://files.pythonhosted.org/packages/f7/03/0a464404c51685dcb9329fdd660b1721e076ccd7b3d97dee066bcc9ffb15/psutil-7.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:8e17852114c4e7996fe9da4745c2bdef001ebbf2f260dec406290e66628bdb91", size = 246714, upload-time = "2025-10-25T10:47:15.093Z" }, - { url = "https://files.pythonhosted.org/packages/6a/32/97ca2090f2f1b45b01b6aa7ae161cfe50671de097311975ca6eea3e7aabc/psutil-7.1.2-cp37-abi3-win_arm64.whl", hash = "sha256:3e988455e61c240cc879cb62a008c2699231bf3e3d061d7fce4234463fd2abb4", size = 243742, upload-time = "2025-10-25T10:47:17.302Z" }, +version = "7.1.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" }, + { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" }, + { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" }, + { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" }, + { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" }, + { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" }, + { url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" }, + { url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" }, + { url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" }, + { url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" }, + { url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" }, + { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" }, + { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" }, ] [[package]] @@ -970,7 +977,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.12.3" +version = "2.12.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -978,76 +985,80 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383, upload-time = "2025-10-17T15:04:21.222Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/ad/a17bc283d7d81837c061c49e3eaa27a45991759a1b7eae1031921c6bd924/pydantic-2.12.4.tar.gz", hash = "sha256:0f8cb9555000a4b5b617f66bfd2566264c4984b27589d3b845685983e8ea85ac", size = 821038, upload-time = "2025-11-05T10:50:08.59Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431, upload-time = "2025-10-17T15:04:19.346Z" }, + { url = "https://files.pythonhosted.org/packages/82/2f/e68750da9b04856e2a7ec56fc6f034a5a79775e9b9a81882252789873798/pydantic-2.12.4-py3-none-any.whl", hash = "sha256:92d3d202a745d46f9be6df459ac5a064fdaa3c1c4cd8adcfa332ccf3c05f871e", size = 463400, upload-time = "2025-11-05T10:50:06.732Z" }, ] [[package]] name = "pydantic-core" -version = "2.41.4" +version = "2.41.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, - { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, - { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, - { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, - { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, - { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, - { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, - { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, - { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, - { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, - { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, - { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, - { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, - { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, - { url = "https://files.pythonhosted.org/packages/13/d0/c20adabd181a029a970738dfe23710b52a31f1258f591874fcdec7359845/pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746", size = 2105688, upload-time = "2025-10-14T10:20:54.448Z" }, - { url = "https://files.pythonhosted.org/packages/00/b6/0ce5c03cec5ae94cca220dfecddc453c077d71363b98a4bbdb3c0b22c783/pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced", size = 1910807, upload-time = "2025-10-14T10:20:56.115Z" }, - { url = "https://files.pythonhosted.org/packages/68/3e/800d3d02c8beb0b5c069c870cbb83799d085debf43499c897bb4b4aaff0d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a", size = 1956669, upload-time = "2025-10-14T10:20:57.874Z" }, - { url = "https://files.pythonhosted.org/packages/60/a4/24271cc71a17f64589be49ab8bd0751f6a0a03046c690df60989f2f95c2c/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02", size = 2051629, upload-time = "2025-10-14T10:21:00.006Z" }, - { url = "https://files.pythonhosted.org/packages/68/de/45af3ca2f175d91b96bfb62e1f2d2f1f9f3b14a734afe0bfeff079f78181/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1", size = 2224049, upload-time = "2025-10-14T10:21:01.801Z" }, - { url = "https://files.pythonhosted.org/packages/af/8f/ae4e1ff84672bf869d0a77af24fd78387850e9497753c432875066b5d622/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2", size = 2342409, upload-time = "2025-10-14T10:21:03.556Z" }, - { url = "https://files.pythonhosted.org/packages/18/62/273dd70b0026a085c7b74b000394e1ef95719ea579c76ea2f0cc8893736d/pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84", size = 2069635, upload-time = "2025-10-14T10:21:05.385Z" }, - { url = "https://files.pythonhosted.org/packages/30/03/cf485fff699b4cdaea469bc481719d3e49f023241b4abb656f8d422189fc/pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d", size = 2194284, upload-time = "2025-10-14T10:21:07.122Z" }, - { url = "https://files.pythonhosted.org/packages/f9/7e/c8e713db32405dfd97211f2fc0a15d6bf8adb7640f3d18544c1f39526619/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d", size = 2137566, upload-time = "2025-10-14T10:21:08.981Z" }, - { url = "https://files.pythonhosted.org/packages/04/f7/db71fd4cdccc8b75990f79ccafbbd66757e19f6d5ee724a6252414483fb4/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2", size = 2316809, upload-time = "2025-10-14T10:21:10.805Z" }, - { url = "https://files.pythonhosted.org/packages/76/63/a54973ddb945f1bca56742b48b144d85c9fc22f819ddeb9f861c249d5464/pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab", size = 2311119, upload-time = "2025-10-14T10:21:12.583Z" }, - { url = "https://files.pythonhosted.org/packages/f8/03/5d12891e93c19218af74843a27e32b94922195ded2386f7b55382f904d2f/pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c", size = 1981398, upload-time = "2025-10-14T10:21:14.584Z" }, - { url = "https://files.pythonhosted.org/packages/be/d8/fd0de71f39db91135b7a26996160de71c073d8635edfce8b3c3681be0d6d/pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4", size = 2030735, upload-time = "2025-10-14T10:21:16.432Z" }, - { url = "https://files.pythonhosted.org/packages/72/86/c99921c1cf6650023c08bfab6fe2d7057a5142628ef7ccfa9921f2dda1d5/pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564", size = 1973209, upload-time = "2025-10-14T10:21:18.213Z" }, - { url = "https://files.pythonhosted.org/packages/36/0d/b5706cacb70a8414396efdda3d72ae0542e050b591119e458e2490baf035/pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4", size = 1877324, upload-time = "2025-10-14T10:21:20.363Z" }, - { url = "https://files.pythonhosted.org/packages/de/2d/cba1fa02cfdea72dfb3a9babb067c83b9dff0bbcb198368e000a6b756ea7/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2", size = 1884515, upload-time = "2025-10-14T10:21:22.339Z" }, - { url = "https://files.pythonhosted.org/packages/07/ea/3df927c4384ed9b503c9cc2d076cf983b4f2adb0c754578dfb1245c51e46/pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf", size = 2042819, upload-time = "2025-10-14T10:21:26.683Z" }, - { url = "https://files.pythonhosted.org/packages/6a/ee/df8e871f07074250270a3b1b82aad4cd0026b588acd5d7d3eb2fcb1471a3/pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2", size = 1995866, upload-time = "2025-10-14T10:21:28.951Z" }, - { url = "https://files.pythonhosted.org/packages/fc/de/b20f4ab954d6d399499c33ec4fafc46d9551e11dc1858fb7f5dca0748ceb/pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89", size = 1970034, upload-time = "2025-10-14T10:21:30.869Z" }, - { url = "https://files.pythonhosted.org/packages/54/28/d3325da57d413b9819365546eb9a6e8b7cbd9373d9380efd5f74326143e6/pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1", size = 2102022, upload-time = "2025-10-14T10:21:32.809Z" }, - { url = "https://files.pythonhosted.org/packages/9e/24/b58a1bc0d834bf1acc4361e61233ee217169a42efbdc15a60296e13ce438/pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac", size = 1905495, upload-time = "2025-10-14T10:21:34.812Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a4/71f759cc41b7043e8ecdaab81b985a9b6cad7cec077e0b92cff8b71ecf6b/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554", size = 1956131, upload-time = "2025-10-14T10:21:36.924Z" }, - { url = "https://files.pythonhosted.org/packages/b0/64/1e79ac7aa51f1eec7c4cda8cbe456d5d09f05fdd68b32776d72168d54275/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e", size = 2052236, upload-time = "2025-10-14T10:21:38.927Z" }, - { url = "https://files.pythonhosted.org/packages/e9/e3/a3ffc363bd4287b80f1d43dc1c28ba64831f8dfc237d6fec8f2661138d48/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616", size = 2223573, upload-time = "2025-10-14T10:21:41.574Z" }, - { url = "https://files.pythonhosted.org/packages/28/27/78814089b4d2e684a9088ede3790763c64693c3d1408ddc0a248bc789126/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af", size = 2342467, upload-time = "2025-10-14T10:21:44.018Z" }, - { url = "https://files.pythonhosted.org/packages/92/97/4de0e2a1159cb85ad737e03306717637842c88c7fd6d97973172fb183149/pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12", size = 2063754, upload-time = "2025-10-14T10:21:46.466Z" }, - { url = "https://files.pythonhosted.org/packages/0f/50/8cb90ce4b9efcf7ae78130afeb99fd1c86125ccdf9906ef64b9d42f37c25/pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d", size = 2196754, upload-time = "2025-10-14T10:21:48.486Z" }, - { url = "https://files.pythonhosted.org/packages/34/3b/ccdc77af9cd5082723574a1cc1bcae7a6acacc829d7c0a06201f7886a109/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad", size = 2137115, upload-time = "2025-10-14T10:21:50.63Z" }, - { url = "https://files.pythonhosted.org/packages/ca/ba/e7c7a02651a8f7c52dc2cff2b64a30c313e3b57c7d93703cecea76c09b71/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a", size = 2317400, upload-time = "2025-10-14T10:21:52.959Z" }, - { url = "https://files.pythonhosted.org/packages/2c/ba/6c533a4ee8aec6b812c643c49bb3bd88d3f01e3cebe451bb85512d37f00f/pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025", size = 2312070, upload-time = "2025-10-14T10:21:55.419Z" }, - { url = "https://files.pythonhosted.org/packages/22/ae/f10524fcc0ab8d7f96cf9a74c880243576fd3e72bd8ce4f81e43d22bcab7/pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e", size = 1982277, upload-time = "2025-10-14T10:21:57.474Z" }, - { url = "https://files.pythonhosted.org/packages/b4/dc/e5aa27aea1ad4638f0c3fb41132f7eb583bd7420ee63204e2d4333a3bbf9/pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894", size = 2024608, upload-time = "2025-10-14T10:21:59.557Z" }, - { url = "https://files.pythonhosted.org/packages/3e/61/51d89cc2612bd147198e120a13f150afbf0bcb4615cddb049ab10b81b79e/pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d", size = 1967614, upload-time = "2025-10-14T10:22:01.847Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c2/472f2e31b95eff099961fa050c376ab7156a81da194f9edb9f710f68787b/pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da", size = 1876904, upload-time = "2025-10-14T10:22:04.062Z" }, - { url = "https://files.pythonhosted.org/packages/4a/07/ea8eeb91173807ecdae4f4a5f4b150a520085b35454350fc219ba79e66a3/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e", size = 1882538, upload-time = "2025-10-14T10:22:06.39Z" }, - { url = "https://files.pythonhosted.org/packages/1e/29/b53a9ca6cd366bfc928823679c6a76c7a4c69f8201c0ba7903ad18ebae2f/pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa", size = 2041183, upload-time = "2025-10-14T10:22:08.812Z" }, - { url = "https://files.pythonhosted.org/packages/c7/3d/f8c1a371ceebcaf94d6dd2d77c6cf4b1c078e13a5837aee83f760b4f7cfd/pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d", size = 1993542, upload-time = "2025-10-14T10:22:11.332Z" }, - { url = "https://files.pythonhosted.org/packages/8a/ac/9fc61b4f9d079482a290afe8d206b8f490e9fd32d4fc03ed4fc698214e01/pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0", size = 1973897, upload-time = "2025-10-14T10:22:13.444Z" }, - { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, - { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, - { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, ] [[package]] @@ -1458,28 +1469,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.14.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/34/8218a19b2055b80601e8fd201ec723c74c7fe1ca06d525a43ed07b6d8e85/ruff-0.14.2.tar.gz", hash = "sha256:98da787668f239313d9c902ca7c523fe11b8ec3f39345553a51b25abc4629c96", size = 5539663, upload-time = "2025-10-23T19:37:00.956Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/16/dd/23eb2db5ad9acae7c845700493b72d3ae214dce0b226f27df89216110f2b/ruff-0.14.2-py3-none-linux_armv6l.whl", hash = "sha256:7cbe4e593505bdec5884c2d0a4d791a90301bc23e49a6b1eb642dd85ef9c64f1", size = 12533390, upload-time = "2025-10-23T19:36:18.044Z" }, - { url = "https://files.pythonhosted.org/packages/5a/8c/5f9acff43ddcf3f85130d0146d0477e28ccecc495f9f684f8f7119b74c0d/ruff-0.14.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:8d54b561729cee92f8d89c316ad7a3f9705533f5903b042399b6ae0ddfc62e11", size = 12887187, upload-time = "2025-10-23T19:36:22.664Z" }, - { url = "https://files.pythonhosted.org/packages/99/fa/047646491479074029665022e9f3dc6f0515797f40a4b6014ea8474c539d/ruff-0.14.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5c8753dfa44ebb2cde10ce5b4d2ef55a41fb9d9b16732a2c5df64620dbda44a3", size = 11925177, upload-time = "2025-10-23T19:36:24.778Z" }, - { url = "https://files.pythonhosted.org/packages/15/8b/c44cf7fe6e59ab24a9d939493a11030b503bdc2a16622cede8b7b1df0114/ruff-0.14.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d0bbeffb8d9f4fccf7b5198d566d0bad99a9cb622f1fc3467af96cb8773c9e3", size = 12358285, upload-time = "2025-10-23T19:36:26.979Z" }, - { url = "https://files.pythonhosted.org/packages/45/01/47701b26254267ef40369aea3acb62a7b23e921c27372d127e0f3af48092/ruff-0.14.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7047f0c5a713a401e43a88d36843d9c83a19c584e63d664474675620aaa634a8", size = 12303832, upload-time = "2025-10-23T19:36:29.192Z" }, - { url = "https://files.pythonhosted.org/packages/2d/5c/ae7244ca4fbdf2bee9d6405dcd5bc6ae51ee1df66eb7a9884b77b8af856d/ruff-0.14.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bf8d2f9aa1602599217d82e8e0af7fd33e5878c4d98f37906b7c93f46f9a839", size = 13036995, upload-time = "2025-10-23T19:36:31.861Z" }, - { url = "https://files.pythonhosted.org/packages/27/4c/0860a79ce6fd4c709ac01173f76f929d53f59748d0dcdd662519835dae43/ruff-0.14.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1c505b389e19c57a317cf4b42db824e2fca96ffb3d86766c1c9f8b96d32048a7", size = 14512649, upload-time = "2025-10-23T19:36:33.915Z" }, - { url = "https://files.pythonhosted.org/packages/7f/7f/d365de998069720a3abfc250ddd876fc4b81a403a766c74ff9bde15b5378/ruff-0.14.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a307fc45ebd887b3f26b36d9326bb70bf69b01561950cdcc6c0bdf7bb8e0f7cc", size = 14088182, upload-time = "2025-10-23T19:36:36.983Z" }, - { url = "https://files.pythonhosted.org/packages/6c/ea/d8e3e6b209162000a7be1faa41b0a0c16a133010311edc3329753cc6596a/ruff-0.14.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61ae91a32c853172f832c2f40bd05fd69f491db7289fb85a9b941ebdd549781a", size = 13599516, upload-time = "2025-10-23T19:36:39.208Z" }, - { url = "https://files.pythonhosted.org/packages/fa/ea/c7810322086db68989fb20a8d5221dd3b79e49e396b01badca07b433ab45/ruff-0.14.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1967e40286f63ee23c615e8e7e98098dedc7301568bd88991f6e544d8ae096", size = 13272690, upload-time = "2025-10-23T19:36:41.453Z" }, - { url = "https://files.pythonhosted.org/packages/a9/39/10b05acf8c45786ef501d454e00937e1b97964f846bf28883d1f9619928a/ruff-0.14.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:2877f02119cdebf52a632d743a2e302dea422bfae152ebe2f193d3285a3a65df", size = 13496497, upload-time = "2025-10-23T19:36:43.61Z" }, - { url = "https://files.pythonhosted.org/packages/59/a1/1f25f8301e13751c30895092485fada29076e5e14264bdacc37202e85d24/ruff-0.14.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e681c5bc777de5af898decdcb6ba3321d0d466f4cb43c3e7cc2c3b4e7b843a05", size = 12266116, upload-time = "2025-10-23T19:36:45.625Z" }, - { url = "https://files.pythonhosted.org/packages/5c/fa/0029bfc9ce16ae78164e6923ef392e5f173b793b26cc39aa1d8b366cf9dc/ruff-0.14.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e21be42d72e224736f0c992cdb9959a2fa53c7e943b97ef5d081e13170e3ffc5", size = 12281345, upload-time = "2025-10-23T19:36:47.618Z" }, - { url = "https://files.pythonhosted.org/packages/a5/ab/ece7baa3c0f29b7683be868c024f0838770c16607bea6852e46b202f1ff6/ruff-0.14.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b8264016f6f209fac16262882dbebf3f8be1629777cf0f37e7aff071b3e9b92e", size = 12629296, upload-time = "2025-10-23T19:36:49.789Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7f/638f54b43f3d4e48c6a68062794e5b367ddac778051806b9e235dfb7aa81/ruff-0.14.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5ca36b4cb4db3067a3b24444463ceea5565ea78b95fe9a07ca7cb7fd16948770", size = 13371610, upload-time = "2025-10-23T19:36:51.882Z" }, - { url = "https://files.pythonhosted.org/packages/8d/35/3654a973ebe5b32e1fd4a08ed2d46755af7267da7ac710d97420d7b8657d/ruff-0.14.2-py3-none-win32.whl", hash = "sha256:41775927d287685e08f48d8eb3f765625ab0b7042cc9377e20e64f4eb0056ee9", size = 12415318, upload-time = "2025-10-23T19:36:53.961Z" }, - { url = "https://files.pythonhosted.org/packages/71/30/3758bcf9e0b6a4193a6f51abf84254aba00887dfa8c20aba18aa366c5f57/ruff-0.14.2-py3-none-win_amd64.whl", hash = "sha256:0df3424aa5c3c08b34ed8ce099df1021e3adaca6e90229273496b839e5a7e1af", size = 13565279, upload-time = "2025-10-23T19:36:56.578Z" }, - { url = "https://files.pythonhosted.org/packages/2e/5d/aa883766f8ef9ffbe6aa24f7192fb71632f31a30e77eb39aa2b0dc4290ac/ruff-0.14.2-py3-none-win_arm64.whl", hash = "sha256:ea9d635e83ba21569fbacda7e78afbfeb94911c9434aff06192d9bc23fd5495a", size = 12554956, upload-time = "2025-10-23T19:36:58.714Z" }, +version = "0.14.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/55/cccfca45157a2031dcbb5a462a67f7cf27f8b37d4b3b1cd7438f0f5c1df6/ruff-0.14.4.tar.gz", hash = "sha256:f459a49fe1085a749f15414ca76f61595f1a2cc8778ed7c279b6ca2e1fd19df3", size = 5587844, upload-time = "2025-11-06T22:07:45.033Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/b9/67240254166ae1eaa38dec32265e9153ac53645a6c6670ed36ad00722af8/ruff-0.14.4-py3-none-linux_armv6l.whl", hash = "sha256:e6604613ffbcf2297cd5dcba0e0ac9bd0c11dc026442dfbb614504e87c349518", size = 12606781, upload-time = "2025-11-06T22:07:01.841Z" }, + { url = "https://files.pythonhosted.org/packages/46/c8/09b3ab245d8652eafe5256ab59718641429f68681ee713ff06c5c549f156/ruff-0.14.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d99c0b52b6f0598acede45ee78288e5e9b4409d1ce7f661f0fa36d4cbeadf9a4", size = 12946765, upload-time = "2025-11-06T22:07:05.858Z" }, + { url = "https://files.pythonhosted.org/packages/14/bb/1564b000219144bf5eed2359edc94c3590dd49d510751dad26202c18a17d/ruff-0.14.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9358d490ec030f1b51d048a7fd6ead418ed0826daf6149e95e30aa67c168af33", size = 11928120, upload-time = "2025-11-06T22:07:08.023Z" }, + { url = "https://files.pythonhosted.org/packages/a3/92/d5f1770e9988cc0742fefaa351e840d9aef04ec24ae1be36f333f96d5704/ruff-0.14.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b40d27924f1f02dfa827b9c0712a13c0e4b108421665322218fc38caf615c2", size = 12370877, upload-time = "2025-11-06T22:07:10.015Z" }, + { url = "https://files.pythonhosted.org/packages/e2/29/e9282efa55f1973d109faf839a63235575519c8ad278cc87a182a366810e/ruff-0.14.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f5e649052a294fe00818650712083cddc6cc02744afaf37202c65df9ea52efa5", size = 12408538, upload-time = "2025-11-06T22:07:13.085Z" }, + { url = "https://files.pythonhosted.org/packages/8e/01/930ed6ecfce130144b32d77d8d69f5c610e6d23e6857927150adf5d7379a/ruff-0.14.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa082a8f878deeba955531f975881828fd6afd90dfa757c2b0808aadb437136e", size = 13141942, upload-time = "2025-11-06T22:07:15.386Z" }, + { url = "https://files.pythonhosted.org/packages/6a/46/a9c89b42b231a9f487233f17a89cbef9d5acd538d9488687a02ad288fa6b/ruff-0.14.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1043c6811c2419e39011890f14d0a30470f19d47d197c4858b2787dfa698f6c8", size = 14544306, upload-time = "2025-11-06T22:07:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/78/96/9c6cf86491f2a6d52758b830b89b78c2ae61e8ca66b86bf5a20af73d20e6/ruff-0.14.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a9f3a936ac27fb7c2a93e4f4b943a662775879ac579a433291a6f69428722649", size = 14210427, upload-time = "2025-11-06T22:07:19.832Z" }, + { url = "https://files.pythonhosted.org/packages/71/f4/0666fe7769a54f63e66404e8ff698de1dcde733e12e2fd1c9c6efb689cb5/ruff-0.14.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95643ffd209ce78bc113266b88fba3d39e0461f0cbc8b55fb92505030fb4a850", size = 13658488, upload-time = "2025-11-06T22:07:22.32Z" }, + { url = "https://files.pythonhosted.org/packages/ee/79/6ad4dda2cfd55e41ac9ed6d73ef9ab9475b1eef69f3a85957210c74ba12c/ruff-0.14.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:456daa2fa1021bc86ca857f43fe29d5d8b3f0e55e9f90c58c317c1dcc2afc7b5", size = 13354908, upload-time = "2025-11-06T22:07:24.347Z" }, + { url = "https://files.pythonhosted.org/packages/b5/60/f0b6990f740bb15c1588601d19d21bcc1bd5de4330a07222041678a8e04f/ruff-0.14.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f911bba769e4a9f51af6e70037bb72b70b45a16db5ce73e1f72aefe6f6d62132", size = 13587803, upload-time = "2025-11-06T22:07:26.327Z" }, + { url = "https://files.pythonhosted.org/packages/c9/da/eaaada586f80068728338e0ef7f29ab3e4a08a692f92eb901a4f06bbff24/ruff-0.14.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76158a7369b3979fa878612c623a7e5430c18b2fd1c73b214945c2d06337db67", size = 12279654, upload-time = "2025-11-06T22:07:28.46Z" }, + { url = "https://files.pythonhosted.org/packages/66/d4/b1d0e82cf9bf8aed10a6d45be47b3f402730aa2c438164424783ac88c0ed/ruff-0.14.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f3b8f3b442d2b14c246e7aeca2e75915159e06a3540e2f4bed9f50d062d24469", size = 12357520, upload-time = "2025-11-06T22:07:31.468Z" }, + { url = "https://files.pythonhosted.org/packages/04/f4/53e2b42cc82804617e5c7950b7079d79996c27e99c4652131c6a1100657f/ruff-0.14.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c62da9a06779deecf4d17ed04939ae8b31b517643b26370c3be1d26f3ef7dbde", size = 12719431, upload-time = "2025-11-06T22:07:33.831Z" }, + { url = "https://files.pythonhosted.org/packages/a2/94/80e3d74ed9a72d64e94a7b7706b1c1ebaa315ef2076fd33581f6a1cd2f95/ruff-0.14.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a443a83a1506c684e98acb8cb55abaf3ef725078be40237463dae4463366349", size = 13464394, upload-time = "2025-11-06T22:07:35.905Z" }, + { url = "https://files.pythonhosted.org/packages/54/1a/a49f071f04c42345c793d22f6cf5e0920095e286119ee53a64a3a3004825/ruff-0.14.4-py3-none-win32.whl", hash = "sha256:643b69cb63cd996f1fc7229da726d07ac307eae442dd8974dbc7cf22c1e18fff", size = 12493429, upload-time = "2025-11-06T22:07:38.43Z" }, + { url = "https://files.pythonhosted.org/packages/bc/22/e58c43e641145a2b670328fb98bc384e20679b5774258b1e540207580266/ruff-0.14.4-py3-none-win_amd64.whl", hash = "sha256:26673da283b96fe35fa0c939bf8411abec47111644aa9f7cfbd3c573fb125d2c", size = 13635380, upload-time = "2025-11-06T22:07:40.496Z" }, + { url = "https://files.pythonhosted.org/packages/30/bd/4168a751ddbbf43e86544b4de8b5c3b7be8d7167a2a5cb977d274e04f0a1/ruff-0.14.4-py3-none-win_arm64.whl", hash = "sha256:dd09c292479596b0e6fec8cd95c65c3a6dc68e9ad17b8f2382130f87ff6a75bb", size = 12663065, upload-time = "2025-11-06T22:07:42.603Z" }, ] [[package]] @@ -1529,14 +1540,14 @@ wheels = [ [[package]] name = "sse-starlette" -version = "3.0.2" +version = "3.0.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } +sdist = { url = "https://files.pythonhosted.org/packages/db/3c/fa6517610dc641262b77cc7bf994ecd17465812c1b0585fe33e11be758ab/sse_starlette-3.0.3.tar.gz", hash = "sha256:88cfb08747e16200ea990c8ca876b03910a23b547ab3bd764c0d8eb81019b971", size = 21943, upload-time = "2025-10-30T18:44:20.117Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/23/a0/984525d19ca5c8a6c33911a0c164b11490dd0f90ff7fd689f704f84e9a11/sse_starlette-3.0.3-py3-none-any.whl", hash = "sha256:af5bf5a6f3933df1d9c7f8539633dc8444ca6a97ab2e2a7cd3b6e431ac03a431", size = 11765, upload-time = "2025-10-30T18:44:18.834Z" }, ] [[package]] @@ -1555,15 +1566,15 @@ wheels = [ [[package]] name = "starlette" -version = "0.49.0" +version = "0.49.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/36/70878596a60339cd9f78ee58b9ca8548f235a02d21be649ca73c35ccdd5a/starlette-0.49.0.tar.gz", hash = "sha256:cb75dfe3267b99caf9036db355601f8c6092c4d50d132b3724cb373766cb2ab1", size = 2654292, upload-time = "2025-10-28T08:11:42.379Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/1a/608df0b10b53b0beb96a37854ee05864d182ddd4b1156a22f1ad3860425a/starlette-0.49.3.tar.gz", hash = "sha256:1c14546f299b5901a1ea0e34410575bc33bbd741377a10484a54445588d00284", size = 2655031, upload-time = "2025-11-01T15:12:26.13Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/c4/da4392a59f3f1c9da2fa7bafd697ed14cf5f2ee2102c5cd4a4a1c5985e42/starlette-0.49.0-py3-none-any.whl", hash = "sha256:6f3988f55dda3fd36e5640aeaceee5b691063e0ebc4d7851e1c3a6695fbdc8ba", size = 74057, upload-time = "2025-10-28T08:11:40.895Z" }, + { url = "https://files.pythonhosted.org/packages/a3/e0/021c772d6a662f43b63044ab481dc6ac7592447605b5b35a957785363122/starlette-0.49.3-py3-none-any.whl", hash = "sha256:b579b99715fdc2980cf88c8ec96d3bf1ce16f5a8051a7c2b84ef9b1cdecaea2f", size = 74340, upload-time = "2025-11-01T15:12:24.387Z" }, ] [[package]] @@ -1861,11 +1872,11 @@ wheels = [ [[package]] name = "webcolors" -version = "24.11.1" +version = "25.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7b/29/061ec845fb58521848f3739e466efd8250b4b7b98c1b6c5bf4d40b419b7e/webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6", size = 45064, upload-time = "2024-11-11T07:43:24.224Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/7a/eb316761ec35664ea5174709a68bbd3389de60d4a1ebab8808bfc264ed67/webcolors-25.10.0.tar.gz", hash = "sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf", size = 53491, upload-time = "2025-10-31T07:51:03.977Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/60/e8/c0e05e4684d13459f93d312077a9a2efbe04d59c393bc2b8802248c908d4/webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9", size = 14934, upload-time = "2024-11-11T07:43:22.529Z" }, + { url = "https://files.pythonhosted.org/packages/e2/cc/e097523dd85c9cf5d354f78310927f1656c422bd7b2613b2db3e3f9a0f2c/webcolors-25.10.0-py3-none-any.whl", hash = "sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d", size = 14905, upload-time = "2025-10-31T07:51:01.778Z" }, ] [[package]] diff --git a/webhook_server/app.py b/webhook_server/app.py index d1e0461ac..beac2a2c5 100644 --- a/webhook_server/app.py +++ b/webhook_server/app.py @@ -1,28 +1,32 @@ +import asyncio import ipaddress import json -import logging import os -import sys +from collections.abc import AsyncGenerator from contextlib import asynccontextmanager -from typing import Any, AsyncGenerator +from typing import Any import httpx import requests import urllib3 from fastapi import ( - BackgroundTasks, Depends, FastAPI, HTTPException, + Query, Request, + Response, WebSocket, status, ) -from fastapi.responses import HTMLResponse, StreamingResponse +from fastapi.responses import HTMLResponse, JSONResponse, StreamingResponse from fastapi.staticfiles import StaticFiles # Import for MCP integration from fastapi_mcp import FastApiMCP +from fastapi_mcp.transport.http import FastApiHttpSessionManager +from mcp.server.streamable_http_manager import StreamableHTTPSessionManager +from starlette.datastructures import Headers from webhook_server.libs.config import Config from webhook_server.libs.exceptions import RepositoryNotFoundInConfigError @@ -40,12 +44,14 @@ # Constants APP_URL_ROOT_PATH: str = "/webhook_server" +LOG_SERVER_ENABLED: bool = os.environ.get("ENABLE_LOG_SERVER") == "true" # Global variables ALLOWED_IPS: tuple[ipaddress._BaseNetwork, ...] = () LOGGER = get_logger_with_params() _lifespan_http_client: httpx.AsyncClient | None = None +_background_tasks: set[asyncio.Task] = set() # Helper function to wrap the imported gate_by_allowlist_ips with ALLOWED_IPS @@ -54,8 +60,17 @@ async def gate_by_allowlist_ips_dependency(request: Request) -> None: await gate_by_allowlist_ips(request, ALLOWED_IPS) +def require_log_server_enabled() -> None: + """Dependency to ensure log server is enabled before accessing log viewer APIs.""" + if not LOG_SERVER_ENABLED: + raise HTTPException( + status_code=status.HTTP_404_NOT_FOUND, + detail="Log server is disabled. Set ENABLE_LOG_SERVER=true to enable.", + ) + + @asynccontextmanager -async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: +async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]: global _lifespan_http_client _lifespan_http_client = httpx.AsyncClient(timeout=HTTP_TIMEOUT_SECONDS) @@ -80,8 +95,8 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: config = Config(logger=LOGGER) root_config = config.root_data - verify_github_ips = root_config.get("verify-github-ips") - verify_cloudflare_ips = root_config.get("verify-cloudflare-ips") + verify_github_ips = root_config.get("verify-github-ips", False) + verify_cloudflare_ips = root_config.get("verify-cloudflare-ips", False) disable_ssl_warnings = root_config.get("disable-ssl-warnings", False) # Conditionally disable urllib3 warnings based on config @@ -124,7 +139,13 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: ALLOWED_IPS = tuple(networks) LOGGER.info(f"IP allowlist initialized successfully with {len(ALLOWED_IPS)} networks.") elif verify_github_ips or verify_cloudflare_ips: - LOGGER.warning("IP verification enabled but no valid IPs loaded - webhook will accept from any IP") + # Fail-close: If IP verification is enabled but no networks loaded, reject all requests + LOGGER.error("IP verification enabled but no valid IPs loaded - failing closed for security") + raise RuntimeError( + "IP verification enabled but no allowlist loaded. " + "Cannot start server in insecure state. " + "Check network connectivity to GitHub/Cloudflare API endpoints." + ) yield @@ -143,6 +164,20 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: await _lifespan_http_client.aclose() LOGGER.debug("HTTP client closed") + # Optionally wait for pending background tasks for graceful shutdown + global _background_tasks + if _background_tasks: + LOGGER.info(f"Waiting for {len(_background_tasks)} pending background task(s) to complete...") + # Wait up to 30 seconds for tasks to complete + done, pending = await asyncio.wait(_background_tasks, timeout=30.0, return_when=asyncio.ALL_COMPLETED) + if pending: + LOGGER.warning(f"{len(pending)} background task(s) did not complete within timeout, cancelling...") + for task in pending: + task.cancel() + # Wait briefly for cancellations to propagate + await asyncio.wait(pending, timeout=5.0) + LOGGER.debug(f"Background tasks cleanup complete: {len(done)} completed, {len(pending)} cancelled") + LOGGER.info("Application shutdown complete.") @@ -164,23 +199,73 @@ def healthcheck() -> dict[str, Any]: dependencies=[Depends(gate_by_allowlist_ips_dependency)], tags=["mcp_exclude"], ) -async def process_webhook(request: Request, background_tasks: BackgroundTasks) -> dict[str, Any]: - # Extract headers early for logging - delivery_id = request.headers.get("X-GitHub-Delivery", "unknown-delivery") - event_type = request.headers.get("X-GitHub-Event", "unknown-event") +async def process_webhook(request: Request) -> JSONResponse: + """Process GitHub webhooks with immediate 200 OK response and background processing. + + **Critical Design Pattern:** + This endpoint returns 200 OK immediately after validating that we have enough + data to process the webhook. This design prevents GitHub webhook timeouts (10 + second limit) while allowing long-running operations to complete asynchronously. + + **Synchronous Validation (must pass to return 200):** + 1. Read request body + 2. Verify signature (if webhook-secret configured) + 3. Parse JSON payload + 4. Validate required fields: repository.name, repository.full_name, X-GitHub-Event + + **Background Processing (errors logged only):** + - Config loading, repository validation, API initialization + - All API calls + - All handler processing + - All errors (missing repos, API failures, etc.) are caught and logged + + **Why Background Processing:** + - GitHub webhook timeout: 10 seconds + - Typical processing time: 5-30 seconds (API calls, builds, notifications) + - Without background processing: Frequent timeouts, webhook retries, duplicates + - With background processing: Instant 200 OK, reliable webhook delivery + + **Implications:** + - HTTP 200 OK means webhook payload was valid and queued for processing + - HTTP 200 OK does NOT mean webhook was processed successfully + - Check logs with delivery_id to verify actual processing results + + Args: + request: FastAPI Request object containing webhook payload and headers + + Returns: + JSONResponse: 200 OK response with delivery_id and event_type for tracking - # Use standardized log prefix format (will get repository info after parsing payload) - log_context = prepare_log_prefix(event_type, delivery_id) + Raises: + HTTPException 400: Missing required fields (X-GitHub-Event, repository.name, + repository.full_name) or invalid JSON payload + HTTPException 401: Signature verification failed (if webhook-secret configured) + HTTPException 500: Configuration errors during signature verification setup + + Note: + All processing errors (missing repos, API failures, etc.) + happen in background and are logged only. They do NOT affect the HTTP response. + """ + # Extract headers for validation and logging + delivery_id = request.headers.get("X-GitHub-Delivery", "unknown-delivery") + event_type = request.headers.get("X-GitHub-Event") + log_context = prepare_log_prefix(event_type or "unknown-event", delivery_id) LOGGER.info(f"{log_context} Processing webhook") + # Validate X-GitHub-Event header (required by GithubWebhook.__init__) + if not event_type: + LOGGER.error(f"{log_context} Missing X-GitHub-Event header") + raise HTTPException(status_code=400, detail="Missing X-GitHub-Event header") + + # Read request body try: payload_body = await request.body() except Exception as e: LOGGER.error(f"{log_context} Failed to read request body: {e}") - raise HTTPException(status_code=400, detail="Failed to read request body") + raise HTTPException(status_code=400, detail="Failed to read request body") from e - # Load config and verify signature + # Verify signature if configured try: config = Config(logger=LOGGER) root_config = config.root_data @@ -194,62 +279,90 @@ async def process_webhook(request: Request, background_tasks: BackgroundTasks) - raise except Exception as e: LOGGER.error(f"{log_context} Configuration error: {e}") - raise HTTPException(status_code=500, detail="Configuration error") + raise HTTPException(status_code=500, detail="Configuration error") from e # Parse JSON payload try: hook_data: dict[Any, Any] = json.loads(payload_body) - if "repository" not in hook_data or "name" not in hook_data["repository"]: - raise ValueError("Missing repository information in payload") - except json.JSONDecodeError as e: - LOGGER.error(f"{log_context} Invalid JSON payload: {e}") - raise HTTPException(status_code=400, detail="Invalid JSON payload") - except ValueError as e: - LOGGER.error(f"{log_context} Invalid payload structure: {e}") - raise HTTPException(status_code=400, detail=str(e)) - - # Create repository-specific logger - repository_name = hook_data["repository"]["name"] - logger = get_logger_with_params(repository_name=repository_name) - logger.info(f"{log_context} Processing webhook for repository: {repository_name}") - - async def process_with_error_handling(_api: GithubWebhook, _logger: logging.Logger) -> None: + except json.JSONDecodeError: + LOGGER.exception(f"{log_context} Invalid JSON payload") + raise HTTPException(status_code=400, detail="Invalid JSON payload") from None + + # Validate required fields for GithubWebhook.__init__() + if "repository" not in hook_data: + LOGGER.error(f"{log_context} Missing repository in payload") + raise HTTPException(status_code=400, detail="Missing repository in payload") + if "name" not in hook_data["repository"]: + LOGGER.error(f"{log_context} Missing repository.name in payload") + raise HTTPException(status_code=400, detail="Missing repository.name in payload") + if "full_name" not in hook_data["repository"]: + LOGGER.error(f"{log_context} Missing repository.full_name in payload") + raise HTTPException(status_code=400, detail="Missing repository.full_name in payload") + + # Return 200 immediately - all validation passed, we can process this webhook + LOGGER.info(f"{log_context} Webhook validation passed, queuing for background processing") + + async def process_with_error_handling( + _hook_data: dict[Any, Any], _headers: Headers, _delivery_id: str, _event_type: str + ) -> None: + """Process webhook in background with granular error handling. + + This function runs in a background task after the webhook endpoint has already + returned 200 OK to GitHub. Exceptions here do NOT affect the HTTP response, + preventing webhook timeouts while still logging all errors for debugging. + + Args: + _hook_data: Webhook payload data dictionary + _headers: Starlette Headers object from the incoming request + _delivery_id: GitHub delivery ID for logging + _event_type: GitHub event type for logging + """ + # Create repository-specific logger in background + repository_name = _hook_data.get("repository", {}).get("name", "unknown") + _logger = get_logger_with_params(repository_name=repository_name) + _log_context = prepare_log_prefix( + event_type=_event_type, delivery_id=_delivery_id, repository_name=repository_name + ) + _logger.info(f"{_log_context} Processing webhook") + try: + # Initialize GithubWebhook inside background task to avoid blocking webhook response + _api: GithubWebhook = GithubWebhook(hook_data=_hook_data, headers=_headers, logger=_logger) await _api.process() - _logger.success(f"{log_context} Webhook processing completed successfully") # type: ignore - except Exception as e: - _logger.exception(f"{log_context} Error in background task: {e}") - - try: - api: GithubWebhook = GithubWebhook(hook_data=hook_data, headers=request.headers, logger=logger) - background_tasks.add_task(process_with_error_handling, _api=api, _logger=logger) + except RepositoryNotFoundInConfigError: + # Repository-specific error - not exceptional, log as error not exception + _logger.error(f"{_log_context} Repository not found in configuration") + except (httpx.ConnectError, httpx.RequestError, requests.exceptions.ConnectionError): + # Network/connection errors - can be transient + _logger.exception(f"{_log_context} API connection error - check network connectivity") + except Exception: + # Catch-all for unexpected errors + _logger.exception(f"{_log_context} Unexpected error in background webhook processing") + + # Start background task immediately using asyncio.create_task + # This ensures the HTTP response is sent immediately without waiting + # Store task reference for observability and graceful shutdown + task = asyncio.create_task( + process_with_error_handling( + _hook_data=hook_data, + _headers=request.headers, + _delivery_id=delivery_id, + _event_type=event_type, + ) + ) + _background_tasks.add(task) + task.add_done_callback(_background_tasks.discard) - LOGGER.info(f"{log_context} Webhook queued for background processing") - return { + # Return 200 immediately with JSONResponse for fastest serialization + return JSONResponse( + status_code=status.HTTP_200_OK, + content={ "status": status.HTTP_200_OK, "message": "Webhook queued for processing", "delivery_id": delivery_id, "event_type": event_type, - } - - except RepositoryNotFoundInConfigError as e: - logger.error(f"{log_context} Repository not found: {e}") - raise HTTPException(status_code=404, detail=str(e)) - - except ConnectionError as e: - logger.error(f"{log_context} API connection error: {e}") - raise HTTPException(status_code=503, detail=f"API Connection Error: {e}") - - except HTTPException: - raise - - except Exception as e: - logger.exception(f"{log_context} Unexpected error during processing: {e}") - exc_type, _, exc_tb = sys.exc_info() - line_no = exc_tb.tb_lineno if exc_tb else "unknown" - file_name = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1] if exc_tb else "unknown" - error_details = f"Error type: {exc_type.__name__ if exc_type else ''}, File: {file_name}, Line: {line_no}" - raise HTTPException(status_code=500, detail=f"Internal Server Error: {error_details}") + }, + ) # Module-level singleton instance @@ -318,7 +431,11 @@ async def _get_log_entries_core( ) -@FASTAPI_APP.get("/logs/api/entries", operation_id="get_log_entries") +@FASTAPI_APP.get( + "/logs/api/entries", + operation_id="get_log_entries", + dependencies=[Depends(require_log_server_enabled)], +) async def get_log_entries( hook_id: str | None = None, pr_number: int | None = None, @@ -329,8 +446,8 @@ async def get_log_entries( start_time: str | None = None, end_time: str | None = None, search: str | None = None, - limit: int = 100, - offset: int = 0, + limit: int = Query(default=100, ge=1, le=10000, description="Maximum entries to return (1-10000)"), + offset: int = Query(default=0, ge=0, description="Number of entries to skip for pagination"), controller: LogViewerController = controller_dependency, ) -> dict[str, Any]: """Retrieve and filter webhook processing logs with advanced pagination and search capabilities. @@ -469,9 +586,17 @@ async def _export_logs_core( ) -@FASTAPI_APP.get("/logs/api/export", operation_id="export_logs") +@FASTAPI_APP.get( + "/logs/api/export", + operation_id="export_logs", + dependencies=[Depends(require_log_server_enabled)], +) async def export_logs( - format_type: str, + format_type: str = Query( + default="json", + pattern="^json$", + description="Export format (currently only 'json' supported)", + ), hook_id: str | None = None, pr_number: int | None = None, repository: str | None = None, @@ -481,7 +606,7 @@ async def export_logs( start_time: str | None = None, end_time: str | None = None, search: str | None = None, - limit: int = 10000, + limit: int = Query(default=10000, ge=1, le=100000, description="Maximum entries to export (1-100000)"), controller: LogViewerController = controller_dependency, ) -> StreamingResponse: """Export filtered webhook logs to downloadable files for offline analysis and reporting. @@ -615,7 +740,11 @@ async def _get_pr_flow_data_core( return controller.get_pr_flow_data(hook_id) -@FASTAPI_APP.get("/logs/api/pr-flow/{hook_id}", operation_id="get_pr_flow_data") +@FASTAPI_APP.get( + "/logs/api/pr-flow/{hook_id}", + operation_id="get_pr_flow_data", + dependencies=[Depends(require_log_server_enabled)], +) async def get_pr_flow_data(hook_id: str, controller: LogViewerController = controller_dependency) -> dict[str, Any]: """Get PR workflow visualization data for process analysis and debugging. @@ -656,7 +785,11 @@ async def _get_workflow_steps_core( return controller.get_workflow_steps(hook_id) -@FASTAPI_APP.get("/logs/api/workflow-steps/{hook_id}", operation_id="get_workflow_steps") +@FASTAPI_APP.get( + "/logs/api/workflow-steps/{hook_id}", + operation_id="get_workflow_steps", + dependencies=[Depends(require_log_server_enabled)], +) async def get_workflow_steps(hook_id: str, controller: LogViewerController = controller_dependency) -> dict[str, Any]: """Retrieve detailed timeline and execution data for individual workflow steps within a webhook processing flow. @@ -901,6 +1034,11 @@ async def websocket_log_stream( level: str | None = None, ) -> None: """Handle WebSocket connection for real-time log streaming.""" + # Check if log server is enabled (manual check since WebSocket doesn't support dependencies same way) + if not LOG_SERVER_ENABLED: + await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Log server is disabled") + return + controller = get_log_viewer_controller() await controller.handle_websocket( websocket=websocket, @@ -914,7 +1052,43 @@ async def websocket_log_stream( # Create MCP instance with the main app +# NOTE: No authentication configured - MCP server runs without auth mcp = FastApiMCP(FASTAPI_APP, exclude_tags=["mcp_exclude"]) -mcp.mount_http() -LOGGER.info("MCP integration initialized successfully") +# Create stateless HTTP transport to avoid session management issues +# Override with stateless session manager +http_transport = FastApiHttpSessionManager( + mcp_server=mcp.server, + event_store=None, # No event store needed for stateless mode + json_response=True, +) +# Manually patch to use stateless mode +http_transport._session_manager = None # Force recreation with stateless=True + + +# Register the HTTP endpoint manually +@FASTAPI_APP.api_route("/mcp", methods=["GET", "POST", "DELETE"], include_in_schema=False, operation_id="mcp_http") +async def handle_mcp_streamable_http(request: Request) -> Response: + # Ensure session manager is created with stateless=True + if http_transport._session_manager is None: + http_transport._session_manager = StreamableHTTPSessionManager( + app=mcp.server, + event_store=http_transport.event_store, + json_response=True, + stateless=True, # Enable stateless mode - no session management required + ) + # Start the session manager + + async def run_manager() -> None: + async with http_transport._session_manager.run(): + await asyncio.Event().wait() + + http_transport._manager_task = asyncio.create_task(run_manager()) + http_transport._manager_started = True + await asyncio.sleep(0.1) # Give it time to initialize + + return await http_transport.handle_fastapi_request(request) + + +LOGGER.info("MCP integration initialized successfully (no authentication configured)") +LOGGER.debug("MCP HTTP endpoint mounted at: /mcp") diff --git a/webhook_server/libs/config.py b/webhook_server/libs/config.py index 6f8a83452..4686a0541 100644 --- a/webhook_server/libs/config.py +++ b/webhook_server/libs/config.py @@ -33,22 +33,49 @@ def repositories_exists(self) -> None: def root_data(self) -> dict[str, Any]: try: with open(self.config_path) as fd: - return yaml.safe_load(fd) + return yaml.safe_load(fd) or {} + except FileNotFoundError: + # Since existence is validated in __init__, this indicates a race condition. + # Re-raise to propagate the error rather than returning empty dict. + self.logger.exception(f"Config file not found: {self.config_path}") + raise + except yaml.YAMLError: + self.logger.exception(f"Config file has invalid YAML syntax: {self.config_path}") + raise + except PermissionError: + self.logger.exception(f"Permission denied reading config file: {self.config_path}") + raise except Exception: - self.logger.error(f"Config file is empty: {self.config_path}") - return {} + self.logger.exception(f"Failed to load config file {self.config_path}") + raise @property def repository_data(self) -> dict[str, Any]: return self.root_data["repositories"].get(self.repository, {}) def repository_local_data(self, github_api: github.Github, repository_full_name: str) -> dict[str, Any]: - if self.repository and repository_full_name: - # Import here to avoid cyclic imports - from webhook_server.utils.helpers import get_github_repo_api + """ + Get repository-specific configuration from .github-webhook-server.yaml file. + + Reads configuration from the repository's .github-webhook-server.yaml file, + which takes precedence over global config.yaml settings. + + Args: + github_api: PyGithub API instance for repository access + repository_full_name: Full repository name (owner/repo-name) + Returns: + Dictionary containing repository configuration, or empty dict if file not found + + Raises: + yaml.YAMLError: If repository config file has invalid YAML syntax + """ + if self.repository and repository_full_name: try: - repo = get_github_repo_api(github_app_api=github_api, repository=repository_full_name) + # Directly use github_api.get_repo instead of importing get_github_repo_api + # to avoid circular dependency with helpers.py + self.logger.debug(f"Get GitHub API for repository {repository_full_name}") + repo = github_api.get_repo(repository_full_name) try: _path = repo.get_contents(".github-webhook-server.yaml") except UnknownObjectException: @@ -58,8 +85,12 @@ def repository_local_data(self, github_api: github.Github, repository_full_name: repo_config = yaml.safe_load(config_file.decoded_content) return repo_config - except Exception as ex: - self.logger.error(f"Repository {repository_full_name} config file not found or error. {ex}") + except yaml.YAMLError: + self.logger.exception(f"Repository {repository_full_name} config has invalid YAML syntax") + raise + + except Exception: + self.logger.exception(f"Repository {repository_full_name} config file not found or error") return {} self.logger.error("self.repository or self.repository_full_name is not defined") @@ -69,20 +100,43 @@ def get_value(self, value: str, return_on_none: Any = None, extra_dict: dict[str """ Get value from config + Supports dot notation for nested values (e.g., "docker.username", "pypi.token") + Order of getting value: 1. Local repository file (.github-webhook-server.yaml) 2. Repository level global config file (config.yaml) 3. Root level global config file (config.yaml) """ - if extra_dict and extra_dict.get(value): - value = extra_dict[value] - if value is not None: - return value + if extra_dict: + result = self._get_nested_value(value, extra_dict) + if result is not None: + return result for scope in (self.repository_data, self.root_data): - if value in scope: - value_data = scope[value] - if value_data is not None: - return value_data + result = self._get_nested_value(value, scope) + if result is not None: + return result return return_on_none + + def _get_nested_value(self, key: str, data: dict[str, Any]) -> Any: + """ + Get value from nested dict using dot notation. + + Args: + key: Key with optional dot notation (e.g., "docker.username", "pypi.token") + data: Dictionary to search + + Returns: + Value if found, None otherwise + """ + keys = key.split(".") + current = data + + for k in keys: + if isinstance(current, dict) and k in current: + current = current[k] + else: + return None + + return current diff --git a/webhook_server/libs/exceptions.py b/webhook_server/libs/exceptions.py index a75dd5c69..b10b62b47 100644 --- a/webhook_server/libs/exceptions.py +++ b/webhook_server/libs/exceptions.py @@ -1,12 +1,10 @@ class RepositoryNotFoundInConfigError(Exception): - pass - + """Raised when a repository is not found in the configuration file.""" -class ProcessGithubWebhookError(Exception): - def __init__(self, err: dict[str, str]): - self.err = err - super().__init__(str(err)) + pass class NoApiTokenError(Exception): + """Raised when no API token is available for GitHub API operations.""" + pass diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index b23ce068f..d9381e8bb 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -2,11 +2,14 @@ import asyncio import contextlib -import json import logging import os +import shlex +import shutil +import tempfile from typing import Any +import github import requests from github import GithubException from github.Commit import Commit @@ -14,14 +17,14 @@ from github.Repository import Repository from starlette.datastructures import Headers -from webhook_server.libs.check_run_handler import CheckRunHandler from webhook_server.libs.config import Config from webhook_server.libs.exceptions import RepositoryNotFoundInConfigError -from webhook_server.libs.issue_comment_handler import IssueCommentHandler -from webhook_server.libs.owners_files_handler import OwnersFileHandler -from webhook_server.libs.pull_request_handler import PullRequestHandler -from webhook_server.libs.pull_request_review_handler import PullRequestReviewHandler -from webhook_server.libs.push_handler import PushHandler +from webhook_server.libs.handlers.check_run_handler import CheckRunHandler +from webhook_server.libs.handlers.issue_comment_handler import IssueCommentHandler +from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler +from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler +from webhook_server.libs.handlers.pull_request_review_handler import PullRequestReviewHandler +from webhook_server.libs.handlers.push_handler import PushHandler from webhook_server.utils.constants import ( BUILD_CONTAINER_STR, CAN_BE_MERGED_STR, @@ -35,7 +38,7 @@ get_repository_github_app_api, ) from webhook_server.utils.helpers import ( - extract_key_from_dict, + format_task_fields, get_api_with_highest_rate_limit, get_apis_and_tokes_from_config, get_github_repo_api, @@ -50,6 +53,7 @@ def __init__(self, hook_data: dict[Any, Any], headers: Headers, logger: logging. self.hook_data = hook_data self.repository_name: str = hook_data["repository"]["name"] self.repository_full_name: str = hook_data["repository"]["full_name"] + self._bg_tasks: set[asyncio.Task] = set() self.parent_committer: str = "" self.x_github_delivery: str = headers.get("X-GitHub-Delivery", "") self.github_event: str = headers["X-GitHub-Event"] @@ -61,6 +65,8 @@ def __init__(self, hook_data: dict[Any, Any], headers: Headers, logger: logging. self.token: str self.api_user: str self.current_pull_request_supported_retest: list[str] = [] + self.github_api: github.Github | None = None + self.initial_rate_limit_remaining: int | None = None if not self.config.repository_data: raise RepositoryNotFoundInConfigError(f"Repository {self.repository_name} not found in config file") @@ -72,6 +78,14 @@ def __init__(self, hook_data: dict[Any, Any], headers: Headers, logger: logging. ) if github_api and self.token: + self.github_api = github_api + # Track initial rate limit for token spend calculation + # Note: log_prefix not set yet, so we can't use it in error messages here + try: + initial_rate_limit = github_api.get_rate_limit() + self.initial_rate_limit_remaining = initial_rate_limit.rate.remaining + except Exception as ex: + self.logger.debug(f"Failed to get initial rate limit: {ex}") self.repository = get_github_repo_api(github_app_api=github_api, repository=self.repository_full_name) # Once we have a repository, we can get the config from .github-webhook-server.yaml local_repository_config = self.config.repository_local_data( @@ -105,90 +119,245 @@ def __init__(self, hook_data: dict[Any, Any], headers: Headers, logger: logging. self.logger.error(f"{self.log_prefix} Failed to get repository.") return - self.clone_repo_dir: str = os.path.join("/tmp", f"{self.repository.name}") - self.add_api_users_to_auto_verified_and_merged_users + # Create unique temp directory to avoid collisions and security issues + # Format: /tmp/tmp{random}/github-webhook-{repo_name} + # This prevents predictable paths and ensures isolation between concurrent webhook handlers + self.clone_repo_dir: str = tempfile.mkdtemp(prefix=f"github-webhook-{self.repository_name}-") + # Initialize auto-verified users from API users + self.add_api_users_to_auto_verified_and_merged_users() self.current_pull_request_supported_retest = self._current_pull_request_supported_retest self.issue_url_for_welcome_msg: str = ( "Report bugs in [Issues](https://github.com/myakove/github-webhook-server/issues)" ) + async def _get_token_metrics(self) -> str: + """Get token metrics (API rate limit consumption) for this webhook. + + Returns: + str: Formatted token metrics string for logging, or empty string if unavailable. + """ + if not self.github_api or self.initial_rate_limit_remaining is None: + return "" + + try: + final_rate_limit = await asyncio.to_thread(self.github_api.get_rate_limit) + final_remaining = final_rate_limit.rate.remaining + + # Calculate token spend (handle case where rate limit reset between checks) + # If final > initial, rate limit reset occurred, so we can't calculate accurately + if final_remaining > self.initial_rate_limit_remaining: + # Rate limit reset happened - log as 0 since we can't determine actual spend + token_spend = 0 + return ( + f"token {self.token[:8]}... {token_spend} API calls " + f"(rate limit reset occurred - initial: {self.initial_rate_limit_remaining}, " + f"final: {final_remaining})" + ) + else: + token_spend = self.initial_rate_limit_remaining - final_remaining + # Return token spend with structured format for parsing + return ( + f"token {self.token[:8]}... {token_spend} API calls " + f"(initial: {self.initial_rate_limit_remaining}, " + f"final: {final_remaining}, remaining: {final_remaining})" + ) + except Exception as ex: + self.logger.debug(f"{self.log_prefix} Failed to get token metrics: {ex}") + return "" + async def process(self) -> Any: event_log: str = f"Event type: {self.github_event}. event ID: {self.x_github_delivery}" - self.logger.step(f"{self.log_prefix} Starting webhook processing: {event_log}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'started')} " + f"Starting webhook processing: {event_log}", + ) if self.github_event == "ping": - self.logger.step(f"{self.log_prefix} Processing ping event") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Processing ping event", + ) self.logger.debug(f"{self.log_prefix} {event_log}") + token_metrics = await self._get_token_metrics() + self.logger.success( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} " + f"Webhook processing completed successfully: ping - {token_metrics}", + ) return {"status": requests.codes.ok, "message": "pong"} if self.github_event == "push": - self.logger.step(f"{self.log_prefix} Processing push event") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Processing push event", + ) self.logger.debug(f"{self.log_prefix} {event_log}") - return await PushHandler(github_webhook=self).process_push_webhook_data() + await PushHandler(github_webhook=self).process_push_webhook_data() + token_metrics = await self._get_token_metrics() + self.logger.success( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} " + f"Webhook processing completed successfully: push - {token_metrics}", + ) + return None + + pull_request = await self.get_pull_request() + if pull_request: + # Log how we got the pull request (for workflow tracking) + if self.github_event == "pull_request": + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Initializing pull request from webhook payload", + ) + else: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Fetched pull request data via API (event: {self.github_event})", + ) - if pull_request := await self.get_pull_request(): self.log_prefix = self.prepare_log_prefix(pull_request=pull_request) - self.logger.step(f"{self.log_prefix} Processing pull request event: {event_log}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Processing pull request event: {event_log}", + ) self.logger.debug(f"{self.log_prefix} {event_log}") - if pull_request.draft: - self.logger.step(f"{self.log_prefix} Pull request is draft, skipping processing") # type: ignore + if await asyncio.to_thread(lambda: pull_request.draft): + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Pull request is draft, skipping processing", + ) self.logger.debug(f"{self.log_prefix} Pull request is draft, doing nothing") + token_metrics = await self._get_token_metrics() + self.logger.success( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} " + f"Webhook processing completed successfully: draft PR (skipped) - {token_metrics}", + ) return None - self.logger.step(f"{self.log_prefix} Initializing pull request data") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Initializing pull request data", + ) self.last_commit = await self._get_last_commit(pull_request=pull_request) self.parent_committer = pull_request.user.login self.last_committer = getattr(self.last_commit.committer, "login", self.parent_committer) if self.github_event == "issue_comment": - self.logger.step(f"{self.log_prefix} Initializing OWNERS file handler for issue comment") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Initializing OWNERS file handler for issue comment", + ) owners_file_handler = OwnersFileHandler(github_webhook=self) owners_file_handler = await owners_file_handler.initialize(pull_request=pull_request) - self.logger.step(f"{self.log_prefix} Processing issue comment with IssueCommentHandler") # type: ignore - return await IssueCommentHandler( + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Processing issue comment with IssueCommentHandler", + ) + await IssueCommentHandler( github_webhook=self, owners_file_handler=owners_file_handler ).process_comment_webhook_data(pull_request=pull_request) + token_metrics = await self._get_token_metrics() + self.logger.success( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} " + f"Webhook processing completed successfully: issue_comment - {token_metrics}", + ) + return None elif self.github_event == "pull_request": - self.logger.step(f"{self.log_prefix} Initializing OWNERS file handler for pull request") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Initializing OWNERS file handler for pull request", + ) owners_file_handler = OwnersFileHandler(github_webhook=self) owners_file_handler = await owners_file_handler.initialize(pull_request=pull_request) - self.logger.step(f"{self.log_prefix} Processing pull request with PullRequestHandler") # type: ignore - return await PullRequestHandler( + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Processing pull request with PullRequestHandler", + ) + await PullRequestHandler( github_webhook=self, owners_file_handler=owners_file_handler ).process_pull_request_webhook_data(pull_request=pull_request) + token_metrics = await self._get_token_metrics() + self.logger.success( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} " + f"Webhook processing completed successfully: pull_request - {token_metrics}", + ) + return None elif self.github_event == "pull_request_review": - self.logger.step(f"{self.log_prefix} Initializing OWNERS file handler for pull request review") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Initializing OWNERS file handler for pull request review", + ) owners_file_handler = OwnersFileHandler(github_webhook=self) owners_file_handler = await owners_file_handler.initialize(pull_request=pull_request) - self.logger.step(f"{self.log_prefix} Processing pull request review with PullRequestReviewHandler") # type: ignore - return await PullRequestReviewHandler( + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Processing pull request review with PullRequestReviewHandler", + ) + await PullRequestReviewHandler( github_webhook=self, owners_file_handler=owners_file_handler ).process_pull_request_review_webhook_data( pull_request=pull_request, ) + token_metrics = await self._get_token_metrics() + self.logger.success( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'completed')} " + f"Webhook processing completed successfully: pull_request_review - {token_metrics}", + ) + return None elif self.github_event == "check_run": - self.logger.step(f"{self.log_prefix} Initializing OWNERS file handler for check run") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Initializing OWNERS file handler for check run", + ) owners_file_handler = OwnersFileHandler(github_webhook=self) owners_file_handler = await owners_file_handler.initialize(pull_request=pull_request) - self.logger.step(f"{self.log_prefix} Processing check run with CheckRunHandler") # type: ignore - if await CheckRunHandler( + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Processing check run with CheckRunHandler", + ) + handled = await CheckRunHandler( github_webhook=self, owners_file_handler=owners_file_handler - ).process_pull_request_check_run_webhook_data(pull_request=pull_request): + ).process_pull_request_check_run_webhook_data(pull_request=pull_request) + if handled: if self.hook_data["check_run"]["name"] != CAN_BE_MERGED_STR: - self.logger.step(f"{self.log_prefix} Checking if pull request can be merged after check run") # type: ignore - return await PullRequestHandler( + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " + f"Checking if pull request can be merged after check run", + ) + await PullRequestHandler( github_webhook=self, owners_file_handler=owners_file_handler ).check_if_can_be_merged(pull_request=pull_request) + # Log completion regardless of whether check run was processed or skipped + token_metrics = await self._get_token_metrics() + self.logger.success( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('webhook_processing', 'webhook_routing', 'completed')} " + f"Webhook processing completed successfully: check_run - {token_metrics}", + ) + return None + + else: + # Log warning when no PR found + self.logger.warning( + f"{self.log_prefix} " + f"{format_task_fields('webhook_processing', 'webhook_routing', 'skipped')} " + f"No pull request found for {self.github_event} event - skipping processing" + ) + token_metrics = await self._get_token_metrics() + self.logger.success( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('webhook_processing', 'webhook_routing', 'completed')} " + f"Webhook processing completed: no PR found - {token_metrics}" + ) + return None - @property def add_api_users_to_auto_verified_and_merged_users(self) -> None: apis_and_tokens = get_apis_and_tokes_from_config(config=self.config) for _api, _ in apis_and_tokens: @@ -231,8 +400,19 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: self.container_repository: str = self.build_and_push_container["repository"] self.dockerfile: str = self.build_and_push_container.get("dockerfile", "Dockerfile") self.container_tag: str = self.build_and_push_container.get("tag", "latest") - self.container_build_args: str = self.build_and_push_container.get("build-args", "") - self.container_command_args: str = self.build_and_push_container.get("args", "") + _build_args = self.build_and_push_container.get("build-args", []) + _cmd_args = self.build_and_push_container.get("args", []) + # Normalize to lists + if isinstance(_build_args, str): + _build_args = [a for a in shlex.split(_build_args) if a] + elif not isinstance(_build_args, list): + _build_args = [] + if isinstance(_cmd_args, str): + _cmd_args = [a for a in shlex.split(_cmd_args) if a] + elif not isinstance(_cmd_args, list): + _cmd_args = [] + self.container_build_args: list[str] = [str(a) for a in _build_args] + self.container_command_args: list[str] = [str(a) for a in _cmd_args] self.container_release: bool = self.build_and_push_container.get("release", False) self.pre_commit: bool = self.config.get_value( @@ -266,29 +446,52 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: async def get_pull_request(self, number: int | None = None) -> PullRequest | None: if number: + self.logger.debug(f"{self.log_prefix} Attempting to get PR by number: {number}") return await asyncio.to_thread(self.repository.get_pull, number) - for _number in extract_key_from_dict(key="number", _dict=self.hook_data): - try: - return await asyncio.to_thread(self.repository.get_pull, _number) - except GithubException: - continue + # Try to get PR number from hook_data + self.logger.debug(f"{self.log_prefix} Attempting to get PR from webhook payload") + pr_data = self.hook_data.get("pull_request") or self.hook_data.get("issue", {}) + if pr_data and isinstance(pr_data, dict): + pr_number = pr_data.get("number") + if pr_number: + self.logger.debug(f"{self.log_prefix} Found PR number in payload: {pr_number}") + try: + return await asyncio.to_thread(self.repository.get_pull, pr_number) + except GithubException as ex: + self.logger.debug(f"{self.log_prefix} Failed to get PR {pr_number} from payload: {ex}") + else: + self.logger.debug(f"{self.log_prefix} No PR number found in payload") + else: + self.logger.debug(f"{self.log_prefix} No PR data in webhook payload") commit: dict[str, Any] = self.hook_data.get("commit", {}) if commit: + self.logger.debug(f"{self.log_prefix} Attempting to get PR from commit SHA: {commit.get('sha', 'unknown')}") commit_obj = await asyncio.to_thread(self.repository.get_commit, commit["sha"]) with contextlib.suppress(Exception): _pulls = await asyncio.to_thread(commit_obj.get_pulls) - return _pulls[0] + if _pulls: + self.logger.debug(f"{self.log_prefix} Found PR from commit SHA: {_pulls[0].number}") + return _pulls[0] + self.logger.debug(f"{self.log_prefix} No PR found for commit SHA") + else: + self.logger.debug(f"{self.log_prefix} No commit data in webhook payload") if self.github_event == "check_run": + head_sha = self.hook_data["check_run"]["head_sha"] + self.logger.debug(f"{self.log_prefix} Searching open PRs for check_run head SHA: {head_sha}") for _pull_request in await asyncio.to_thread(self.repository.get_pulls, state="open"): - if _pull_request.head.sha == self.hook_data["check_run"]["head_sha"]: + if _pull_request.head.sha == head_sha: self.logger.debug( - f"{self.log_prefix} Found pull request {_pull_request.title} [{_pull_request.number}] for check run {self.hook_data['check_run']['name']}" + f"{self.log_prefix} Found pull request {_pull_request.title} " + f"[{_pull_request.number}] for check run " + f"{self.hook_data['check_run']['name']}" ) return _pull_request + self.logger.debug(f"{self.log_prefix} No open PR found matching check_run head SHA") + self.logger.debug(f"{self.log_prefix} All PR lookup strategies exhausted, no PR found") return None async def _get_last_commit(self, pull_request: PullRequest) -> Commit: @@ -328,19 +531,6 @@ def container_repository_and_tag( self.logger.error(f"{self.log_prefix} container tag not found") return None - def send_slack_message(self, message: str, webhook_url: str) -> None: - slack_data: dict[str, str] = {"text": message} - self.logger.info(f"{self.log_prefix} Sending message to slack: {message}") - response: requests.Response = requests.post( - webhook_url, - data=json.dumps(slack_data), - headers={"Content-Type": "application/json"}, - ) - if response.status_code != 200: - raise ValueError( - f"Request to slack returned an error {response.status_code} with the following message: {response.text}" - ) - @property def _current_pull_request_supported_retest(self) -> list[str]: current_pull_request_supported_retest: list[str] = [] @@ -360,3 +550,19 @@ def _current_pull_request_supported_retest(self) -> list[str]: if self.conventional_title: current_pull_request_supported_retest.append(CONVENTIONAL_TITLE_STR) return current_pull_request_supported_retest + + def __del__(self) -> None: + """Cleanup temporary clone directory on object destruction. + + This ensures the base temp directory created by tempfile.mkdtemp() is removed + when the webhook handler is destroyed, preventing temp directory leaks. + The subdirectories (created with -uuid4() suffix) are cleaned up by + _prepare_cloned_repo_dir context manager in handlers. + """ + if hasattr(self, "clone_repo_dir") and os.path.exists(self.clone_repo_dir): + try: + shutil.rmtree(self.clone_repo_dir, ignore_errors=True) + if hasattr(self, "logger"): + self.logger.debug(f"Cleaned up temp directory: {self.clone_repo_dir}") + except Exception: + pass # Ignore errors during cleanup diff --git a/webhook_server/libs/handlers/__init__.py b/webhook_server/libs/handlers/__init__.py new file mode 100644 index 000000000..bb6c967ad --- /dev/null +++ b/webhook_server/libs/handlers/__init__.py @@ -0,0 +1 @@ +"""Handler modules for processing GitHub webhook events.""" diff --git a/webhook_server/libs/check_run_handler.py b/webhook_server/libs/handlers/check_run_handler.py similarity index 76% rename from webhook_server/libs/check_run_handler.py rename to webhook_server/libs/handlers/check_run_handler.py index a05a14660..a95b1559f 100644 --- a/webhook_server/libs/check_run_handler.py +++ b/webhook_server/libs/handlers/check_run_handler.py @@ -5,8 +5,8 @@ from github.PullRequest import PullRequest from github.Repository import Repository -from webhook_server.libs.labels_handler import LabelsHandler -from webhook_server.libs.owners_files_handler import OwnersFileHandler +from webhook_server.libs.handlers.labels_handler import LabelsHandler +from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler from webhook_server.utils.constants import ( AUTOMERGE_LABEL_STR, BUILD_CONTAINER_STR, @@ -22,6 +22,7 @@ TOX_STR, VERIFIED_LABEL_STR, ) +from webhook_server.utils.helpers import format_task_fields, strip_ansi_codes if TYPE_CHECKING: from webhook_server.libs.github_api import GithubWebhook @@ -46,18 +47,28 @@ async def process_pull_request_check_run_webhook_data(self, pull_request: PullRe _check_run: dict[str, Any] = self.hook_data["check_run"] check_run_name: str = _check_run["name"] - self.logger.step(f"{self.log_prefix} Processing check run: {check_run_name}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'processing')} " + f"Processing check run: {check_run_name}", + ) if self.hook_data.get("action", "") != "completed": self.logger.debug( - f"{self.log_prefix} check run {check_run_name} action is {self.hook_data.get('action', 'N/A')} and not completed, skipping" + f"{self.log_prefix} check run {check_run_name} action is " + f"{self.hook_data.get('action', 'N/A')} and not completed, skipping" + ) + # Log completion - task_status reflects the result of our action (skipping is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} " + f"Processing check run: {check_run_name} (action not completed - skipped)", ) return False check_run_status: str = _check_run["status"] check_run_conclusion: str = _check_run["conclusion"] self.logger.debug( - f"{self.log_prefix} processing check_run - Name: {check_run_name} Status: {check_run_status} Conclusion: {check_run_conclusion}" + f"{self.log_prefix} processing check_run - Name: {check_run_name} " + f"Status: {check_run_status} Conclusion: {check_run_conclusion}" ) if check_run_name == CAN_BE_MERGED_STR: @@ -66,24 +77,55 @@ async def process_pull_request_check_run_webhook_data(self, pull_request: PullRe label=AUTOMERGE_LABEL_STR, pull_request=pull_request ): try: - self.logger.step(f"{self.log_prefix} Executing auto-merge for PR #{pull_request.number}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'automerge', 'processing')} " + f"Executing auto-merge for PR #{pull_request.number}", + ) await asyncio.to_thread(pull_request.merge, merge_method="SQUASH") - self.logger.step(f"{self.log_prefix} Auto-merge completed successfully") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'automerge', 'completed')} " + f"Auto-merge completed successfully", + ) self.logger.info( f"{self.log_prefix} Successfully auto-merged pull request #{pull_request.number}" ) + # Log completion for main check_run processing + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} " + f"Processing check run: {check_run_name} (auto-merged)", + ) return False except Exception as ex: self.logger.error( f"{self.log_prefix} Failed to auto-merge pull request #{pull_request.number}: {ex}" ) + # Log failure for automerge + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'automerge', 'failed')} " + f"Failed to auto-merge PR #{pull_request.number}: {ex}", + ) # Continue processing to allow manual intervention + # Log completion for main check_run processing (continuing after failed automerge) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} " + f"Processing check run: {check_run_name} (auto-merge failed, continuing)", + ) return True else: self.logger.debug(f"{self.log_prefix} check run is {CAN_BE_MERGED_STR}, skipping") + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} " + f"Processing check run: {check_run_name} (skipped - conditions not met)", + ) return False + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} " + f"Processing check run: {check_run_name} (completed)", + ) return True async def set_verify_check_queued(self) -> None: @@ -218,14 +260,27 @@ async def set_check_run_status( msg: str = f"{self.log_prefix} check run {check_run} status: {status or conclusion}" # Log workflow steps for check run status changes + # task_status reflects the result of our action, not what we're setting the check to if status == QUEUED_STR: - self.logger.step(f"{self.log_prefix} Setting {check_run} check to queued") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} " + f"Setting {check_run} check to queued", + ) elif status == IN_PROGRESS_STR: - self.logger.step(f"{self.log_prefix} Setting {check_run} check to in-progress") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} " + f"Setting {check_run} check to in-progress", + ) elif conclusion == SUCCESS_STR: - self.logger.step(f"{self.log_prefix} Setting {check_run} check to success") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'completed')} " + f"Setting {check_run} check to success", + ) elif conclusion == FAILURE_STR: - self.logger.step(f"{self.log_prefix} Setting {check_run} check to failure") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('check_run', 'ci_check', 'failed')} " + f"Setting {check_run} check to failure", + ) try: self.logger.debug(f"{self.log_prefix} Set check run status with {kwargs}") @@ -240,12 +295,16 @@ async def set_check_run_status( await asyncio.to_thread(self.github_webhook.repository_by_github_app.create_check_run, **kwargs) def get_check_run_text(self, err: str, out: str) -> str: - total_len: int = len(err) + len(out) + # Strip ANSI escape codes from output to prevent scrambled characters in GitHub UI + err_clean = strip_ansi_codes(err) + out_clean = strip_ansi_codes(out) + + total_len: int = len(err_clean) + len(out_clean) if total_len > 65534: # GitHub limit is 65535 characters - _output = f"```\n{err}\n\n{out}\n```"[:65534] + _output = f"```\n{err_clean}\n\n{out_clean}\n```"[:65534] else: - _output = f"```\n{err}\n\n{out}\n```" + _output = f"```\n{err_clean}\n\n{out_clean}\n```" _hased_str = "*****" @@ -302,11 +361,11 @@ async def required_check_failed_or_no_status( if failed_check_run not in check_runs_in_progress ] msg += f"Some check runs failed: {', '.join(exclude_in_progress)}\n" - self.logger.debug(f"failed_check_runs: {failed_check_runs}") + self.logger.debug(f"{self.log_prefix} failed_check_runs: {failed_check_runs}") if no_status_check_runs: msg += f"Some check runs not started: {', '.join(no_status_check_runs)}\n" - self.logger.debug(f"no_status_check_runs: {no_status_check_runs}") + self.logger.debug(f"{self.log_prefix} no_status_check_runs: {no_status_check_runs}") return msg @@ -343,7 +402,7 @@ async def get_branch_required_status_checks(self, pull_request: PullRequest) -> pull_request_branch = await asyncio.to_thread(self.repository.get_branch, pull_request.base.ref) branch_protection = await asyncio.to_thread(pull_request_branch.get_protection) branch_required_status_checks = branch_protection.required_status_checks.contexts - self.logger.debug(f"branch_required_status_checks: {branch_required_status_checks}") + self.logger.debug(f"{self.log_prefix} branch_required_status_checks: {branch_required_status_checks}") return branch_required_status_checks async def required_check_in_progress( diff --git a/webhook_server/libs/issue_comment_handler.py b/webhook_server/libs/handlers/issue_comment_handler.py similarity index 73% rename from webhook_server/libs/issue_comment_handler.py rename to webhook_server/libs/handlers/issue_comment_handler.py index 2f9877b77..19aa8ba7d 100644 --- a/webhook_server/libs/issue_comment_handler.py +++ b/webhook_server/libs/handlers/issue_comment_handler.py @@ -2,16 +2,17 @@ import asyncio from asyncio import Task -from typing import TYPE_CHECKING, Any, Callable, Coroutine, Union +from collections.abc import Callable, Coroutine +from typing import TYPE_CHECKING, Any from github.PullRequest import PullRequest from github.Repository import Repository -from webhook_server.libs.check_run_handler import CheckRunHandler -from webhook_server.libs.labels_handler import LabelsHandler -from webhook_server.libs.owners_files_handler import OwnersFileHandler -from webhook_server.libs.pull_request_handler import PullRequestHandler -from webhook_server.libs.runner_handler import RunnerHandler +from webhook_server.libs.handlers.check_run_handler import CheckRunHandler +from webhook_server.libs.handlers.labels_handler import LabelsHandler +from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler +from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler +from webhook_server.libs.handlers.runner_handler import RunnerHandler from webhook_server.utils.constants import ( AUTOMERGE_LABEL_STR, BUILD_AND_PUSH_CONTAINER_STR, @@ -33,13 +34,14 @@ VERIFIED_LABEL_STR, WIP_STR, ) +from webhook_server.utils.helpers import format_task_fields if TYPE_CHECKING: from webhook_server.libs.github_api import GithubWebhook class IssueCommentHandler: - def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersFileHandler): + def __init__(self, github_webhook: GithubWebhook, owners_file_handler: OwnersFileHandler): self.github_webhook = github_webhook self.owners_file_handler = owners_file_handler @@ -60,35 +62,97 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF async def process_comment_webhook_data(self, pull_request: PullRequest) -> None: comment_action = self.hook_data["action"] - self.logger.step(f"{self.log_prefix} Starting issue comment processing: action={comment_action}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'started')} " + f"Starting issue comment processing: action={comment_action}", + ) if comment_action in ("edited", "deleted"): - self.logger.step(f"{self.log_prefix} Skipping comment processing: action is {comment_action}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'processing')} " + f"Skipping comment processing: action is {comment_action}", + ) self.logger.debug(f"{self.log_prefix} Not processing comment. action is {comment_action}") + # Log completion - task_status reflects the result of our action (skipping is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} " + f"Skipping comment processing: action is {comment_action} (completed)", + ) return - self.logger.step(f"{self.log_prefix} Processing issue comment for issue {self.hook_data['issue']['number']}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'processing')} " + f"Processing issue comment for issue {self.hook_data['issue']['number']}", + ) self.logger.info(f"{self.log_prefix} Processing issue {self.hook_data['issue']['number']}") body: str = self.hook_data["comment"]["body"] if self.github_webhook.issue_url_for_welcome_msg in body: self.logger.debug(f"{self.log_prefix} Welcome message found in issue {pull_request.title}. Not processing") + # Log completion - task_status reflects the result of our action (skipping welcome message is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} " + f"Processing issue comment for issue {self.hook_data['issue']['number']} (welcome message - skipped)", + ) return _user_commands: list[str] = [_cmd.strip("/") for _cmd in body.strip().splitlines() if _cmd.startswith("/")] if _user_commands: - self.logger.step(f"{self.log_prefix} Found {len(_user_commands)} user commands: {_user_commands}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'processing')} " + f"Found {len(_user_commands)} user commands: {_user_commands}", + ) user_login: str = self.hook_data["sender"]["login"] - for user_command in _user_commands: - self.logger.step(f"{self.log_prefix} Executing user command: /{user_command} by {user_login}") # type: ignore - await self.user_commands( - pull_request=pull_request, - command=user_command, - reviewed_user=user_login, - issue_comment_id=self.hook_data["comment"]["id"], + + # Execute all commands in parallel + if _user_commands: + tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = [] + for user_command in _user_commands: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('issue_comment', 'pr_management', 'processing')} " + f"Executing user command: /{user_command} by {user_login}", + ) + task = asyncio.create_task( + self.user_commands( + pull_request=pull_request, + command=user_command, + reviewed_user=user_login, + issue_comment_id=self.hook_data["comment"]["id"], + ) + ) + tasks.append(task) + + # Execute all commands concurrently + results = await asyncio.gather(*tasks, return_exceptions=True) + + # Log results and handle exceptions + for idx, result in enumerate(results): + user_command = _user_commands[idx] + if isinstance(result, Exception): + self.logger.error(f"{self.log_prefix} Command execution failed: /{user_command} - {result}") + else: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} " + f"Executed user command: /{user_command} by {user_login}", + ) + + # Log completion for main processing - task_status reflects the result of our action + if not _user_commands: + # No commands found, log completion + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} " + f"Processing issue comment for issue {self.hook_data['issue']['number']} (no commands found)", + ) + else: + # Commands were processed, log completion + issue_num = self.hook_data["issue"]["number"] + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('issue_comment', 'pr_management', 'completed')} " + f"Processing issue comment for issue {issue_num} (processed {len(_user_commands)} commands)", ) async def user_commands( @@ -194,10 +258,12 @@ async def user_commands( wip_for_title: str = f"{WIP_STR.upper()}:" if remove: await self.labels_handler._remove_label(pull_request=pull_request, label=WIP_STR) - await asyncio.to_thread(pull_request.edit, title=pull_request.title.replace(wip_for_title, "")) + pr_title = await asyncio.to_thread(lambda: pull_request.title) + await asyncio.to_thread(pull_request.edit, title=pr_title.replace(wip_for_title, "")) else: await self.labels_handler._add_label(pull_request=pull_request, label=WIP_STR) - await asyncio.to_thread(pull_request.edit, title=f"{wip_for_title} {pull_request.title}") + pr_title = await asyncio.to_thread(lambda: pull_request.title) + await asyncio.to_thread(pull_request.edit, title=f"{wip_for_title} {pr_title}") elif _command == HOLD_LABEL_STR: if reviewed_user not in self.owners_file_handler.all_pull_request_approvers: @@ -240,7 +306,7 @@ async def _add_reviewer_by_user_comment(self, pull_request: PullRequest, reviewe reviewer = reviewer.strip("@") self.logger.info(f"{self.log_prefix} Adding reviewer {reviewer} by user comment") repo_contributors = list(await asyncio.to_thread(self.repository.get_contributors)) - self.logger.debug(f"Repo contributors are: {repo_contributors}") + self.logger.debug(f"{self.log_prefix} Repo contributors are: {repo_contributors}") for contributer in repo_contributors: if contributer.login == reviewer: @@ -266,7 +332,8 @@ async def process_cherry_pick_command( except Exception: _non_exits_target_branches_msg += f"Target branch `{_target_branch}` does not exist\n" self.logger.debug( - f"{self.log_prefix} Found target branches {_exits_target_branches} and not found {_non_exits_target_branches_msg}" + f"{self.log_prefix} Found target branches {_exits_target_branches} " + f"and not found {_non_exits_target_branches_msg}" ) if _non_exits_target_branches_msg: @@ -313,7 +380,7 @@ async def process_retest_command( PYTHON_MODULE_INSTALL_STR: self.runner_handler.run_install_python_module, CONVENTIONAL_TITLE_STR: self.runner_handler.run_conventional_title_check, } - self.logger.debug(f"Retest map is {_retests_to_func_map}") + self.logger.debug(f"{self.log_prefix} Retest map is {_retests_to_func_map}") if not _target_tests: msg = "No test defined to retest" @@ -341,8 +408,8 @@ async def process_retest_command( else: _not_supported_retests.append(_test) - self.logger.debug(f"Supported retests are {_supported_retests}") - self.logger.debug(f"Not supported retests are {_not_supported_retests}") + self.logger.debug(f"{self.log_prefix} Supported retests are {_supported_retests}") + self.logger.debug(f"{self.log_prefix} Not supported retests are {_not_supported_retests}") if _not_supported_retests: msg = f"No {' '.join(_not_supported_retests)} configured for this repository" @@ -351,7 +418,7 @@ async def process_retest_command( await asyncio.to_thread(pull_request.create_issue_comment, msg) if _supported_retests: - tasks: list[Union[Coroutine[Any, Any, Any], Task[Any]]] = [] + tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = [] for _test in _supported_retests: self.logger.debug(f"{self.log_prefix} running retest {_test}") task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request)) diff --git a/webhook_server/libs/labels_handler.py b/webhook_server/libs/handlers/labels_handler.py similarity index 78% rename from webhook_server/libs/labels_handler.py rename to webhook_server/libs/handlers/labels_handler.py index 3493f1d70..f9aa59578 100644 --- a/webhook_server/libs/labels_handler.py +++ b/webhook_server/libs/handlers/labels_handler.py @@ -7,7 +7,7 @@ from github.Repository import Repository from timeout_sampler import TimeoutWatch -from webhook_server.libs.owners_files_handler import OwnersFileHandler +from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler from webhook_server.utils.constants import ( ADD_STR, APPROVE_STR, @@ -23,6 +23,7 @@ STATIC_LABELS_DICT, WIP_STR, ) +from webhook_server.utils.helpers import format_task_fields if TYPE_CHECKING: from webhook_server.libs.github_api import GithubWebhook @@ -46,35 +47,79 @@ async def pull_request_labels_names(self, pull_request: PullRequest) -> list[str return [lb.name for lb in labels] async def _remove_label(self, pull_request: PullRequest, label: str) -> bool: - self.logger.step(f"{self.log_prefix} Removing label '{label}' from PR") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'processing')} " + f"Removing label '{label}' from PR", + ) self.logger.debug(f"{self.log_prefix} Removing label {label}") try: if await self.label_exists_in_pull_request(pull_request=pull_request, label=label): self.logger.info(f"{self.log_prefix} Removing label {label}") await asyncio.to_thread(pull_request.remove_from_labels, label) - return await self.wait_for_label(pull_request=pull_request, label=label, exists=False) + success = await self.wait_for_label(pull_request=pull_request, label=label, exists=False) + # Log completion - task_status reflects the result of our action + if success: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} " + f"Removed label '{label}' from PR", + ) + else: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'failed')} " + f"Failed to remove label '{label}' from PR (timeout waiting for removal)", + ) + return success except Exception as exp: self.logger.debug(f"{self.log_prefix} Failed to remove {label} label. Exception: {exp}") + # Log failure - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'failed')} " + f"Failed to remove label '{label}' from PR (exception: {exp})", + ) return False + # Label doesn't exist - this is an acceptable outcome (we don't check first to save API calls) + # Log completion - task_status reflects the result of our action (attempting to remove completed successfully) self.logger.debug(f"{self.log_prefix} Label {label} not found and cannot be removed") + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} " + f"Removing label '{label}' from PR (label does not exist - acceptable)", + ) return False async def _add_label(self, pull_request: PullRequest, label: str) -> None: label = label.strip() - self.logger.step(f"{self.log_prefix} Adding label '{label}' to PR") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'processing')} " + f"Adding label '{label}' to PR", + ) self.logger.debug(f"{self.log_prefix} Adding label {label}") if len(label) > 49: self.logger.debug(f"{label} is too long, not adding.") + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} " + f"Adding label '{label}' to PR (label too long - skipped)", + ) return if await self.label_exists_in_pull_request(pull_request=pull_request, label=label): self.logger.debug(f"{self.log_prefix} Label {label} already assign") + # Log completion - task_status reflects the result of our action (label already exists is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} " + f"Adding label '{label}' to PR (label already exists - acceptable)", + ) return if label in STATIC_LABELS_DICT: self.logger.info(f"{self.log_prefix} Adding pull request label {label}") await asyncio.to_thread(pull_request.add_to_labels, label) + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} " + f"Added label '{label}' to PR", + ) return color = self._get_label_color(label) @@ -92,6 +137,11 @@ async def _add_label(self, pull_request: PullRequest, label: str) -> None: self.logger.info(f"{self.log_prefix} Adding pull request label {label}") await asyncio.to_thread(pull_request.add_to_labels, label) await self.wait_for_label(pull_request=pull_request, label=label, exists=True) + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} " + f"Added label '{label}' to PR", + ) async def wait_for_label(self, pull_request: PullRequest, label: str, exists: bool) -> bool: self.logger.debug(f"{self.log_prefix} waiting for label {label} to {'exists' if exists else 'not exists'}") @@ -116,7 +166,7 @@ def _get_label_color(self, label: str) -> str: size_name = label[len(SIZE_LABEL_PREFIX) :] thresholds = self._get_custom_pr_size_thresholds() - for threshold, label_name, color_hex in thresholds: + for _threshold, label_name, color_hex in thresholds: if label_name == size_name: return color_hex @@ -211,7 +261,10 @@ def get_size(self, pull_request: PullRequest) -> str: async def add_size_label(self, pull_request: PullRequest) -> None: """Add a size label to the pull request based on its additions and deletions.""" - self.logger.step(f"{self.log_prefix} Calculating and applying PR size label") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'processing')} " + f"Calculating and applying PR size label", + ) size_label = self.get_size(pull_request=pull_request) self.logger.debug(f"{self.log_prefix} size label is {size_label}") if not size_label: @@ -232,7 +285,10 @@ async def add_size_label(self, pull_request: PullRequest) -> None: await self._remove_label(pull_request=pull_request, label=exists_size_label[0]) await self._add_label(pull_request=pull_request, label=size_label) - self.logger.step(f"{self.log_prefix} Applied size label '{size_label}' to PR") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('labels', 'pr_management', 'completed')} " + f"Applied size label '{size_label}' to PR", + ) async def label_by_user_comment( self, @@ -328,7 +384,7 @@ async def manage_reviewed_by_label( f"{self.log_prefix} PR {pull_request.number} got unsupported review state: {review_state}" ) - def wip_or_hold_lables_exists(self, labels: list[str]) -> str: + def wip_or_hold_labels_exists(self, labels: list[str]) -> str: failure_output = "" if HOLD_LABEL_STR in labels: diff --git a/webhook_server/libs/owners_files_handler.py b/webhook_server/libs/handlers/owners_files_handler.py similarity index 70% rename from webhook_server/libs/owners_files_handler.py rename to webhook_server/libs/handlers/owners_files_handler.py index d90645c0c..79a37026f 100644 --- a/webhook_server/libs/owners_files_handler.py +++ b/webhook_server/libs/handlers/owners_files_handler.py @@ -1,6 +1,7 @@ import asyncio +from collections.abc import Coroutine from pathlib import Path -from typing import TYPE_CHECKING, Any, Coroutine +from typing import TYPE_CHECKING, Any import yaml from asyncstdlib import functools @@ -8,10 +9,12 @@ from github.GithubException import GithubException from github.NamedUser import NamedUser from github.PaginatedList import PaginatedList +from github.Permissions import Permissions from github.PullRequest import PullRequest from github.Repository import Repository -from webhook_server.utils.constants import COMMAND_ADD_ALLOWED_USER_STR +from webhook_server.utils.constants import COMMAND_ADD_ALLOWED_USER_STR, ROOT_APPROVERS_KEY +from webhook_server.utils.helpers import format_task_fields if TYPE_CHECKING: from webhook_server.libs.github_api import GithubWebhook @@ -25,14 +28,29 @@ def __init__(self, github_webhook: "GithubWebhook") -> None: self.repository: Repository = self.github_webhook.repository async def initialize(self, pull_request: PullRequest) -> "OwnersFileHandler": - self.changed_files = await self.list_changed_files(pull_request=pull_request) - self.all_repository_approvers_and_reviewers = await self.get_all_repository_approvers_and_reviewers( - pull_request=pull_request + """Initialize handler with PR data (optimized with parallel operations). + + Phase 1: Fetch independent data in parallel (changed files + OWNERS data) + Phase 2: Process derived data in parallel (approvers + reviewers) + """ + # Phase 1: Parallel data fetching - independent GitHub API operations + self.changed_files, self.all_repository_approvers_and_reviewers = await asyncio.gather( + self.list_changed_files(pull_request=pull_request), + self.get_all_repository_approvers_and_reviewers(pull_request=pull_request), + ) + + # Phase 2: Parallel data processing - all depend on phase 1 but independent of each other + ( + self.all_repository_approvers, + self.all_repository_reviewers, + self.all_pull_request_approvers, + self.all_pull_request_reviewers, + ) = await asyncio.gather( + self.get_all_repository_approvers(), + self.get_all_repository_reviewers(), + self.get_all_pull_request_approvers(), + self.get_all_pull_request_reviewers(), ) - self.all_repository_approvers = await self.get_all_repository_approvers() - self.all_repository_reviewers = await self.get_all_repository_reviewers() - self.all_pull_request_approvers = await self.get_all_pull_request_approvers() - self.all_pull_request_reviewers = await self.get_all_pull_request_reviewers() return self @@ -99,7 +117,6 @@ async def _get_file_content(self, content_path: str, pull_request: PullRequest) return _path, content_path - @functools.lru_cache async def get_all_repository_approvers_and_reviewers(self, pull_request: PullRequest) -> dict[str, dict[str, Any]]: # Dictionary mapping OWNERS file paths to their approvers and reviewers _owners: dict[str, dict[str, Any]] = {} @@ -169,7 +186,7 @@ async def get_all_repository_reviewers(self) -> list[str]: async def get_all_pull_request_approvers(self) -> list[str]: _approvers: list[str] = [] - changed_files = await self.owners_data_for_changed_files() + changed_files = await self.owners_data_for_changed_files for list_of_approvers in changed_files.values(): for _approver in list_of_approvers.get("approvers", []): @@ -182,7 +199,7 @@ async def get_all_pull_request_approvers(self) -> list[str]: async def get_all_pull_request_reviewers(self) -> list[str]: _reviewers: list[str] = [] - changed_files = await self.owners_data_for_changed_files() + changed_files = await self.owners_data_for_changed_files for list_of_reviewers in changed_files.values(): for _reviewer in list_of_reviewers.get("reviewers", []): @@ -190,16 +207,22 @@ async def get_all_pull_request_reviewers(self) -> list[str]: _reviewers = list(set(_reviewers)) _reviewers.sort() - self.logger.debug(f"Pull request reviewers are: {_reviewers}") + self.logger.debug(f"{self.log_prefix} Pull request reviewers are: {_reviewers}") return _reviewers + @functools.cached_property async def owners_data_for_changed_files(self) -> dict[str, dict[str, Any]]: + """Get OWNERS data for directories containing changed files. + + Uses @functools.cached_property to cache results and avoid redundant computation + of folder matching logic across multiple calls during initialization. + """ self._ensure_initialized() data: dict[str, dict[str, Any]] = {} changed_folders = {Path(cf).parent for cf in self.changed_files} - self.logger.debug(f"Changed folders: {changed_folders}") + self.logger.debug(f"{self.log_prefix} Changed folders: {changed_folders}") changed_folder_match: list[Path] = [] @@ -219,7 +242,7 @@ async def owners_data_for_changed_files(self) -> dict[str, dict[str, Any]]: f"{self.log_prefix} Matched changed folder: {changed_folder} with owners dir: {_owners_dir}" ) if require_root_approvers is None: - require_root_approvers = owners_data.get("root-approvers", True) + require_root_approvers = owners_data.get(ROOT_APPROVERS_KEY, True) if require_root_approvers or require_root_approvers is None: self.logger.debug(f"{self.log_prefix} require root_approvers") @@ -231,43 +254,78 @@ async def owners_data_for_changed_files(self) -> dict[str, dict[str, Any]]: if _folder == _changed_path or _changed_path in _folder.parents: continue else: - self.logger.debug(f"Adding root approvers for {_folder}") + self.logger.debug(f"{self.log_prefix} Adding root approvers for {_folder}") data["."] = self.all_repository_approvers_and_reviewers.get(".", {}) break - self.logger.debug(f"Final owners data for changed files: {data}") + self.logger.debug(f"{self.log_prefix} Final owners data for changed files: {data}") + return data async def assign_reviewers(self, pull_request: PullRequest) -> None: self._ensure_initialized() - self.logger.step(f"{self.log_prefix} Starting reviewer assignment based on OWNERS files") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'started')} " + f"Starting reviewer assignment based on OWNERS files", + ) self.logger.info(f"{self.log_prefix} Assign reviewers") _to_add: list[str] = list(set(self.all_pull_request_reviewers)) self.logger.debug(f"{self.log_prefix} Reviewers to add: {', '.join(_to_add)}") if _to_add: - self.logger.step(f"{self.log_prefix} Assigning {len(_to_add)} reviewers to PR") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'processing')} " + f"Assigning {len(_to_add)} reviewers to PR", + ) else: - self.logger.step(f"{self.log_prefix} No reviewers to assign") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'processing')} " + f"No reviewers to assign", + ) + # Log completion - task_status reflects the result of our action (no reviewers to assign is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'completed')} " + f"No reviewers to assign (completed)", + ) return + assigned_count = 0 + failed_count = 0 for reviewer in _to_add: if reviewer != pull_request.user.login: self.logger.debug(f"{self.log_prefix} Adding reviewer {reviewer}") try: await asyncio.to_thread(pull_request.create_review_request, [reviewer]) - self.logger.step(f"{self.log_prefix} Successfully assigned reviewer {reviewer}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'processing')} " + f"Successfully assigned reviewer {reviewer}", + ) + assigned_count += 1 except GithubException as ex: - self.logger.step(f"{self.log_prefix} Failed to assign reviewer {reviewer}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'failed')} " + f"Failed to assign reviewer {reviewer}", + ) self.logger.debug(f"{self.log_prefix} Failed to add reviewer {reviewer}. {ex}") await asyncio.to_thread( pull_request.create_issue_comment, f"{reviewer} can not be added as reviewer. {ex}" ) - - self.logger.step(f"{self.log_prefix} Reviewer assignment completed") # type: ignore + failed_count += 1 + + # Log completion - task_status reflects the result of our action + if failed_count > 0: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'failed')} " + f"Assigned {assigned_count} reviewers to PR ({failed_count} failed)", + ) + else: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('owners', 'pr_management', 'completed')} " + f"Assigned {assigned_count} reviewers to PR", + ) async def is_user_valid_to_run_commands(self, pull_request: PullRequest, reviewed_user: str) -> bool: self._ensure_initialized() @@ -283,7 +341,7 @@ async def is_user_valid_to_run_commands(self, pull_request: PullRequest, reviewe - {"\n - ".join(allowed_user_to_approve)} """ valid_users = await self.valid_users_to_run_commands - self.logger.debug(f"Valid users to run commands: {valid_users}") + self.logger.debug(f"{self.log_prefix} Valid users to run commands: {valid_users}") if reviewed_user not in valid_users: for comment in [ @@ -319,23 +377,31 @@ async def valid_users_to_run_commands(self) -> set[str]: async def get_all_repository_contributors(self) -> list[str]: contributors = await self.repository_contributors - return [val.login for val in contributors] + return await asyncio.to_thread(lambda: [val.login for val in contributors]) async def get_all_repository_collaborators(self) -> list[str]: collaborators = await self.repository_collaborators - return [val.login for val in collaborators] + return await asyncio.to_thread(lambda: [val.login for val in collaborators]) async def get_all_repository_maintainers(self) -> list[str]: maintainers: list[str] = [] - for user in await self.repository_collaborators: - permissions = user.permissions - self.logger.debug(f"User {user.login} permissions: {permissions}") + # Fix #1: Convert PaginatedList to list in thread pool to avoid blocking during iteration + collaborators = await self.repository_collaborators + collaborators_list = await asyncio.to_thread(lambda: list(collaborators)) + + for user in collaborators_list: + # Fix #2: Wrap permissions access in thread pool (property makes blocking API call) + def get_user_permissions(u: NamedUser = user) -> Permissions: + return u.permissions + + permissions = await asyncio.to_thread(get_user_permissions) + self.logger.debug(f"{self.log_prefix} User {user.login} permissions: {permissions}") if permissions.admin or permissions.maintain: maintainers.append(user.login) - self.logger.debug(f"Maintainers: {maintainers}") + self.logger.debug(f"{self.log_prefix} Maintainers: {maintainers}") return maintainers @functools.cached_property diff --git a/webhook_server/libs/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py similarity index 61% rename from webhook_server/libs/pull_request_handler.py rename to webhook_server/libs/handlers/pull_request_handler.py index 34151e28f..d0d73dd78 100644 --- a/webhook_server/libs/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -1,15 +1,17 @@ from __future__ import annotations import asyncio -from typing import TYPE_CHECKING, Any, Coroutine +from collections.abc import Coroutine +from typing import TYPE_CHECKING, Any +from github import GithubException from github.PullRequest import PullRequest from github.Repository import Repository -from webhook_server.libs.check_run_handler import CheckRunHandler -from webhook_server.libs.labels_handler import LabelsHandler -from webhook_server.libs.owners_files_handler import OwnersFileHandler -from webhook_server.libs.runner_handler import RunnerHandler +from webhook_server.libs.handlers.check_run_handler import CheckRunHandler +from webhook_server.libs.handlers.labels_handler import LabelsHandler +from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler +from webhook_server.libs.handlers.runner_handler import RunnerHandler from webhook_server.utils.constants import ( APPROVED_BY_LABEL_PREFIX, AUTOMERGE_LABEL_STR, @@ -34,13 +36,14 @@ VERIFIED_LABEL_STR, WIP_STR, ) +from webhook_server.utils.helpers import format_task_fields if TYPE_CHECKING: from webhook_server.libs.github_api import GithubWebhook class PullRequestHandler: - def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersFileHandler): + def __init__(self, github_webhook: GithubWebhook, owners_file_handler: OwnersFileHandler): self.github_webhook = github_webhook self.owners_file_handler = owners_file_handler @@ -60,7 +63,10 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> None: hook_action: str = self.hook_data["action"] - self.logger.step(f"{self.log_prefix} Starting pull request processing: action={hook_action}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'started')} " + f"Starting pull request processing: action={hook_action}", + ) self.logger.info(f"{self.log_prefix} hook_action is: {hook_action}") self.logger.debug(f"{self.log_prefix} pull_request: {pull_request.title} ({pull_request.number})") @@ -71,9 +77,18 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> if self.github_webhook.conventional_title and self.hook_data["changes"].get("title"): self.logger.info(f"{self.log_prefix} PR title changed, running conventional title check") await self.runner_handler.run_conventional_title_check(pull_request=pull_request) + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " + f"Starting pull request processing: action={hook_action} (completed)", + ) + return if hook_action in ("opened", "reopened", "ready_for_review"): - self.logger.step(f"{self.log_prefix} Processing PR {hook_action} event: initializing new pull request") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} " + f"Processing PR {hook_action} event: initializing new pull request", + ) tasks: list[Coroutine[Any, Any, Any]] = [] if hook_action in ("opened", "ready_for_review"): @@ -91,9 +106,18 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> # Set auto merge only after all initialization of a new PR is done. await self.set_pull_request_automerge(pull_request=pull_request) + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " + f"Starting pull request processing: action={hook_action} (completed)", + ) + return if hook_action == "synchronize": - self.logger.step(f"{self.log_prefix} Processing PR synchronize event: handling new commits") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} " + f"Processing PR synchronize event: handling new commits", + ) sync_tasks: list[Coroutine[Any, Any, Any]] = [] sync_tasks.append(self.process_opened_or_synchronize_pull_request(pull_request=pull_request)) @@ -104,16 +128,29 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> for result in results: if isinstance(result, Exception): self.logger.error(f"{self.log_prefix} Async task failed: {result}") + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " + f"Starting pull request processing: action={hook_action} (completed)", + ) + return if hook_action == "closed": - self.logger.step(f"{self.log_prefix} Processing PR closed event: cleaning up resources") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} " + f"Processing PR closed event: cleaning up resources", + ) await self.close_issue_for_merged_or_closed_pr(pull_request=pull_request, hook_action=hook_action) await self.delete_remote_tag_for_merged_or_closed_pr(pull_request=pull_request) if is_merged := pull_request_data.get("merged", False): - self.logger.step(f"{self.log_prefix} PR was merged: processing post-merge tasks") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} " + f"PR was merged: processing post-merge tasks", + ) self.logger.info(f"{self.log_prefix} PR is merged") - for _label in pull_request.labels: + labels = await asyncio.to_thread(lambda: list(pull_request.labels)) + for _label in labels: _label_name = _label.name if _label_name.startswith(CHERRY_PICK_LABEL_PREFIX): await self.runner_handler.cherry_pick( @@ -128,6 +165,12 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> ) await self.label_all_opened_pull_requests_merge_state_after_merged() + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " + f"Starting pull request processing: action={hook_action} (completed)", + ) + return if hook_action in ("labeled", "unlabeled"): _check_for_merge: bool = False @@ -136,13 +179,22 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> labeled = self.hook_data["label"]["name"] labeled_lower = labeled.lower() - self.logger.step(f"{self.log_prefix} Processing label {hook_action} event: {labeled}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} " + f"Processing label {hook_action} event: {labeled}", + ) if labeled_lower == CAN_BE_MERGED_STR: + # Log completion - task_status reflects the result of our action (skipping is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " + f"Starting pull request processing: action={hook_action} (skipped - can-be-merged label)", + ) return self.logger.info(f"{self.log_prefix} PR {pull_request.number} {hook_action} with {labeled}") - self.logger.debug(f"PR labels are {pull_request.labels}") + labels = await asyncio.to_thread(lambda: list(pull_request.labels)) + self.logger.debug(f"{self.log_prefix} PR labels are {labels}") _split_label = labeled.split(LABELS_SEPARATOR, 1) @@ -161,11 +213,15 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> + self.owners_file_handler.root_approvers ): _check_for_merge = True - self.logger.debug(f"PR approved by label action, will check for merge. user: {_user}") + self.logger.debug( + f"{self.log_prefix} PR approved by label action, will check for merge. user: {_user}" + ) if self.github_webhook.verified_job and labeled_lower == VERIFIED_LABEL_STR: _check_for_merge = True - self.logger.debug(f"PR verified label action, will check for merge. label: {labeled_lower}") + self.logger.debug( + f"{self.log_prefix} PR verified label action, will check for merge. label: {labeled_lower}" + ) if action_labeled: await self.check_run_handler.set_verify_check_success() @@ -174,10 +230,22 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> if labeled_lower in (WIP_STR, HOLD_LABEL_STR, AUTOMERGE_LABEL_STR): _check_for_merge = True - self.logger.debug(f"PR has {labeled_lower} label, will check for merge.") + self.logger.debug(f"{self.log_prefix} PR has {labeled_lower} label, will check for merge.") if _check_for_merge: await self.check_if_can_be_merged(pull_request=pull_request) + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " + f"Starting pull request processing: action={hook_action} (completed)", + ) + return + + # Log completion for any unhandled actions - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " + f"Starting pull request processing: action={hook_action} (no action handler - completed)", + ) async def set_wip_label_based_on_title(self, pull_request: PullRequest) -> None: if pull_request.title.lower().startswith(f"{WIP_STR}:"): @@ -198,15 +266,19 @@ def _prepare_welcome_comment(self) -> str: is_auto_verified = self.github_webhook.parent_committer in self.github_webhook.auto_verified_and_merged_users auto_verified_note = "" if is_auto_verified: - auto_verified_note = """ - -> **Note**: You are an auto-verified user. Your PRs will be automatically verified and may be auto-merged when all requirements are met. -""" + auto_verified_note = ( + "\n" + "> **Note**: You are an auto-verified user. Your PRs will be automatically verified " + "and may be auto-merged when all requirements are met.\n" + ) # Check if issue creation is enabled issue_creation_note = "" if self.github_webhook.create_issue_for_new_pr: - issue_creation_note = "* **Issue Creation**: A tracking issue is created for this PR and will be closed when the PR is merged or closed\n" + issue_creation_note = ( + "* **Issue Creation**: A tracking issue is created for this PR " + "and will be closed when the PR is merged or closed\n" + ) else: issue_creation_note = "* **Issue Creation**: Disabled for this repository\n" @@ -218,9 +290,13 @@ def _prepare_welcome_comment(self) -> str: This pull request will be automatically processed with the following features:{auto_verified_note} ### 🔄 Automatic Actions -* **Reviewer Assignment**: Reviewers are automatically assigned based on the OWNERS file in the repository root -* **Size Labeling**: PR size labels (XS, S, M, L, XL, XXL) are automatically applied based on changes -{issue_creation_note}* **Pre-commit Checks**: [pre-commit](https://pre-commit.ci/) runs automatically if `.pre-commit-config.yaml` exists +* **Reviewer Assignment**: Reviewers are automatically assigned based on the " + "OWNERS file in the repository root\n" + "* **Size Labeling**: PR size labels (XS, S, M, L, XL, XXL) are " + "automatically applied based on changes\n" + f"{issue_creation_note}" + "* **Pre-commit Checks**: [pre-commit](https://pre-commit.ci/) runs " + "automatically if `.pre-commit-config.yaml` exists\n" * **Branch Labeling**: Branch-specific labels are applied to track the target branch * **Auto-verification**: Auto-verified users have their PRs automatically marked as verified @@ -344,18 +420,33 @@ async def label_all_opened_pull_requests_merge_state_after_merged(self) -> None: self.logger.info(f"{self.log_prefix} Sleep for {time_sleep} seconds before getting all opened PRs") await asyncio.sleep(time_sleep) - for pull_request in self.repository.get_pulls(state="open"): + pulls = await asyncio.to_thread(lambda: list(self.repository.get_pulls(state="open"))) + for pull_request in pulls: self.logger.info(f"{self.log_prefix} check label pull request after merge") await self.label_pull_request_by_merge_state(pull_request=pull_request) async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequest) -> None: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'processing')} " + f"Deleting remote tag for PR #{pull_request.number}", + ) self.logger.debug(f"{self.log_prefix} Checking if need to delete remote tag for {pull_request.number}") if not self.github_webhook.build_and_push_container: self.logger.info(f"{self.log_prefix} repository do not have container configured") + # Log completion - task_status reflects the result of our action (skipping is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} " + f"Deleting remote tag for PR #{pull_request.number} (skipped - container not configured)", + ) return repository_full_tag = self.github_webhook.container_repository_and_tag(pull_request=pull_request) if not repository_full_tag: + # Log completion - task_status reflects the result of our action (no tag to delete) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} " + f"Deleting remote tag for PR #{pull_request.number} (no tag configured)", + ) return pr_tag = repository_full_tag.split(":")[-1] @@ -367,9 +458,157 @@ async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequ f"{self.log_prefix} No registry host found in " f"{self.github_webhook.container_repository}; skipping tag deletion" ) + # Log completion - task_status reflects the result of our action (skipping is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} " + f"Deleting remote tag for PR #{pull_request.number} (skipped - no registry host)", + ) return registry_url = registry_info[0] + + # Check if this is GitHub Container Registry (GHCR) + if registry_url == "ghcr.io": + # Use GitHub Packages API for GHCR + await self._delete_ghcr_tag_via_github_api( + pull_request=pull_request, repository_full_tag=repository_full_tag, pr_tag=pr_tag + ) + else: + # Use regctl for other registries (Quay, Docker Hub, etc.) + await self._delete_registry_tag_via_regctl( + pull_request=pull_request, + repository_full_tag=repository_full_tag, + pr_tag=pr_tag, + registry_url=registry_url, + ) + + async def _delete_ghcr_tag_via_github_api( + self, pull_request: PullRequest, repository_full_tag: str, pr_tag: str + ) -> None: + """Delete GHCR tag using GitHub Packages REST API.""" + if not self.github_webhook.github_api or not self.github_webhook.token: + # Log failure - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} " + f"Failed to delete tag: {repository_full_tag} (GitHub API not available)", + ) + self.logger.error(f"{self.log_prefix} GitHub API or token not available for tag deletion") + return + + # Extract organization and package name from container repository + # Format: ghcr.io/org/package-name -> org, package-name + # Format: ghcr.io/org/services/api-server -> org, services/api-server + registry_info = self.github_webhook.container_repository.split("/") + if len(registry_info) < 3: + # Log failure - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} " + f"Failed to delete tag: {repository_full_tag} (invalid repository format)", + ) + self.logger.error( + f"{self.log_prefix} Invalid container repository format: {self.github_webhook.container_repository}" + ) + return + + owner_name = registry_info[1] + # Join all segments after the owner to support nested paths + package_name = "/".join(registry_info[2:]) + + try: + package_api_base: str | None = None + versions: list[dict[str, Any]] | None = None + + # GHCR packages can live under organisations *and* personal scopes - try both. + for scope in ("orgs", "users"): + candidate_base = f"/{scope}/{owner_name}/packages/container/{package_name}" + try: + _, versions = await asyncio.to_thread( + self.github_webhook.github_api.requester.requestJsonAndCheck, + "GET", + f"{candidate_base}/versions", + ) + package_api_base = candidate_base + break + except GithubException as ex: + if ex.status == 404: + continue + raise + + if not versions or not package_api_base: + # Log completion - task_status reflects the result of our action (package not found is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} " + f"Deleting remote tag for PR #{pull_request.number} (package not found)", + ) + self.logger.warning( + f"{self.log_prefix} Package {package_name} not found for owner {owner_name} on GHCR" + ) + return + # Find version with matching tag + version_to_delete_id: int | None = None + for version in versions: + # Check metadata.tags for the tag we're looking for + metadata = version.get("metadata", {}) + container_metadata = metadata.get("container", {}) + version_tags = container_metadata.get("tags", []) + if pr_tag in version_tags: + version_to_delete_id = version["id"] + break + + if not version_to_delete_id: + # Log completion - task_status reflects the result of our action (tag not found is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} " + f"Deleting remote tag for PR #{pull_request.number} (tag not found in package)", + ) + self.logger.warning(f"{self.log_prefix} Tag {pr_tag} not found in package {package_name} versions") + return + + # Delete the package version + # DELETE /{scope}/{owner}/packages/{package_type}/{package_name}/versions/{package_version_id} + delete_url = f"{package_api_base}/versions/{version_to_delete_id}" + try: + await asyncio.to_thread( + self.github_webhook.github_api.requester.requestJsonAndCheck, "DELETE", delete_url + ) + except GithubException as ex: + if ex.status == 404: + # Version already deleted or doesn't exist - treat as success + self.logger.warning( + f"{self.log_prefix} Package version {version_to_delete_id} not found " + "(may have been already deleted)" + ) + else: + raise + + await asyncio.to_thread( + pull_request.create_issue_comment, f"Successfully removed PR tag: {repository_full_tag}." + ) + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} " + f"Deleted remote tag: {repository_full_tag}", + ) + + except GithubException: + # Log failure - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} " + f"Failed to delete tag: {repository_full_tag}", + ) + self.logger.exception(f"{self.log_prefix} Failed to delete GHCR tag: {repository_full_tag}") + except Exception: + # Log failure - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} " + f"Failed to delete tag: {repository_full_tag}", + ) + self.logger.exception(f"{self.log_prefix} Failed to delete GHCR tag: {repository_full_tag}") + + async def _delete_registry_tag_via_regctl( + self, pull_request: PullRequest, repository_full_tag: str, pr_tag: str, registry_url: str + ) -> None: + """Delete registry tag using regctl (for non-GHCR registries like Quay, Docker Hub).""" reg_login_cmd = ( f"regctl registry login {registry_url} " f"-u {self.github_webhook.container_repository_username} " @@ -386,24 +625,46 @@ async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequ if rc and out: tag_del_cmd = f"regctl tag delete {repository_full_tag}" - rc, _, _ = await self.runner_handler.run_podman_command(command=tag_del_cmd) + rc, del_out, del_err = await self.runner_handler.run_podman_command(command=tag_del_cmd) if rc: await asyncio.to_thread( pull_request.create_issue_comment, f"Successfully removed PR tag: {repository_full_tag}." ) + # Log completion - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} " + f"Deleted remote tag: {repository_full_tag}", + ) else: + # Log failure - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} " + f"Failed to delete tag: {repository_full_tag}", + ) self.logger.error( - f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. OUT:{out}. ERR:{err}" + f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. " + f"OUT:{del_out}. ERR:{del_err}" ) else: + # Log completion - task_status reflects the result of our action (tag not found is acceptable) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'completed')} " + f"Deleting remote tag for PR #{pull_request.number} (tag not found in registry)", + ) self.logger.warning( - f"{self.log_prefix} {pr_tag} tag not found in registry {self.github_webhook.container_repository}. " + f"{self.log_prefix} {pr_tag} tag not found in registry " + f"{self.github_webhook.container_repository}. " f"OUT:{out}. ERR:{err}" ) finally: await self.runner_handler.run_podman_command(command="regctl registry logout") else: + # Log failure - task_status reflects the result of our action + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'failed')} " + f"Failed to delete tag: {repository_full_tag} (registry login failed)", + ) await asyncio.to_thread( pull_request.create_issue_comment, f"Failed to delete tag: {repository_full_tag}. Please delete it manually.", @@ -423,10 +684,16 @@ async def close_issue_for_merged_or_closed_pr(self, pull_request: PullRequest, h break async def process_opened_or_synchronize_pull_request(self, pull_request: PullRequest) -> None: - self.logger.step(f"{self.log_prefix} Starting PR processing workflow") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'started')} " + f"Starting PR processing workflow", + ) # Stage 1: Initial setup and check queue tasks - self.logger.step(f"{self.log_prefix} Stage: Initial setup and check queuing") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} " + f"Stage: Initial setup and check queuing", + ) setup_tasks: list[Coroutine[Any, Any, Any]] = [] setup_tasks.append(self.owners_file_handler.assign_reviewers(pull_request=pull_request)) @@ -449,17 +716,24 @@ async def process_opened_or_synchronize_pull_request(self, pull_request: PullReq if self.github_webhook.conventional_title: setup_tasks.append(self.check_run_handler.set_conventional_title_queued()) - self.logger.step(f"{self.log_prefix} Executing setup tasks") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} Executing setup tasks" + ) setup_results = await asyncio.gather(*setup_tasks, return_exceptions=True) for result in setup_results: if isinstance(result, Exception): self.logger.error(f"{self.log_prefix} Setup task failed: {result}") - self.logger.step(f"{self.log_prefix} Setup tasks completed") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} Setup tasks completed" + ) # Stage 2: CI/CD execution tasks - self.logger.step(f"{self.log_prefix} Stage: CI/CD execution") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} " + f"Stage: CI/CD execution", + ) ci_tasks: list[Coroutine[Any, Any, Any]] = [] ci_tasks.append(self.runner_handler.run_tox(pull_request=pull_request)) @@ -470,14 +744,20 @@ async def process_opened_or_synchronize_pull_request(self, pull_request: PullReq if self.github_webhook.conventional_title: ci_tasks.append(self.runner_handler.run_conventional_title_check(pull_request=pull_request)) - self.logger.step(f"{self.log_prefix} Executing CI/CD tasks") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'processing')} " + f"Executing CI/CD tasks", + ) ci_results = await asyncio.gather(*ci_tasks, return_exceptions=True) for result in ci_results: if isinstance(result, Exception): self.logger.error(f"{self.log_prefix} CI/CD task failed: {result}") - self.logger.step(f"{self.log_prefix} PR processing workflow completed") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " + f"PR processing workflow completed", + ) async def create_issue_for_new_pull_request(self, pull_request: PullRequest) -> None: if not self.github_webhook.create_issue_for_new_pr: @@ -524,7 +804,8 @@ async def set_pull_request_automerge(self, pull_request: PullRequest) -> None: try: if not pull_request.raw_data.get("auto_merge"): self.logger.info( - f"{self.log_prefix} will be merged automatically. owner: {self.github_webhook.parent_committer} " + f"{self.log_prefix} will be merged automatically. " + f"owner: {self.github_webhook.parent_committer} " f"is part of auto merge enabled rules" ) @@ -537,7 +818,8 @@ async def set_pull_request_automerge(self, pull_request: PullRequest) -> None: async def remove_labels_when_pull_request_sync(self, pull_request: PullRequest) -> None: tasks: list[Coroutine[Any, Any, Any]] = [] - for _label in pull_request.labels: + labels = await asyncio.to_thread(lambda: list(pull_request.labels)) + for _label in labels: _label_name = _label.name if ( _label_name.startswith(APPROVED_BY_LABEL_PREFIX) @@ -559,7 +841,7 @@ async def remove_labels_when_pull_request_sync(self, pull_request: PullRequest) self.logger.error(f"{self.log_prefix} Async task failed: {result}") async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> None: - merge_state = pull_request.mergeable_state + merge_state = await asyncio.to_thread(lambda: pull_request.mergeable_state) self.logger.debug(f"{self.log_prefix} Mergeable state is {merge_state}") if merge_state == "unknown": return @@ -593,8 +875,9 @@ async def _process_verified_for_update_or_new_pull_request(self, pull_request: P if self.github_webhook.parent_committer in self.github_webhook.auto_verified_and_merged_users: self.logger.info( - f"{self.log_prefix} Committer {self.github_webhook.parent_committer} is part of {self.github_webhook.auto_verified_and_merged_users}" - ", Setting verified label" + f"{self.log_prefix} Committer {self.github_webhook.parent_committer} " + f"is part of {self.github_webhook.auto_verified_and_merged_users}, " + f"Setting verified label" ) await self.labels_handler._add_label(pull_request=pull_request, label=VERIFIED_LABEL_STR) await self.check_run_handler.set_verify_check_success() @@ -607,13 +890,13 @@ async def _process_verified_for_update_or_new_pull_request(self, pull_request: P async def add_pull_request_owner_as_assingee(self, pull_request: PullRequest) -> None: try: self.logger.info(f"{self.log_prefix} Adding PR owner as assignee") - pull_request.add_to_assignees(pull_request.user.login) + await asyncio.to_thread(pull_request.add_to_assignees, pull_request.user.login) except Exception as exp: self.logger.debug(f"{self.log_prefix} Exception while adding PR owner as assignee: {exp}") if self.owners_file_handler.root_approvers: self.logger.debug(f"{self.log_prefix} Falling back to first approver as assignee") - pull_request.add_to_assignees(self.owners_file_handler.root_approvers[0]) + await asyncio.to_thread(pull_request.add_to_assignees, self.owners_file_handler.root_approvers[0]) async def check_if_can_be_merged(self, pull_request: PullRequest) -> None: """ @@ -627,8 +910,12 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None: PR status is not 'dirty'. PR has no changed requests from approvers. """ - self.logger.step(f"{self.log_prefix} Starting merge eligibility check") # type: ignore - if self.skip_if_pull_request_already_merged(pull_request=pull_request): + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('pr_handler', 'pr_management', 'started')} " + f"Starting merge eligibility check" + ) + if await self.skip_if_pull_request_already_merged(pull_request=pull_request): self.logger.debug(f"{self.log_prefix} Pull request already merged") return @@ -647,7 +934,7 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None: _labels = await self.labels_handler.pull_request_labels_names(pull_request=pull_request) self.logger.debug(f"{self.log_prefix} check if can be merged. PR labels are: {_labels}") - is_pr_mergable = pull_request.mergeable + is_pr_mergable = await asyncio.to_thread(lambda: pull_request.mergeable) self.logger.debug(f"{self.log_prefix} PR mergeable is {is_pr_mergable}") if not is_pr_mergable: failure_output += f"PR is not mergeable: {is_pr_mergable}\n" @@ -662,10 +949,10 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None: failure_output += required_check_in_progress_failure_output self.logger.debug(f"{self.log_prefix} required_check_in_progress_failure_output: {failure_output}") - labels_failure_output = self.labels_handler.wip_or_hold_lables_exists(labels=_labels) + labels_failure_output = self.labels_handler.wip_or_hold_labels_exists(labels=_labels) if labels_failure_output: failure_output += labels_failure_output - self.logger.debug(f"{self.log_prefix} wip_or_hold_lables_exists: {failure_output}") + self.logger.debug(f"{self.log_prefix} wip_or_hold_labels_exists: {failure_output}") required_check_failed_failure_output = await self.check_run_handler.required_check_failed_or_no_status( pull_request=pull_request, @@ -689,16 +976,27 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None: if not failure_output: await self.labels_handler._add_label(pull_request=pull_request, label=CAN_BE_MERGED_STR) await self.check_run_handler.set_merge_check_success() - + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " + f"Merge eligibility check completed successfully", + ) self.logger.info(f"{self.log_prefix} Pull request can be merged") return + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'failed')} " + f"Merge eligibility check failed", + ) self.logger.debug(f"{self.log_prefix} cannot be merged: {failure_output}") output["text"] = failure_output await self.labels_handler._remove_label(pull_request=pull_request, label=CAN_BE_MERGED_STR) await self.check_run_handler.set_merge_check_failure(output=output) except Exception as ex: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'failed')} " + f"Merge eligibility check failed with exception", + ) self.logger.error( f"{self.log_prefix} Failed to check if can be merged, set check run to {FAILURE_STR} {ex}" ) @@ -709,7 +1007,7 @@ async def check_if_can_be_merged(self, pull_request: PullRequest) -> None: async def _check_if_pr_approved(self, labels: list[str]) -> str: self.logger.info(f"{self.log_prefix} Check if pull request is approved by pull request labels.") - self.logger.debug(f"labels are {labels}") + self.logger.debug(f"{self.log_prefix} labels are {labels}") error: str = "" approved_by = [] @@ -720,11 +1018,11 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str: + self.owners_file_handler.root_approvers.copy() + self.owners_file_handler.root_reviewers.copy() ) - self.logger.debug(f"all_reviewers: {all_reviewers}") + self.logger.debug(f"{self.log_prefix} all_reviewers: {all_reviewers}") all_reviewers_without_pr_owner = { _reviewer for _reviewer in all_reviewers if _reviewer != self.github_webhook.parent_committer } - self.logger.debug(f"all_reviewers_without_pr_owner: {all_reviewers_without_pr_owner}") + self.logger.debug(f"{self.log_prefix} all_reviewers_without_pr_owner: {all_reviewers_without_pr_owner}") all_reviewers_without_pr_owner_and_lgtmed = all_reviewers_without_pr_owner.copy() @@ -734,17 +1032,17 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str: if LGTM_BY_LABEL_PREFIX.lower() in _label.lower() and reviewer in all_reviewers_without_pr_owner: lgtm_count += 1 all_reviewers_without_pr_owner_and_lgtmed.remove(reviewer) - self.logger.debug(f"lgtm_count: {lgtm_count}") + self.logger.debug(f"{self.log_prefix} lgtm_count: {lgtm_count}") for _label in labels: if APPROVED_BY_LABEL_PREFIX.lower() in _label.lower(): approved_by.append(_label.split(LABELS_SEPARATOR)[-1]) - self.logger.debug(f"approved_by: {approved_by}") + self.logger.debug(f"{self.log_prefix} approved_by: {approved_by}") missing_approvers = list(set(self.owners_file_handler.all_pull_request_approvers.copy())) - self.logger.debug(f"missing_approvers: {missing_approvers}") - owners_data_changed_files = await self.owners_file_handler.owners_data_for_changed_files() - self.logger.debug(f"owners_data_changed_files: {owners_data_changed_files}") + self.logger.debug(f"{self.log_prefix} missing_approvers: {missing_approvers}") + owners_data_changed_files = await self.owners_file_handler.owners_data_for_changed_files + self.logger.debug(f"{self.log_prefix} owners_data_changed_files: {owners_data_changed_files}") # If any of root approvers is in approved_by list, the pull request is approved for _approver in approved_by: @@ -758,7 +1056,8 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str: for required_pr_approver in required_pr_approvers: if required_pr_approver in approved_by: - # Once we found approver in approved_by list, we remove all approvers from missing_approvers list for this owners file + # Once we found approver in approved_by list, we remove all approvers " + # from missing_approvers list for this owners file for _approver in required_pr_approvers: if _approver in missing_approvers: missing_approvers.remove(_approver) @@ -766,7 +1065,7 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str: break missing_approvers = list(set(missing_approvers)) - self.logger.debug(f"missing_approvers after check: {missing_approvers}") + self.logger.debug(f"{self.log_prefix} missing_approvers after check: {missing_approvers}") if missing_approvers: error += f"Missing approved from approvers: {', '.join(missing_approvers)}\n" @@ -774,12 +1073,16 @@ async def _check_if_pr_approved(self, labels: list[str]) -> str: if lgtm_count < self.github_webhook.minimum_lgtm: if lgtm_count == len(all_reviewers_without_pr_owner): self.logger.debug( - f"{self.log_prefix} minimum_lgtm is {self.github_webhook.minimum_lgtm}, but number of reviewers is {len(all_reviewers_without_pr_owner)}. PR approved." + f"{self.log_prefix} minimum_lgtm is {self.github_webhook.minimum_lgtm}, " + f"but number of reviewers is {len(all_reviewers_without_pr_owner)}. " + f"PR approved." ) else: + reviewers_str = ", ".join(all_reviewers_without_pr_owner) error += ( "Missing lgtm from reviewers. " - f"Minimum {self.github_webhook.minimum_lgtm} required, ({lgtm_count} given). Reviewers: {', '.join(all_reviewers_without_pr_owner)}.\n" + f"Minimum {self.github_webhook.minimum_lgtm} required, " + f"({lgtm_count} given). Reviewers: {reviewers_str}.\n" ) return error @@ -793,21 +1096,21 @@ def _check_labels_for_can_be_merged(self, labels: list[str]) -> str: change_request_user = _label.split(LABELS_SEPARATOR)[-1] if change_request_user in self.owners_file_handler.all_pull_request_approvers: failure_output += "PR has changed requests from approvers\n" - self.logger.debug(f"Found changed request by {change_request_user}") + self.logger.debug(f"{self.log_prefix} Found changed request by {change_request_user}") missing_required_labels = [] for _req_label in self.github_webhook.can_be_merged_required_labels: if _req_label not in labels: missing_required_labels.append(_req_label) - self.logger.debug(f"Missing required label {_req_label}") + self.logger.debug(f"{self.log_prefix} Missing required label {_req_label}") if missing_required_labels: failure_output += f"Missing required labels: {', '.join(missing_required_labels)}\n" return failure_output - def skip_if_pull_request_already_merged(self, pull_request: PullRequest) -> bool: - if pull_request and pull_request.is_merged(): + async def skip_if_pull_request_already_merged(self, pull_request: PullRequest) -> bool: + if pull_request and await asyncio.to_thread(lambda: pull_request.is_merged()): self.logger.info(f"{self.log_prefix}: PR is merged, not processing") return True diff --git a/webhook_server/libs/pull_request_review_handler.py b/webhook_server/libs/handlers/pull_request_review_handler.py similarity index 93% rename from webhook_server/libs/pull_request_review_handler.py rename to webhook_server/libs/handlers/pull_request_review_handler.py index e9617c395..619118665 100644 --- a/webhook_server/libs/pull_request_review_handler.py +++ b/webhook_server/libs/handlers/pull_request_review_handler.py @@ -2,8 +2,8 @@ from github.PullRequest import PullRequest -from webhook_server.libs.labels_handler import LabelsHandler -from webhook_server.libs.owners_files_handler import OwnersFileHandler +from webhook_server.libs.handlers.labels_handler import LabelsHandler +from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler from webhook_server.utils.constants import ADD_STR, APPROVE_STR if TYPE_CHECKING: diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py new file mode 100644 index 000000000..ae3071935 --- /dev/null +++ b/webhook_server/libs/handlers/push_handler.py @@ -0,0 +1,174 @@ +import asyncio +import re +from typing import TYPE_CHECKING +from uuid import uuid4 + +from github.Repository import Repository + +from webhook_server.libs.handlers.check_run_handler import CheckRunHandler +from webhook_server.libs.handlers.runner_handler import RunnerHandler +from webhook_server.utils.helpers import format_task_fields, run_command +from webhook_server.utils.notification_utils import send_slack_message + +if TYPE_CHECKING: + from webhook_server.libs.github_api import GithubWebhook + + +class PushHandler: + def __init__(self, github_webhook: "GithubWebhook"): + self.github_webhook = github_webhook + + self.hook_data = self.github_webhook.hook_data + self.logger = self.github_webhook.logger + self.log_prefix: str = self.github_webhook.log_prefix + self.repository: Repository = self.github_webhook.repository + self.check_run_handler = CheckRunHandler(github_webhook=self.github_webhook) + self.runner_handler = RunnerHandler(github_webhook=self.github_webhook) + + async def process_push_webhook_data(self) -> None: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'started')} " + f"Starting push webhook processing", # pragma: allowlist secret + ) + tag = re.search(r"^refs/tags/(.+)$", self.hook_data["ref"]) + if tag: + tag_name = tag.group(1) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " + f"Processing tag push: {tag_name}", + ) + self.logger.info(f"{self.log_prefix} Processing push for tag: {tag.group(1)}") + self.logger.debug(f"{self.log_prefix} Tag: {tag_name}") + if self.github_webhook.pypi: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'started')} " + f"Starting PyPI upload for tag: {tag_name}", + ) + self.logger.info(f"{self.log_prefix} Processing upload to pypi for tag: {tag_name}") + try: + await self.upload_to_pypi(tag_name=tag_name) + except Exception: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} " + f"PyPI upload failed with exception", + ) + self.logger.exception(f"{self.log_prefix} PyPI upload failed") + + if self.github_webhook.build_and_push_container and self.github_webhook.container_release: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'started')} " + f"Starting container build and push for tag: {tag_name}", + ) + self.logger.info(f"{self.log_prefix} Processing build and push container for tag: {tag_name}") + try: + await self.runner_handler.run_build_container(push=True, set_check=False, tag=tag_name) + # Note: run_build_container logs completion/failure internally + except Exception as ex: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} " + f"Container build and push failed with exception", + ) + self.logger.exception(f"{self.log_prefix} Container build and push failed: {ex}") + else: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " + f"Non-tag push detected, skipping processing", + ) + + async def upload_to_pypi(self, tag_name: str) -> None: + async def _issue_on_error(_error: str) -> None: + # Sanitize title: replace newlines, remove backticks, strip whitespace, truncate + sanitized_title = _error.replace("\n", " ").replace("`", "").replace("\r", "").strip() + # Truncate to safe length (GitHub issue title limit is ~256 chars, use 250 for safety) + if len(sanitized_title) > 250: + sanitized_title = sanitized_title[:247] + "..." + await asyncio.to_thread( + self.repository.create_issue, + title=sanitized_title, + body=f""" +Publish to PYPI failed: `{_error}` +""", + ) + + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'started')} " + f"Starting PyPI upload process for tag: {tag_name}", + ) + clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" + uv_cmd_dir = f"--directory {clone_repo_dir}" + self.logger.info(f"{self.log_prefix} Start uploading to pypi") + self.logger.debug(f"{self.log_prefix} Clone repo dir: {clone_repo_dir}") + _dist_dir: str = f"{clone_repo_dir}/pypi-dist" + + async with self.runner_handler._prepare_cloned_repo_dir( + checkout=tag_name, clone_repo_dir=clone_repo_dir + ) as _res: + if not _res[0]: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} " + f"PyPI upload failed: repository preparation failed", + ) + _error = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) + await _issue_on_error(_error=_error) + return + + rc, out, err = await run_command( + command=f"uv {uv_cmd_dir} build --sdist --out-dir {_dist_dir}", log_prefix=self.log_prefix + ) + if not rc: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} " + f"PyPI upload failed: build command failed", + ) + _error = self.check_run_handler.get_check_run_text(out=out, err=err) + await _issue_on_error(_error=_error) + return + + rc, tar_gz_file, err = await run_command(command=f"ls {_dist_dir}", log_prefix=self.log_prefix) + if not rc: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} " + f"PyPI upload failed: listing dist directory failed", + ) + _error = self.check_run_handler.get_check_run_text(out=tar_gz_file, err=err) + await _issue_on_error(_error=_error) + return + + tar_gz_file = tar_gz_file.strip() + + pypi_token = self.github_webhook.pypi["token"] + commands: list[str] = [ + f"uvx {uv_cmd_dir} twine check {_dist_dir}/{tar_gz_file}", + f"uvx {uv_cmd_dir} twine upload --username __token__ " + f"--password {pypi_token} " + f"{_dist_dir}/{tar_gz_file} --skip-existing", + ] + + for cmd in commands: + rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix, redact_secrets=[pypi_token]) + if not rc: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} " + f"PyPI upload failed: command execution failed", + ) + _error = self.check_run_handler.get_check_run_text(out=out, err=err) + await _issue_on_error(_error=_error) + return + + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'completed')} " + f"PyPI upload completed successfully for tag: {tag_name}", + ) + self.logger.info(f"{self.log_prefix} Publish to pypi finished") + if self.github_webhook.slack_webhook_url: + message: str = f""" +``` +{self.github_webhook.repository_name} Version {tag_name} published to PYPI. +``` +""" + send_slack_message( + message=message, + webhook_url=self.github_webhook.slack_webhook_url, + logger=self.logger, + log_prefix=self.log_prefix, + ) diff --git a/webhook_server/libs/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py similarity index 56% rename from webhook_server/libs/runner_handler.py rename to webhook_server/libs/handlers/runner_handler.py index 9b88d0c6e..95dd48aa4 100644 --- a/webhook_server/libs/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -2,7 +2,8 @@ import contextlib import re import shutil -from typing import TYPE_CHECKING, Any, AsyncGenerator +from collections.abc import AsyncGenerator +from typing import TYPE_CHECKING, Any from uuid import uuid4 import shortuuid @@ -10,8 +11,8 @@ from github.PullRequest import PullRequest from github.Repository import Repository -from webhook_server.libs.check_run_handler import CheckRunHandler -from webhook_server.libs.owners_files_handler import OwnersFileHandler +from webhook_server.libs.handlers.check_run_handler import CheckRunHandler +from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler from webhook_server.utils.constants import ( BUILD_CONTAINER_STR, CHERRY_PICKED_LABEL_PREFIX, @@ -21,7 +22,8 @@ PYTHON_MODULE_INSTALL_STR, TOX_STR, ) -from webhook_server.utils.helpers import run_command +from webhook_server.utils.helpers import _redact_secrets, format_task_fields, run_command +from webhook_server.utils.notification_utils import send_slack_message if TYPE_CHECKING: from webhook_server.libs.github_api import GithubWebhook @@ -40,6 +42,11 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF github_webhook=self.github_webhook, owners_file_handler=self.owners_file_handler ) + @property + def mask_sensitive(self) -> bool: + """Get mask_sensitive configuration value.""" + return self.github_webhook.config.get_value("mask-sensitive-data", return_on_none=True) + @contextlib.asynccontextmanager async def _prepare_cloned_repo_dir( self, @@ -56,10 +63,13 @@ async def _prepare_cloned_repo_dir( try: # Clone the repository + github_token = self.github_webhook.token + clone_url_with_token = self.repository.clone_url.replace("https://", f"https://{github_token}@") rc, out, err = await run_command( - command=f"git clone {self.repository.clone_url.replace('https://', f'https://{self.github_webhook.token}@')} " - f"{clone_repo_dir}", + command=(f"git clone {clone_url_with_token} {clone_repo_dir}"), log_prefix=self.log_prefix, + redact_secrets=[github_token], + mask_sensitive=self.mask_sensitive, ) if not rc: result = (rc, out, err) @@ -67,7 +77,9 @@ async def _prepare_cloned_repo_dir( if success: rc, out, err = await run_command( - command=f"{git_cmd} config user.name '{self.repository.owner.login}'", log_prefix=self.log_prefix + command=f"{git_cmd} config user.name '{self.repository.owner.login}'", + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, ) if not rc: result = (rc, out, err) @@ -75,7 +87,9 @@ async def _prepare_cloned_repo_dir( if success: rc, out, err = await run_command( - f"{git_cmd} config user.email '{self.repository.owner.email}'", log_prefix=self.log_prefix + command=f"{git_cmd} config user.email '{self.repository.owner.email}'", + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, ) if not rc: result = (rc, out, err) @@ -83,30 +97,42 @@ async def _prepare_cloned_repo_dir( if success: rc, out, err = await run_command( - command=f"{git_cmd} config --local --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pr/*", + command=( + f"{git_cmd} config --local --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pr/*" + ), log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, ) if not rc: result = (rc, out, err) success = False if success: - rc, out, err = await run_command(command=f"{git_cmd} remote update", log_prefix=self.log_prefix) + rc, out, err = await run_command( + command=f"{git_cmd} remote update", + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, + ) if not rc: result = (rc, out, err) success = False # Checkout to requested branch/tag if checkout and success: - rc, out, err = await run_command(f"{git_cmd} checkout {checkout}", log_prefix=self.log_prefix) + rc, out, err = await run_command( + command=f"{git_cmd} checkout {checkout}", + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, + ) if not rc: result = (rc, out, err) success = False if success and pull_request: rc, out, err = await run_command( - f"{git_cmd} merge origin/{pull_request.base.ref} -m 'Merge {pull_request.base.ref}'", + command=f"{git_cmd} merge origin/{pull_request.base.ref} -m 'Merge {pull_request.base.ref}'", log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, ) if not rc: result = (rc, out, err) @@ -119,6 +145,7 @@ async def _prepare_cloned_repo_dir( rc, out, err = await run_command( command=f"{git_cmd} checkout {pull_request.base.ref}", log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, ) if not rc: result = (rc, out, err) @@ -126,7 +153,9 @@ async def _prepare_cloned_repo_dir( elif tag_name: rc, out, err = await run_command( - command=f"{git_cmd} checkout {tag_name}", log_prefix=self.log_prefix + command=f"{git_cmd} checkout {tag_name}", + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, ) if not rc: result = (rc, out, err) @@ -138,6 +167,7 @@ async def _prepare_cloned_repo_dir( rc, out, err = await run_command( command=f"{git_cmd} checkout origin/pr/{_pull_request.number}", log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, ) if not rc: result = (rc, out, err) @@ -145,8 +175,12 @@ async def _prepare_cloned_repo_dir( if pull_request and success: rc, out, err = await run_command( - f"{git_cmd} merge origin/{pull_request.base.ref} -m 'Merge {pull_request.base.ref}'", + command=( + f"{git_cmd} merge origin/{pull_request.base.ref} " + f"-m 'Merge {pull_request.base.ref}'" + ), log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, ) if not rc: result = (rc, out, err) @@ -154,7 +188,7 @@ async def _prepare_cloned_repo_dir( finally: yield result self.logger.debug(f"{self.log_prefix} Deleting {clone_repo_dir}") - shutil.rmtree(clone_repo_dir) + shutil.rmtree(clone_repo_dir, ignore_errors=True) def is_podman_bug(self, err: str) -> bool: _err = "Error: current system boot ID differs from cached boot ID; an unhandled reboot has occurred" @@ -165,15 +199,24 @@ def fix_podman_bug(self) -> None: shutil.rmtree("/tmp/storage-run-1000/containers", ignore_errors=True) shutil.rmtree("/tmp/storage-run-1000/libpod/tmp", ignore_errors=True) - async def run_podman_command(self, command: str) -> tuple[bool, str, str]: - rc, out, err = await run_command(command=command, log_prefix=self.log_prefix) + async def run_podman_command( + self, command: str, redact_secrets: list[str] | None = None, mask_sensitive: bool = True + ) -> tuple[bool, str, str]: + rc, out, err = await run_command( + command=command, log_prefix=self.log_prefix, redact_secrets=redact_secrets, mask_sensitive=mask_sensitive + ) if rc: return rc, out, err if self.is_podman_bug(err=err): self.fix_podman_bug() - return await run_command(command=command, log_prefix=self.log_prefix) + return await run_command( + command=command, + log_prefix=self.log_prefix, + redact_secrets=redact_secrets, + mask_sensitive=mask_sensitive, + ) return rc, out, err @@ -182,7 +225,9 @@ async def run_tox(self, pull_request: PullRequest) -> None: self.logger.debug(f"{self.log_prefix} Tox not configured for this repository") return - self.logger.step(f"{self.log_prefix} Starting tox tests execution") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} Starting tox tests execution" + ) if await self.check_run_handler.is_check_run_in_progress(check_run=TOX_STR): self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {TOX_STR}.") @@ -198,11 +243,17 @@ async def run_tox(self, pull_request: PullRequest) -> None: tests = _tox_tests.replace(" ", "") cmd += f" -e {tests}" - self.logger.step(f"{self.log_prefix} Setting tox check status to in-progress") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Setting tox check status to in-progress", + ) await self.check_run_handler.set_run_tox_check_in_progress() self.logger.debug(f"{self.log_prefix} Tox command to run: {cmd}") - self.logger.step(f"{self.log_prefix} Preparing repository clone for tox execution") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Preparing repository clone for tox execution", + ) async with self._prepare_cloned_repo_dir(clone_repo_dir=clone_repo_dir, pull_request=pull_request) as _res: output: dict[str, Any] = { "title": "Tox", @@ -210,20 +261,33 @@ async def run_tox(self, pull_request: PullRequest) -> None: "text": None, } if not _res[0]: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " + f"Repository preparation failed for tox", + ) self.logger.error(f"{self.log_prefix} Repository preparation failed for tox") output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) return await self.check_run_handler.set_run_tox_check_failure(output=output) - self.logger.step(f"{self.log_prefix} Executing tox command") # type: ignore - rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} Executing tox command" + ) + rc, out, err = await run_command( + command=cmd, log_prefix=self.log_prefix, mask_sensitive=self.mask_sensitive + ) output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) if rc: - self.logger.step(f"{self.log_prefix} Tox tests completed successfully") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " + f"Tox tests completed successfully", + ) return await self.check_run_handler.set_run_tox_check_success(output=output) else: - self.logger.step(f"{self.log_prefix} Tox tests failed") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Tox tests failed" + ) return await self.check_run_handler.set_run_tox_check_failure(output=output) async def run_pre_commit(self, pull_request: PullRequest) -> None: @@ -231,7 +295,10 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: self.logger.debug(f"{self.log_prefix} Pre-commit not configured for this repository") return - self.logger.step(f"{self.log_prefix} Starting pre-commit checks execution") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} " + f"Starting pre-commit checks execution", + ) if await self.check_run_handler.is_check_run_in_progress(check_run=PRE_COMMIT_STR): self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {PRE_COMMIT_STR}.") @@ -239,10 +306,16 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" cmd = f" uvx --directory {clone_repo_dir} {PREK_STR} run --all-files" - self.logger.step(f"{self.log_prefix} Setting pre-commit check status to in-progress") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Setting pre-commit check status to in-progress", + ) await self.check_run_handler.set_run_pre_commit_check_in_progress() - self.logger.step(f"{self.log_prefix} Preparing repository clone for pre-commit execution") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Preparing repository clone for pre-commit execution", + ) async with self._prepare_cloned_repo_dir(pull_request=pull_request, clone_repo_dir=clone_repo_dir) as _res: output: dict[str, Any] = { "title": "Pre-Commit", @@ -250,20 +323,34 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: "text": None, } if not _res[0]: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " + f"Repository preparation failed for pre-commit", + ) self.logger.error(f"{self.log_prefix} Repository preparation failed for pre-commit") output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) return await self.check_run_handler.set_run_pre_commit_check_failure(output=output) - self.logger.step(f"{self.log_prefix} Executing pre-commit command") # type: ignore - rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Executing pre-commit command", + ) + rc, out, err = await run_command( + command=cmd, log_prefix=self.log_prefix, mask_sensitive=self.mask_sensitive + ) output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) if rc: - self.logger.step(f"{self.log_prefix} Pre-commit checks completed successfully") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " + f"Pre-commit checks completed successfully", + ) return await self.check_run_handler.set_run_pre_commit_check_success(output=output) else: - self.logger.step(f"{self.log_prefix} Pre-commit checks failed") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Pre-commit checks failed" + ) return await self.check_run_handler.set_run_pre_commit_check_failure(output=output) async def run_build_container( @@ -279,7 +366,9 @@ async def run_build_container( if not self.github_webhook.build_and_push_container: return - self.logger.step(f"{self.log_prefix} Starting container build process") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} Starting container build process", + ) if ( self.owners_file_handler @@ -297,14 +386,21 @@ async def run_build_container( if await self.check_run_handler.is_check_run_in_progress(check_run=BUILD_CONTAINER_STR) and not is_merged: self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {BUILD_CONTAINER_STR}.") - self.logger.step(f"{self.log_prefix} Setting container build check status to in-progress") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Setting container build check status to in-progress", + ) await self.check_run_handler.set_container_build_in_progress() _container_repository_and_tag = self.github_webhook.container_repository_and_tag( pull_request=pull_request, is_merged=is_merged, tag=tag ) no_cache: str = " --no-cache" if is_merged else "" - build_cmd: str = f"--network=host {no_cache} -f {clone_repo_dir}/{self.github_webhook.dockerfile} {clone_repo_dir} -t {_container_repository_and_tag}" + build_cmd: str = ( + f"--network=host {no_cache} -f " + f"{clone_repo_dir}/{self.github_webhook.dockerfile} " + f"{clone_repo_dir} -t {_container_repository_and_tag}" + ) if self.github_webhook.container_build_args: build_args = " ".join(f"--build-arg {arg}" for arg in self.github_webhook.container_build_args) @@ -318,7 +414,10 @@ async def run_build_container( podman_build_cmd: str = f"podman build {build_cmd}" self.logger.debug(f"{self.log_prefix} Podman build command to run: {podman_build_cmd}") - self.logger.step(f"{self.log_prefix} Preparing repository clone for container build") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Preparing repository clone for container build", + ) async with self._prepare_cloned_repo_dir( pull_request=pull_request, is_merged=is_merged, @@ -331,31 +430,63 @@ async def run_build_container( "text": None, } if not _res[0]: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " + f"Repository preparation failed for container build", + ) output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) if pull_request and set_check: return await self.check_run_handler.set_container_build_failure(output=output) - self.logger.step(f"{self.log_prefix} Executing container build command") # type: ignore - build_rc, build_out, build_err = await self.run_podman_command(command=podman_build_cmd) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Executing container build command", + ) + build_rc, build_out, build_err = await self.run_podman_command( + command=podman_build_cmd, mask_sensitive=self.mask_sensitive + ) output["text"] = self.check_run_handler.get_check_run_text(err=build_err, out=build_out) if build_rc: - self.logger.step(f"{self.log_prefix} Container build completed successfully") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " + f"Container build completed successfully", + ) self.logger.info(f"{self.log_prefix} Done building {_container_repository_and_tag}") if pull_request and set_check: return await self.check_run_handler.set_container_build_success(output=output) else: - self.logger.step(f"{self.log_prefix} Container build failed") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Container build failed" + ) self.logger.error(f"{self.log_prefix} Failed to build {_container_repository_and_tag}") if pull_request and set_check: return await self.check_run_handler.set_container_build_failure(output=output) if push and build_rc: - self.logger.step(f"{self.log_prefix} Starting container push to registry") # type: ignore - cmd = f"podman push --creds {self.github_webhook.container_repository_username}:{self.github_webhook.container_repository_password} {_container_repository_and_tag}" - push_rc, _, _ = await self.run_podman_command(command=cmd) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} " + f"Starting container push to registry", + ) + cmd = ( + f"podman push --creds " + f"{self.github_webhook.container_repository_username}:" + f"{self.github_webhook.container_repository_password} " + f"{_container_repository_and_tag}" + ) + push_rc, _, _ = await self.run_podman_command( + command=cmd, + redact_secrets=[ + self.github_webhook.container_repository_username, + self.github_webhook.container_repository_password, + ], + mask_sensitive=self.mask_sensitive, + ) if push_rc: - self.logger.step(f"{self.log_prefix} Container push completed successfully") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " + f"Container push completed successfully", + ) push_msg: str = f"New container for {_container_repository_and_tag} published" if pull_request: await asyncio.to_thread(pull_request.create_issue_comment, push_msg) @@ -366,13 +497,19 @@ async def run_build_container( {self.github_webhook.repository_full_name} {push_msg}. ``` """ - self.github_webhook.send_slack_message( - message=message, webhook_url=self.github_webhook.slack_webhook_url + send_slack_message( + message=message, + webhook_url=self.github_webhook.slack_webhook_url, + logger=self.logger, + log_prefix=self.log_prefix, ) self.logger.info(f"{self.log_prefix} Done push {_container_repository_and_tag}") else: err_msg: str = f"Failed to build and push {_container_repository_and_tag}" + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Container push failed", + ) if pull_request: await asyncio.to_thread(pull_request.create_issue_comment, err_msg) @@ -382,24 +519,37 @@ async def run_build_container( {self.github_webhook.repository_full_name} {err_msg}. ``` """ - self.github_webhook.send_slack_message( - message=message, webhook_url=self.github_webhook.slack_webhook_url + send_slack_message( + message=message, + webhook_url=self.github_webhook.slack_webhook_url, + logger=self.logger, + log_prefix=self.log_prefix, ) async def run_install_python_module(self, pull_request: PullRequest) -> None: if not self.github_webhook.pypi: return - self.logger.step(f"{self.log_prefix} Starting Python module installation") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('runner', 'ci_check', 'started')} " + f"Starting Python module installation" + ) if await self.check_run_handler.is_check_run_in_progress(check_run=PYTHON_MODULE_INSTALL_STR): self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {PYTHON_MODULE_INSTALL_STR}.") clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" self.logger.info(f"{self.log_prefix} Installing python module") - self.logger.step(f"{self.log_prefix} Setting Python module install check status to in-progress") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Setting Python module install check status to in-progress", + ) await self.check_run_handler.set_python_module_install_in_progress() - self.logger.step(f"{self.log_prefix} Preparing repository clone for Python module installation") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Preparing repository clone for Python module installation", + ) async with self._prepare_cloned_repo_dir( pull_request=pull_request, clone_repo_dir=clone_repo_dir, @@ -410,29 +560,46 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: "text": None, } if not _res[0]: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " + f"Repository preparation failed for Python module installation", + ) output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) return await self.check_run_handler.set_python_module_install_failure(output=output) - self.logger.step(f"{self.log_prefix} Executing Python module installation command") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Executing Python module installation command", + ) rc, out, err = await run_command( command=f"uvx pip wheel --no-cache-dir -w {clone_repo_dir}/dist {clone_repo_dir}", log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, ) output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) if rc: - self.logger.step(f"{self.log_prefix} Python module installation completed successfully") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " + f"Python module installation completed successfully", + ) return await self.check_run_handler.set_python_module_install_success(output=output) - self.logger.step(f"{self.log_prefix} Python module installation failed") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('runner', 'ci_check', 'failed')} " + f"Python module installation failed" + ) return await self.check_run_handler.set_python_module_install_failure(output=output) async def run_conventional_title_check(self, pull_request: PullRequest) -> None: if not self.github_webhook.conventional_title: return - self.logger.step(f"{self.log_prefix} Starting conventional title check") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} Starting conventional title check" + ) output: dict[str, str] = { "title": "Conventional Title", @@ -443,17 +610,27 @@ async def run_conventional_title_check(self, pull_request: PullRequest) -> None: if await self.check_run_handler.is_check_run_in_progress(check_run=CONVENTIONAL_TITLE_STR): self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {CONVENTIONAL_TITLE_STR}.") - self.logger.step(f"{self.log_prefix} Setting conventional title check status to in-progress") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Setting conventional title check status to in-progress", + ) await self.check_run_handler.set_conventional_title_in_progress() allowed_names = self.github_webhook.conventional_title.split(",") title = pull_request.title self.logger.debug(f"{self.log_prefix} Conventional title check for title: {title}, allowed: {allowed_names}") if any([re.search(rf"{_name}(.*):", title) for _name in allowed_names]): - self.logger.step(f"{self.log_prefix} Conventional title check completed successfully") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " + f"Conventional title check completed successfully", + ) await self.check_run_handler.set_conventional_title_success(output=output) else: - self.logger.step(f"{self.log_prefix} Conventional title check failed") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('runner', 'ci_check', 'failed')} " + f"Conventional title check failed" + ) output["summary"] = "Failed" output["text"] = f"Pull request title must starts with allowed title: {': ,'.join(allowed_names)}" await self.check_run_handler.set_conventional_title_failure(output=output) @@ -463,34 +640,50 @@ async def is_branch_exists(self, branch: str) -> Branch: async def cherry_pick(self, pull_request: PullRequest, target_branch: str, reviewed_user: str = "") -> None: requested_by = reviewed_user or "by target-branch label" - self.logger.step(f"{self.log_prefix} Starting cherry-pick process to {target_branch}") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('runner', 'ci_check', 'started')} " + f"Starting cherry-pick process to {target_branch}" + ) self.logger.info(f"{self.log_prefix} Cherry-pick requested by user: {requested_by}") new_branch_name = f"{CHERRY_PICKED_LABEL_PREFIX}-{pull_request.head.ref}-{shortuuid.uuid()[:5]}" if not await self.is_branch_exists(branch=target_branch): err_msg = f"cherry-pick failed: {target_branch} does not exists" - self.logger.step(f"{self.log_prefix} Cherry-pick failed: target branch does not exist") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('runner', 'ci_check', 'failed')} " + f"Cherry-pick failed: target branch does not exist" + ) self.logger.error(err_msg) await asyncio.to_thread(pull_request.create_issue_comment, err_msg) else: - self.logger.step(f"{self.log_prefix} Setting cherry-pick check status to in-progress") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('runner', 'ci_check', 'processing')} " + f"Setting cherry-pick check status to in-progress" + ) await self.check_run_handler.set_cherry_pick_in_progress() commit_hash = pull_request.merge_commit_sha commit_msg_striped = pull_request.title.replace("'", "") pull_request_url = pull_request.html_url clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" git_cmd = f"git --work-tree={clone_repo_dir} --git-dir={clone_repo_dir}/.git" - hub_cmd = f"GITHUB_TOKEN={self.github_webhook.token} hub --work-tree={clone_repo_dir} --git-dir={clone_repo_dir}/.git" + github_token = self.github_webhook.token + hub_cmd = f"GITHUB_TOKEN={github_token} hub --work-tree={clone_repo_dir} --git-dir={clone_repo_dir}/.git" commands: list[str] = [ f"{git_cmd} checkout {target_branch}", f"{git_cmd} pull origin {target_branch}", f"{git_cmd} checkout -b {new_branch_name} origin/{target_branch}", f"{git_cmd} cherry-pick {commit_hash}", f"{git_cmd} push origin {new_branch_name}", - f"bash -c \"{hub_cmd} pull-request -b {target_branch} -h {new_branch_name} -l {CHERRY_PICKED_LABEL_PREFIX} -m '{CHERRY_PICKED_LABEL_PREFIX}: [{target_branch}] {commit_msg_striped}' -m 'cherry-pick {pull_request_url} into {target_branch}' -m 'requested-by {requested_by}'\"", + f'bash -c "{hub_cmd} pull-request -b {target_branch} ' + f"-h {new_branch_name} -l {CHERRY_PICKED_LABEL_PREFIX} " + f"-m '{CHERRY_PICKED_LABEL_PREFIX}: [{target_branch}] " + f"{commit_msg_striped}' -m 'cherry-pick {pull_request_url} " + f"into {target_branch}' -m 'requested-by {requested_by}'\"", ] - self.logger.debug(f"{self.log_prefix} Cherry pick commands to run: {commands}") rc, out, err = None, "", "" async with self._prepare_cloned_repo_dir(pull_request=pull_request, clone_repo_dir=clone_repo_dir) as _res: @@ -500,17 +693,36 @@ async def cherry_pick(self, pull_request: PullRequest, target_branch: str, revie "text": None, } if not _res[0]: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " + f"Repository preparation failed for cherry-pick", + ) output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) await self.check_run_handler.set_cherry_pick_failure(output=output) - self.logger.step(f"{self.log_prefix} Executing cherry-pick commands") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('runner', 'ci_check', 'processing')} " + f"Executing cherry-pick commands" + ) for cmd in commands: - rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix) + rc, out, err = await run_command( + command=cmd, + log_prefix=self.log_prefix, + redact_secrets=[github_token], + mask_sensitive=self.mask_sensitive, + ) if not rc: - self.logger.step(f"{self.log_prefix} Cherry-pick command failed") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} " + f"{format_task_fields('runner', 'ci_check', 'failed')} " + f"Cherry-pick command failed" + ) output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) await self.check_run_handler.set_cherry_pick_failure(output=output) - self.logger.error(f"{self.log_prefix} Cherry pick failed: {out} --- {err}") + redacted_out = _redact_secrets(out, [github_token], mask_sensitive=self.mask_sensitive) + redacted_err = _redact_secrets(err, [github_token], mask_sensitive=self.mask_sensitive) + self.logger.error(f"{self.log_prefix} Cherry pick failed: {redacted_out} --- {redacted_err}") local_branch_name = f"{pull_request.head.ref}-{target_branch}" await asyncio.to_thread( pull_request.create_issue_comment, @@ -530,7 +742,10 @@ async def cherry_pick(self, pull_request: PullRequest, target_branch: str, revie output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) - self.logger.step(f"{self.log_prefix} Cherry-pick completed successfully") # type: ignore + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " + f"Cherry-pick completed successfully", + ) await self.check_run_handler.set_cherry_pick_success(output=output) await asyncio.to_thread( pull_request.create_issue_comment, f"Cherry-picked PR {pull_request.title} into {target_branch}" diff --git a/webhook_server/libs/log_parser.py b/webhook_server/libs/log_parser.py index 21c2b1f3f..b05610d24 100644 --- a/webhook_server/libs/log_parser.py +++ b/webhook_server/libs/log_parser.py @@ -3,9 +3,10 @@ import asyncio import datetime import re +from collections.abc import AsyncGenerator from dataclasses import dataclass from pathlib import Path -from typing import Any, AsyncGenerator +from typing import Any from simple_logger.logger import get_logger @@ -23,6 +24,10 @@ class LogEntry: repository: str | None = None pr_number: int | None = None github_user: str | None = None + task_id: str | None = None + task_type: str | None = None + task_status: str | None = None + token_spend: int | None = None def to_dict(self) -> dict[str, Any]: """Convert LogEntry to dictionary for JSON serialization.""" @@ -36,6 +41,10 @@ def to_dict(self) -> dict[str, Any]: "repository": self.repository, "pr_number": self.pr_number, "github_user": self.github_user, + "task_id": self.task_id, + "task_type": self.task_type, + "task_status": self.task_status, + "token_spend": self.token_spend, } @@ -57,30 +66,51 @@ def __init__(self) -> None: # With PR: "{colored_repo} [{event}][{delivery_id}][{user}][PR {number}]: {message}" # Without PR: "{colored_repo} [{event}][{delivery_id}][{user}]: {message}" # Full log format: "timestamp logger level colored_repo [event][delivery_id][user][PR number]: message" - # Example: "2025-07-31T10:30:00.123000 GithubWebhook INFO repo-name [pull_request][abc123][user][PR 123]: Processing webhook" + # Example: "2025-07-31T10:30:00.123000 GithubWebhook INFO repo-name + # [pull_request][abc123][user][PR 123]: Processing webhook" + # Supports: + # - Optional fractional seconds + # - Optional timezone (Z or ±HH:MM format, e.g., +00:00, -05:00) + # - Flexible whitespace between fields + # - Logger names with dots/hyphens LOG_PATTERN = re.compile( - r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+) (\w+) (?:\x1b\[[\d;]*m)?(\w+)(?:\x1b\[[\d;]*m)? (.+)$" + r"^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?(?:Z|[+-]\d{2}:\d{2})?)\s+" + r"([\w.-]+)\s+(?:\x1b\[[\d;]*m)?([\w.-]+)(?:\x1b\[[\d;]*m)?\s+(.+)$" ) # Pattern to extract GitHub context from prepare_log_prefix format # Matches: colored_repo [event][delivery_id][user][PR number]: message GITHUB_CONTEXT_PATTERN = re.compile( - r"(?:\x1b\[[0-9;]*m)?([^\x1b\[\s]+)(?:\x1b\[[0-9;]*m)? \[([^\]]+)\]\[([^\]]+)\]\[([^\]]+)\](?:\[PR (\d+)\])?: (.+)" + r"(?:\x1b\[[0-9;]*m)?([^\x1b\[\s]+)(?:\x1b\[[0-9;]*m)? " + r"\[([^\]]+)\]\[([^\]]+)\]\[([^\]]+)\](?:\[PR (\d+)\])?: (.+)" ) ANSI_ESCAPE_PATTERN = re.compile(r"\x1b\[[0-9;]*m") + # Precompiled patterns for task field extraction (performance optimization) + # Handle escaped brackets: (?:\\.|[^\]])+ matches escaped chars or non-] chars + TASK_ID_PATTERN = re.compile(r"\[task_id=((?:\\.|[^\]])+)\]") + TASK_TYPE_PATTERN = re.compile(r"\[task_type=((?:\\.|[^\]])+)\]") + TASK_STATUS_PATTERN = re.compile(r"\[task_status=((?:\\.|[^\]])+)\]") + # Pattern for token spend: handles both original and masked formats + # Original: "Token spend: 35 API calls" + # Masked: "token ***** 35 API calls" (when "token" is redacted by secret masking) + TOKEN_SPEND_PATTERN = re.compile(r"(?:Token spend|token\s+\*+)\s*:?\s*(\d+)\s+API calls") + def is_workflow_step(self, entry: LogEntry) -> bool: """ - Check if a log entry is a workflow step (logger.step call). + Check if a log entry is a workflow milestone step. + + Only entries with task_id AND task_status are considered workflow milestones. + This filters out internal/initialization steps and only shows meaningful business events. Args: entry: LogEntry to check Returns: - True if this is a workflow step entry + True if this is a workflow milestone entry (has task_id and task_status) """ - return entry.level.upper() == "STEP" + return bool(entry.task_id and entry.task_status) def extract_workflow_steps(self, entries: list[LogEntry], hook_id: str) -> list[LogEntry]: """ @@ -116,24 +146,39 @@ def parse_log_entry(self, log_line: str) -> LogEntry | None: timestamp_str, logger_name, level, message = match.groups() # Parse ISO timestamp format: "2025-07-31T10:30:00.123000" + # Handle 'Z' timezone suffix which fromisoformat doesn't accept try: - timestamp = datetime.datetime.fromisoformat(timestamp_str) + normalized_timestamp = timestamp_str.replace("Z", "+00:00") + timestamp = datetime.datetime.fromisoformat(normalized_timestamp) + # Ensure timestamp is timezone-aware (assume UTC if naive) + if timestamp.tzinfo is None: + timestamp = timestamp.replace(tzinfo=datetime.UTC) except ValueError: return None # Extract GitHub webhook context from prepare_log_prefix format repository, event_type, hook_id, github_user, pr_number, cleaned_message = self._extract_github_context(message) + # Extract task correlation fields from message and strip them from the message + task_id, task_type, task_status, final_message = self._extract_task_fields(cleaned_message) + + # Extract token spend from message + token_spend = self.extract_token_spend(final_message) + return LogEntry( timestamp=timestamp, level=level, logger_name=logger_name, - message=cleaned_message, + message=final_message, hook_id=hook_id, event_type=event_type, repository=repository, pr_number=pr_number, github_user=github_user, + task_id=task_id, + task_type=task_type, + task_status=task_status, + token_spend=token_spend, ) def _extract_github_context( @@ -170,6 +215,69 @@ def _extract_github_context( cleaned_message = self.ANSI_ESCAPE_PATTERN.sub("", message) return None, None, None, None, None, cleaned_message + def _extract_task_fields(self, message: str) -> tuple[str | None, str | None, str | None, str]: + """Extract task correlation fields from log message. + + Extracts task_id, task_type, and task_status from patterns like: + [task_id=check_tox] [task_type=ci_check] [task_status=started] + + The task tokens are removed from the returned message to avoid duplication + and improve free-text search, as these values are stored in dedicated fields. + + Args: + message: Log message to extract from + + Returns: + Tuple of (task_id, task_type, task_status, cleaned_message) + """ + task_id = None + task_type = None + task_status = None + cleaned_message = message + + # Extract task_id using precompiled pattern + if task_id_match := self.TASK_ID_PATTERN.search(cleaned_message): + task_id = self._unescape_task_value(task_id_match.group(1)) + cleaned_message = self.TASK_ID_PATTERN.sub("", cleaned_message, count=1).strip() + + # Extract task_type using precompiled pattern + if task_type_match := self.TASK_TYPE_PATTERN.search(cleaned_message): + task_type = self._unescape_task_value(task_type_match.group(1)) + cleaned_message = self.TASK_TYPE_PATTERN.sub("", cleaned_message, count=1).strip() + + # Extract task_status using precompiled pattern + if task_status_match := self.TASK_STATUS_PATTERN.search(cleaned_message): + task_status = self._unescape_task_value(task_status_match.group(1)) + cleaned_message = self.TASK_STATUS_PATTERN.sub("", cleaned_message, count=1).strip() + + return task_id, task_type, task_status, cleaned_message + + @staticmethod + def _unescape_task_value(value: str) -> str: + """Unescape brackets in task field values.""" + return value.replace("\\]", "]").replace("\\[", "[") + + def extract_token_spend(self, message: str) -> int | None: + """Extract token spend from log message. + + Parses messages like: + - "Token spend: 35 API calls (initial: 2831, final: 2796, remaining: 2796)" + - "token ***** 35 API calls (initial: 2831, final: 2796, remaining: 2796)" (when masked) + + Args: + message: Log message to extract from + + Returns: + Token spend as integer, or None if not found + """ + match = self.TOKEN_SPEND_PATTERN.search(message) + if match: + try: + return int(match.group(1)) + except ValueError: + return None + return None + def parse_log_file(self, file_path: Path) -> list[LogEntry]: """ Parse an entire log file and return list of LogEntry objects. @@ -185,8 +293,8 @@ def parse_log_file(self, file_path: Path) -> list[LogEntry]: failed_lines = 0 try: - with open(file_path, "r", encoding="utf-8") as f: - for line_num, line in enumerate(f, 1): + with open(file_path, encoding="utf-8") as f: + for _line_num, line in enumerate(f, 1): total_lines += 1 entry = self.parse_log_entry(line) if entry: @@ -216,7 +324,7 @@ async def tail_log_file(self, file_path: Path, follow: bool = True) -> AsyncGene if not file_path.exists(): return - with open(file_path, "r", encoding="utf-8") as f: + with open(file_path, encoding="utf-8") as f: # Move to end of file f.seek(0, 2) @@ -249,7 +357,7 @@ async def monitor_log_directory(self, log_dir: Path, pattern: str = "*.log") -> # Find all existing log files including rotated ones log_files: list[Path] = [] - log_files.extend(log_dir.glob("*.log")) + log_files.extend(log_dir.glob(pattern)) # Only monitor current log file, not rotated ones for real-time current_log_files = [ f for f in log_files if not any(f.name.endswith(ext) for ext in [".1", ".2", ".3", ".4", ".5"]) diff --git a/webhook_server/libs/push_handler.py b/webhook_server/libs/push_handler.py deleted file mode 100644 index 24f544905..000000000 --- a/webhook_server/libs/push_handler.py +++ /dev/null @@ -1,105 +0,0 @@ -import re -from typing import TYPE_CHECKING -from uuid import uuid4 - -from github.Repository import Repository - -from webhook_server.libs.check_run_handler import CheckRunHandler -from webhook_server.libs.runner_handler import RunnerHandler -from webhook_server.utils.helpers import run_command - -if TYPE_CHECKING: - from webhook_server.libs.github_api import GithubWebhook - - -class PushHandler: - def __init__(self, github_webhook: "GithubWebhook"): - self.github_webhook = github_webhook - - self.hook_data = self.github_webhook.hook_data - self.logger = self.github_webhook.logger - self.log_prefix: str = self.github_webhook.log_prefix - self.repository: Repository = self.github_webhook.repository - self.check_run_handler = CheckRunHandler(github_webhook=self.github_webhook) - self.runner_handler = RunnerHandler(github_webhook=self.github_webhook) - - async def process_push_webhook_data(self) -> None: - self.logger.step(f"{self.log_prefix} Starting push webhook processing") # type: ignore - tag = re.search(r"refs/tags/?(.*)", self.hook_data["ref"]) - if tag: - tag_name = tag.group(1) - self.logger.step(f"{self.log_prefix} Processing tag push: {tag_name}") # type: ignore - self.logger.info(f"{self.log_prefix} Processing push for tag: {tag.group(1)}") - self.logger.debug(f"{self.log_prefix} Tag: {tag_name}") - if self.github_webhook.pypi: - self.logger.step(f"{self.log_prefix} Starting PyPI upload for tag: {tag_name}") # type: ignore - self.logger.info(f"{self.log_prefix} Processing upload to pypi for tag: {tag_name}") - await self.upload_to_pypi(tag_name=tag_name) - - if self.github_webhook.build_and_push_container and self.github_webhook.container_release: - self.logger.step(f"{self.log_prefix} Starting container build and push for tag: {tag_name}") # type: ignore - self.logger.info(f"{self.log_prefix} Processing build and push container for tag: {tag_name}") - await self.runner_handler.run_build_container(push=True, set_check=False, tag=tag_name) - else: - self.logger.step(f"{self.log_prefix} Non-tag push detected, skipping processing") # type: ignore - - async def upload_to_pypi(self, tag_name: str) -> None: - def _issue_on_error(_error: str) -> None: - self.repository.create_issue( - title=_error, - body=f""" -Publish to PYPI failed: `{_error}` -""", - ) - - self.logger.step(f"{self.log_prefix} Starting PyPI upload process for tag: {tag_name}") # type: ignore - clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" - uv_cmd_dir = f"--directory {clone_repo_dir}" - self.logger.info(f"{self.log_prefix} Start uploading to pypi") - self.logger.debug(f"{self.log_prefix} Clone repo dir: {clone_repo_dir}") - _dist_dir: str = f"{clone_repo_dir}/pypi-dist" - - async with self.runner_handler._prepare_cloned_repo_dir( - checkout=tag_name, clone_repo_dir=clone_repo_dir - ) as _res: - if not _res[0]: - _error = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) - return _issue_on_error(_error=_error) - - rc, out, err = await run_command( - command=f"uv {uv_cmd_dir} build --sdist --out-dir {_dist_dir}", log_prefix=self.log_prefix - ) - if not rc: - _error = self.check_run_handler.get_check_run_text(out=out, err=err) - return _issue_on_error(_error=_error) - - rc, tar_gz_file, err = await run_command(command=f"ls {_dist_dir}", log_prefix=self.log_prefix) - if not rc: - _error = self.check_run_handler.get_check_run_text(out=tar_gz_file, err=err) - return _issue_on_error(_error=_error) - - tar_gz_file = tar_gz_file.strip() - - commands: list[str] = [ - f"uvx {uv_cmd_dir} twine check {_dist_dir}/{tar_gz_file}", - f"uvx {uv_cmd_dir} twine upload --username __token__ --password {self.github_webhook.pypi['token']} {_dist_dir}/{tar_gz_file} --skip-existing", - ] - self.logger.debug(f"Commands to run: {commands}") - - for cmd in commands: - rc, out, err = await run_command(command=cmd, log_prefix=self.log_prefix) - if not rc: - _error = self.check_run_handler.get_check_run_text(out=out, err=err) - return _issue_on_error(_error=_error) - - self.logger.step(f"{self.log_prefix} PyPI upload completed successfully for tag: {tag_name}") # type: ignore - self.logger.info(f"{self.log_prefix} Publish to pypi finished") - if self.github_webhook.slack_webhook_url: - message: str = f""" -``` -{self.github_webhook.repository_name} Version {tag_name} published to PYPI. -``` -""" - self.github_webhook.send_slack_message( - message=message, webhook_url=self.github_webhook.slack_webhook_url - ) diff --git a/webhook_server/tests/conftest.py b/webhook_server/tests/conftest.py index d81b3cea9..f2d30dc46 100644 --- a/webhook_server/tests/conftest.py +++ b/webhook_server/tests/conftest.py @@ -1,10 +1,14 @@ +import logging as python_logging import os +from dataclasses import dataclass +from datetime import datetime, timedelta import pytest import yaml from starlette.datastructures import Headers -from webhook_server.libs.owners_files_handler import OwnersFileHandler +from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler +from webhook_server.libs.log_parser import LogEntry os.environ["WEBHOOK_SERVER_DATA_DIR"] = "webhook_server/tests/manifests" os.environ["ENABLE_LOG_SERVER"] = "true" @@ -87,9 +91,9 @@ def get_contents(self, path: str, ref: str): return ContentFile(folder5_owners_data) +@dataclass class Label: - def __init__(self, name: str): - self.name = name + name: str class PullRequest: @@ -123,8 +127,6 @@ def github_webhook(mocker, request): mocker.patch(f"{base_import_path}.GithubWebhook.add_api_users_to_auto_verified_and_merged_users", return_value=None) # Use standard Python logger for caplog compatibility - import logging as python_logging - test_logger = python_logging.getLogger("GithubWebhook") test_logger.setLevel(python_logging.DEBUG) @@ -154,10 +156,6 @@ def owners_file_handler(github_webhook): @pytest.fixture def sample_log_entries(): """Pre-generated sample log entries for performance tests.""" - from datetime import datetime, timedelta - - from webhook_server.libs.log_parser import LogEntry - entries = [] base_time = datetime(2025, 7, 31, 10, 0, 0) @@ -182,7 +180,6 @@ def sample_log_entries(): @pytest.fixture(autouse=True) def optimize_test_environment(): """Auto-applied fixture to optimize test environment.""" - import logging as python_logging # Disable unnecessary logging during tests python_logging.getLogger("httpx").setLevel(python_logging.WARNING) diff --git a/webhook_server/tests/test_add_reviewer_action.py b/webhook_server/tests/test_add_reviewer_action.py index 6ee951203..7a0e033b5 100644 --- a/webhook_server/tests/test_add_reviewer_action.py +++ b/webhook_server/tests/test_add_reviewer_action.py @@ -2,7 +2,7 @@ import pytest -from webhook_server.libs.issue_comment_handler import IssueCommentHandler +from webhook_server.libs.handlers.issue_comment_handler import IssueCommentHandler class User: diff --git a/webhook_server/tests/test_app.py b/webhook_server/tests/test_app.py index 0ca18d036..bb8f7f9f6 100644 --- a/webhook_server/tests/test_app.py +++ b/webhook_server/tests/test_app.py @@ -10,6 +10,7 @@ import pytest from fastapi.testclient import TestClient +from webhook_server import app as app_module from webhook_server.app import FASTAPI_APP from webhook_server.libs.exceptions import RepositoryNotFoundInConfigError from webhook_server.utils.app_utils import ( @@ -120,14 +121,19 @@ def test_process_webhook_missing_repository(self, client: TestClient, webhook_se response = client.post("/webhook_server", content=payload_json, headers=headers) assert response.status_code == 400 - assert "Missing repository information" in response.json()["detail"] + assert "Missing repository in payload" in response.json()["detail"] @patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"}) @patch("webhook_server.app.GithubWebhook") def test_process_webhook_repository_not_found( self, mock_github_webhook: Mock, client: TestClient, valid_webhook_payload: dict[str, Any], webhook_secret: str ) -> None: - """Test webhook processing when repository is not found in config.""" + """Test webhook processing when repository is not found in config. + + Note: RepositoryNotFoundInConfigError is now handled in background task, + so the HTTP response is 200 OK. The error is logged but doesn't affect + the webhook response to prevent GitHub webhook timeouts. + """ # Mock GithubWebhook to raise RepositoryNotFoundError mock_github_webhook.side_effect = RepositoryNotFoundInConfigError("Repository not found in configuration") @@ -143,8 +149,9 @@ def test_process_webhook_repository_not_found( response = client.post("/webhook_server", content=payload_json, headers=headers) - assert response.status_code == 404 - assert "Repository not found in configuration" in response.json()["detail"] + # Returns 200 OK immediately - error is handled in background + assert response.status_code == 200 + assert response.json()["message"] == "Webhook queued for processing" @patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"}) def test_process_webhook_signature_verification_failure( @@ -171,7 +178,12 @@ def test_process_webhook_signature_verification_failure( def test_process_webhook_connection_error( self, mock_github_webhook: Mock, client: TestClient, valid_webhook_payload: dict[str, Any], webhook_secret: str ) -> None: - """Test webhook processing when connection error occurs.""" + """Test webhook processing when connection error occurs. + + Note: Connection errors are now handled in background task, + so the HTTP response is 200 OK. The error is logged but doesn't affect + the webhook response to prevent GitHub webhook timeouts. + """ mock_github_webhook.side_effect = ConnectionError("API connection failed") payload_json = json.dumps(valid_webhook_payload) @@ -186,15 +198,21 @@ def test_process_webhook_connection_error( response = client.post("/webhook_server", content=payload_json, headers=headers) - assert response.status_code == 503 - assert "API Connection Error" in response.json()["detail"] + # Returns 200 OK immediately - error is handled in background + assert response.status_code == 200 + assert response.json()["message"] == "Webhook queued for processing" @patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"}) @patch("webhook_server.app.GithubWebhook") def test_process_webhook_unexpected_error( self, mock_github_webhook: Mock, client: TestClient, valid_webhook_payload: dict[str, Any], webhook_secret: str ) -> None: - """Test webhook processing when unexpected error occurs.""" + """Test webhook processing when unexpected error occurs. + + Note: Unexpected errors are now handled in background task, + so the HTTP response is 200 OK. The error is logged but doesn't affect + the webhook response to prevent GitHub webhook timeouts. + """ mock_github_webhook.side_effect = Exception("Unexpected error") payload_json = json.dumps(valid_webhook_payload) @@ -209,8 +227,9 @@ def test_process_webhook_unexpected_error( response = client.post("/webhook_server", content=payload_json, headers=headers) - assert response.status_code == 500 - assert "Internal Server Error" in response.json()["detail"] + # Returns 200 OK immediately - error is handled in background + assert response.status_code == 200 + assert response.json()["message"] == "Webhook queued for processing" @patch("webhook_server.app.get_github_allowlist") @patch("webhook_server.app.get_cloudflare_allowlist") @@ -236,8 +255,6 @@ async def test_get_github_allowlist_success(self, mock_get: Mock) -> None: mock_response.json.return_value = {"hooks": ["192.30.252.0/22", "185.199.108.0/22"]} mock_response.raise_for_status.return_value = None # Use AsyncMock for the client - from unittest.mock import AsyncMock - async_client = AsyncMock() async_client.get.return_value = mock_response @@ -248,8 +265,6 @@ async def test_get_github_allowlist_success(self, mock_get: Mock) -> None: @patch("httpx.AsyncClient.get") async def test_get_github_allowlist_error(self, mock_get: Mock) -> None: """Test GitHub allowlist fetching with error.""" - from unittest.mock import AsyncMock - async_client = AsyncMock() async_client.get.side_effect = httpx.RequestError("Network error") @@ -264,8 +279,6 @@ async def test_get_cloudflare_allowlist_success(self, mock_get: Mock) -> None: "result": {"ipv4_cidrs": ["103.21.244.0/22"], "ipv6_cidrs": ["2400:cb00::/32"]} } mock_response.raise_for_status.return_value = None - from unittest.mock import AsyncMock - async_client = AsyncMock() async_client.get.return_value = mock_response @@ -384,18 +397,14 @@ def test_process_webhook_no_webhook_secret( @patch("httpx.AsyncClient.get") async def test_get_github_allowlist_unexpected_error(self, mock_get: Mock) -> None: """Test GitHub allowlist fetching with unexpected error.""" - from unittest.mock import AsyncMock - async_client = AsyncMock() async_client.get.side_effect = Exception("Unexpected error") - with pytest.raises(Exception): + with pytest.raises(Exception, match="Unexpected error"): await get_github_allowlist(async_client) async def test_get_cloudflare_allowlist_request_error(self) -> None: """Test Cloudflare allowlist fetching with request error.""" - from unittest.mock import AsyncMock - async_client = AsyncMock() async_client.get.side_effect = httpx.RequestError("Network error") @@ -405,21 +414,15 @@ async def test_get_cloudflare_allowlist_request_error(self) -> None: @patch("httpx.AsyncClient.get") async def test_get_cloudflare_allowlist_unexpected_error(self, mock_get: Mock) -> None: """Test Cloudflare allowlist fetching with unexpected error.""" - from unittest.mock import AsyncMock - async_client = AsyncMock() async_client.get.side_effect = Exception("Unexpected error") - with pytest.raises(Exception): + with pytest.raises(Exception, match="Unexpected error"): await get_cloudflare_allowlist(async_client) @patch("httpx.AsyncClient.get") async def test_get_cloudflare_allowlist_http_error(self, mock_get: Mock) -> None: """Test Cloudflare allowlist fetching with HTTP error.""" - from unittest.mock import AsyncMock - - import httpx - async_client = AsyncMock() mock_response = Mock() req = httpx.Request("GET", "https://api.cloudflare.com/client/v4/ips") @@ -434,10 +437,6 @@ async def test_get_cloudflare_allowlist_http_error(self, mock_get: Mock) -> None @patch("httpx.AsyncClient.get") async def test_get_github_allowlist_http_error(self, mock_get: Mock) -> None: """Test GitHub allowlist fetching with HTTP error.""" - from unittest.mock import AsyncMock - - import httpx - async_client = AsyncMock() mock_response = Mock() req = httpx.Request("GET", "https://api.github.com/meta") @@ -457,10 +456,6 @@ async def test_lifespan_success( self, mock_urllib3: Mock, mock_config: Mock, mock_cf_allowlist: Mock, mock_gh_allowlist: Mock ) -> None: """Test successful lifespan function execution.""" - from unittest.mock import AsyncMock - from unittest.mock import patch as patcher - - from webhook_server import app as app_module # Mock config mock_config_instance = Mock() @@ -475,7 +470,7 @@ async def test_lifespan_success( mock_cf_allowlist.return_value = ["103.21.244.0/22"] # Mock HTTP client mock_client = AsyncMock() - with patcher("httpx.AsyncClient", return_value=mock_client): + with patch("httpx.AsyncClient", return_value=mock_client): async with app_module.lifespan(FASTAPI_APP): pass mock_client.aclose.assert_called_once() @@ -488,7 +483,6 @@ async def test_lifespan_with_ssl_warnings_disabled( self, mock_urllib3: Mock, mock_config: Mock, mock_cf_allowlist: Mock, mock_gh_allowlist: Mock ) -> None: """Test lifespan function with SSL warnings disabled.""" - from webhook_server import app as app_module # Mock config with SSL warnings disabled mock_config_instance = Mock() @@ -515,8 +509,12 @@ async def test_lifespan_with_ssl_warnings_disabled( async def test_lifespan_with_invalid_cidr( self, mock_config: Mock, mock_cf_allowlist: Mock, mock_gh_allowlist: Mock ) -> None: - """Test lifespan function with invalid CIDR addresses.""" - from webhook_server import app as app_module + """Test lifespan function with invalid CIDR addresses. + + Note: Invalid CIDR addresses are filtered out, so if IP verification + is enabled but no valid networks are loaded, the server will fail-close + with RuntimeError for security. + """ # Mock config mock_config_instance = Mock() @@ -527,7 +525,7 @@ async def test_lifespan_with_invalid_cidr( } mock_config.return_value = mock_config_instance - # Mock allowlist responses with invalid CIDR + # Mock allowlist responses with invalid CIDR (will be filtered out) mock_gh_allowlist.return_value = ["invalid-cidr"] mock_cf_allowlist.return_value = ["also-invalid"] @@ -535,10 +533,10 @@ async def test_lifespan_with_invalid_cidr( mock_client = AsyncMock() with patch.object(app_module, "_lifespan_http_client", mock_client): - async with app_module.lifespan(FASTAPI_APP): - pass - - # Should handle invalid CIDR gracefully + # Should raise RuntimeError because IP verification is enabled but no valid networks loaded + with pytest.raises(RuntimeError, match="IP verification enabled but no allowlist loaded"): + async with app_module.lifespan(FASTAPI_APP): + pass @patch("webhook_server.app.get_github_allowlist") @patch("webhook_server.app.get_cloudflare_allowlist") @@ -546,8 +544,12 @@ async def test_lifespan_with_invalid_cidr( async def test_lifespan_with_allowlist_errors( self, mock_config: Mock, mock_cf_allowlist: Mock, mock_gh_allowlist: Mock ) -> None: - """Test lifespan function when allowlist fetching fails.""" - from webhook_server import app as app_module + """Test lifespan function when allowlist fetching fails. + + Note: If IP verification is enabled but allowlist fetching fails, + the server will fail-close with RuntimeError for security (fail-close + behavior prevents insecure state). + """ # Mock config mock_config_instance = Mock() @@ -563,15 +565,13 @@ async def test_lifespan_with_allowlist_errors( # Mock HTTP client mock_client = AsyncMock() with patch.object(app_module, "_lifespan_http_client", mock_client): - # Should not raise, just log warnings - async with app_module.lifespan(FASTAPI_APP): - pass - # Should handle both allowlist failures gracefully - # (You could add log assertion here if desired) + # Should raise RuntimeError because IP verification is enabled but no networks loaded + with pytest.raises(RuntimeError, match="IP verification enabled but no allowlist loaded"): + async with app_module.lifespan(FASTAPI_APP): + pass def test_static_files_path_construction(self) -> None: """Test that the static files path is constructed correctly.""" - from webhook_server import app as app_module # The static_files_path should point to webhook_server/web/static expected_suffix = os.path.join("webhook_server", "web", "static") @@ -593,7 +593,6 @@ def test_static_files_path_construction(self) -> None: @patch("webhook_server.app.os.path.isdir") def test_static_files_validation_logic(self, mock_isdir: Mock, mock_exists: Mock) -> None: """Test static files validation logic without lifespan.""" - from webhook_server import app as app_module # Test case 1: Directory exists and is valid mock_exists.return_value = True diff --git a/webhook_server/tests/test_app_utils.py b/webhook_server/tests/test_app_utils.py new file mode 100644 index 000000000..7a169d594 --- /dev/null +++ b/webhook_server/tests/test_app_utils.py @@ -0,0 +1,92 @@ +"""Tests for webhook_server.utils.app_utils module.""" + +import datetime +import hashlib +import hmac + +import pytest +from fastapi import HTTPException + +from webhook_server.utils.app_utils import parse_datetime_string, verify_signature + + +class TestVerifySignature: + """Test suite for verify_signature function.""" + + def test_verify_signature_missing_header(self) -> None: + """Test verify_signature raises HTTPException when signature_header is None.""" + payload_body = b"test payload" + secret_token = "test_secret" # pragma: allowlist secret + + with pytest.raises(HTTPException) as exc_info: + verify_signature(payload_body, secret_token, signature_header=None) + + assert exc_info.value.status_code == 403 + assert "x-hub-signature-256 header is missing" in exc_info.value.detail + + def test_verify_signature_valid(self) -> None: + """Test verify_signature with valid signature.""" + payload_body = b"test payload" + secret_token = "test_secret" # pragma: allowlist secret + + hash_object = hmac.new(secret_token.encode("utf-8"), msg=payload_body, digestmod=hashlib.sha256) + expected_signature = "sha256=" + hash_object.hexdigest() + + # Should not raise exception + verify_signature(payload_body, secret_token, signature_header=expected_signature) + + def test_verify_signature_invalid(self) -> None: + """Test verify_signature with invalid signature.""" + payload_body = b"test payload" + secret_token = "test_secret" # pragma: allowlist secret + invalid_signature = "sha256=invalid_signature" + + with pytest.raises(HTTPException) as exc_info: + verify_signature(payload_body, secret_token, signature_header=invalid_signature) + + assert exc_info.value.status_code == 403 + assert "Request signatures didn't match" in exc_info.value.detail + + +class TestParseDatetimeString: + """Test suite for parse_datetime_string function.""" + + def test_parse_datetime_string_none(self) -> None: + """Test parse_datetime_string with None input.""" + result = parse_datetime_string(None, "test_field") + assert result is None + + def test_parse_datetime_string_valid_iso(self) -> None: + """Test parse_datetime_string with valid ISO format.""" + datetime_str = "2024-01-01T12:00:00Z" + result = parse_datetime_string(datetime_str, "test_field") + assert isinstance(result, datetime.datetime) + assert result.year == 2024 + assert result.month == 1 + assert result.day == 1 + + def test_parse_datetime_string_valid_with_timezone(self) -> None: + """Test parse_datetime_string with valid ISO format with timezone.""" + datetime_str = "2024-01-01T12:00:00+00:00" + result = parse_datetime_string(datetime_str, "test_field") + assert isinstance(result, datetime.datetime) + + def test_parse_datetime_string_invalid_format(self) -> None: + """Test parse_datetime_string with invalid format raises HTTPException.""" + datetime_str = "invalid-datetime-format" + field_name = "test_field" + + with pytest.raises(HTTPException) as exc_info: + parse_datetime_string(datetime_str, field_name) + + assert exc_info.value.status_code == 400 + assert f"Invalid {field_name} format" in exc_info.value.detail + assert datetime_str in exc_info.value.detail + assert "Expected ISO 8601 format" in exc_info.value.detail + + def test_parse_datetime_string_empty_string(self) -> None: + """Test parse_datetime_string with empty string returns None.""" + datetime_str = "" + result = parse_datetime_string(datetime_str, "test_field") + # Empty string is falsy, so it returns None (same as None input) + assert result is None diff --git a/webhook_server/tests/test_branch_protection.py b/webhook_server/tests/test_branch_protection.py index f88de8b1f..a1ec77e06 100644 --- a/webhook_server/tests/test_branch_protection.py +++ b/webhook_server/tests/test_branch_protection.py @@ -2,6 +2,7 @@ from typing import Any import pytest + from webhook_server.libs.config import Config from webhook_server.utils.github_repository_settings import ( DEFAULT_BRANCH_PROTECTION, diff --git a/webhook_server/tests/test_check_run_handler.py b/webhook_server/tests/test_check_run_handler.py index aa485f697..2401f56ad 100644 --- a/webhook_server/tests/test_check_run_handler.py +++ b/webhook_server/tests/test_check_run_handler.py @@ -2,7 +2,7 @@ import pytest -from webhook_server.libs.check_run_handler import CheckRunHandler +from webhook_server.libs.handlers.check_run_handler import CheckRunHandler from webhook_server.utils.constants import ( BUILD_CONTAINER_STR, CAN_BE_MERGED_STR, @@ -74,6 +74,8 @@ async def test_process_pull_request_check_run_webhook_data_not_completed( result = await check_run_handler.process_pull_request_check_run_webhook_data() assert result is False + # Verify completion log was called (skipping is acceptable) + assert check_run_handler.logger.step.called # type: ignore[attr-defined] @pytest.mark.asyncio async def test_process_pull_request_check_run_webhook_data_can_be_merged( @@ -87,6 +89,23 @@ async def test_process_pull_request_check_run_webhook_data_can_be_merged( result = await check_run_handler.process_pull_request_check_run_webhook_data() assert result is False + # Verify completion log was called + assert check_run_handler.logger.step.called # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_process_pull_request_check_run_webhook_data_completed_normal( + self, check_run_handler: CheckRunHandler + ) -> None: + """Test processing check run webhook data when action is completed (normal check run).""" + check_run_handler.hook_data = { + "action": "completed", + "check_run": {"name": "test-check", "status": "completed", "conclusion": "success"}, + } + + result = await check_run_handler.process_pull_request_check_run_webhook_data() + assert result is True + # Verify completion log was called + assert check_run_handler.logger.step.called # type: ignore[attr-defined] @pytest.mark.asyncio async def test_set_verify_check_queued(self, check_run_handler: CheckRunHandler) -> None: diff --git a/webhook_server/tests/test_comment_utils.py b/webhook_server/tests/test_comment_utils.py new file mode 100644 index 000000000..780433fd5 --- /dev/null +++ b/webhook_server/tests/test_comment_utils.py @@ -0,0 +1,250 @@ +"""Tests for comment_utils module.""" + +from webhook_server.utils.comment_utils import comment_with_details + + +class TestCommentWithDetails: + """Test suite for comment_with_details function.""" + + def test_basic_comment_formatting(self) -> None: + """Test basic comment with simple title and body.""" + result = comment_with_details("Summary", "Details go here") + + assert "
" in result + assert "Summary" in result + assert "Details go here" in result + assert "
" in result + + def test_comment_with_code_block(self) -> None: + """Test comment containing code block.""" + body = """```python +def hello(): + print("Hello, world!") +```""" + result = comment_with_details("Code Example", body) + + assert "Code Example" in result + assert "```python" in result + assert 'print("Hello, world!")' in result + + def test_comment_with_markdown_list(self) -> None: + """Test comment containing markdown list.""" + body = """- Item 1 +- Item 2 +- Item 3""" + result = comment_with_details("List Example", body) + + assert "- Item 1" in result + assert "- Item 2" in result + assert "- Item 3" in result + + def test_comment_with_links(self) -> None: + """Test comment containing markdown links.""" + body = "See [documentation](https://example.com) for details" + result = comment_with_details("Reference", body) + + assert "[documentation](https://example.com)" in result + + def test_comment_with_html_tags(self) -> None: + """Test comment containing HTML tags in body.""" + body = "Bold text and italic text" + result = comment_with_details("HTML Content", body) + + assert "Bold text" in result + assert "italic text" in result + + def test_comment_with_empty_body(self) -> None: + """Test comment with empty body.""" + result = comment_with_details("Empty Details", "") + + assert "Empty Details" in result + assert "
" in result + assert "
" in result + + def test_comment_with_empty_title(self) -> None: + """Test comment with empty title.""" + result = comment_with_details("", "Some content") + + assert "" in result + assert "Some content" in result + + def test_comment_with_special_characters_in_title(self) -> None: + """Test title containing special characters.""" + result = comment_with_details("Build Failed! ⚠️", "Error details") + + assert "Build Failed! ⚠️" in result + + def test_comment_with_special_characters_in_body(self) -> None: + """Test body containing special characters.""" + body = 'Error: "timeout" & connection failed @ 10:30 AM' + result = comment_with_details("Error Report", body) + + assert 'Error: "timeout" & connection failed @ 10:30 AM' in result + + def test_comment_with_multiline_body(self) -> None: + """Test comment with multiline body.""" + body = """Line 1 +Line 2 +Line 3 +Line 4""" + result = comment_with_details("Multiline", body) + + assert "Line 1" in result + assert "Line 2" in result + assert "Line 3" in result + assert "Line 4" in result + + def test_comment_with_table(self) -> None: + """Test comment containing markdown table.""" + body = """| Column 1 | Column 2 | +|----------|----------| +| Value 1 | Value 2 |""" + result = comment_with_details("Table Data", body) + + assert "| Column 1 | Column 2 |" in result + assert "| Value 1 | Value 2 |" in result + + def test_comment_with_headers(self) -> None: + """Test comment containing markdown headers.""" + body = """# Header 1 +## Header 2 +### Header 3""" + result = comment_with_details("Headers", body) + + assert "# Header 1" in result + assert "## Header 2" in result + assert "### Header 3" in result + + def test_comment_with_blockquote(self) -> None: + """Test comment containing blockquote.""" + body = """> This is a quote +> from someone""" + result = comment_with_details("Quote", body) + + assert "> This is a quote" in result + assert "> from someone" in result + + def test_comment_with_emoji(self) -> None: + """Test comment containing emoji.""" + body = "Build succeeded! 🎉 ✅ 🚀" + result = comment_with_details("Success", body) + + assert "🎉" in result + assert "✅" in result + assert "🚀" in result + + def test_comment_with_inline_code(self) -> None: + """Test comment with inline code.""" + body = "Use the `get_container_repository_and_tag()` function" + result = comment_with_details("Usage", body) + + assert "`get_container_repository_and_tag()`" in result + + def test_comment_with_unicode_characters(self) -> None: + """Test comment with Unicode characters.""" + body = "Unicode test: 测试 тест ทดสอบ テスト" + result = comment_with_details("Unicode", body) + + assert "测试" in result + assert "тест" in result + assert "ทดสอบ" in result + assert "テスト" in result + + def test_comment_with_very_long_body(self) -> None: + """Test comment with very long body text.""" + long_body = "A" * 10000 + result = comment_with_details("Long Content", long_body) + + assert "Long Content" in result + assert long_body in result + assert len(result) > 10000 + + def test_comment_with_nested_details(self) -> None: + """Test comment with nested details/summary in body.""" + body = """
+Nested +Nested content +
""" + result = comment_with_details("Outer", body) + + assert "Outer" in result + assert "Nested" in result + assert "Nested content" in result + + def test_comment_structure_format(self) -> None: + """Test that the comment structure follows expected format.""" + result = comment_with_details("Title", "Body") + + # Should have newline after opening details tag + assert result.startswith("\n
") + # Should have proper indentation + assert " Body" in result + # Should end with closing details tag and newline + assert result.rstrip().endswith("
") + + def test_comment_with_mixed_content(self) -> None: + """Test comment with mixed markdown, HTML, and special chars.""" + body = """**Build Results:** + +- ✅ Tests passed +- ❌ Linting failed + +```bash +npm run lint +``` + +See logs for details.""" + result = comment_with_details("CI Results", body) + + assert "**Build Results:**" in result + assert "- ✅ Tests passed" in result + assert "```bash" in result + assert 'logs' in result + + def test_comment_preserves_whitespace(self) -> None: + """Test that whitespace in body is preserved.""" + body = """Line with multiple spaces + Indented line + More indented""" + result = comment_with_details("Whitespace", body) + + assert "multiple spaces" in result + assert " Indented line" in result + assert " More indented" in result + + def test_comment_with_escape_sequences(self) -> None: + """Test comment with escape sequences.""" + body = r"Path: C:\Users\test\file.txt\n\tNext line" + result = comment_with_details("Paths", body) + + assert r"C:\Users\test\file.txt" in result + + def test_comment_with_backticks_in_title(self) -> None: + """Test title containing backticks.""" + result = comment_with_details("`function()` failed", "Error details") + + assert "`function()` failed" in result + + def test_comment_return_type(self) -> None: + """Test that function returns a string.""" + result = comment_with_details("Test", "Test") + + assert isinstance(result, str) + + def test_comment_with_image_markdown(self) -> None: + """Test comment with markdown image syntax.""" + body = "![Alt text](https://example.com/image.png)" + result = comment_with_details("Image", body) + + assert "![Alt text](https://example.com/image.png)" in result + + def test_comment_with_horizontal_rule(self) -> None: + """Test comment with horizontal rule.""" + body = """Section 1 +--- +Section 2""" + result = comment_with_details("Sections", body) + + assert "---" in result + assert "Section 1" in result + assert "Section 2" in result diff --git a/webhook_server/tests/test_config.py b/webhook_server/tests/test_config.py index 1771173e5..e310ea142 100644 --- a/webhook_server/tests/test_config.py +++ b/webhook_server/tests/test_config.py @@ -1,7 +1,8 @@ import os +import shutil import tempfile -from unittest.mock import Mock, patch from typing import Any +from unittest.mock import Mock, patch import pytest import yaml @@ -75,8 +76,6 @@ def test_init_with_custom_data_dir( assert config.data_dir == custom_dir assert config.config_path == os.path.join(custom_dir, "config.yaml") finally: - import shutil - shutil.rmtree(custom_dir) def test_exists_file_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: @@ -88,8 +87,6 @@ def test_exists_file_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: with pytest.raises(FileNotFoundError, match="Config file .* not found"): Config() finally: - import shutil - shutil.rmtree(temp_dir) def test_repositories_exists_missing_repositories( @@ -153,8 +150,9 @@ def test_root_data_corrupted_file(self, temp_config_dir: str, monkeypatch: pytes config.config_path = config_file config.logger = Mock() - root_data = config.root_data - assert root_data == {} + # Corrupted YAML should raise yaml.YAMLError + with pytest.raises(yaml.YAMLError): + _ = config.root_data def test_repository_data_with_repository(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: """Test repository_data property when repository is specified.""" @@ -185,10 +183,7 @@ def test_repository_data_nonexistent_repository( assert repo_data == {} - @patch("webhook_server.utils.helpers.get_github_repo_api") - def test_repository_local_data_success( - self, mock_get_repo_api: Mock, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch - ) -> None: + def test_repository_local_data_success(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: """Test repository_local_data method with successful config file retrieval.""" monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) @@ -197,21 +192,18 @@ def test_repository_local_data_success( mock_config_file = Mock() mock_config_file.decoded_content = yaml.dump({"local-setting": "value"}).encode() mock_repo.get_contents.return_value = mock_config_file - mock_get_repo_api.return_value = mock_repo config = Config(repository="test-repo") mock_github_api = Mock() + mock_github_api.get_repo.return_value = mock_repo result = config.repository_local_data(mock_github_api, "org/test-repo") assert result == {"local-setting": "value"} - mock_get_repo_api.assert_called_once_with(github_app_api=mock_github_api, repository="org/test-repo") + mock_github_api.get_repo.assert_called_once_with("org/test-repo") mock_repo.get_contents.assert_called_once_with(".github-webhook-server.yaml") - @patch("webhook_server.utils.helpers.get_github_repo_api") - def test_repository_local_data_list_result( - self, mock_get_repo_api: Mock, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch - ) -> None: + def test_repository_local_data_list_result(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: """Test repository_local_data method when get_contents returns a list.""" monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) @@ -220,10 +212,10 @@ def test_repository_local_data_list_result( mock_config_file = Mock() mock_config_file.decoded_content = yaml.dump({"local-setting": "value"}).encode() mock_repo.get_contents.return_value = [mock_config_file] # List result - mock_get_repo_api.return_value = mock_repo config = Config(repository="test-repo") mock_github_api = Mock() + mock_github_api.get_repo.return_value = mock_repo result = config.repository_local_data(mock_github_api, "org/test-repo") diff --git a/webhook_server/tests/test_config_schema.py b/webhook_server/tests/test_config_schema.py index 1ceb7ad89..d76947a7c 100644 --- a/webhook_server/tests/test_config_schema.py +++ b/webhook_server/tests/test_config_schema.py @@ -1,4 +1,5 @@ import os +import shutil import tempfile from typing import Any @@ -106,7 +107,6 @@ def test_valid_minimal_config_loads( assert "test-repo" in config.root_data["repositories"] finally: # Clean up - import shutil shutil.rmtree(temp_dir) @@ -132,8 +132,6 @@ def test_valid_full_config_loads(self, valid_full_config: dict[str, Any], monkey assert repo_data["minimum-lgtm"] == 2 assert repo_data["conventional-title"] == "feat,fix,docs" finally: - import shutil - shutil.rmtree(temp_dir) def test_log_level_enum_validation(self, valid_minimal_config: dict[str, Any]) -> None: @@ -146,12 +144,10 @@ def test_log_level_enum_validation(self, valid_minimal_config: dict[str, Any]) - try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert data["log-level"] == level finally: - import shutil - shutil.rmtree(temp_dir) def test_required_fields_validation(self, monkeypatch: pytest.MonkeyPatch) -> None: @@ -171,8 +167,6 @@ def test_required_fields_validation(self, monkeypatch: pytest.MonkeyPatch) -> No with pytest.raises(ValueError, match="does not have `repositories`"): Config() finally: - import shutil - shutil.rmtree(temp_dir) def test_array_fields_validation(self, valid_minimal_config: dict[str, Any]) -> None: @@ -186,14 +180,12 @@ def test_array_fields_validation(self, valid_minimal_config: dict[str, Any]) -> try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert len(data["github-tokens"]) == 3 assert len(data["default-status-checks"]) == 3 assert len(data["auto-verified-and-merged-users"]) == 2 finally: - import shutil - shutil.rmtree(temp_dir) def test_docker_object_validation(self, valid_minimal_config: dict[str, Any]) -> None: @@ -205,13 +197,11 @@ def test_docker_object_validation(self, valid_minimal_config: dict[str, Any]) -> try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert data["docker"]["username"] == "testuser" assert data["docker"]["password"] == "testpass" # pragma: allowlist secret finally: - import shutil - shutil.rmtree(temp_dir) def test_branch_protection_object_validation(self, valid_minimal_config: dict[str, Any]) -> None: @@ -230,15 +220,13 @@ def test_branch_protection_object_validation(self, valid_minimal_config: dict[st try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) branch_protection = data["branch-protection"] assert branch_protection["strict"] is True assert branch_protection["require_code_owner_reviews"] is False assert branch_protection["required_approving_review_count"] == 2 finally: - import shutil - shutil.rmtree(temp_dir) def test_repository_structure_validation(self, valid_minimal_config: dict[str, Any]) -> None: @@ -253,14 +241,12 @@ def test_repository_structure_validation(self, valid_minimal_config: dict[str, A try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert "repo1" in data["repositories"] assert "repo2" in data["repositories"] assert data["repositories"]["repo2"]["minimum-lgtm"] == 1 finally: - import shutil - shutil.rmtree(temp_dir) def test_tox_configuration_flexibility(self, valid_minimal_config: dict[str, Any]) -> None: @@ -276,15 +262,13 @@ def test_tox_configuration_flexibility(self, valid_minimal_config: dict[str, Any try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) tox_config = data["repositories"]["test-repo"]["tox"] assert tox_config["main"] == "all" assert tox_config["dev"] == ["test1", "test2"] assert tox_config["feature"] == "specific-test" finally: - import shutil - shutil.rmtree(temp_dir) def test_protected_branches_flexibility(self, valid_minimal_config: dict[str, Any]) -> None: @@ -300,15 +284,13 @@ def test_protected_branches_flexibility(self, valid_minimal_config: dict[str, An try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) protected_branches = data["repositories"]["test-repo"]["protected-branches"] assert "include-runs" in protected_branches["main"] assert protected_branches["dev"] == [] assert protected_branches["feature"] == ["simple-array"] finally: - import shutil - shutil.rmtree(temp_dir) def test_container_configuration_complete(self, valid_minimal_config: dict[str, Any]) -> None: @@ -328,7 +310,7 @@ def test_container_configuration_complete(self, valid_minimal_config: dict[str, try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) container = data["repositories"]["test-repo"]["container"] assert container["username"] == "reguser" @@ -336,8 +318,6 @@ def test_container_configuration_complete(self, valid_minimal_config: dict[str, assert len(container["build-args"]) == 2 assert len(container["args"]) == 2 finally: - import shutil - shutil.rmtree(temp_dir) def test_boolean_fields_validation(self, valid_minimal_config: dict[str, Any]) -> None: @@ -350,7 +330,7 @@ def test_boolean_fields_validation(self, valid_minimal_config: dict[str, Any]) - try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert data["verify-github-ips"] is True assert data["verify-cloudflare-ips"] is False @@ -358,8 +338,6 @@ def test_boolean_fields_validation(self, valid_minimal_config: dict[str, Any]) - assert data["repositories"]["test-repo"]["verified-job"] is False assert data["repositories"]["test-repo"]["pre-commit"] is True finally: - import shutil - shutil.rmtree(temp_dir) def test_integer_fields_validation(self, valid_minimal_config: dict[str, Any]) -> None: @@ -372,14 +350,12 @@ def test_integer_fields_validation(self, valid_minimal_config: dict[str, Any]) - try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert data["port"] == 8080 assert data["max-workers"] == 20 assert data["repositories"]["test-repo"]["minimum-lgtm"] == 3 finally: - import shutil - shutil.rmtree(temp_dir) def test_disable_ssl_warnings_configuration(self, valid_minimal_config: dict[str, Any]) -> None: @@ -391,12 +367,10 @@ def test_disable_ssl_warnings_configuration(self, valid_minimal_config: dict[str try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert data["disable-ssl-warnings"] is True finally: - import shutil - shutil.rmtree(temp_dir) def test_empty_configuration_handling(self, monkeypatch: pytest.MonkeyPatch) -> None: @@ -410,8 +384,6 @@ def test_empty_configuration_handling(self, monkeypatch: pytest.MonkeyPatch) -> with pytest.raises(ValueError): Config() finally: - import shutil - shutil.rmtree(temp_dir) def test_malformed_yaml_handling(self, monkeypatch: pytest.MonkeyPatch) -> None: @@ -426,12 +398,10 @@ def test_malformed_yaml_handling(self, monkeypatch: pytest.MonkeyPatch) -> None: try: monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_dir) - # Malformed YAML should result in empty config and fail repositories validation - with pytest.raises(ValueError, match="does not have `repositories`"): + # Malformed YAML should raise yaml.YAMLError during parsing + with pytest.raises(yaml.YAMLError): Config() finally: - import shutil - shutil.rmtree(temp_dir) def test_default_values_behavior( @@ -452,8 +422,6 @@ def test_default_values_behavior( assert "verify-github-ips" not in config_obj.root_data assert "minimum-lgtm" not in config_obj.root_data["repositories"]["test-repo"] finally: - import shutil - shutil.rmtree(temp_dir) def test_create_issue_for_new_pr_configuration(self, monkeypatch: pytest.MonkeyPatch) -> None: @@ -478,8 +446,6 @@ def test_create_issue_for_new_pr_configuration(self, monkeypatch: pytest.MonkeyP config = Config() assert config.root_data["create-issue-for-new-pr"] is False finally: - import shutil - shutil.rmtree(temp_dir) # Test repository-specific override @@ -503,8 +469,6 @@ def test_create_issue_for_new_pr_configuration(self, monkeypatch: pytest.MonkeyP assert config.root_data["create-issue-for-new-pr"] is False assert config.root_data["repositories"]["test-repo"]["create-issue-for-new-pr"] is True finally: - import shutil - shutil.rmtree(temp_dir) def test_pr_size_thresholds_valid_configuration(self, valid_minimal_config: dict[str, Any]) -> None: @@ -519,7 +483,7 @@ def test_pr_size_thresholds_valid_configuration(self, valid_minimal_config: dict try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) pr_thresholds = data["pr-size-thresholds"] assert pr_thresholds["Small"]["threshold"] == 100 @@ -527,8 +491,6 @@ def test_pr_size_thresholds_valid_configuration(self, valid_minimal_config: dict assert pr_thresholds["Large"]["threshold"] == 500 assert pr_thresholds["Large"]["color"] == "red" finally: - import shutil - shutil.rmtree(temp_dir) def test_pr_size_thresholds_repository_level(self, valid_minimal_config: dict[str, Any]) -> None: @@ -544,7 +506,7 @@ def test_pr_size_thresholds_repository_level(self, valid_minimal_config: dict[st try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) repo_thresholds = data["repositories"]["test-repo"]["pr-size-thresholds"] assert repo_thresholds["Express"]["threshold"] == 25 @@ -552,8 +514,6 @@ def test_pr_size_thresholds_repository_level(self, valid_minimal_config: dict[st assert repo_thresholds["Extended"]["threshold"] == 300 assert repo_thresholds["Extended"]["color"] == "orange" finally: - import shutil - shutil.rmtree(temp_dir) def test_pr_size_thresholds_various_color_names(self, valid_minimal_config: dict[str, Any]) -> None: @@ -572,15 +532,13 @@ def test_pr_size_thresholds_various_color_names(self, valid_minimal_config: dict try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) pr_thresholds = data["pr-size-thresholds"] assert len(pr_thresholds) == 6 assert pr_thresholds["Tiny"]["color"] == "lightgray" assert pr_thresholds["Massive"]["threshold"] == 2000 finally: - import shutil - shutil.rmtree(temp_dir) def test_pr_size_thresholds_missing_fields(self, valid_minimal_config: dict[str, Any]) -> None: @@ -595,13 +553,11 @@ def test_pr_size_thresholds_missing_fields(self, valid_minimal_config: dict[str, try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) # Should still load, validation will happen at runtime assert "pr-size-thresholds" in data finally: - import shutil - shutil.rmtree(temp_dir) # Test missing color (should be acceptable with fallback) @@ -614,12 +570,10 @@ def test_pr_size_thresholds_missing_fields(self, valid_minimal_config: dict[str, try: config_file = os.path.join(temp_dir2, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert data["pr-size-thresholds"]["Small"]["threshold"] == 100 finally: - import shutil - shutil.rmtree(temp_dir2) def test_pr_size_thresholds_invalid_threshold_values(self, valid_minimal_config: dict[str, Any]) -> None: @@ -634,13 +588,11 @@ def test_pr_size_thresholds_invalid_threshold_values(self, valid_minimal_config: try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) # Config loads, but validation should catch this at runtime assert data["pr-size-thresholds"]["Small"]["threshold"] == -10 finally: - import shutil - shutil.rmtree(temp_dir) # Test zero threshold @@ -653,12 +605,10 @@ def test_pr_size_thresholds_invalid_threshold_values(self, valid_minimal_config: try: config_file = os.path.join(temp_dir2, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert data["pr-size-thresholds"]["Small"]["threshold"] == 0 finally: - import shutil - shutil.rmtree(temp_dir2) # Test non-integer threshold @@ -671,12 +621,10 @@ def test_pr_size_thresholds_invalid_threshold_values(self, valid_minimal_config: try: config_file = os.path.join(temp_dir3, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert data["pr-size-thresholds"]["Small"]["threshold"] == "not-a-number" finally: - import shutil - shutil.rmtree(temp_dir3) def test_pr_size_thresholds_empty_configuration(self, valid_minimal_config: dict[str, Any]) -> None: @@ -688,10 +636,8 @@ def test_pr_size_thresholds_empty_configuration(self, valid_minimal_config: dict try: config_file = os.path.join(temp_dir, "config.yaml") - with open(config_file, "r") as file_handle: + with open(config_file) as file_handle: data = yaml.safe_load(file_handle) assert data["pr-size-thresholds"] == {} finally: - import shutil - shutil.rmtree(temp_dir) diff --git a/webhook_server/tests/test_container_utils.py b/webhook_server/tests/test_container_utils.py new file mode 100644 index 000000000..268c5ce9a --- /dev/null +++ b/webhook_server/tests/test_container_utils.py @@ -0,0 +1,266 @@ +"""Tests for container_utils module.""" + +from unittest.mock import Mock + +import pytest +from github.PullRequest import PullRequest + +from webhook_server.utils.container_utils import get_container_repository_and_tag + + +class TestGetContainerRepositoryAndTag: + """Test suite for get_container_repository_and_tag function.""" + + @pytest.fixture + def mock_logger(self) -> Mock: + """Create a mock logger.""" + return Mock() + + @pytest.fixture + def mock_pull_request(self) -> Mock: + """Create a mock PyGithub PullRequest.""" + pr = Mock(spec=PullRequest) + pr.number = 123 + pr.base = Mock() + pr.base.ref = "main" + return pr + + def test_explicit_tag_provided(self, mock_logger: Mock) -> None: + """Test with explicit tag provided.""" + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + tag="v1.2.3", + logger=mock_logger, + log_prefix="[TEST]", + ) + + assert result == "quay.io/myorg/myimage:v1.2.3" + mock_logger.debug.assert_called_once_with("[TEST] container tag is: v1.2.3") + + def test_explicit_tag_with_hash(self, mock_logger: Mock) -> None: + """Test with hash-based tag.""" + result = get_container_repository_and_tag( + container_repository="docker.io/myorg/myimage", + container_tag="latest", + tag="abc123def456", # pragma: allowlist secret + logger=mock_logger, + ) + + assert result == "docker.io/myorg/myimage:abc123def456" + + def test_merged_pr_main_branch(self, mock_pull_request: Mock, mock_logger: Mock) -> None: + """Test merged PR on main branch uses default container tag.""" + mock_pull_request.base.ref = "main" + + result = get_container_repository_and_tag( + container_repository="ghcr.io/myorg/myimage", + container_tag="latest", + is_merged=True, + pull_request=mock_pull_request, + logger=mock_logger, + ) + + assert result == "ghcr.io/myorg/myimage:latest" + + def test_merged_pr_master_branch(self, mock_pull_request: Mock, mock_logger: Mock) -> None: + """Test merged PR on master branch uses default container tag.""" + mock_pull_request.base.ref = "master" + + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="stable", + is_merged=True, + pull_request=mock_pull_request, + logger=mock_logger, + ) + + assert result == "quay.io/myorg/myimage:stable" + + def test_merged_pr_feature_branch(self, mock_pull_request: Mock, mock_logger: Mock) -> None: + """Test merged PR on feature branch uses branch name as tag.""" + mock_pull_request.base.ref = "feature/new-api" + + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + is_merged=True, + pull_request=mock_pull_request, + logger=mock_logger, + ) + + assert result == "quay.io/myorg/myimage:feature/new-api" + + def test_merged_pr_release_branch(self, mock_pull_request: Mock, mock_logger: Mock) -> None: + """Test merged PR on release branch uses branch name as tag.""" + mock_pull_request.base.ref = "release-v2.0" + + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + is_merged=True, + pull_request=mock_pull_request, + logger=mock_logger, + ) + + assert result == "quay.io/myorg/myimage:release-v2.0" + + def test_unmerged_pr(self, mock_pull_request: Mock, mock_logger: Mock) -> None: + """Test unmerged PR uses pr-{number} tag format.""" + mock_pull_request.number = 456 + + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + is_merged=False, + pull_request=mock_pull_request, + logger=mock_logger, + ) + + assert result == "quay.io/myorg/myimage:pr-456" + + def test_no_tag_no_pull_request(self, mock_logger: Mock) -> None: + """Test returns None when no tag and no PR provided.""" + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + logger=mock_logger, + log_prefix="[ERROR]", + ) + + assert result is None + mock_logger.error.assert_called_once_with("[ERROR] No pull request provided and no tag specified") + + def test_repository_with_port(self, mock_logger: Mock) -> None: + """Test repository URL with port number.""" + result = get_container_repository_and_tag( + container_repository="registry.example.com:5000/myorg/myimage", + container_tag="latest", + tag="v2.0.0", + logger=mock_logger, + ) + + assert result == "registry.example.com:5000/myorg/myimage:v2.0.0" + + def test_repository_with_nested_path(self, mock_logger: Mock) -> None: + """Test repository with nested path.""" + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/team/myimage", + container_tag="latest", + tag="dev", + logger=mock_logger, + ) + + assert result == "quay.io/myorg/team/myimage:dev" + + def test_tag_with_special_characters(self, mock_logger: Mock) -> None: + """Test tag with special characters like dots and hyphens.""" + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + tag="v1.2.3-rc.1", + logger=mock_logger, + ) + + assert result == "quay.io/myorg/myimage:v1.2.3-rc.1" + + def test_without_logger(self) -> None: + """Test function works without logger (logger is optional).""" + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + tag="v1.0.0", + ) + + assert result == "quay.io/myorg/myimage:v1.0.0" + + def test_without_logger_no_tag_no_pr(self) -> None: + """Test returns None without logger when no tag and no PR.""" + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + ) + + assert result is None + + def test_without_log_prefix(self, mock_logger: Mock) -> None: + """Test function works without log_prefix (uses empty string by default).""" + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + tag="test", + logger=mock_logger, + ) + + assert result == "quay.io/myorg/myimage:test" + mock_logger.debug.assert_called_once_with(" container tag is: test") + + def test_pr_number_zero(self, mock_pull_request: Mock, mock_logger: Mock) -> None: + """Test PR with number 0 (edge case).""" + mock_pull_request.number = 0 + + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + is_merged=False, + pull_request=mock_pull_request, + logger=mock_logger, + ) + + assert result == "quay.io/myorg/myimage:pr-0" + + def test_very_long_branch_name(self, mock_pull_request: Mock, mock_logger: Mock) -> None: + """Test with very long branch name.""" + long_branch = "feature/" + "x" * 100 + mock_pull_request.base.ref = long_branch + + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + is_merged=True, + pull_request=mock_pull_request, + logger=mock_logger, + ) + + assert result == f"quay.io/myorg/myimage:{long_branch}" + + def test_empty_container_repository(self, mock_logger: Mock) -> None: + """Test with empty container repository string.""" + result = get_container_repository_and_tag( + container_repository="", + container_tag="latest", + tag="v1.0.0", + logger=mock_logger, + ) + + assert result == ":v1.0.0" + + def test_empty_tag_string(self, mock_pull_request: Mock, mock_logger: Mock) -> None: + """Test with explicitly empty tag string (should use PR logic).""" + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="latest", + tag="", + is_merged=False, + pull_request=mock_pull_request, + logger=mock_logger, + ) + + assert result == "quay.io/myorg/myimage:pr-123" + + def test_merged_pr_main_with_empty_container_tag(self, mock_pull_request: Mock, mock_logger: Mock) -> None: + """Test merged PR on main with empty default container tag.""" + mock_pull_request.base.ref = "main" + + result = get_container_repository_and_tag( + container_repository="quay.io/myorg/myimage", + container_tag="", # Empty default tag + is_merged=True, + pull_request=mock_pull_request, + logger=mock_logger, + ) + + # When merged to main with empty container_tag, tag becomes empty string + # This triggers the final error path + assert result is None + mock_logger.error.assert_called_with(" container tag not found") diff --git a/webhook_server/tests/test_edge_cases_validation.py b/webhook_server/tests/test_edge_cases_validation.py index 66b28647a..d694918f5 100644 --- a/webhook_server/tests/test_edge_cases_validation.py +++ b/webhook_server/tests/test_edge_cases_validation.py @@ -1,15 +1,18 @@ """Edge case validation tests for webhook server log functionality.""" import asyncio +import concurrent.futures import datetime import os import tempfile +import time +from collections.abc import Generator from pathlib import Path -from typing import Generator -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest from fastapi import HTTPException +from fastapi.websockets import WebSocketDisconnect from simple_logger.logger import get_logger try: @@ -143,7 +146,11 @@ def test_malformed_log_entries_handling(self): {"json": "object", "instead": "of log line"} 2025-07-31T10:00:01.000000 GithubWebhook DEBUG Another valid entry Line with unicode characters: 🚀 💻 ✅ - Very long line that exceeds normal expectations and might cause buffer overflow issues in poorly implemented parsers with limited memory allocation strategies and insufficient bounds checking mechanisms that could potentially lead to security vulnerabilities or performance degradation + Very long line that exceeds normal expectations and might cause " + "buffer overflow issues in poorly implemented parsers with " + "limited memory allocation strategies and insufficient bounds " + "checking mechanisms that could potentially lead to security " + "vulnerabilities or performance degradation 2025-07-31T10:00:02.000000 GithubWebhook ERROR Final valid entry """ @@ -172,8 +179,6 @@ def test_concurrent_file_access(self, temp_log_file): def parse_file(): return parser.parse_log_file(log_path) - import concurrent.futures - with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor: futures = [executor.submit(parse_file) for _ in range(10)] results = [future.result() for future in futures] @@ -240,7 +245,7 @@ async def run_test(): asyncio.gather(monitor_task, rotation_task, return_exceptions=True), timeout=1.0, # Reduced from 5.0 to 1.0 second ) - except asyncio.TimeoutError: + except TimeoutError: monitor_task.cancel() rotation_task.cancel() @@ -249,8 +254,9 @@ async def run_test(): # Should handle rotation gracefully and capture at least some entries # The monitor should capture at least the "Before rotation" entry since it's added after monitoring starts # During rotation, some entries might be missed, but the monitor should capture at least 1 entry + entry_messages = [e.message for e in monitored_entries] assert len(monitored_entries) >= 1, ( - f"Expected at least 1 monitored entry, got {len(monitored_entries)}. Entries: {[e.message for e in monitored_entries]}" + f"Expected at least 1 monitored entry, got {len(monitored_entries)}. Entries: {entry_messages}" ) # Verify that captured entries are valid LogEntry objects with expected content @@ -261,13 +267,22 @@ async def run_test(): def test_unicode_and_special_characters(self): """Test handling of unicode and special characters in log entries.""" - unicode_content = """2025-07-31T10:00:00.000000 GithubWebhook INFO test-repo [push][hook-1][user]: Message with unicode: 🚀 ✅ 💻 -2025-07-31T10:00:01.000000 GithubWebhook INFO test-repo [push][hook-2][user]: ASCII and émojis: café naïve résumé -2025-07-31T10:00:02.000000 GithubWebhook INFO test-repo [push][hook-3][user]: Chinese characters: 你好世界 -2025-07-31T10:00:03.000000 GithubWebhook INFO test-repo [push][hook-4][user]: Arabic: مرحبا بالعالم -2025-07-31T10:00:04.000000 GithubWebhook INFO test-repo [push][hook-5][user]: Special chars: @#$%^&*(){}[]|\\:";'<>?,./ -2025-07-31T10:00:05.000000 GithubWebhook INFO test-repo [push][hook-6][user]: Newlines and tabs: Message\\nwith\\ttabs -2025-07-31T10:00:06.000000 GithubWebhook INFO test-repo [push][hook-7][user]: Quote handling: 'single' "double" `backtick`""" + unicode_content = ( + "2025-07-31T10:00:00.000000 GithubWebhook INFO test-repo " + "[push][hook-1][user]: Message with unicode: 🚀 ✅ 💻\n" + "2025-07-31T10:00:01.000000 GithubWebhook INFO test-repo " + "[push][hook-2][user]: ASCII and émojis: café naïve résumé\n" + "2025-07-31T10:00:02.000000 GithubWebhook INFO test-repo " + "[push][hook-3][user]: Chinese characters: 你好世界\n" + "2025-07-31T10:00:03.000000 GithubWebhook INFO test-repo " + "[push][hook-4][user]: Arabic: مرحبا بالعالم\n" + "2025-07-31T10:00:04.000000 GithubWebhook INFO test-repo " + "[push][hook-5][user]: Special chars: @#$%^&*(){}[]|\\:\";'<>?,./\n" + "2025-07-31T10:00:05.000000 GithubWebhook INFO test-repo " + "[push][hook-6][user]: Newlines and tabs: Message\\nwith\\ttabs\n" + "2025-07-31T10:00:06.000000 GithubWebhook INFO test-repo " + "[push][hook-7][user]: Quote handling: 'single' \"double\" `backtick`" + ) entries = parse_log_content_helper(unicode_content, encoding="utf-8") @@ -295,7 +310,7 @@ def test_empty_and_whitespace_only_files(self): " \n \t \n ", # Mixed whitespace ] - for i, content in enumerate(test_cases): + for _i, content in enumerate(test_cases): entries = parse_log_content_helper(content) # Should handle gracefully without errors @@ -307,9 +322,14 @@ def test_very_long_individual_log_lines(self): # Generate very long message long_message = "Very long message: " + "A" * 100000 # 100KB message - long_line_content = f"""2025-07-31T10:00:00.000000 GithubWebhook INFO test-repo [push][hook-1][user]: Normal message -2025-07-31T10:00:01.000000 GithubWebhook INFO test-repo [push][hook-2][user]: {long_message} -2025-07-31T10:00:02.000000 GithubWebhook INFO test-repo [push][hook-3][user]: Another normal message""" + long_line_content = ( + f"2025-07-31T10:00:00.000000 GithubWebhook INFO test-repo " + f"[push][hook-1][user]: Normal message\n" + f"2025-07-31T10:00:01.000000 GithubWebhook INFO test-repo " + f"[push][hook-2][user]: {long_message}\n" + f"2025-07-31T10:00:02.000000 GithubWebhook INFO test-repo " + f"[push][hook-3][user]: Another normal message" + ) entries = parse_log_content_helper(long_line_content) @@ -462,8 +482,6 @@ def test_filtering_performance_with_large_strings(self): entries = self.create_complex_test_dataset() log_filter = LogFilter() - import time - # Test search in very long content start_time = time.perf_counter() long_string_filtered = log_filter.filter_entries(entries, search_text="X" * 100) @@ -548,9 +566,7 @@ async def test_websocket_connection_limits(self): # Mock multiple WebSocket connections mock_websockets = [] - for i in range(100): # Simulate many connections - from unittest.mock import AsyncMock - + for _i in range(100): # Simulate many connections mock_ws = AsyncMock() mock_ws.accept = AsyncMock() mock_ws.send_json = AsyncMock() @@ -602,15 +618,11 @@ async def mock_monitor(): @pytest.mark.asyncio async def test_websocket_with_rapid_disconnections(self): """Test WebSocket handling with rapid connect/disconnect cycles.""" - from fastapi.websockets import WebSocketDisconnect - mock_logger = Mock() controller = LogViewerController(logger=mock_logger) # Test rapid disconnection scenarios - for i in range(10): - from unittest.mock import AsyncMock - + for _i in range(10): mock_ws = AsyncMock() mock_ws.accept = AsyncMock() @@ -646,8 +658,6 @@ async def test_websocket_with_corrupted_data_streams(self): ), ] - from unittest.mock import AsyncMock - mock_ws = AsyncMock() mock_ws.accept = AsyncMock() mock_ws.send_json = AsyncMock() @@ -704,31 +714,32 @@ def test_api_with_malformed_parameters(self): mock_logger = Mock() controller = LogViewerController(logger=mock_logger) - # Test malformed parameters - malformed_params = [ - {"limit": "not_a_number"}, - {"offset": -1}, - {"pr_number": "not_a_number"}, - {"start_time": "invalid_date"}, - {"end_time": "invalid_date"}, - {"hook_id": None}, # None value - {"repository": ""}, # Empty string - ] - - for params in malformed_params: - try: - # This would normally be called through FastAPI with parameter validation - # Here we test the controller's parameter handling - if "limit" in params and not isinstance(params["limit"], int): - with pytest.raises((ValueError, TypeError, HTTPException)): + with patch.object(controller, "_stream_log_entries", return_value=iter([])): + with patch.object(controller, "_estimate_total_log_count", return_value=0): + # Test truly malformed parameters that should raise exceptions + invalid_params = [ + {"limit": 0}, # Below minimum + {"limit": 10001}, # Above maximum + {"offset": -1}, # Negative offset + ] + + for params in invalid_params: + with pytest.raises((ValueError, HTTPException)): controller.get_log_entries(**params) - else: - # For other malformed params, should handle gracefully + + # Test valid edge cases that should succeed + valid_edge_cases = [ + {"hook_id": None}, # None means no filtering + {"repository": ""}, # Empty string means no filtering + {"limit": 1}, # Minimum valid + {"limit": 10000}, # Maximum valid + {"offset": 0}, # Minimum valid + ] + + for params in valid_edge_cases: result = controller.get_log_entries(**params) assert isinstance(result, dict) - except Exception as e: - # Some malformed parameters should raise exceptions - assert isinstance(e, (ValueError, TypeError, HTTPException)) + assert "entries" in result def test_api_with_extremely_large_responses(self): """Test API behavior with extremely large response datasets.""" @@ -851,7 +862,7 @@ async def test_multiple_users_different_filters(self): # Simulate multiple users with different controllers users = [] - for i in range(5): + for _i in range(5): controller = LogViewerController(logger=mock_logger) users.append(controller) @@ -871,7 +882,7 @@ def user_request(controller, filters): # Execute concurrent requests tasks = [] - for controller, filters in zip(users, user_filters): + for controller, filters in zip(users, user_filters, strict=True): task = asyncio.create_task(asyncio.to_thread(user_request, controller, filters)) tasks.append(task) @@ -897,9 +908,7 @@ async def test_concurrent_websocket_connections_with_filters(self): # Mock WebSocket connections for each user mock_websockets = [] - for i in range(3): - from unittest.mock import AsyncMock - + for _i in range(3): mock_ws = AsyncMock() mock_ws.accept = AsyncMock() mock_ws.send_json = AsyncMock() @@ -926,7 +935,7 @@ async def mock_monitor(user_id): # Start WebSocket connections for all users tasks = [] - for i, (controller, ws) in enumerate(zip(controllers, mock_websockets)): + for i, (controller, ws) in enumerate(zip(controllers, mock_websockets, strict=True)): with patch.object(controller.log_parser, "monitor_log_directory", return_value=mock_monitor(i)): task = asyncio.create_task(controller.handle_websocket(ws)) tasks.append(task) diff --git a/webhook_server/tests/test_exceptions.py b/webhook_server/tests/test_exceptions.py new file mode 100644 index 000000000..f00972813 --- /dev/null +++ b/webhook_server/tests/test_exceptions.py @@ -0,0 +1,20 @@ +"""Tests for custom exceptions.""" + +import pytest + +from webhook_server.libs.exceptions import ( + NoApiTokenError, + RepositoryNotFoundInConfigError, +) + + +def test_repository_not_found_error(): + """Test RepositoryNotFoundInConfigError can be raised.""" + with pytest.raises(RepositoryNotFoundInConfigError): + raise RepositoryNotFoundInConfigError() + + +def test_no_api_token_error(): + """Test NoApiTokenError can be raised.""" + with pytest.raises(NoApiTokenError): + raise NoApiTokenError() diff --git a/webhook_server/tests/test_github_api.py b/webhook_server/tests/test_github_api.py index a66ca2148..fc0c993b9 100644 --- a/webhook_server/tests/test_github_api.py +++ b/webhook_server/tests/test_github_api.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, Mock, patch import pytest +from github.GithubException import GithubException from simple_logger.logger import get_logger from starlette.datastructures import Headers @@ -203,13 +204,10 @@ def test_process_ping_event( @patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"}) @patch("webhook_server.libs.github_api.get_repository_github_app_api") @patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") - @patch("webhook_server.libs.pull_request_handler.PullRequestHandler.process_pull_request_webhook_data") + @patch("webhook_server.libs.handlers.pull_request_handler.PullRequestHandler.process_pull_request_webhook_data") @patch("webhook_server.utils.helpers.get_apis_and_tokes_from_config") @patch("webhook_server.libs.config.Config.repository_local_data") - @patch( - "webhook_server.libs.github_api.GithubWebhook.add_api_users_to_auto_verified_and_merged_users", - new_callable=lambda: property(lambda self: None), - ) + @patch("webhook_server.libs.github_api.GithubWebhook.add_api_users_to_auto_verified_and_merged_users") async def test_process_pull_request_event( self, mock_auto_verified_prop: Mock, @@ -270,13 +268,10 @@ async def test_process_pull_request_event( @patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"}) @patch("webhook_server.libs.github_api.get_repository_github_app_api") @patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") - @patch("webhook_server.libs.push_handler.PushHandler.process_push_webhook_data") + @patch("webhook_server.libs.handlers.push_handler.PushHandler.process_push_webhook_data") @patch("webhook_server.utils.helpers.get_apis_and_tokes_from_config") @patch("webhook_server.libs.config.Config.repository_local_data") - @patch( - "webhook_server.libs.github_api.GithubWebhook.add_api_users_to_auto_verified_and_merged_users", - new_callable=lambda: property(lambda self: None), - ) + @patch("webhook_server.libs.github_api.GithubWebhook.add_api_users_to_auto_verified_and_merged_users") async def test_process_push_event( self, mock_auto_verified_prop: Mock, @@ -310,13 +305,10 @@ async def test_process_push_event( @patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"}) @patch("webhook_server.libs.github_api.get_repository_github_app_api") @patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") - @patch("webhook_server.libs.issue_comment_handler.IssueCommentHandler.process_comment_webhook_data") + @patch("webhook_server.libs.handlers.issue_comment_handler.IssueCommentHandler.process_comment_webhook_data") @patch("webhook_server.utils.helpers.get_apis_and_tokes_from_config") @patch("webhook_server.libs.config.Config.repository_local_data") - @patch( - "webhook_server.libs.github_api.GithubWebhook.add_api_users_to_auto_verified_and_merged_users", - new_callable=lambda: property(lambda self: None), - ) + @patch("webhook_server.libs.github_api.GithubWebhook.add_api_users_to_auto_verified_and_merged_users") async def test_process_issue_comment_event( self, mock_auto_verified_prop: Mock, @@ -379,10 +371,7 @@ async def test_process_issue_comment_event( @patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") @patch("webhook_server.utils.helpers.get_apis_and_tokes_from_config") @patch("webhook_server.libs.config.Config.repository_local_data") - @patch( - "webhook_server.libs.github_api.GithubWebhook.add_api_users_to_auto_verified_and_merged_users", - new_callable=lambda: property(lambda self: None), - ) + @patch("webhook_server.libs.github_api.GithubWebhook.add_api_users_to_auto_verified_and_merged_users") async def test_process_unsupported_event( self, mock_auto_verified_prop: Mock, @@ -645,7 +634,7 @@ def get_value_side_effect(value, *args, **kwargs): mock_api.get_user.return_value = mock_user mock_get_apis.return_value = [(mock_api, "token")] gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) - gh.add_api_users_to_auto_verified_and_merged_users + _ = gh.add_api_users_to_auto_verified_and_merged_users assert "test-user" in gh.auto_verified_and_merged_users @patch("webhook_server.libs.github_api.get_apis_and_tokes_from_config") @@ -797,7 +786,6 @@ async def test_get_pull_request_github_exception( self, minimal_hook_data: dict, minimal_headers: dict, logger: Mock ) -> None: """Test getting pull request with GithubException.""" - from github import GithubException with patch("webhook_server.libs.github_api.Config") as mock_config: mock_config.return_value.repository = True @@ -969,70 +957,6 @@ def test_container_repository_and_tag_no_pull_request( result = gh.container_repository_and_tag() assert result is None - @patch("webhook_server.libs.github_api.requests.post") - def test_send_slack_message_success( - self, mock_post: Mock, minimal_hook_data: dict, minimal_headers: dict, logger: Mock - ) -> None: - """Test sending slack message successfully.""" - with patch("webhook_server.libs.github_api.Config") as mock_config: - mock_config.return_value.repository = True - mock_config.return_value.repository_local_data.return_value = {} - - with patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api: - mock_get_api.return_value = (Mock(), "token", "apiuser") - - with patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo_api: - mock_get_repo_api.return_value = Mock() - - with patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_app_api: - mock_get_app_api.return_value = Mock() - - with patch("webhook_server.utils.helpers.get_repository_color_for_log_prefix") as mock_color: - mock_color.return_value = "test-repo" - - mock_response = Mock() - mock_response.status_code = 200 - mock_post.return_value = mock_response - - gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) - gh.send_slack_message("Test message", "https://hooks.slack.com/test") - - mock_post.assert_called_once() - call_args = mock_post.call_args - assert call_args[0][0] == "https://hooks.slack.com/test" - assert "Test message" in call_args[1]["data"] - - @patch("webhook_server.libs.github_api.requests.post") - def test_send_slack_message_failure( - self, mock_post: Mock, minimal_hook_data: dict, minimal_headers: dict, logger: Mock - ) -> None: - """Test sending slack message with failure.""" - with patch("webhook_server.libs.github_api.Config") as mock_config: - mock_config.return_value.repository = True - mock_config.return_value.repository_local_data.return_value = {} - - with patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api: - mock_get_api.return_value = (Mock(), "token", "apiuser") - - with patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo_api: - mock_get_repo_api.return_value = Mock() - - with patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_app_api: - mock_get_app_api.return_value = Mock() - - with patch("webhook_server.utils.helpers.get_repository_color_for_log_prefix") as mock_color: - mock_color.return_value = "test-repo" - - mock_response = Mock() - mock_response.status_code = 400 - mock_response.text = "Bad Request" - mock_post.return_value = mock_response - - gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) - - with pytest.raises(ValueError, match="Request to slack returned an error 400"): - gh.send_slack_message("Test message", "https://hooks.slack.com/test") - def test_current_pull_request_supported_retest_property( self, minimal_hook_data: dict, minimal_headers: dict, logger: Mock ) -> None: diff --git a/webhook_server/tests/test_github_repository_settings.py b/webhook_server/tests/test_github_repository_settings.py index d8cc0796e..d6f88cf21 100644 --- a/webhook_server/tests/test_github_repository_settings.py +++ b/webhook_server/tests/test_github_repository_settings.py @@ -145,7 +145,7 @@ def test_get_required_status_checks_basic(self) -> None: """Test getting required status checks with basic configuration.""" mock_repo = Mock() # Patch get_contents to raise exception so 'pre-commit.ci - pr' is not added - mock_repo.get_contents.side_effect = Exception() + mock_repo.get_contents.side_effect = UnknownObjectException(status=404, data={}, headers={}) data: dict = {} default_status_checks: list[str] = ["basic-check"] exclude_status_checks: list[str] = [] @@ -231,7 +231,7 @@ def test_get_required_status_checks_with_exclusions(self) -> None: """Test getting required status checks with exclusions.""" mock_repo = Mock() # Patch get_contents to raise exception so 'pre-commit.ci - pr' is not added - mock_repo.get_contents.side_effect = Exception() + mock_repo.get_contents.side_effect = UnknownObjectException(status=404, data={}, headers={}) data: dict = {"tox": True} default_status_checks: list[str] = ["tox", "verified"] exclude_status_checks: list[str] = ["tox"] diff --git a/webhook_server/tests/test_helpers.py b/webhook_server/tests/test_helpers.py index 4f037b29f..8252b36a7 100644 --- a/webhook_server/tests/test_helpers.py +++ b/webhook_server/tests/test_helpers.py @@ -1,66 +1,27 @@ +import datetime import logging import os import sys from unittest.mock import Mock, patch + import pytest +from webhook_server.libs.config import Config +from webhook_server.libs.exceptions import NoApiTokenError from webhook_server.utils.helpers import ( - extract_key_from_dict, - get_logger_with_params, get_api_with_highest_rate_limit, get_apis_and_tokes_from_config, + get_future_results, get_github_repo_api, - run_command, + get_logger_with_params, log_rate_limit, - get_future_results, + run_command, ) class TestHelpers: """Test suite for utility helper functions.""" - def test_extract_key_from_dict_simple(self) -> None: - """Test extracting key from simple dictionary.""" - test_dict = {"key1": "value1", "key2": "value2"} - result = list(extract_key_from_dict(key="key1", _dict=test_dict)) - assert result == ["value1"] - - def test_extract_key_from_dict_nested(self) -> None: - """Test extracting key from nested dictionary.""" - test_dict = {"level1": {"key1": "nested_value1", "level2": {"key1": "nested_value2"}}, "key1": "root_value"} - result = list(extract_key_from_dict(key="key1", _dict=test_dict)) - assert set(result) == {"nested_value1", "nested_value2", "root_value"} - - def test_extract_key_from_dict_with_lists(self) -> None: - """Test extracting key from dictionary containing lists.""" - test_dict = { - "items": [{"key1": "list_value1"}, {"key1": "list_value2", "other": "ignored"}], - "key1": "root_value", - } - result = list(extract_key_from_dict(key="key1", _dict=test_dict)) - assert set(result) == {"list_value1", "list_value2", "root_value"} - - def test_extract_key_from_dict_not_found(self) -> None: - """Test extracting non-existent key returns empty list.""" - test_dict = {"key1": "value1", "key2": "value2"} - result = list(extract_key_from_dict(key="nonexistent", _dict=test_dict)) - assert result == [] - - def test_extract_key_from_dict_empty_dict(self) -> None: - """Test extracting key from empty dictionary.""" - result = list(extract_key_from_dict(key="any_key", _dict={})) - assert result == [] - - def test_extract_key_from_dict_complex_nested(self) -> None: - """Test extracting key from complex nested structure.""" - test_dict = { - "pull_request": {"number": 123}, - "issue": {"number": 456}, - "commits": [{"commit": {"message": "test", "number": 789}}, {"commit": {"message": "test2"}}], - } - result = list(extract_key_from_dict(key="number", _dict=test_dict)) - assert set(result) == {123, 456, 789} - def test_get_logger_with_params_default(self) -> None: """Test logger creation with default parameters.""" logger = get_logger_with_params() @@ -77,7 +38,6 @@ def test_get_logger_with_params_with_repository(self) -> None: @patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"}) def test_get_apis_and_tokes_from_config(self) -> None: """Test getting APIs and tokens from configuration.""" - from webhook_server.libs.config import Config config = Config(repository="test-repo") apis_and_tokens = get_apis_and_tokes_from_config(config=config) @@ -95,7 +55,6 @@ def test_get_apis_and_tokes_from_config(self) -> None: @patch("webhook_server.utils.helpers.log_rate_limit") def test_get_api_with_highest_rate_limit(self, mock_log_rate_limit: Mock, mock_get_apis: Mock) -> None: """Test getting API with highest rate limit.""" - from webhook_server.libs.config import Config # Mock APIs with different rate limits mock_api1 = Mock() @@ -130,8 +89,6 @@ def test_get_api_with_highest_rate_limit(self, mock_log_rate_limit: Mock, mock_g @patch("webhook_server.utils.helpers.get_apis_and_tokes_from_config") def test_get_api_with_highest_rate_limit_no_apis(self, mock_get_apis: Mock) -> None: """Test getting API when no APIs available.""" - from webhook_server.libs.config import Config - from webhook_server.libs.exceptions import NoApiTokenError mock_get_apis.return_value = [] @@ -164,34 +121,12 @@ def test_get_github_repo_api_exception(self) -> None: with pytest.raises(Exception, match="Repository not found"): get_github_repo_api(github_app_api=mock_github_api, repository=repository_name) - def test_extract_key_from_dict_with_none_values(self) -> None: - """Test extracting key from dictionary with None values.""" - test_dict = {"key1": None, "nested": {"key1": "value1", "key2": None}} - result = list(extract_key_from_dict(key="key1", _dict=test_dict)) - # Should return all values including None - assert result == [None, "value1"] - - def test_extract_key_from_dict_with_boolean_values(self) -> None: - """Test extracting key from dictionary with boolean values.""" - test_dict = {"key1": True, "nested": {"key1": False, "key2": "string_value"}} - result = list(extract_key_from_dict(key="key1", _dict=test_dict)) - # Should include boolean values - assert set(result) == {True, False} - - def test_extract_key_from_dict_with_numeric_values(self) -> None: - """Test extracting key from dictionary with numeric values.""" - test_dict = {"key1": 42, "nested": {"key1": 3.14, "key2": "ignored"}, "list": [{"key1": 0}]} - result = list(extract_key_from_dict(key="key1", _dict=test_dict)) - # Should include all numeric values - assert set(result) == {42, 3.14, 0} - @patch("webhook_server.utils.helpers.get_apis_and_tokes_from_config") @patch("webhook_server.utils.helpers.log_rate_limit") def test_get_api_with_highest_rate_limit_invalid_tokens( self, mock_log_rate_limit: Mock, mock_get_apis: Mock ) -> None: """Test getting API with invalid tokens (rate limit 60).""" - from webhook_server.libs.config import Config # Mock API with invalid token (rate limit 60) mock_api1 = Mock() @@ -332,13 +267,12 @@ async def test_run_command_exception(self): def test_log_rate_limit_all_branches(self): """Test log_rate_limit for all color/warning branches.""" - import datetime # Patch logger to capture logs with patch("webhook_server.utils.helpers.get_logger_with_params") as mock_get_logger: mock_logger = Mock() mock_get_logger.return_value = mock_logger - now = datetime.datetime.now(datetime.timezone.utc) + now = datetime.datetime.now(datetime.UTC) # RED branch (below_minimum) rate_core = Mock() rate_core.remaining = 600 diff --git a/webhook_server/tests/test_helpers_sanitization.py b/webhook_server/tests/test_helpers_sanitization.py new file mode 100644 index 000000000..f297e26c4 --- /dev/null +++ b/webhook_server/tests/test_helpers_sanitization.py @@ -0,0 +1,436 @@ +"""Tests for log sanitization in helpers module.""" + +from __future__ import annotations + +from unittest.mock import MagicMock, patch + +import pytest + +from webhook_server.utils.helpers import ( + _redact_secrets, + _sanitize_log_value, + _truncate_output, + format_task_fields, + run_command, + strip_ansi_codes, +) + + +class TestSanitizeLogValue: + """Test the _sanitize_log_value helper function.""" + + def test_sanitize_newlines(self) -> None: + """Test that newlines are removed.""" + value = "test\nvalue\nwith\nnewlines" + result = _sanitize_log_value(value) + assert "\n" not in result + assert result == "test value with newlines" + + def test_sanitize_carriage_returns(self) -> None: + """Test that carriage returns are removed.""" + value = "test\rvalue\r\nwith\rreturns" + result = _sanitize_log_value(value) + assert "\r" not in result + assert "\n" not in result + assert result == "test value with returns" + + def test_sanitize_tabs(self) -> None: + """Test handling of tabs (currently preserved by implementation).""" + value = "test\tvalue\twith\ttabs" + result = _sanitize_log_value(value) + # Note: Current implementation doesn't remove tabs, only newlines and carriage returns + # This test documents current behavior and can be updated if tabs should be sanitized + assert result == "test\tvalue\twith\ttabs" + + def test_sanitize_control_characters(self) -> None: + """Test handling of control characters (currently preserved by implementation).""" + value = "test\x00value\x01with\x02control" + result = _sanitize_log_value(value) + # Note: Current implementation doesn't remove control characters + # This test documents current behavior and can be updated if control chars should be sanitized + assert "test" in result and "value" in result + # Verify the function doesn't break with control characters + assert isinstance(result, str) + + def test_escape_brackets(self) -> None: + """Test that brackets are escaped.""" + value = "value[with]brackets" + result = _sanitize_log_value(value) + assert result == "value\\[with\\]brackets" + + def test_combined_injection_attempt(self) -> None: + """Test sanitization of complex injection attempt.""" + # Simulates log injection: task_id=normal] [task_id=injected + value = "normal] [task_id=injected" + result = _sanitize_log_value(value) + # Should escape brackets to prevent breaking out of structured field + assert result == "normal\\] \\[task_id=injected" + + def test_newline_injection_attempt(self) -> None: + """Test sanitization of newline injection attempt.""" + # Simulates log injection with newline to insert fake log entry + value = "normal\n[ERROR] Fake log entry" + result = _sanitize_log_value(value) + assert "\n" not in result + # Brackets should also be escaped to prevent fake structured log entries + assert result == "normal \\[ERROR\\] Fake log entry" + + def test_empty_string(self) -> None: + """Test sanitization of empty string.""" + result = _sanitize_log_value("") + assert result == "" + + def test_clean_value_unchanged_content(self) -> None: + """Test that clean values have same content (just escaped brackets).""" + value = "clean_task_id_123" + result = _sanitize_log_value(value) + assert result == value # No brackets, newlines, or returns to sanitize + + +class TestFormatTaskFields: + """Test the format_task_fields function with sanitization.""" + + def test_format_task_fields_normal(self) -> None: + """Test normal task field formatting.""" + result = format_task_fields( + task_id="check_tox", + task_type="ci_check", + task_status="started", + ) + assert result == "[task_id=check_tox] [task_type=ci_check] [task_status=started]" + + def test_format_task_fields_with_injection(self) -> None: + """Test task field formatting with injection attempt.""" + # Try to inject additional fields via bracket manipulation + result = format_task_fields( + task_id="normal] [task_id=injected", + task_type="ci_check", + task_status="started", + ) + # Brackets should be escaped to prevent injection + assert result == "[task_id=normal\\] \\[task_id=injected] [task_type=ci_check] [task_status=started]" + + def test_format_task_fields_with_newlines(self) -> None: + """Test task field formatting with newline injection attempt.""" + result = format_task_fields( + task_id="check_tox\nFAKE_LOG_ENTRY", + task_type="ci_check", + task_status="started", + ) + # Newlines should be replaced with spaces + assert "\n" not in result + assert result == "[task_id=check_tox FAKE_LOG_ENTRY] [task_type=ci_check] [task_status=started]" + + def test_format_task_fields_with_tabs_and_control_chars(self) -> None: + """Test task field formatting with tabs and control characters.""" + result = format_task_fields( + task_id="check\ttox\x00test", + task_type="ci_check", + task_status="started", + ) + # Note: Current implementation preserves tabs and control chars (only sanitizes \n, \r, and brackets) + # This test documents current behavior + assert isinstance(result, str) + assert "task_id=" in result + assert "task_type=ci_check" in result + assert "task_status=started" in result + + def test_format_task_fields_partial(self) -> None: + """Test formatting with only some fields provided.""" + result = format_task_fields(task_id="check_tox") + assert result == "[task_id=check_tox]" + + result = format_task_fields(task_type="ci_check", task_status="started") + assert result == "[task_type=ci_check] [task_status=started]" + + def test_format_task_fields_empty(self) -> None: + """Test formatting with no fields provided.""" + result = format_task_fields() + assert result == "" + + def test_format_task_fields_all_injections(self) -> None: + """Test formatting with injection attempts in all fields.""" + result = format_task_fields( + task_id="id]\n[fake=field", + task_type="type]\r\n[fake=log", + task_status="status[bracket]test", + ) + # All dangerous characters should be sanitized + assert "\n" not in result + assert "\r" not in result + # Brackets should be escaped + assert "\\[" in result + assert "\\]" in result + + +class TestRedactSecrets: + """Test the _redact_secrets function.""" + + def test_redact_single_secret(self) -> None: + """Test redacting a single secret.""" + text = "Password is secret123" + secrets = ["secret123"] + result = _redact_secrets(text, secrets) + assert "secret123" not in result + assert "***REDACTED***" in result + + def test_redact_multiple_secrets(self) -> None: + """Test redacting multiple secrets.""" + text = "Token: abc123 Password: xyz789" + secrets = ["abc123", "xyz789"] + result = _redact_secrets(text, secrets) + assert "abc123" not in result + assert "xyz789" not in result + assert result.count("***REDACTED***") == 2 + + def test_redact_empty_secrets_list(self) -> None: + """Test with empty secrets list.""" + text = "No secrets here" + result = _redact_secrets(text, None) + assert result == text + + def test_redact_no_secrets_in_text(self) -> None: + """Test when no secrets are found in text.""" + text = "No secrets here" + secrets = ["secret123"] + result = _redact_secrets(text, secrets) + assert result == text + + def test_redact_secret_with_special_regex_chars(self) -> None: + """Test redacting secrets containing regex special characters.""" + text = "Password: test[123].*" + secrets = ["test[123].*"] + result = _redact_secrets(text, secrets) + assert "test[123].*" not in result + assert "***REDACTED***" in result + + def test_redact_case_sensitive(self) -> None: + """Test case-sensitive redaction (default).""" + text = "Token: ABC123" + secrets = ["abc123"] + result = _redact_secrets(text, secrets, case_insensitive=False) + # Should not match due to case difference + assert "ABC123" in result + + def test_redact_case_insensitive(self) -> None: + """Test case-insensitive redaction.""" + text = "Token: ABC123" + secrets = ["abc123"] + result = _redact_secrets(text, secrets, case_insensitive=True) + assert "ABC123" not in result + assert "***REDACTED***" in result + + def test_redact_substring_prevention(self) -> None: + """Test that longer secrets are matched first to prevent substring leaks.""" + text = "Secret: abcdef" + secrets = ["abc", "abcdef"] + result = _redact_secrets(text, secrets) + # Should match "abcdef" first, not "abc" + assert result.count("***REDACTED***") == 1 + assert "abc" not in result + + def test_redact_empty_strings_filtered(self) -> None: + """Test that empty strings in secrets list are filtered out.""" + text = "No secrets" + secrets = ["", " ", "secret"] + result = _redact_secrets(text, secrets) + # Empty strings should be filtered, but "secret" should still be redacted + assert result == "No ***REDACTED***s" + # Verify empty strings don't cause issues (they're filtered in the function) + result2 = _redact_secrets(text, ["", " "]) + assert result2 == text # No non-empty secrets, so no redaction + + def test_redact_with_mask_sensitive_false(self) -> None: + """Test that when mask_sensitive=False, secrets are NOT redacted.""" + text = "Token: secret123 Password: abc456" + secrets = ["secret123", "abc456"] + result = _redact_secrets(text, secrets, mask_sensitive=False) + # Secrets should still be present + assert "secret123" in result + assert "abc456" in result + # No redaction marker should be present + assert "***REDACTED***" not in result + # Result should be unchanged + assert result == text + + def test_redact_with_mask_sensitive_true(self) -> None: + """Test that when mask_sensitive=True, secrets ARE redacted (explicit parameter).""" + text = "Token: secret123" + secrets = ["secret123"] + result = _redact_secrets(text, secrets, mask_sensitive=True) + # Secret should be redacted + assert "secret123" not in result + # Redaction marker should be present + assert "***REDACTED***" in result + + def test_redact_mask_sensitive_default(self) -> None: + """Test that default behavior is to mask (backward compatibility).""" + text = "Token: secret123" + secrets = ["secret123"] + # Call without mask_sensitive parameter - should default to True + result = _redact_secrets(text, secrets) + # Secret should be redacted by default + assert "secret123" not in result + # Redaction marker should be present + assert "***REDACTED***" in result + + +class TestTruncateOutput: + """Test the _truncate_output function.""" + + def test_truncate_long_text(self) -> None: + """Test truncating text longer than max_length.""" + text = "A" * 1000 + result = _truncate_output(text, max_length=500) + assert len(result) < len(text) + assert "... [truncated" in result + assert "500 chars]" in result + + def test_truncate_short_text(self) -> None: + """Test that short text is not truncated.""" + text = "Short text" + result = _truncate_output(text, max_length=500) + assert result == text + + def test_truncate_exact_length(self) -> None: + """Test text exactly at max_length.""" + text = "A" * 500 + result = _truncate_output(text, max_length=500) + assert result == text + + def test_truncate_custom_max_length(self) -> None: + """Test with custom max_length.""" + text = "A" * 200 + result = _truncate_output(text, max_length=100) + assert len(result) < len(text) + assert "... [truncated 100 chars]" in result + + def test_truncate_empty_string(self) -> None: + """Test truncating empty string.""" + result = _truncate_output("", max_length=500) + assert result == "" + + +class TestStripAnsiCodes: + """Test the strip_ansi_codes function.""" + + def test_strip_color_codes(self) -> None: + """Test stripping ANSI color codes.""" + text = "\x1b[31mRed text\x1b[0m" + result = strip_ansi_codes(text) + assert result == "Red text" + assert "\x1b" not in result + + def test_strip_bold_codes(self) -> None: + """Test stripping ANSI bold codes.""" + text = "\x1b[1mBold text\x1b[0m" + result = strip_ansi_codes(text) + assert result == "Bold text" + + def test_strip_multiple_codes(self) -> None: + """Test stripping multiple ANSI codes.""" + text = "\x1b[1m\x1b[32mBold green\x1b[0m" + result = strip_ansi_codes(text) + assert result == "Bold green" + + def test_strip_no_ansi_codes(self) -> None: + """Test text with no ANSI codes.""" + text = "Plain text" + result = strip_ansi_codes(text) + assert result == text + + def test_strip_cursor_movement_codes(self) -> None: + """Test stripping cursor movement codes.""" + text = "\x1b[2J\x1b[HClear screen" + result = strip_ansi_codes(text) + assert result == "Clear screen" + + def test_strip_mixed_content(self) -> None: + """Test stripping ANSI codes from mixed content.""" + text = "Start \x1b[31mred\x1b[0m middle \x1b[32mgreen\x1b[0m end" + result = strip_ansi_codes(text) + assert result == "Start red middle green end" + + def test_strip_empty_string(self) -> None: + """Test stripping ANSI codes from empty string.""" + result = strip_ansi_codes("") + assert result == "" + + def test_strip_escape_sequences_only(self) -> None: + """Test that only ANSI escape sequences are removed.""" + text = "Text with [brackets] and (parentheses)" + result = strip_ansi_codes(text) + assert result == text # Should remain unchanged + + +class TestRunCommandMaskSensitive: + """Test the run_command function with mask_sensitive parameter.""" + + @pytest.mark.asyncio + async def test_run_command_mask_sensitive_false(self) -> None: + """Test that run_command respects mask_sensitive=False. + + When mask_sensitive=False, secrets should NOT be redacted in logs. + The function should log the unredacted command and output. + """ + # Mock the logger to capture log messages + with patch("webhook_server.utils.helpers.get_logger_with_params") as mock_get_logger: + mock_logger = MagicMock() + mock_get_logger.return_value = mock_logger + + # Run command with a secret and mask_sensitive=False + success, stdout, stderr = await run_command( + command="echo 'token: ghp_test123'", + log_prefix="test", + redact_secrets=["ghp_test123"], + mask_sensitive=False, + ) + + # Verify command succeeded + assert success is True + + # Verify the token appears in stdout (unredacted return value) + assert "ghp_test123" in stdout + + # Verify logger.debug was called with unredacted command + debug_calls = [call for call in mock_logger.debug.call_args_list] + assert any("ghp_test123" in str(call) for call in debug_calls), ( + "Token should appear in debug logs when mask_sensitive=False" + ) + + @pytest.mark.asyncio + async def test_run_command_mask_sensitive_true(self) -> None: + """Test that run_command respects mask_sensitive=True. + + When mask_sensitive=True, secrets should be redacted in logs. + However, the returned stdout/stderr should remain unredacted. + """ + # Mock the logger to capture log messages + with patch("webhook_server.utils.helpers.get_logger_with_params") as mock_get_logger: + mock_logger = MagicMock() + mock_get_logger.return_value = mock_logger + + # Run command with a secret and mask_sensitive=True + success, stdout, stderr = await run_command( + command="echo 'token: ghp_test456'", + log_prefix="test", + redact_secrets=["ghp_test456"], + mask_sensitive=True, + ) + + # Verify command succeeded + assert success is True + + # Verify the token appears in stdout (unredacted return value) + # Per function design: "stdout and stderr are UNREDACTED strings" + assert "ghp_test456" in stdout + + # Verify logger.debug was called with REDACTED command + debug_calls = [call for call in mock_logger.debug.call_args_list] + assert any("***REDACTED***" in str(call) for call in debug_calls), ( + "Redaction marker should appear in debug logs when mask_sensitive=True" + ) + # Ensure the secret does NOT appear in logs + assert not any("ghp_test456" in str(call) for call in debug_calls), ( + "Token should NOT appear in debug logs when mask_sensitive=True" + ) diff --git a/webhook_server/tests/test_issue_comment_handler.py b/webhook_server/tests/test_issue_comment_handler.py index 5bb1e84f5..35e3c420e 100644 --- a/webhook_server/tests/test_issue_comment_handler.py +++ b/webhook_server/tests/test_issue_comment_handler.py @@ -1,8 +1,10 @@ +import asyncio +import time from unittest.mock import AsyncMock, Mock, patch import pytest -from webhook_server.libs.issue_comment_handler import IssueCommentHandler +from webhook_server.libs.handlers.issue_comment_handler import IssueCommentHandler from webhook_server.utils.constants import ( BUILD_AND_PUSH_CONTAINER_STR, COMMAND_ASSIGN_REVIEWER_STR, @@ -60,6 +62,10 @@ async def test_process_comment_webhook_data_edited_action(self, issue_comment_ha with patch.object(issue_comment_handler, "user_commands") as mock_user_commands: await issue_comment_handler.process_comment_webhook_data(Mock()) mock_user_commands.assert_not_called() + # Verify completion log was emitted with task_status='completed' + assert any( + "task_status=completed" in call.args[0] for call in issue_comment_handler.logger.step.call_args_list + ), "Expected a completion log with task_status='completed'" @pytest.mark.asyncio async def test_process_comment_webhook_data_deleted_action( @@ -71,6 +77,10 @@ async def test_process_comment_webhook_data_deleted_action( with patch.object(issue_comment_handler, "user_commands") as mock_user_commands: await issue_comment_handler.process_comment_webhook_data(Mock()) mock_user_commands.assert_not_called() + # Verify completion log was emitted with task_status='completed' + assert any( + "task_status=completed" in call.args[0] for call in issue_comment_handler.logger.step.call_args_list + ), "Expected a completion log with task_status='completed'" @pytest.mark.asyncio async def test_process_comment_webhook_data_welcome_message( @@ -82,6 +92,10 @@ async def test_process_comment_webhook_data_welcome_message( with patch.object(issue_comment_handler, "user_commands") as mock_user_commands: await issue_comment_handler.process_comment_webhook_data(Mock()) mock_user_commands.assert_not_called() + # Verify completion log was emitted with task_status='completed' + assert any( + "task_status=completed" in call.args[0] for call in issue_comment_handler.logger.step.call_args_list + ), "Expected a completion log with task_status='completed'" @pytest.mark.asyncio async def test_process_comment_webhook_data_normal_comment( @@ -93,6 +107,23 @@ async def test_process_comment_webhook_data_normal_comment( with patch.object(issue_comment_handler, "user_commands") as mock_user_commands: await issue_comment_handler.process_comment_webhook_data(Mock()) mock_user_commands.assert_called_once() + # Verify completion log was emitted with task_status='completed' + assert any( + "task_status=completed" in call.args[0] for call in issue_comment_handler.logger.step.call_args_list + ), "Expected a completion log with task_status='completed'" + + @pytest.mark.asyncio + async def test_process_comment_webhook_data_no_commands(self, issue_comment_handler: IssueCommentHandler) -> None: + """Test processing comment webhook data with no commands.""" + issue_comment_handler.hook_data["comment"]["body"] = "Just a regular comment" + + with patch.object(issue_comment_handler, "user_commands") as mock_user_commands: + await issue_comment_handler.process_comment_webhook_data(Mock()) + mock_user_commands.assert_not_called() + # Verify completion log was emitted with task_status='completed' + assert any( + "task_status=completed" in call.args[0] for call in issue_comment_handler.logger.step.call_args_list + ), "Expected a completion log with task_status='completed'" @pytest.mark.asyncio async def test_process_comment_webhook_data_multiple_commands( @@ -105,6 +136,93 @@ async def test_process_comment_webhook_data_multiple_commands( await issue_comment_handler.process_comment_webhook_data(Mock()) assert mock_user_commands.call_count == 2 + @pytest.mark.asyncio + async def test_process_comment_webhook_data_parallel_execution( + self, issue_comment_handler: IssueCommentHandler + ) -> None: + """Test that multiple commands execute in parallel, not sequentially. + + This test verifies: + 1. Multiple commands start concurrently (not one-after-another) + 2. Parallel execution is significantly faster than sequential + 3. Exception in one command doesn't block others + 4. All commands complete even if one fails + """ + issue_comment_handler.hook_data["comment"]["body"] = "/verified\n/approved\n/hold" + + # Track execution order and timing + execution_events: list[tuple[str, str, float]] = [] # (command, event, timestamp) + + async def mock_command(pull_request, command, reviewed_user, issue_comment_id): + """Mock command that simulates real work and tracks execution.""" + start_time = time.time() + execution_events.append((command, "start", start_time)) + + # Simulate work (50ms per command) + await asyncio.sleep(0.05) + + # Simulate exception for second command to test exception handling + if command == "approved": + execution_events.append((command, "error", time.time())) + raise ValueError(f"Simulated error in {command}") + + execution_events.append((command, "end", time.time())) + + with patch.object(issue_comment_handler, "user_commands", side_effect=mock_command): + # Execute commands + start = time.time() + await issue_comment_handler.process_comment_webhook_data(Mock()) + total_duration = time.time() - start + + # VERIFICATION 1: All three commands should have started + start_events = [e for e in execution_events if e[1] == "start"] + assert len(start_events) == 3, f"Expected 3 commands to start, got {len(start_events)}" + + # VERIFICATION 2: Commands started concurrently (within 10ms of each other) + # In sequential execution, commands would start 50ms apart + # In parallel execution, all start nearly simultaneously + first_start = start_events[0][2] + last_start = start_events[-1][2] + start_time_spread = last_start - first_start + + # All commands should start within 10ms (parallel) + # vs 100ms+ for sequential execution (50ms * 2 delays) + assert start_time_spread < 0.015, f"Commands did not start concurrently (spread: {start_time_spread:.3f}s)" + + # VERIFICATION 3: Total execution time indicates parallel execution + # Sequential: 3 commands * 50ms = 150ms minimum + # Parallel: max(50ms) = 50ms (plus overhead) + # Allow 100ms for parallel (generous overhead buffer) + assert total_duration < 0.1, f"Execution took {total_duration:.3f}s, expected < 0.1s (parallel execution)" + + # Sequential would take at least 150ms + assert total_duration < 0.12, f"Commands appear to run sequentially ({total_duration:.3f}s >= 0.12s)" + + # VERIFICATION 4: Exception in one command didn't stop others + # verified and hold should complete successfully + successful_completions = [e for e in execution_events if e[1] == "end"] + assert len(successful_completions) == 2, ( + f"Expected 2 successful completions (verified, hold), got {len(successful_completions)}" + ) + + # VERIFICATION 5: Error was recorded for failed command + error_events = [e for e in execution_events if e[1] == "error"] + assert len(error_events) == 1, f"Expected 1 error event (approved), got {len(error_events)}" + assert error_events[0][0] == "approved", "Error should be for 'approved' command" + + # VERIFICATION 6: Commands completed in overlapping time windows + # This proves they ran concurrently, not sequentially + verified_start = next(e[2] for e in execution_events if e[0] == "verified" and e[1] == "start") + hold_end = next(e[2] for e in execution_events if e[0] == "hold" and e[1] == "end") + + # Both commands (verified and hold) should overlap in execution + # If sequential: hold would start AFTER verified ends (100ms gap) + # If parallel: hold starts immediately, both execute simultaneously + execution_overlap = hold_end - verified_start + + # Overlap should be ~50ms (parallel) not ~100ms (sequential) + assert execution_overlap < 0.08, f"Execution overlap {execution_overlap:.3f}s suggests sequential execution" + @pytest.mark.asyncio async def test_user_commands_unsupported_command(self, issue_comment_handler: IssueCommentHandler) -> None: """Test user commands with unsupported command.""" @@ -154,7 +272,9 @@ async def test_user_commands_assign_reviewer_with_args(self, issue_comment_handl mock_pull_request = Mock() with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler, "_add_reviewer_by_user_comment") as mock_add_reviewer: + with patch.object( + issue_comment_handler, "_add_reviewer_by_user_comment", new_callable=AsyncMock + ) as mock_add_reviewer: await issue_comment_handler.user_commands( pull_request=mock_pull_request, command=f"{COMMAND_ASSIGN_REVIEWER_STR} reviewer1", @@ -188,7 +308,9 @@ async def test_user_commands_check_can_merge(self, issue_comment_handler: IssueC mock_pull_request = Mock() with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler.pull_request_handler, "check_if_can_be_merged") as mock_check: + with patch.object( + issue_comment_handler.pull_request_handler, "check_if_can_be_merged", new_callable=AsyncMock + ) as mock_check: await issue_comment_handler.user_commands( pull_request=mock_pull_request, command=COMMAND_CHECK_CAN_MERGE_STR, @@ -204,7 +326,9 @@ async def test_user_commands_cherry_pick(self, issue_comment_handler: IssueComme mock_pull_request = Mock() with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler, "process_cherry_pick_command") as mock_cherry_pick: + with patch.object( + issue_comment_handler, "process_cherry_pick_command", new_callable=AsyncMock + ) as mock_cherry_pick: await issue_comment_handler.user_commands( pull_request=mock_pull_request, command=f"{COMMAND_CHERRY_PICK_STR} branch1 branch2", @@ -240,7 +364,9 @@ async def test_user_commands_build_container_enabled(self, issue_comment_handler mock_pull_request = Mock() with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler.runner_handler, "run_build_container") as mock_build: + with patch.object( + issue_comment_handler.runner_handler, "run_build_container", new_callable=AsyncMock + ) as mock_build: await issue_comment_handler.user_commands( pull_request=mock_pull_request, command=f"{BUILD_AND_PUSH_CONTAINER_STR} args", @@ -280,7 +406,9 @@ async def test_user_commands_wip_add(self, issue_comment_handler: IssueCommentHa mock_pull_request.title = "Test PR" with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler.labels_handler, "_add_label") as mock_add_label: + with patch.object( + issue_comment_handler.labels_handler, "_add_label", new_callable=AsyncMock + ) as mock_add_label: with patch.object(mock_pull_request, "edit") as mock_edit: await issue_comment_handler.user_commands( pull_request=mock_pull_request, command=WIP_STR, reviewed_user="test-user", issue_comment_id=123 @@ -296,7 +424,9 @@ async def test_user_commands_wip_remove(self, issue_comment_handler: IssueCommen mock_pull_request.title = "WIP: Test PR" with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler.labels_handler, "_remove_label") as mock_remove_label: + with patch.object( + issue_comment_handler.labels_handler, "_remove_label", new_callable=AsyncMock + ) as mock_remove_label: with patch.object(mock_pull_request, "edit") as mock_edit: await issue_comment_handler.user_commands( pull_request=mock_pull_request, @@ -332,8 +462,12 @@ async def test_user_commands_hold_authorized_user_add(self, issue_comment_handle mock_pull_request = Mock() with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler.labels_handler, "_add_label") as mock_add_label: - with patch.object(issue_comment_handler.pull_request_handler, "check_if_can_be_merged") as mock_check: + with patch.object( + issue_comment_handler.labels_handler, "_add_label", new_callable=AsyncMock + ) as mock_add_label: + with patch.object( + issue_comment_handler.pull_request_handler, "check_if_can_be_merged", new_callable=AsyncMock + ) as mock_check: await issue_comment_handler.user_commands( pull_request=mock_pull_request, command=HOLD_LABEL_STR, @@ -350,8 +484,12 @@ async def test_user_commands_hold_authorized_user_remove(self, issue_comment_han mock_pull_request = Mock() with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler.labels_handler, "_remove_label") as mock_remove_label: - with patch.object(issue_comment_handler.pull_request_handler, "check_if_can_be_merged") as mock_check: + with patch.object( + issue_comment_handler.labels_handler, "_remove_label", new_callable=AsyncMock + ) as mock_remove_label: + with patch.object( + issue_comment_handler.pull_request_handler, "check_if_can_be_merged", new_callable=AsyncMock + ) as mock_check: await issue_comment_handler.user_commands( pull_request=mock_pull_request, command=f"{HOLD_LABEL_STR} cancel", @@ -368,8 +506,12 @@ async def test_user_commands_verified_add(self, issue_comment_handler: IssueComm mock_pull_request = Mock() with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler.labels_handler, "_add_label") as mock_add_label: - with patch.object(issue_comment_handler.check_run_handler, "set_verify_check_success") as mock_success: + with patch.object( + issue_comment_handler.labels_handler, "_add_label", new_callable=AsyncMock + ) as mock_add_label: + with patch.object( + issue_comment_handler.check_run_handler, "set_verify_check_success", new_callable=AsyncMock + ) as mock_success: await issue_comment_handler.user_commands( pull_request=mock_pull_request, command=VERIFIED_LABEL_STR, @@ -386,8 +528,12 @@ async def test_user_commands_verified_remove(self, issue_comment_handler: IssueC mock_pull_request = Mock() with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: - with patch.object(issue_comment_handler.labels_handler, "_remove_label") as mock_remove_label: - with patch.object(issue_comment_handler.check_run_handler, "set_verify_check_queued") as mock_queued: + with patch.object( + issue_comment_handler.labels_handler, "_remove_label", new_callable=AsyncMock + ) as mock_remove_label: + with patch.object( + issue_comment_handler.check_run_handler, "set_verify_check_queued", new_callable=AsyncMock + ) as mock_queued: await issue_comment_handler.user_commands( pull_request=mock_pull_request, command=f"{VERIFIED_LABEL_STR} cancel", @@ -403,7 +549,7 @@ async def test_user_commands_custom_label(self, issue_comment_handler: IssueComm """Test user commands with custom label command.""" mock_pull_request = Mock() # Patch USER_LABELS_DICT to include 'bug' - with patch("webhook_server.libs.issue_comment_handler.USER_LABELS_DICT", {"bug": "Bug label"}): + with patch("webhook_server.libs.handlers.issue_comment_handler.USER_LABELS_DICT", {"bug": "Bug label"}): with patch.object(issue_comment_handler, "create_comment_reaction") as mock_reaction: with patch.object( issue_comment_handler.labels_handler, "label_by_user_comment", new_callable=AsyncMock @@ -474,7 +620,9 @@ async def test_process_cherry_pick_command_existing_branches( with patch.object(mock_pull_request, "is_merged", new=Mock(return_value=False)): with patch.object(issue_comment_handler.repository, "get_branch") as mock_get_branch: with patch.object(mock_pull_request, "create_issue_comment") as mock_comment: - with patch.object(issue_comment_handler.labels_handler, "_add_label") as mock_add_label: + with patch.object( + issue_comment_handler.labels_handler, "_add_label", new_callable=AsyncMock + ) as mock_add_label: await issue_comment_handler.process_cherry_pick_command( pull_request=mock_pull_request, command_args="branch1 branch2", reviewed_user="test-user" ) @@ -504,7 +652,9 @@ async def test_process_cherry_pick_command_merged_pr(self, issue_comment_handler # Patch is_merged as a method with patch.object(mock_pull_request, "is_merged", new=Mock(return_value=True)): with patch.object(issue_comment_handler.repository, "get_branch"): - with patch.object(issue_comment_handler.runner_handler, "cherry_pick") as mock_cherry_pick: + with patch.object( + issue_comment_handler.runner_handler, "cherry_pick", new_callable=AsyncMock + ) as mock_cherry_pick: await issue_comment_handler.process_cherry_pick_command( pull_request=mock_pull_request, command_args="branch1", reviewed_user="test-user" ) @@ -541,18 +691,22 @@ async def test_process_retest_command_all_only(self, issue_comment_handler: Issu """Test processing retest command with 'all' only.""" mock_pull_request = Mock() - with patch.object(issue_comment_handler.runner_handler, "run_tox") as mock_run_tox: - await issue_comment_handler.process_retest_command( - pull_request=mock_pull_request, command_args="all", reviewed_user="test-user" - ) - mock_run_tox.assert_called_once_with(pull_request=mock_pull_request) + with patch.object(issue_comment_handler.runner_handler, "run_tox", new_callable=AsyncMock) as mock_run_tox: + with patch.object( + issue_comment_handler.runner_handler, "run_pre_commit", new_callable=AsyncMock + ) as mock_run_pre_commit: + await issue_comment_handler.process_retest_command( + pull_request=mock_pull_request, command_args="all", reviewed_user="test-user" + ) + mock_run_tox.assert_awaited_once_with(pull_request=mock_pull_request) + mock_run_pre_commit.assert_awaited_once_with(pull_request=mock_pull_request) @pytest.mark.asyncio async def test_process_retest_command_specific_tests(self, issue_comment_handler: IssueCommentHandler) -> None: """Test processing retest command with specific tests.""" mock_pull_request = Mock() - with patch.object(issue_comment_handler.runner_handler, "run_tox") as mock_run_tox: + with patch.object(issue_comment_handler.runner_handler, "run_tox", new_callable=AsyncMock) as mock_run_tox: with patch.object(mock_pull_request, "create_issue_comment") as mock_comment: await issue_comment_handler.process_retest_command( pull_request=mock_pull_request, command_args="tox unsupported-test", reviewed_user="test-user" @@ -596,7 +750,9 @@ async def test_process_retest_command_async_task_exception( """Test processing retest command with async task exception.""" mock_pull_request = Mock() - with patch.object(issue_comment_handler.runner_handler, "run_tox", side_effect=Exception("Test error")): + with patch.object( + issue_comment_handler.runner_handler, "run_tox", new_callable=AsyncMock, side_effect=Exception("Test error") + ): with patch.object(issue_comment_handler.logger, "error") as mock_error: await issue_comment_handler.process_retest_command( pull_request=mock_pull_request, command_args="tox", reviewed_user="test-user" diff --git a/webhook_server/tests/test_labels_handler.py b/webhook_server/tests/test_labels_handler.py index a7665e4c2..62ed697e4 100644 --- a/webhook_server/tests/test_labels_handler.py +++ b/webhook_server/tests/test_labels_handler.py @@ -1,10 +1,11 @@ +import asyncio from unittest.mock import AsyncMock, Mock, patch import pytest from github.GithubException import UnknownObjectException from github.PullRequest import PullRequest -from webhook_server.libs.labels_handler import LabelsHandler +from webhook_server.libs.handlers.labels_handler import LabelsHandler from webhook_server.utils.constants import ( ADD_STR, APPROVE_STR, @@ -128,9 +129,44 @@ async def test_add_label_success(self, labels_handler: LabelsHandler, mock_pull_ with patch("timeout_sampler.TimeoutWatch") as mock_timeout: mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): - with patch.object(labels_handler, "label_exists_in_pull_request", side_effect=[False, True]): + with patch.object( + labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, side_effect=[False, True] + ): await labels_handler._add_label(mock_pull_request, "test-label") mock_pull_request.add_to_labels.assert_called_once_with("test-label") + # Verify completion log was called + assert labels_handler.logger.step.called # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_add_label_too_long(self, labels_handler: LabelsHandler, mock_pull_request: Mock) -> None: + """Test _add_label when label is too long (> 49 chars).""" + long_label = "a" * 50 # 50 characters + await labels_handler._add_label(mock_pull_request, long_label) + # Verify label was not added + mock_pull_request.add_to_labels.assert_not_called() + # Verify completion log was called (label too long is acceptable) + assert labels_handler.logger.step.called # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_add_label_already_exists(self, labels_handler: LabelsHandler, mock_pull_request: Mock) -> None: + """Test _add_label when label already exists.""" + with patch.object(labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, return_value=True): + await labels_handler._add_label(mock_pull_request, "existing-label") + # Verify label was not added (already exists) + mock_pull_request.add_to_labels.assert_not_called() + # Verify completion log was called (label already exists is acceptable) + assert labels_handler.logger.step.called # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_add_label_static_label(self, labels_handler: LabelsHandler, mock_pull_request: Mock) -> None: + """Test _add_label with static label.""" + static_label = next(iter(STATIC_LABELS_DICT.keys())) + with patch.object(labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, return_value=False): + await labels_handler._add_label(mock_pull_request, static_label) + # Verify label was added + mock_pull_request.add_to_labels.assert_called_once_with(static_label) + # Verify completion log was called + assert labels_handler.logger.step.called # type: ignore[attr-defined] @pytest.mark.asyncio async def test_add_label_exception_handling(self, labels_handler: LabelsHandler, mock_pull_request: Mock) -> None: @@ -138,7 +174,9 @@ async def test_add_label_exception_handling(self, labels_handler: LabelsHandler, with patch("timeout_sampler.TimeoutWatch") as mock_timeout: mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): - with patch.object(labels_handler, "label_exists_in_pull_request", side_effect=[False, True]): + with patch.object( + labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, side_effect=[False, True] + ): with patch.object(mock_pull_request, "add_to_labels", side_effect=Exception("Test error")): # Should not raise exception - the method should handle it gracefully try: @@ -153,7 +191,9 @@ async def test_remove_label_success(self, labels_handler: LabelsHandler, mock_pu with patch("timeout_sampler.TimeoutWatch") as mock_timeout: mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): - with patch.object(labels_handler, "label_exists_in_pull_request", side_effect=[True, False]): + with patch.object( + labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, side_effect=[True, False] + ): result = await labels_handler._remove_label(mock_pull_request, "test-label") assert result is True mock_pull_request.remove_from_labels.assert_called_once_with("test-label") @@ -166,7 +206,9 @@ async def test_remove_label_exception_handling( with patch("timeout_sampler.TimeoutWatch") as mock_timeout: mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): - with patch.object(labels_handler, "label_exists_in_pull_request", side_effect=[True, False]): + with patch.object( + labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, side_effect=[True, False] + ): with patch.object(mock_pull_request, "remove_from_labels", side_effect=Exception("Test error")): result = await labels_handler._remove_label(mock_pull_request, "test-label") assert result is False @@ -179,8 +221,12 @@ async def test_remove_label_exception_during_wait( with patch("timeout_sampler.TimeoutWatch") as mock_timeout: mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): - with patch.object(labels_handler, "label_exists_in_pull_request", side_effect=[True, False]): - with patch.object(labels_handler, "wait_for_label", side_effect=Exception("Wait failed")): + with patch.object( + labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, side_effect=[True, False] + ): + with patch.object( + labels_handler, "wait_for_label", new_callable=AsyncMock, side_effect=Exception("Wait failed") + ): result = await labels_handler._remove_label(mock_pull_request, "test-label") assert result is False @@ -192,11 +238,43 @@ async def test_remove_label_wait_for_label_exception( with patch("timeout_sampler.TimeoutWatch") as mock_timeout: mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): - with patch.object(labels_handler, "label_exists_in_pull_request", side_effect=[True, False]): - with patch.object(labels_handler, "wait_for_label", side_effect=Exception("Wait failed")): + with patch.object( + labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, side_effect=[True, False] + ): + with patch.object( + labels_handler, "wait_for_label", new_callable=AsyncMock, side_effect=Exception("Wait failed") + ): result = await labels_handler._remove_label(mock_pull_request, "test-label") assert result is False + @pytest.mark.asyncio + async def test_remove_label_not_exists(self, labels_handler: LabelsHandler, mock_pull_request: Mock) -> None: + """Test _remove_label when label doesn't exist (acceptable outcome).""" + with patch.object(labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, return_value=False): + result = await labels_handler._remove_label(mock_pull_request, "non-existent-label") + assert result is False + # Verify that remove_from_labels was not called (we don't check first to save API calls) + mock_pull_request.remove_from_labels.assert_not_called() + # Verify completion log was called (label doesn't exist is acceptable) + assert labels_handler.logger.step.called # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_remove_label_wait_timeout(self, labels_handler: LabelsHandler, mock_pull_request: Mock) -> None: + """Test _remove_label when removal succeeds but wait_for_label times out.""" + with patch("timeout_sampler.TimeoutWatch") as mock_timeout: + mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object( + labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, side_effect=[True, True] + ): + # wait_for_label returns False (timeout) + with patch.object(labels_handler, "wait_for_label", new_callable=AsyncMock, return_value=False): + result = await labels_handler._remove_label(mock_pull_request, "test-label") + assert result is False + mock_pull_request.remove_from_labels.assert_called_once_with("test-label") + # Verify failure log was called (timeout waiting for removal) + assert labels_handler.logger.step.called # type: ignore[attr-defined] + @pytest.mark.asyncio async def test_add_label_dynamic_label_wait_exception( self, labels_handler: LabelsHandler, mock_pull_request: Mock @@ -206,12 +284,19 @@ async def test_add_label_dynamic_label_wait_exception( with patch("timeout_sampler.TimeoutWatch") as mock_timeout: mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): - with patch.object(labels_handler, "label_exists_in_pull_request", side_effect=[False, True]): + with patch.object( + labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, side_effect=[False, True] + ): with patch.object( labels_handler.repository, "get_label", side_effect=Exception("Get label failed") ): with patch.object(labels_handler.repository, "create_label"): - with patch.object(labels_handler, "wait_for_label", side_effect=Exception("Wait failed")): + with patch.object( + labels_handler, + "wait_for_label", + new_callable=AsyncMock, + side_effect=Exception("Wait failed"), + ): # Should not raise exception try: await labels_handler._add_label(mock_pull_request, dynamic_label) @@ -224,7 +309,7 @@ async def test_add_label_static_label_wait_exception( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test _add_label with exception during wait for static label.""" - static_label = list(STATIC_LABELS_DICT.keys())[0] + static_label = next(iter(STATIC_LABELS_DICT.keys())) with patch("timeout_sampler.TimeoutWatch") as mock_timeout: mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): @@ -239,7 +324,9 @@ async def test_wait_for_label_success(self, labels_handler: LabelsHandler, mock_ with patch("timeout_sampler.TimeoutWatch") as mock_timeout: mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): - with patch.object(labels_handler, "label_exists_in_pull_request", side_effect=[True]): + with patch.object( + labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, side_effect=[True] + ): result = await labels_handler.wait_for_label(mock_pull_request, "test-label", exists=True) assert result is True @@ -252,7 +339,10 @@ async def test_wait_for_label_exception_during_check( mock_timeout.return_value.remaining_time.side_effect = [10, 10, 0] with patch("asyncio.sleep", new_callable=AsyncMock): with patch.object( - labels_handler, "label_exists_in_pull_request", side_effect=Exception("Check failed") + labels_handler, + "label_exists_in_pull_request", + new_callable=AsyncMock, + side_effect=Exception("Check failed"), ): with pytest.raises(Exception, match="Check failed"): await labels_handler.wait_for_label(mock_pull_request, "test-label", exists=True) @@ -264,8 +354,8 @@ async def test_label_by_user_comment_authorized_user(self, labels_handler: Label user = "approver1" # User in the approvers list with ( - patch.object(labels_handler, "_add_label") as mock_add, - patch.object(labels_handler, "wait_for_label", return_value=True), + patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add, + patch.object(labels_handler, "wait_for_label", new_callable=AsyncMock, return_value=True), ): await labels_handler.label_by_user_comment( pull_request=pull_request, user_requested_label=label_name, remove=False, reviewed_user=user @@ -280,8 +370,8 @@ async def test_label_by_user_comment_unauthorized_user(self, labels_handler: Lab user = "unauthorized_user" # User not in approvers list with ( - patch.object(labels_handler, "_add_label") as mock_add, - patch.object(labels_handler, "wait_for_label", return_value=True), + patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add, + patch.object(labels_handler, "wait_for_label", new_callable=AsyncMock, return_value=True), ): await labels_handler.label_by_user_comment( pull_request=pull_request, user_requested_label=label_name, remove=False, reviewed_user=user @@ -297,8 +387,8 @@ async def test_label_by_user_comment_remove_label(self, labels_handler: LabelsHa user = "approver1" with ( - patch.object(labels_handler, "_remove_label") as mock_remove, - patch.object(labels_handler, "wait_for_label", return_value=True), + patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove, + patch.object(labels_handler, "wait_for_label", new_callable=AsyncMock, return_value=True), ): await labels_handler.label_by_user_comment( pull_request=pull_request, user_requested_label=label_name, remove=True, reviewed_user=user @@ -321,9 +411,9 @@ async def test_size_label_management(self, labels_handler: LabelsHandler) -> Non with ( patch.object(pull_request, "get_labels", return_value=existing_labels), - patch.object(labels_handler, "_remove_label") as mock_remove, - patch.object(labels_handler, "_add_label") as mock_add, - patch.object(labels_handler, "wait_for_label", return_value=True), + patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove, + patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add, + patch.object(labels_handler, "wait_for_label", new_callable=AsyncMock, return_value=True), ): await labels_handler.add_size_label(pull_request=pull_request) @@ -346,9 +436,9 @@ async def test_size_label_no_existing_size_label(self, labels_handler: LabelsHan with ( patch.object(pull_request, "get_labels", return_value=existing_labels), - patch.object(labels_handler, "_remove_label") as mock_remove, - patch.object(labels_handler, "_add_label") as mock_add, - patch.object(labels_handler, "wait_for_label", return_value=True), + patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove, + patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add, + patch.object(labels_handler, "wait_for_label", new_callable=AsyncMock, return_value=True), ): await labels_handler.add_size_label(pull_request=pull_request) @@ -386,12 +476,10 @@ async def test_concurrent_label_operations(self, labels_handler: LabelsHandler) # Simulate concurrent add and remove operations with ( - patch.object(labels_handler, "_add_label") as mock_add, - patch.object(labels_handler, "_remove_label") as mock_remove, - patch.object(labels_handler, "wait_for_label", return_value=True), + patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add, + patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove, + patch.object(labels_handler, "wait_for_label", new_callable=AsyncMock, return_value=True), ): - import asyncio - # Run concurrent operations await asyncio.gather( labels_handler._add_label(pull_request=pull_request, label="bug"), @@ -409,7 +497,7 @@ async def test_add_label_dynamic_label_edit_exception( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test _add_label with dynamic label where edit raises exception and label is created.""" - with patch.object(labels_handler, "label_exists_in_pull_request", return_value=False): + with patch.object(labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, return_value=False): with patch.object(mock_pull_request, "get_labels", return_value=[]): with patch("asyncio.to_thread") as mock_to_thread: # get_label raises UnknownObjectException, create_label raises Exception @@ -426,9 +514,9 @@ async def test_add_label_dynamic_label_edit_success( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test _add_label with dynamic label where edit succeeds.""" - with patch.object(labels_handler, "label_exists_in_pull_request", return_value=False): + with patch.object(labels_handler, "label_exists_in_pull_request", new_callable=AsyncMock, return_value=False): with patch.object(mock_pull_request, "get_labels", return_value=[]): - with patch.object(labels_handler, "wait_for_label", return_value=True): + with patch.object(labels_handler, "wait_for_label", new_callable=AsyncMock, return_value=True): with patch("asyncio.to_thread") as mock_to_thread: # get_label returns label, edit succeeds, add_to_labels succeeds mock_label = Mock() @@ -445,8 +533,8 @@ async def test_manage_reviewed_by_label_approve_not_in_approvers( # Mock root_approvers as a list to avoid concatenation error with patch.object(labels_handler.owners_file_handler, "root_approvers", []): with ( - patch.object(labels_handler, "_add_label") as mock_add, - patch.object(labels_handler, "_remove_label") as mock_remove, + patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add, + patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove, ): await labels_handler.manage_reviewed_by_label(mock_pull_request, APPROVE_STR, ADD_STR, "not_approver") mock_add.assert_not_called() @@ -470,7 +558,7 @@ async def test_manage_reviewed_by_label_commented( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test manage_reviewed_by_label with commented state.""" - with patch.object(labels_handler, "_add_label") as mock_add: + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add: await labels_handler.manage_reviewed_by_label(mock_pull_request, "commented", ADD_STR, "reviewer1") mock_add.assert_called_once() @@ -479,14 +567,14 @@ async def test_manage_reviewed_by_label_unsupported_state( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test manage_reviewed_by_label with unsupported review state.""" - with patch.object(labels_handler, "_add_label") as mock_add: + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add: await labels_handler.manage_reviewed_by_label(mock_pull_request, "unsupported", ADD_STR, "reviewer1") mock_add.assert_not_called() @pytest.mark.asyncio async def test_label_by_user_comment_remove(self, labels_handler: LabelsHandler, mock_pull_request: Mock) -> None: """Test label_by_user_comment with remove=True for regular label.""" - with patch.object(labels_handler, "_remove_label") as mock_remove: + with patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove: await labels_handler.label_by_user_comment(mock_pull_request, "bug", True, "user1") mock_remove.assert_called_once_with(pull_request=mock_pull_request, label="bug") @@ -503,7 +591,9 @@ async def test_label_exists_in_pull_request_exception( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test label_exists_in_pull_request with exception.""" - with patch.object(labels_handler, "pull_request_labels_names", side_effect=Exception("Test error")): + with patch.object( + labels_handler, "pull_request_labels_names", new_callable=AsyncMock, side_effect=Exception("Test error") + ): with pytest.raises(Exception, match="Test error"): await labels_handler.label_exists_in_pull_request(mock_pull_request, "test-label") @@ -516,8 +606,10 @@ async def test_add_size_label_remove_existing_exception( mock_pull_request.deletions = 5 existing_size_label = f"{SIZE_LABEL_PREFIX}L" with patch.object(labels_handler, "pull_request_labels_names", return_value=[existing_size_label]): - with patch.object(labels_handler, "_remove_label", side_effect=Exception("Remove failed")): - with patch.object(labels_handler, "_add_label"): + with patch.object( + labels_handler, "_remove_label", new_callable=AsyncMock, side_effect=Exception("Remove failed") + ): + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock): with pytest.raises(Exception, match="Remove failed"): await labels_handler.add_size_label(mock_pull_request) @@ -526,7 +618,7 @@ async def test_label_by_user_comment_lgtm_remove( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test label_by_user_comment for LGTM removal.""" - with patch.object(labels_handler, "manage_reviewed_by_label") as mock_manage: + with patch.object(labels_handler, "manage_reviewed_by_label", new_callable=AsyncMock) as mock_manage: await labels_handler.label_by_user_comment( pull_request=mock_pull_request, user_requested_label=LGTM_STR, remove=True, reviewed_user="test-user" ) @@ -537,7 +629,7 @@ async def test_label_by_user_comment_approve_remove( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test label_by_user_comment for approve removal.""" - with patch.object(labels_handler, "manage_reviewed_by_label") as mock_manage: + with patch.object(labels_handler, "manage_reviewed_by_label", new_callable=AsyncMock) as mock_manage: await labels_handler.label_by_user_comment( pull_request=mock_pull_request, user_requested_label=APPROVE_STR, remove=True, reviewed_user="test-user" ) @@ -560,7 +652,7 @@ async def test_label_by_user_comment_approve_add( @pytest.mark.asyncio async def test_label_by_user_comment_lgtm_add(self, labels_handler: LabelsHandler, mock_pull_request: Mock) -> None: """Test label_by_user_comment for LGTM addition.""" - with patch.object(labels_handler, "manage_reviewed_by_label") as mock_manage: + with patch.object(labels_handler, "manage_reviewed_by_label", new_callable=AsyncMock) as mock_manage: await labels_handler.label_by_user_comment( pull_request=mock_pull_request, user_requested_label=LGTM_STR, remove=False, reviewed_user="test-user" ) @@ -571,7 +663,7 @@ async def test_label_by_user_comment_other_label_add( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test label_by_user_comment for other label addition.""" - with patch.object(labels_handler, "_add_label") as mock_add: + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add: await labels_handler.label_by_user_comment( pull_request=mock_pull_request, user_requested_label="other-label", @@ -585,7 +677,7 @@ async def test_label_by_user_comment_other_label_remove( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test label_by_user_comment for other label removal.""" - with patch.object(labels_handler, "_remove_label") as mock_remove: + with patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove: await labels_handler.label_by_user_comment( pull_request=mock_pull_request, user_requested_label="other-label", @@ -602,8 +694,8 @@ async def test_manage_reviewed_by_label_approved_by_approver_add( # Ensure the owners_file_handler has the expected attributes with patch.object(labels_handler.owners_file_handler, "all_pull_request_approvers", ["approver1", "approver2"]): with patch.object(labels_handler.owners_file_handler, "root_approvers", ["root-approver"]): - with patch.object(labels_handler, "_add_label") as mock_add: - with patch.object(labels_handler, "_remove_label") as mock_remove: + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add: + with patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove: await labels_handler.manage_reviewed_by_label( pull_request=mock_pull_request, review_state=APPROVE_STR, @@ -621,8 +713,8 @@ async def test_manage_reviewed_by_label_approved_by_root_approver_add( # Ensure the owners_file_handler has the expected attributes with patch.object(labels_handler.owners_file_handler, "all_pull_request_approvers", ["approver1", "approver2"]): with patch.object(labels_handler.owners_file_handler, "root_approvers", ["root-approver"]): - with patch.object(labels_handler, "_add_label") as mock_add: - with patch.object(labels_handler, "_remove_label") as mock_remove: + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add: + with patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove: await labels_handler.manage_reviewed_by_label( pull_request=mock_pull_request, review_state=APPROVE_STR, @@ -643,7 +735,7 @@ async def test_manage_reviewed_by_label_lgtm_by_owner_add( "pull_request": {"user": {"login": "test-user"}}, } - with patch.object(labels_handler, "_add_label") as mock_add: + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add: await labels_handler.manage_reviewed_by_label( pull_request=mock_pull_request, review_state=LGTM_STR, @@ -663,8 +755,8 @@ async def test_manage_reviewed_by_label_lgtm_by_non_owner_add( "pull_request": {"user": {"login": "test-user"}}, } - with patch.object(labels_handler, "_add_label") as mock_add: - with patch.object(labels_handler, "_remove_label") as mock_remove: + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add: + with patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove: await labels_handler.manage_reviewed_by_label( pull_request=mock_pull_request, review_state=LGTM_STR, action=ADD_STR, reviewed_user="other-user" ) @@ -676,8 +768,8 @@ async def test_manage_reviewed_by_label_changes_requested_add( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test manage_reviewed_by_label for changes requested with add action.""" - with patch.object(labels_handler, "_add_label") as mock_add: - with patch.object(labels_handler, "_remove_label") as mock_remove: + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add: + with patch.object(labels_handler, "_remove_label", new_callable=AsyncMock) as mock_remove: await labels_handler.manage_reviewed_by_label( pull_request=mock_pull_request, review_state="changes_requested", @@ -692,37 +784,37 @@ async def test_manage_reviewed_by_label_commented_add( self, labels_handler: LabelsHandler, mock_pull_request: Mock ) -> None: """Test manage_reviewed_by_label for commented with add action.""" - with patch.object(labels_handler, "_add_label") as mock_add: + with patch.object(labels_handler, "_add_label", new_callable=AsyncMock) as mock_add: await labels_handler.manage_reviewed_by_label( pull_request=mock_pull_request, review_state="commented", action=ADD_STR, reviewed_user="test-user" ) mock_add.assert_called_once() - def test_wip_or_hold_lables_exists_both(self, labels_handler: LabelsHandler) -> None: - """Test wip_or_hold_lables_exists with both WIP and HOLD labels.""" + def test_wip_or_hold_labels_exists_both(self, labels_handler: LabelsHandler) -> None: + """Test wip_or_hold_labels_exists with both WIP and HOLD labels.""" labels = [WIP_STR, HOLD_LABEL_STR, "other-label"] - result = labels_handler.wip_or_hold_lables_exists(labels) + result = labels_handler.wip_or_hold_labels_exists(labels) assert "Hold label exists." in result assert "WIP label exists." in result - def test_wip_or_hold_lables_exists_hold_only(self, labels_handler: LabelsHandler) -> None: - """Test wip_or_hold_lables_exists with only HOLD label.""" + def test_wip_or_hold_labels_exists_hold_only(self, labels_handler: LabelsHandler) -> None: + """Test wip_or_hold_labels_exists with only HOLD label.""" labels = [HOLD_LABEL_STR, "other-label"] - result = labels_handler.wip_or_hold_lables_exists(labels) + result = labels_handler.wip_or_hold_labels_exists(labels) assert "Hold label exists." in result assert "WIP label exists." not in result - def test_wip_or_hold_lables_exists_wip_only(self, labels_handler: LabelsHandler) -> None: - """Test wip_or_hold_lables_exists with only WIP label.""" + def test_wip_or_hold_labels_exists_wip_only(self, labels_handler: LabelsHandler) -> None: + """Test wip_or_hold_labels_exists with only WIP label.""" labels = [WIP_STR, "other-label"] - result = labels_handler.wip_or_hold_lables_exists(labels) + result = labels_handler.wip_or_hold_labels_exists(labels) assert "WIP label exists." in result assert "Hold label exists." not in result - def test_wip_or_hold_lables_exists_neither(self, labels_handler: LabelsHandler) -> None: - """Test wip_or_hold_lables_exists with neither WIP nor HOLD labels.""" + def test_wip_or_hold_labels_exists_neither(self, labels_handler: LabelsHandler) -> None: + """Test wip_or_hold_labels_exists with neither WIP nor HOLD labels.""" labels = ["other-label1", "other-label2"] - result = labels_handler.wip_or_hold_lables_exists(labels) + result = labels_handler.wip_or_hold_labels_exists(labels) assert result == "" def test_get_custom_pr_size_thresholds_config_available(self, mock_github_webhook: Mock) -> None: diff --git a/webhook_server/tests/test_log_api.py b/webhook_server/tests/test_log_api.py index 0d203f9e3..39908e35a 100644 --- a/webhook_server/tests/test_log_api.py +++ b/webhook_server/tests/test_log_api.py @@ -10,10 +10,13 @@ import pytest from fastapi import HTTPException +from fastapi.responses import HTMLResponse from fastapi.testclient import TestClient from fastapi.websockets import WebSocketDisconnect +from webhook_server.app import FASTAPI_APP from webhook_server.libs.log_parser import LogEntry +from webhook_server.web.log_viewer import LogViewerController class TestLogViewerController: @@ -27,7 +30,6 @@ def mock_logger(self): @pytest.fixture def controller(self, mock_logger): """Create a LogViewerController instance for testing.""" - from webhook_server.web.log_viewer import LogViewerController with patch("webhook_server.web.log_viewer.Config") as mock_config: mock_config_instance = Mock() @@ -79,11 +81,12 @@ def test_get_log_page_success(self, controller): assert "Test" in response.body.decode() def test_get_log_page_file_not_found(self, controller): - """Test log page when template file not found.""" - with patch.object(controller, "_get_log_viewer_html", side_effect=FileNotFoundError): - with pytest.raises(HTTPException) as exc: - controller.get_log_page() - assert exc.value.status_code == 404 + """Test log page when template file not found - should return fallback HTML.""" + # _get_log_viewer_html now returns fallback HTML instead of raising FileNotFoundError + with patch.object(controller, "_get_log_viewer_html", return_value="fallback"): + result = controller.get_log_page() + assert isinstance(result, HTMLResponse) + assert result.body.decode() == "fallback" def test_get_log_page_error(self, controller): """Test log page with generic error.""" @@ -284,6 +287,114 @@ def test_get_workflow_steps_success(self, controller, sample_log_entries): result = controller.get_workflow_steps("hook1") assert result == {"test": "data"} + def test_get_workflow_steps_with_token_spend(self, controller): + """Test workflow steps with token spend logging.""" + hook_id = "test-hook-123" + entries_with_context = [ + LogEntry( + timestamp=datetime.datetime(2025, 7, 31, 10, 0, 0), + level="STEP", + logger_name="main", + message="Step 1", + hook_id=hook_id, + repository="test-repo", + event_type="pull_request", + github_user="test-user", + pr_number=123, + ), + LogEntry( + timestamp=datetime.datetime(2025, 7, 31, 10, 0, 1), + level="INFO", + logger_name="main", + message="token spend log", + hook_id=hook_id, + repository="test-repo", + event_type="pull_request", + github_user="test-user", + pr_number=123, + token_spend=25, + ), + ] + workflow_steps = [entries_with_context[0]] + + with patch.object(controller, "_stream_log_entries", return_value=entries_with_context): + with patch.object(controller.log_parser, "extract_workflow_steps", return_value=workflow_steps): + with patch.object(controller, "_build_workflow_timeline", return_value={"test": "data"}): + result = controller.get_workflow_steps(hook_id) + assert result == {"test": "data", "token_spend": 25} + # Verify logger.info was called with structured format + assert controller.logger.info.called + call_args = controller.logger.info.call_args[0][0] + assert hook_id in call_args + assert "test-repo" in call_args or "[pull_request]" in call_args + + def test_get_workflow_steps_token_spend_extraction_fallback(self, controller): + """Test token spend extraction fallback when token_spend is None.""" + hook_id = "test-hook-456" + entries_with_keywords = [ + LogEntry( + timestamp=datetime.datetime(2025, 7, 31, 10, 0, 0), + level="STEP", + logger_name="main", + message="Step 1", + hook_id=hook_id, + repository="test-repo", + event_type="check_run", + github_user="test-user", + pr_number=456, + ), + LogEntry( + timestamp=datetime.datetime(2025, 7, 31, 10, 0, 1), + level="INFO", + logger_name="main", + message="token ***** 30 API calls (initial: 1000, final: 970, remaining: 970)", + hook_id=hook_id, + repository="test-repo", + event_type="check_run", + github_user="test-user", + pr_number=456, + token_spend=None, # Not parsed initially + ), + ] + workflow_steps = [entries_with_keywords[0]] + + with patch.object(controller, "_stream_log_entries", return_value=entries_with_keywords): + with patch.object(controller.log_parser, "extract_workflow_steps", return_value=workflow_steps): + with patch.object(controller.log_parser, "extract_token_spend", return_value=30): + with patch.object(controller, "_build_workflow_timeline", return_value={"test": "data"}): + result = controller.get_workflow_steps(hook_id) + assert result == {"test": "data", "token_spend": 30} + # Verify logger.warning and logger.info were called + assert controller.logger.warning.called + assert controller.logger.info.called + + def test_get_workflow_steps_token_spend_no_context(self, controller): + """Test token spend logging when context is missing.""" + hook_id = "test-hook-789" + entries_minimal = [ + LogEntry( + timestamp=datetime.datetime(2025, 7, 31, 10, 0, 0), + level="STEP", + logger_name="main", + message="Step 1", + hook_id=hook_id, + repository=None, + event_type=None, + github_user=None, + pr_number=None, + token_spend=15, + ), + ] + workflow_steps = [entries_minimal[0]] + + with patch.object(controller, "_stream_log_entries", return_value=entries_minimal): + with patch.object(controller.log_parser, "extract_workflow_steps", return_value=workflow_steps): + with patch.object(controller, "_build_workflow_timeline", return_value={"test": "data"}): + result = controller.get_workflow_steps(hook_id) + assert result == {"test": "data", "token_spend": 15} + # Should still log even without full context + assert controller.logger.info.called + def test_get_workflow_steps_not_found(self, controller): """Test workflow steps when not found.""" with patch.object(controller, "_stream_log_entries", return_value=[]): @@ -491,7 +602,6 @@ def test_get_logs_page(self) -> None: with patch("webhook_server.web.log_viewer.LogViewerController") as mock_controller: mock_instance = Mock() mock_controller.return_value = mock_instance - from fastapi.responses import HTMLResponse mock_instance.get_log_page.return_value = HTMLResponse(content="Log Viewer") mock_instance.shutdown = AsyncMock() # Add async shutdown method @@ -511,8 +621,6 @@ def test_get_logs_page(self) -> None: mock_github.return_value = [] mock_cloudflare.return_value = [] - from webhook_server.app import FASTAPI_APP - with TestClient(FASTAPI_APP) as client: response = client.get("/logs") assert response.status_code == 200 @@ -835,9 +943,6 @@ async def mock_handle_websocket_error(websocket): @pytest.mark.asyncio async def test_websocket_handle_real_implementation(self): """Test actual WebSocket handler implementation.""" - from unittest.mock import Mock - - from webhook_server.web.log_viewer import LogViewerController mock_logger = Mock() controller = LogViewerController(logger=mock_logger) @@ -860,7 +965,6 @@ async def test_websocket_handle_real_implementation(self): @pytest.mark.asyncio async def test_websocket_handle_with_log_monitoring(self): """Test WebSocket handler with log monitoring.""" - from webhook_server.web.log_viewer import LogViewerController mock_logger = Mock() controller = LogViewerController(logger=mock_logger) @@ -897,7 +1001,6 @@ async def mock_monitor(): @pytest.mark.asyncio async def test_shutdown_websocket_cleanup(self): """Test shutdown method properly closes all WebSocket connections.""" - from webhook_server.web.log_viewer import LogViewerController mock_logger = Mock() controller = LogViewerController(logger=mock_logger) @@ -932,7 +1035,6 @@ async def test_shutdown_websocket_cleanup(self): @pytest.mark.asyncio async def test_shutdown_websocket_close_error_handling(self): """Test shutdown method handles WebSocket close errors gracefully.""" - from webhook_server.web.log_viewer import LogViewerController mock_logger = Mock() controller = LogViewerController(logger=mock_logger) @@ -970,7 +1072,6 @@ async def test_shutdown_websocket_close_error_handling(self): @pytest.mark.asyncio async def test_shutdown_empty_connections(self): """Test shutdown method works correctly with no active connections.""" - from webhook_server.web.log_viewer import LogViewerController mock_logger = Mock() controller = LogViewerController(logger=mock_logger) @@ -1117,7 +1218,6 @@ class TestWorkflowStepsAPI: def test_get_workflow_steps_success(self) -> None: """Test successful workflow steps retrieval.""" # Import modules and patch before creating test client - from unittest.mock import AsyncMock, Mock # Mock workflow steps data mock_workflow_data = { @@ -1160,10 +1260,6 @@ def test_get_workflow_steps_success(self) -> None: with patch("webhook_server.app.get_log_viewer_controller", return_value=mock_instance): # Also patch the singleton variable itself with patch("webhook_server.app._log_viewer_controller_singleton", mock_instance): - from fastapi.testclient import TestClient - - from webhook_server.app import FASTAPI_APP - client = TestClient(FASTAPI_APP) # Make the request @@ -1184,7 +1280,6 @@ def test_get_workflow_steps_success(self) -> None: def test_get_workflow_steps_no_steps_found(self) -> None: """Test workflow steps when no steps are found.""" # Import modules and patch before creating test client - from unittest.mock import AsyncMock, Mock # Mock empty workflow data mock_workflow_data = { @@ -1208,10 +1303,6 @@ def test_get_workflow_steps_no_steps_found(self) -> None: with patch("webhook_server.app.get_log_viewer_controller", return_value=mock_instance): # Also patch the singleton variable itself with patch("webhook_server.app._log_viewer_controller_singleton", mock_instance): - from fastapi.testclient import TestClient - - from webhook_server.app import FASTAPI_APP - client = TestClient(FASTAPI_APP) # Make the request diff --git a/webhook_server/tests/test_log_parser.py b/webhook_server/tests/test_log_parser.py index c612e9d77..764fb07ed 100644 --- a/webhook_server/tests/test_log_parser.py +++ b/webhook_server/tests/test_log_parser.py @@ -3,7 +3,9 @@ import asyncio import contextlib import datetime +import logging import tempfile +import unittest.mock from pathlib import Path import pytest @@ -25,7 +27,7 @@ def test_parse_log_entry_with_hook_context(self) -> None: entry = parser.parse_log_entry(log_line) assert entry is not None - assert entry.timestamp == datetime.datetime(2025, 7, 31, 10, 30, 0, 123000) + assert entry.timestamp == datetime.datetime(2025, 7, 31, 10, 30, 0, 123000, tzinfo=datetime.UTC) assert entry.level == "INFO" assert entry.logger_name == "GithubWebhook" assert entry.hook_id == "abc123-def456" @@ -60,7 +62,7 @@ def test_parse_log_entry_without_hook_context(self) -> None: entry = parser.parse_log_entry(log_line) assert entry is not None - assert entry.timestamp == datetime.datetime(2025, 7, 31, 12, 45, 0, 789000) + assert entry.timestamp == datetime.datetime(2025, 7, 31, 12, 45, 0, 789000, tzinfo=datetime.UTC) assert entry.level == "WARNING" assert entry.logger_name == "helpers" assert entry.hook_id is None @@ -72,14 +74,16 @@ def test_parse_production_log_entry_with_ansi_colors(self) -> None: """Test parsing production log entry with ANSI color codes from prepare_log_prefix format.""" log_line = ( "2025-07-21T06:05:48.278206 GithubWebhook \x1b[32mINFO\x1b[0m " - "\x1b[38;5;160mgithub-webhook-server\x1b[0m [check_run][9948e8d0-65df-11f0-9e82-d8c2969b6368][myakove-bot]: Processing webhook\x1b[0m" + "\x1b[38;5;160mgithub-webhook-server\x1b[0m " + "[check_run][9948e8d0-65df-11f0-9e82-d8c2969b6368][myakove-bot]: " + "Processing webhook\x1b[0m" ) parser = LogParser() entry = parser.parse_log_entry(log_line) assert entry is not None - assert entry.timestamp == datetime.datetime(2025, 7, 21, 6, 5, 48, 278206) + assert entry.timestamp == datetime.datetime(2025, 7, 21, 6, 5, 48, 278206, tzinfo=datetime.UTC) assert entry.level == "INFO" assert entry.logger_name == "GithubWebhook" assert entry.hook_id == "9948e8d0-65df-11f0-9e82-d8c2969b6368" @@ -93,14 +97,16 @@ def test_parse_production_log_entry_ansi_debug(self) -> None: """Test parsing production DEBUG log entry with ANSI color codes from prepare_log_prefix format.""" log_line = ( "2025-07-21T06:05:48.290851 GithubWebhook \x1b[36mDEBUG\x1b[0m " - "\x1b[38;5;160mgithub-webhook-server\x1b[0m [check_run][9948e8d0-65df-11f0-9e82-d8c2969b6368][myakove-bot]: Signature verification successful\x1b[0m" + "\x1b[38;5;160mgithub-webhook-server\x1b[0m " + "[check_run][9948e8d0-65df-11f0-9e82-d8c2969b6368][myakove-bot]: " + "Signature verification successful\x1b[0m" ) parser = LogParser() entry = parser.parse_log_entry(log_line) assert entry is not None - assert entry.timestamp == datetime.datetime(2025, 7, 21, 6, 5, 48, 290851) + assert entry.timestamp == datetime.datetime(2025, 7, 21, 6, 5, 48, 290851, tzinfo=datetime.UTC) assert entry.level == "DEBUG" assert entry.logger_name == "GithubWebhook" assert entry.hook_id == "9948e8d0-65df-11f0-9e82-d8c2969b6368" @@ -121,7 +127,7 @@ def test_parse_production_log_with_complex_ansi(self) -> None: entry = parser.parse_log_entry(log_line) assert entry is not None - assert entry.timestamp == datetime.datetime(2025, 7, 21, 6, 5, 53, 415209) + assert entry.timestamp == datetime.datetime(2025, 7, 21, 6, 5, 53, 415209, tzinfo=datetime.UTC) assert entry.level == "DEBUG" assert entry.logger_name == "GithubWebhook" assert entry.hook_id == "96d21c70-65df-11f0-89ca-d82effeb540d" @@ -150,12 +156,19 @@ def test_parse_malformed_log_entry(self) -> None: def test_parse_log_file(self) -> None: """Test parsing multiple log entries from a file.""" - log_content = """2025-07-31T10:00:00.000000 GithubWebhook INFO test-repo [push][delivery1][user1]: Start processing -2025-07-31T10:00:01.000000 GithubWebhook DEBUG test-repo [push][delivery1][user1]: Validating signature -2025-07-31T10:00:02.000000 GithubWebhook INFO test-repo [push][delivery1][user1]: Processing complete -2025-07-31T10:01:00.000000 GithubWebhook INFO test-repo [pull_request][delivery2][user2][PR 456]: Processing webhook -Invalid log line -2025-07-31T10:01:05.000000 GithubWebhook ERROR test-repo [pull_request][delivery2][user2][PR 456]: Processing failed""" + log_content = ( + "2025-07-31T10:00:00.000000 GithubWebhook INFO test-repo " + "[push][delivery1][user1]: Start processing\n" + "2025-07-31T10:00:01.000000 GithubWebhook DEBUG test-repo " + "[push][delivery1][user1]: Validating signature\n" + "2025-07-31T10:00:02.000000 GithubWebhook INFO test-repo " + "[push][delivery1][user1]: Processing complete\n" + "2025-07-31T10:01:00.000000 GithubWebhook INFO test-repo " + "[pull_request][delivery2][user2][PR 456]: Processing webhook\n" + "Invalid log line\n" + "2025-07-31T10:01:05.000000 GithubWebhook ERROR test-repo " + "[pull_request][delivery2][user2][PR 456]: Processing failed" + ) with tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False) as f: f.write(log_content) @@ -176,8 +189,6 @@ def test_parse_log_file(self) -> None: def test_parse_log_file_error_logging(self, caplog) -> None: """Test that OSError and UnicodeDecodeError are properly logged.""" - import logging - import unittest.mock # Set log level to capture ERROR messages caplog.set_level(logging.ERROR) @@ -245,7 +256,7 @@ async def test_tail_log_file_with_new_content(self) -> None: # Wait for the tail to collect entries with timeout try: await asyncio.wait_for(tail_task, timeout=2.0) - except asyncio.TimeoutError: + except TimeoutError: # Cancel the task and wait for it to complete tail_task.cancel() with contextlib.suppress(asyncio.CancelledError): @@ -520,6 +531,10 @@ def test_log_entry_to_dict(self) -> None: "repository": "org/repo", "pr_number": None, "github_user": None, + "task_id": None, + "task_type": None, + "task_status": None, + "token_spend": None, } assert result == expected @@ -557,25 +572,27 @@ class TestWorkflowSteps: """Test class for workflow step related functionality.""" def test_is_workflow_step_true(self) -> None: - """Test is_workflow_step method with STEP level entries.""" + """Test is_workflow_step method with entries that have task_id and task_status.""" parser = LogParser() step_entry = LogEntry( - timestamp="2025-07-31T12:00:00", - level="STEP", + timestamp=datetime.datetime(2025, 7, 31, 12, 0, 0), + level="INFO", logger_name="test_logger", message="Starting CI/CD workflow", hook_id="hook-123", + task_id="webhook_processing", + task_status="started", ) assert parser.is_workflow_step(step_entry) is True def test_is_workflow_step_false(self) -> None: - """Test is_workflow_step method with non-STEP level entries.""" + """Test is_workflow_step method with entries that don't have task_id and task_status.""" parser = LogParser() info_entry = LogEntry( - timestamp="2025-07-31T12:00:00", + timestamp=datetime.datetime(2025, 7, 31, 12, 0, 0), level="INFO", logger_name="test_logger", message="Regular info message", @@ -583,7 +600,7 @@ def test_is_workflow_step_false(self) -> None: ) debug_entry = LogEntry( - timestamp="2025-07-31T12:00:00", + timestamp=datetime.datetime(2025, 7, 31, 12, 0, 0), level="DEBUG", logger_name="test_logger", message="Debug message", @@ -594,46 +611,52 @@ def test_is_workflow_step_false(self) -> None: assert parser.is_workflow_step(debug_entry) is False def test_extract_workflow_steps_with_matching_hook_id(self) -> None: - """Test extract_workflow_steps with entries matching hook_id.""" + """Test extract_workflow_steps with entries matching hook_id and having task fields.""" parser = LogParser() target_hook_id = "hook-123" entries = [ LogEntry( - timestamp="2025-07-31T12:00:00", - level="STEP", + timestamp=datetime.datetime(2025, 7, 31, 12, 0, 0), + level="INFO", logger_name="test_logger", message="Starting workflow", hook_id=target_hook_id, + task_id="webhook_processing", + task_status="started", ), LogEntry( - timestamp="2025-07-31T12:00:01", + timestamp=datetime.datetime(2025, 7, 31, 12, 0, 1), level="INFO", logger_name="test_logger", message="Regular info message", hook_id=target_hook_id, ), LogEntry( - timestamp="2025-07-31T12:00:02", - level="STEP", + timestamp=datetime.datetime(2025, 7, 31, 12, 0, 2), + level="INFO", logger_name="test_logger", message="Processing stage", hook_id=target_hook_id, + task_id="webhook_processing", + task_status="processing", ), LogEntry( - timestamp="2025-07-31T12:00:03", - level="STEP", + timestamp=datetime.datetime(2025, 7, 31, 12, 0, 3), + level="INFO", logger_name="test_logger", message="Different hook workflow", hook_id="hook-456", + task_id="webhook_processing", + task_status="started", ), ] workflow_steps = parser.extract_workflow_steps(entries, target_hook_id) assert len(workflow_steps) == 2 - assert all(step.level == "STEP" for step in workflow_steps) assert all(step.hook_id == target_hook_id for step in workflow_steps) + assert all(step.task_id is not None and step.task_status is not None for step in workflow_steps) assert workflow_steps[0].message == "Starting workflow" assert workflow_steps[1].message == "Processing stage" @@ -644,18 +667,20 @@ def test_extract_workflow_steps_no_matching_entries(self) -> None: entries = [ LogEntry( - timestamp="2025-07-31T12:00:00", + timestamp=datetime.datetime(2025, 7, 31, 12, 0, 0), level="INFO", logger_name="test_logger", message="Regular info message", hook_id=target_hook_id, ), LogEntry( - timestamp="2025-07-31T12:00:01", - level="STEP", + timestamp=datetime.datetime(2025, 7, 31, 12, 0, 1), + level="INFO", logger_name="test_logger", message="Different hook workflow", hook_id="hook-456", + task_id="webhook_processing", + task_status="started", ), ] @@ -670,3 +695,66 @@ def test_extract_workflow_steps_empty_entries(self) -> None: workflow_steps = parser.extract_workflow_steps([], "hook-123") assert len(workflow_steps) == 0 + + def test_extract_token_spend_original_format(self) -> None: + """Test extracting token spend from original log format.""" + parser = LogParser() + message = "Token spend: 35 API calls (initial: 2831, final: 2796, remaining: 2796)" + + result = parser.extract_token_spend(message) + + assert result == 35 + + def test_extract_token_spend_masked_format(self) -> None: + """Test extracting token spend from masked log format (when 'token' is redacted).""" + parser = LogParser() + message = "token ***** 23 API calls (initial: 2103, final: 2080, remaining: 2080)" + + result = parser.extract_token_spend(message) + + assert result == 23 + + def test_extract_token_spend_masked_format_with_colon(self) -> None: + """Test extracting token spend from masked format with colon.""" + parser = LogParser() + message = "token *****: 50 API calls (initial: 2269, final: 2219, remaining: 2219)" + + result = parser.extract_token_spend(message) + + assert result == 50 + + def test_extract_token_spend_not_found(self) -> None: + """Test extracting token spend when pattern is not found.""" + parser = LogParser() + message = "Some other log message without token spend" + + result = parser.extract_token_spend(message) + + assert result is None + + def test_extract_token_spend_invalid_number(self) -> None: + """Test extracting token spend with invalid number format.""" + parser = LogParser() + # This shouldn't happen in practice, but test the ValueError handling + # We'll use a pattern that matches but group(1) would cause ValueError if not int + message = "Token spend: abc API calls" + + result = parser.extract_token_spend(message) + + # The regex won't match "abc" as a number, so it should return None + assert result is None + + def test_parse_log_entry_with_token_spend(self) -> None: + """Test parsing log entry that contains token spend information.""" + parser = LogParser() + log_line = ( + "2025-11-07T14:43:56.299809 GithubWebhook INFO " + "github-webhook-server [issue_comment][6143a030-bbd7-11f0-95bd-b07354b8711c][myakove-bot][PR 890]: " + "token ***** 23 API calls (initial: 2103, final: 2080, remaining: 2080)" + ) + + entry = parser.parse_log_entry(log_line) + + assert entry is not None + assert entry.token_spend == 23 + assert entry.hook_id == "6143a030-bbd7-11f0-95bd-b07354b8711c" diff --git a/webhook_server/tests/test_memory_optimization.py b/webhook_server/tests/test_memory_optimization.py index 29c605249..4c540b553 100644 --- a/webhook_server/tests/test_memory_optimization.py +++ b/webhook_server/tests/test_memory_optimization.py @@ -1,16 +1,17 @@ """Memory optimization tests for log viewer streaming functionality.""" -import tempfile +import asyncio import datetime +import shutil +import tempfile import time -import asyncio from pathlib import Path -from unittest.mock import Mock -import pytest +from unittest.mock import Mock, patch +import pytest -from webhook_server.web.log_viewer import LogViewerController from webhook_server.libs.log_parser import LogEntry +from webhook_server.web.log_viewer import LogViewerController class TestStreamingMemoryOptimization: @@ -18,7 +19,6 @@ class TestStreamingMemoryOptimization: def setup_method(self): """Set up test environment.""" - from unittest.mock import patch self.mock_logger = Mock() @@ -96,7 +96,7 @@ def test_chunked_processing_efficiency(self): start_time = time.perf_counter() entries_processed = 0 - for entry in self.controller._stream_log_entries(chunk_size=500, max_entries=5000): + for _entry in self.controller._stream_log_entries(max_entries=5000): entries_processed += 1 if entries_processed >= 2000: # Stop after processing 2000 entries break @@ -247,7 +247,6 @@ async def stream_entries(): def teardown_method(self): """Clean up test environment.""" - import shutil if Path(self.temp_dir).exists(): shutil.rmtree(self.temp_dir) @@ -258,7 +257,6 @@ class TestMemoryRegressionPrevention: def test_streaming_functionality_baseline(self): """Establish baseline functionality for regression testing.""" - from unittest.mock import patch mock_logger = Mock() @@ -287,7 +285,8 @@ def test_streaming_functionality_baseline(self): microseconds = 1000 + (i * 1000) % 999000 timestamp = base_time + datetime.timedelta(seconds=i, microseconds=microseconds) f.write( - f"{timestamp.isoformat()} GithubWebhook INFO test-repo [push][hook-{i:04d}][user]: Message {i}\n" + f"{timestamp.isoformat()} GithubWebhook INFO test-repo " + f"[push][hook-{i:04d}][user]: Message {i}\n" ) # Test streaming functionality diff --git a/webhook_server/tests/test_notification_utils.py b/webhook_server/tests/test_notification_utils.py new file mode 100644 index 000000000..42311f046 --- /dev/null +++ b/webhook_server/tests/test_notification_utils.py @@ -0,0 +1,283 @@ +"""Tests for notification_utils module.""" + +from unittest.mock import Mock, patch + +import pytest +import requests + +from webhook_server.utils.notification_utils import send_slack_message + + +class TestSendSlackMessage: + """Test suite for send_slack_message function.""" + + @pytest.fixture + def mock_logger(self) -> Mock: + """Create a mock logger.""" + return Mock() + + @pytest.fixture + def webhook_url(self) -> str: + """Slack webhook URL for testing.""" + return "https://hooks.slack.com/services/TEST/WEBHOOK/URL" # pragma: allowlist secret + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_successful_message_send(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test successful Slack message send with 200 response.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + send_slack_message( + message="Test notification", + webhook_url=webhook_url, + logger=mock_logger, + log_prefix="[TEST]", + ) + + mock_logger.info.assert_called_once_with("[TEST] Sending message to slack: Test notification") + mock_post.assert_called_once() + + # Verify the call arguments + call_args = mock_post.call_args + assert call_args.kwargs["timeout"] == 10 + assert call_args.kwargs["headers"] == {"Content-Type": "application/json"} + assert '"text": "Test notification"' in call_args.kwargs["data"] + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_message_send_with_500_error(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test Slack message send with 500 server error.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 500 + mock_response.text = "Internal Server Error" + mock_post.return_value = mock_response + + with pytest.raises(ValueError) as exc_info: + send_slack_message( + message="Test message", + webhook_url=webhook_url, + logger=mock_logger, + ) + + assert "Request to slack returned an error 500" in str(exc_info.value) + assert "Internal Server Error" in str(exc_info.value) + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_message_send_with_404_error(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test Slack message send with 404 not found error.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 404 + mock_response.text = "Not Found" + mock_post.return_value = mock_response + + with pytest.raises(ValueError) as exc_info: + send_slack_message( + message="Test message", + webhook_url=webhook_url, + logger=mock_logger, + ) + + assert "Request to slack returned an error 404" in str(exc_info.value) + assert "Not Found" in str(exc_info.value) + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_message_send_with_401_unauthorized(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test Slack message send with 401 unauthorized error.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 401 + mock_response.text = "Unauthorized - Invalid token" + mock_post.return_value = mock_response + + with pytest.raises(ValueError) as exc_info: + send_slack_message( + message="Confidential alert", + webhook_url=webhook_url, + logger=mock_logger, + ) + + assert "Request to slack returned an error 401" in str(exc_info.value) + assert "Unauthorized - Invalid token" in str(exc_info.value) + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_message_send_with_timeout(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test Slack message send with connection timeout.""" + mock_post.side_effect = requests.exceptions.Timeout("Connection timeout") + + with pytest.raises(requests.exceptions.Timeout): + send_slack_message( + message="Test message", + webhook_url=webhook_url, + logger=mock_logger, + ) + + mock_post.assert_called_once() + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_message_send_with_connection_error(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test Slack message send with connection error.""" + mock_post.side_effect = requests.exceptions.ConnectionError("Failed to establish connection") + + with pytest.raises(requests.exceptions.ConnectionError): + send_slack_message( + message="Test message", + webhook_url=webhook_url, + logger=mock_logger, + ) + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_empty_message(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test sending empty message.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + send_slack_message( + message="", + webhook_url=webhook_url, + logger=mock_logger, + ) + + mock_logger.info.assert_called_once() + assert '"text": ""' in mock_post.call_args.kwargs["data"] + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_message_with_special_characters(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test message with special characters and emoji.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + message = 'Build failed! 🔥\nError: "timeout"\n\tDetails: \\n\\t' + send_slack_message( + message=message, + webhook_url=webhook_url, + logger=mock_logger, + ) + + mock_post.assert_called_once() + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_message_with_json_characters(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test message with JSON special characters.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + message = '{"key": "value", "nested": {"data": true}}' + send_slack_message( + message=message, + webhook_url=webhook_url, + logger=mock_logger, + ) + + mock_post.assert_called_once() + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_long_message(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test sending very long message.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + long_message = "A" * 10000 + send_slack_message( + message=long_message, + webhook_url=webhook_url, + logger=mock_logger, + ) + + mock_post.assert_called_once() + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_without_log_prefix(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test message send without log prefix (uses empty string by default).""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + send_slack_message( + message="Test message", + webhook_url=webhook_url, + logger=mock_logger, + ) + + mock_logger.info.assert_called_once_with(" Sending message to slack: Test message") + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_request_headers_correct(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test that request includes correct headers.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + send_slack_message( + message="Test", + webhook_url=webhook_url, + logger=mock_logger, + ) + + assert mock_post.call_args.kwargs["headers"]["Content-Type"] == "application/json" + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_request_timeout_value(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test that request uses correct timeout value.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + send_slack_message( + message="Test", + webhook_url=webhook_url, + logger=mock_logger, + ) + + assert mock_post.call_args.kwargs["timeout"] == 10 + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_invalid_webhook_url(self, mock_post: Mock, mock_logger: Mock) -> None: + """Test with malformed webhook URL.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 400 + mock_response.text = "Invalid URL" + mock_post.return_value = mock_response + + with pytest.raises(ValueError): + send_slack_message( + message="Test", + webhook_url="not-a-valid-url", + logger=mock_logger, + ) + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_multiline_message(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test multiline message formatting.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + message = """Line 1 +Line 2 +Line 3""" + send_slack_message( + message=message, + webhook_url=webhook_url, + logger=mock_logger, + ) + + mock_post.assert_called_once() + + @patch("webhook_server.utils.notification_utils.requests.post") + def test_message_with_unicode(self, mock_post: Mock, webhook_url: str, mock_logger: Mock) -> None: + """Test message with Unicode characters.""" + mock_response = Mock(spec=requests.Response) + mock_response.status_code = 200 + mock_post.return_value = mock_response + + message = "Test 测试 тест ทดสอบ 🚀" + send_slack_message( + message=message, + webhook_url=webhook_url, + logger=mock_logger, + ) + + mock_post.assert_called_once() diff --git a/webhook_server/tests/test_owners_files_handler.py b/webhook_server/tests/test_owners_files_handler.py index c8381dc5a..2b0a53efe 100644 --- a/webhook_server/tests/test_owners_files_handler.py +++ b/webhook_server/tests/test_owners_files_handler.py @@ -2,8 +2,9 @@ import pytest import yaml +from github.GithubException import GithubException -from webhook_server.libs.owners_files_handler import OwnersFileHandler +from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler from webhook_server.tests.conftest import ContentFile @@ -305,32 +306,28 @@ async def test_get_all_repository_reviewers(self, owners_file_handler: OwnersFil @pytest.mark.asyncio async def test_get_all_pull_request_approvers(self, owners_file_handler: OwnersFileHandler) -> None: """Test get_all_pull_request_approvers method.""" - owners_file_handler.changed_files = ["file1.py"] - - with patch.object(owners_file_handler, "owners_data_for_changed_files") as mock_owners_data: - mock_owners_data.return_value = { - ".": {"approvers": ["user1", "user2"], "reviewers": ["user3"]}, - "folder1": {"approvers": ["user4"], "reviewers": ["user5"]}, - } + owners_file_handler.changed_files = ["file1.py", "folder1/file2.py"] + owners_file_handler.all_repository_approvers_and_reviewers = { + ".": {"approvers": ["user1", "user2"], "reviewers": ["user3"]}, + "folder1": {"approvers": ["user4"], "reviewers": ["user5"]}, + } - result = await owners_file_handler.get_all_pull_request_approvers() + result = await owners_file_handler.get_all_pull_request_approvers() - assert result == ["user1", "user2", "user4"] + assert result == ["user1", "user2", "user4"] @pytest.mark.asyncio async def test_get_all_pull_request_reviewers(self, owners_file_handler: OwnersFileHandler) -> None: """Test get_all_pull_request_reviewers method.""" - owners_file_handler.changed_files = ["file1.py"] - - with patch.object(owners_file_handler, "owners_data_for_changed_files") as mock_owners_data: - mock_owners_data.return_value = { - ".": {"approvers": ["user1"], "reviewers": ["user2", "user3"]}, - "folder1": {"approvers": ["user4"], "reviewers": ["user5"]}, - } + owners_file_handler.changed_files = ["file1.py", "folder1/file2.py"] + owners_file_handler.all_repository_approvers_and_reviewers = { + ".": {"approvers": ["user1"], "reviewers": ["user2", "user3"]}, + "folder1": {"approvers": ["user4"], "reviewers": ["user5"]}, + } - result = await owners_file_handler.get_all_pull_request_reviewers() + result = await owners_file_handler.get_all_pull_request_reviewers() - assert result == ["user2", "user3", "user5"] + assert result == ["user2", "user3", "user5"] @pytest.mark.asyncio async def test_owners_data_for_changed_files(self, owners_file_handler: OwnersFileHandler) -> None: @@ -354,7 +351,7 @@ async def test_owners_data_for_changed_files(self, owners_file_handler: OwnersFi }, } - result = await owners_file_handler.owners_data_for_changed_files() + result = await owners_file_handler.owners_data_for_changed_files expected = { "folder1": {"approvers": ["folder1_approver1"], "reviewers": ["folder1_reviewer1"]}, @@ -384,7 +381,7 @@ async def test_owners_data_for_changed_files_no_root_approvers( }, } - result = await owners_file_handler.owners_data_for_changed_files() + result = await owners_file_handler.owners_data_for_changed_files expected = { "folder5": { @@ -396,6 +393,72 @@ async def test_owners_data_for_changed_files_no_root_approvers( } assert result == expected + @pytest.mark.asyncio + async def test_owners_data_for_changed_files_caching(self, owners_file_handler: OwnersFileHandler) -> None: + """Test that owners_data_for_changed_files caches results using @functools.cached_property.""" + # Set up test data + owners_file_handler.changed_files = [ + "folder1/file1.py", + "folder2/file2.py", + ] + owners_file_handler.all_repository_approvers_and_reviewers = { + ".": {"approvers": ["root_approver1"], "reviewers": ["root_reviewer1"]}, + "folder1": {"approvers": ["folder1_approver1"], "reviewers": ["folder1_reviewer1"]}, + "folder2": {}, + } + + # First call - computes and caches + result1 = await owners_file_handler.owners_data_for_changed_files + + # Verify result is correct + expected = { + "folder1": {"approvers": ["folder1_approver1"], "reviewers": ["folder1_reviewer1"]}, + "folder2": {}, + ".": {"approvers": ["root_approver1"], "reviewers": ["root_reviewer1"]}, + } + assert result1 == expected + + # Second call - should return cached result (same object reference) + result2 = await owners_file_handler.owners_data_for_changed_files + + # Verify cache returns same result and same object reference + assert result2 == result1 + assert result2 is result1 # Same object reference (cached by @functools.cached_property) + + @pytest.mark.asyncio + async def test_owners_data_for_changed_files_cache_independence(self, mock_github_webhook: Mock) -> None: + """Test that different OwnersFileHandler instances have independent caches using @functools.cached_property.""" + # Create two separate instances + handler1 = OwnersFileHandler(mock_github_webhook) + handler2 = OwnersFileHandler(mock_github_webhook) + + # Set up different data for each handler + handler1.changed_files = ["folder1/file1.py"] + handler1.all_repository_approvers_and_reviewers = { + ".": {"approvers": ["approver1"], "reviewers": ["reviewer1"]}, + "folder1": {"approvers": ["folder1_approver"], "reviewers": ["folder1_reviewer"]}, + } + + handler2.changed_files = ["folder2/file2.py"] + handler2.all_repository_approvers_and_reviewers = { + ".": {"approvers": ["approver2"], "reviewers": ["reviewer2"]}, + "folder2": {"approvers": ["folder2_approver"], "reviewers": ["folder2_reviewer"]}, + } + + # Get results from both handlers + result1 = await handler1.owners_data_for_changed_files + result2 = await handler2.owners_data_for_changed_files + + # Verify they have independent results + assert result1 != result2 + assert "folder1" in result1 + assert "folder2" in result2 + assert "folder2" not in result1 + assert "folder1" not in result2 + + # Verify results are not the same object (independent caches) + assert result1 is not result2 + @pytest.mark.asyncio async def test_assign_reviewers(self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock) -> None: owners_file_handler.changed_files = ["file1.py"] @@ -408,6 +471,20 @@ async def test_assign_reviewers(self, owners_file_handler: OwnersFileHandler, mo expected_calls = [call(["reviewer1"]), call(["reviewer2"])] actual_calls = mock_create_request.call_args_list assert sorted(actual_calls, key=str) == sorted(expected_calls, key=str) + # Verify completion log was called + assert owners_file_handler.logger.step.called # type: ignore[attr-defined] + + @pytest.mark.asyncio + async def test_assign_reviewers_no_reviewers( + self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock + ) -> None: + """Test assigning reviewers when no reviewers to assign.""" + owners_file_handler.changed_files = ["file1.py"] + owners_file_handler.all_pull_request_reviewers = [] + + await owners_file_handler.assign_reviewers(mock_pull_request) + # Verify completion log was called (no reviewers to assign is acceptable) + assert owners_file_handler.logger.step.called # type: ignore[attr-defined] @pytest.mark.asyncio async def test_assign_reviewers_github_exception( @@ -418,8 +495,6 @@ async def test_assign_reviewers_github_exception( owners_file_handler.all_pull_request_reviewers = ["reviewer1"] mock_pull_request.user.login = "test-user" - from github.GithubException import GithubException - with patch.object(mock_pull_request, "create_review_request", side_effect=GithubException(404, "Not found")): with patch.object(mock_pull_request, "create_issue_comment") as mock_comment: await owners_file_handler.assign_reviewers(mock_pull_request) diff --git a/webhook_server/tests/test_performance_benchmarks.py b/webhook_server/tests/test_performance_benchmarks.py index 7c2676462..f6fb9fa23 100644 --- a/webhook_server/tests/test_performance_benchmarks.py +++ b/webhook_server/tests/test_performance_benchmarks.py @@ -2,6 +2,7 @@ import asyncio import datetime +import gc import json import os import random @@ -204,7 +205,10 @@ def test_memory_efficiency_large_dataset(self): parser = LogParser() content = "" for i in range(10000): - content += f"2025-07-31T10:{i // 600:02d}:{i % 60:02d}.000000 GithubWebhook INFO test-repo [push][hook-{i}][user]: Message {i}\n" + content += ( + f"2025-07-31T10:{i // 600:02d}:{i % 60:02d}.000000 GithubWebhook INFO " + f"test-repo [push][hook-{i}][user]: Message {i}\n" + ) with tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False) as f: f.write(content) @@ -229,9 +233,6 @@ def test_memory_cleanup_after_processing(self): if not PSUTIL_AVAILABLE: pytest.skip("psutil not available for memory monitoring") - import gc - import os - process = psutil.Process(os.getpid()) initial_memory = process.memory_info().rss / 1024 / 1024 # MB @@ -273,7 +274,7 @@ async def test_concurrent_parsing_performance(self): """Test performance of concurrent parsing operations.""" # Create multiple log files files = [] - for i in range(5): + for _i in range(5): content = self._generate_test_content(2000) with tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False) as f: f.write(content) @@ -448,13 +449,13 @@ async def monitor_logs(): # Wait for monitoring to complete try: await asyncio.wait_for(monitor_task, timeout=2.0) - except asyncio.TimeoutError: + except TimeoutError: monitor_task.cancel() # Analyze latency if len(entries_received) >= 3: latencies = [] - for i, (receive_time, entry) in enumerate(entries_received): + for i, (receive_time, _entry) in enumerate(entries_received): if i < len(write_times): latency = receive_time - write_times[i] latencies.append(latency) diff --git a/webhook_server/tests/test_prepare_retest_wellcome_comment.py b/webhook_server/tests/test_prepare_retest_welcome_comment.py similarity index 63% rename from webhook_server/tests/test_prepare_retest_wellcome_comment.py rename to webhook_server/tests/test_prepare_retest_welcome_comment.py index eb0f61567..6f0a79345 100644 --- a/webhook_server/tests/test_prepare_retest_wellcome_comment.py +++ b/webhook_server/tests/test_prepare_retest_welcome_comment.py @@ -1,9 +1,9 @@ import pytest -from webhook_server.libs.pull_request_handler import PullRequestHandler +from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler -class TestPrepareRetestWellcomeMsg: +class TestPrepareRetestWelcomeMsg: @pytest.mark.parametrize( "tox, build_and_push_container, pypi, pre_commit, conventional_title, expected", [ @@ -22,7 +22,10 @@ class TestPrepareRetestWellcomeMsg: False, False, False, - " * `/retest build-container` - Rebuild and test container image\n * `/retest all` - Run all available tests\n", + ( + " * `/retest build-container` - Rebuild and test container image\n " + "* `/retest all` - Run all available tests\n" + ), ), ( False, @@ -30,7 +33,8 @@ class TestPrepareRetestWellcomeMsg: True, False, False, - " * `/retest python-module-install` - Test Python package installation\n * `/retest all` - Run all available tests\n", + " * `/retest python-module-install` - Test Python package installation\n " + "* `/retest all` - Run all available tests\n", ), ( False, @@ -38,7 +42,10 @@ class TestPrepareRetestWellcomeMsg: False, True, False, - " * `/retest pre-commit` - Run pre-commit hooks and checks\n * `/retest all` - Run all available tests\n", + ( + " * `/retest pre-commit` - Run pre-commit hooks and checks\n " + "* `/retest all` - Run all available tests\n" + ), ), ( True, @@ -46,7 +53,14 @@ class TestPrepareRetestWellcomeMsg: True, True, True, - " * `/retest tox` - Run Python test suite with tox\n * `/retest build-container` - Rebuild and test container image\n * `/retest python-module-install` - Test Python package installation\n * `/retest pre-commit` - Run pre-commit hooks and checks\n * `/retest conventional-title` - Validate commit message format\n * `/retest all` - Run all available tests\n", + ( + " * `/retest tox` - Run Python test suite with tox\n " + "* `/retest build-container` - Rebuild and test container image\n " + "* `/retest python-module-install` - Test Python package installation\n " + "* `/retest pre-commit` - Run pre-commit hooks and checks\n " + "* `/retest conventional-title` - Validate commit message format\n " + "* `/retest all` - Run all available tests\n" + ), ), ( False, @@ -54,11 +68,12 @@ class TestPrepareRetestWellcomeMsg: False, False, True, - " * `/retest conventional-title` - Validate commit message format\n * `/retest all` - Run all available tests\n", + " * `/retest conventional-title` - Validate commit message format\n " + "* `/retest all` - Run all available tests\n", ), ], ) - def test_prepare_retest_wellcome_comment( + def test_prepare_retest_welcome_comment( self, process_github_webhook, owners_file_handler, diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index e4cbebdd7..9d3edb100 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1,13 +1,16 @@ -import pytest from unittest.mock import AsyncMock, Mock, patch + +import pytest +from github import GithubException from github.PullRequest import PullRequest -from webhook_server.libs.pull_request_handler import PullRequestHandler +from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler from webhook_server.utils.constants import ( APPROVED_BY_LABEL_PREFIX, CAN_BE_MERGED_STR, CHANGED_REQUESTED_BY_LABEL_PREFIX, CHERRY_PICK_LABEL_PREFIX, + CHERRY_PICKED_LABEL_PREFIX, COMMENTED_BY_LABEL_PREFIX, HAS_CONFLICTS_LABEL_STR, LGTM_BY_LABEL_PREFIX, @@ -18,6 +21,11 @@ ) +# Helper async function for mocking async cached property +async def _mock_owners_data_for_changed_files() -> dict: + return {} + + class TestPullRequestHandler: """Test suite for PullRequestHandler class.""" @@ -202,8 +210,12 @@ async def test_process_pull_request_webhook_data_closed_action_merged( with patch.object(pull_request_handler, "close_issue_for_merged_or_closed_pr") as mock_close_issue: with patch.object(pull_request_handler, "delete_remote_tag_for_merged_or_closed_pr") as mock_delete_tag: - with patch.object(pull_request_handler.runner_handler, "cherry_pick") as mock_cherry_pick: - with patch.object(pull_request_handler.runner_handler, "run_build_container") as mock_build: + with patch.object( + pull_request_handler.runner_handler, "cherry_pick", new_callable=AsyncMock + ) as mock_cherry_pick: + with patch.object( + pull_request_handler.runner_handler, "run_build_container", new_callable=AsyncMock + ) as mock_build: with patch.object( pull_request_handler, "label_all_opened_pull_requests_merge_state_after_merged" ) as mock_label_all: @@ -287,7 +299,9 @@ async def test_set_wip_label_based_on_title_without_wip( """Test removing WIP label when title doesn't contain WIP.""" mock_pull_request.title = "Test PR" - with patch.object(pull_request_handler.labels_handler, "_remove_label") as mock_remove_label: + with patch.object( + pull_request_handler.labels_handler, "_remove_label", new_callable=AsyncMock + ) as mock_remove_label: await pull_request_handler.set_wip_label_based_on_title(pull_request=mock_pull_request) mock_remove_label.assert_called_once_with(pull_request=mock_pull_request, label=WIP_STR) @@ -344,6 +358,7 @@ async def test_delete_remote_tag_for_merged_or_closed_pr_with_tag( self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock ) -> None: mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 with ( patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), patch.object( @@ -357,10 +372,13 @@ async def test_delete_remote_tag_for_merged_or_closed_pr_with_tag( patch.object( pull_request_handler.runner_handler, "run_podman_command", - new=AsyncMock(side_effect=[(0, "", ""), (1, "tag exists", ""), (0, "", "")]), + new=AsyncMock(side_effect=[(True, "", ""), (True, "tag exists", ""), (True, "", ""), (True, "", "")]), ), + patch.object(mock_pull_request, "create_issue_comment", new=Mock()), ): await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + # Verify step logging was called + assert pull_request_handler.logger.step.called # The method uses runner_handler.run_podman_command, not repository.delete_tag @pytest.mark.asyncio @@ -470,7 +488,7 @@ async def test_label_pull_request_by_merge_state_has_conflicts( mock_pull_request.mergeable = False mock_pull_request.mergeable_state = "dirty" - with patch.object(pull_request_handler.labels_handler, "_add_label") as mock_add_label: + with patch.object(pull_request_handler.labels_handler, "_add_label", new_callable=AsyncMock) as mock_add_label: await pull_request_handler.label_pull_request_by_merge_state(pull_request=mock_pull_request) mock_add_label.assert_called_once_with(pull_request=mock_pull_request, label=HAS_CONFLICTS_LABEL_STR) @@ -479,7 +497,7 @@ async def test_process_verified_for_update_or_new_pull_request_auto_verified( self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock ) -> None: """Test processing verified for update or new pull request for auto-verified user.""" - with patch.object(pull_request_handler.labels_handler, "_add_label") as mock_add_label: + with patch.object(pull_request_handler.labels_handler, "_add_label", new_callable=AsyncMock) as mock_add_label: with patch.object(pull_request_handler.check_run_handler, "set_verify_check_success") as mock_success: await pull_request_handler._process_verified_for_update_or_new_pull_request( pull_request=mock_pull_request @@ -494,7 +512,7 @@ async def test_process_verified_for_update_or_new_pull_request_not_auto_verified """Test processing verified for update or new pull request for non-auto-verified user.""" pull_request_handler.github_webhook.parent_committer = "other-user" - with patch.object(pull_request_handler.labels_handler, "_add_label") as mock_add_label: + with patch.object(pull_request_handler.labels_handler, "_add_label", new_callable=AsyncMock) as mock_add_label: with patch.object(pull_request_handler.check_run_handler, "set_verify_check_success") as mock_success: await pull_request_handler._process_verified_for_update_or_new_pull_request( pull_request=mock_pull_request @@ -507,7 +525,6 @@ async def test_process_verified_cherry_picked_pr_auto_verify_enabled( self, pull_request_handler: PullRequestHandler ) -> None: """Test cherry-picked PR with auto-verify enabled (default behavior).""" - from webhook_server.utils.constants import CHERRY_PICKED_LABEL_PREFIX mock_pull_request = Mock(spec=PullRequest) mock_label = Mock() @@ -529,7 +546,6 @@ async def test_process_verified_cherry_picked_pr_auto_verify_disabled( self, pull_request_handler: PullRequestHandler ) -> None: """Test cherry-picked PR with auto-verify disabled.""" - from webhook_server.utils.constants import CHERRY_PICKED_LABEL_PREFIX mock_pull_request = Mock(spec=PullRequest) mock_label = Mock() @@ -578,7 +594,9 @@ async def test_check_if_can_be_merged_not_approved( mock_pull_request.labels = [] with patch.object(pull_request_handler, "_check_if_pr_approved", return_value="not_approved"): - with patch.object(pull_request_handler.labels_handler, "_remove_label") as mock_remove_label: + with patch.object( + pull_request_handler.labels_handler, "_remove_label", new_callable=AsyncMock + ) as mock_remove_label: await pull_request_handler.check_if_can_be_merged(pull_request=mock_pull_request) mock_remove_label.assert_called_once_with(pull_request=mock_pull_request, label=CAN_BE_MERGED_STR) @@ -595,7 +613,7 @@ async def test_check_if_can_be_merged_approved( patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - new=AsyncMock(return_value={}), + _mock_owners_data_for_changed_files(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.check_run_handler, "set_merge_check_in_progress", new=AsyncMock()), @@ -609,7 +627,7 @@ async def test_check_if_can_be_merged_approved( "required_check_failed_or_no_status", new=AsyncMock(return_value=""), ), - patch.object(pull_request_handler.labels_handler, "wip_or_hold_lables_exists", return_value=""), + patch.object(pull_request_handler.labels_handler, "wip_or_hold_labels_exists", return_value=""), patch.object( pull_request_handler.labels_handler, "pull_request_labels_names", new=AsyncMock(return_value=[]) ), @@ -626,7 +644,7 @@ async def test_check_if_pr_approved_no_labels(self, pull_request_handler: PullRe patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - new=AsyncMock(return_value={}), + _mock_owners_data_for_changed_files(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -643,7 +661,7 @@ async def test_check_if_pr_approved_approved_label(self, pull_request_handler: P patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - new=AsyncMock(return_value={}), + _mock_owners_data_for_changed_files(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -660,7 +678,7 @@ async def test_check_if_pr_approved_lgtm_label(self, pull_request_handler: PullR patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - new=AsyncMock(return_value={}), + _mock_owners_data_for_changed_files(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -677,7 +695,7 @@ async def test_check_if_pr_approved_changes_requested(self, pull_request_handler patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - new=AsyncMock(return_value={}), + _mock_owners_data_for_changed_files(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -696,7 +714,7 @@ async def test_check_if_pr_approved_commented(self, pull_request_handler: PullRe patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - new=AsyncMock(return_value={}), + _mock_owners_data_for_changed_files(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -737,22 +755,24 @@ def test_check_labels_for_can_be_merged_not_approved(self, pull_request_handler: result = pull_request_handler._check_labels_for_can_be_merged(labels=["other-label"]) assert result == "" # Empty string means no errors - def test_skip_if_pull_request_already_merged_merged( + @pytest.mark.asyncio + async def test_skip_if_pull_request_already_merged_merged( self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock ) -> None: """Test skipping if pull request is already merged.""" # Patch is_merged as a method that returns True with patch.object(mock_pull_request, "is_merged", new=Mock(return_value=True)): - result = pull_request_handler.skip_if_pull_request_already_merged(pull_request=mock_pull_request) + result = await pull_request_handler.skip_if_pull_request_already_merged(pull_request=mock_pull_request) assert result is True - def test_skip_if_pull_request_already_merged_not_merged( + @pytest.mark.asyncio + async def test_skip_if_pull_request_already_merged_not_merged( self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock ) -> None: """Test skipping if pull request is not merged.""" # Patch is_merged as a method that returns False with patch.object(mock_pull_request, "is_merged", new=Mock(return_value=False)): - result = pull_request_handler.skip_if_pull_request_already_merged(pull_request=mock_pull_request) + result = await pull_request_handler.skip_if_pull_request_already_merged(pull_request=mock_pull_request) assert result is False @pytest.mark.asyncio @@ -761,11 +781,300 @@ async def test_delete_remote_tag_for_merged_or_closed_pr_without_tag( ) -> None: """Test deleting remote tag for merged or closed PR without tag.""" mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 with patch.object(pull_request_handler.github_webhook, "build_and_push_container", False): await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + # Verify step logging was called (processing + completed) + assert pull_request_handler.logger.step.call_count >= 2 # Should return early when build_and_push_container is False + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_failed_deletion( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting remote tag when deletion fails.""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="docker.io/org/repo:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "docker.io/org/repo"), + patch.object(pull_request_handler.github_webhook, "container_repository_username", "test"), + patch.object(pull_request_handler.github_webhook, "container_repository_password", "test"), + patch.object( + pull_request_handler.runner_handler, + "run_podman_command", + new=AsyncMock( + side_effect=[(True, "", ""), (True, "tag exists", ""), (False, "out", "err"), (True, "", "")] + ), + ), + ): + await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + # Verify step logging was called (processing + failed) + assert pull_request_handler.logger.step.called + # Verify error was logged + assert pull_request_handler.logger.error.called + + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_login_failed( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting remote tag when registry login fails.""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="docker.io/org/repo:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "docker.io/org/repo"), + patch.object(pull_request_handler.github_webhook, "container_repository_username", "test"), + patch.object(pull_request_handler.github_webhook, "container_repository_password", "test"), + patch.object( + pull_request_handler.runner_handler, + "run_podman_command", + new=AsyncMock(return_value=(False, "login failed", "error")), + ), + patch.object(mock_pull_request, "create_issue_comment", new=Mock()), + ): + await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + # Verify step logging was called (processing + failed) + assert pull_request_handler.logger.step.called + # Verify error was logged + assert pull_request_handler.logger.error.called + + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_ghcr_success( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting GHCR tag successfully.""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + mock_requester = Mock() + mock_requester.requestJsonAndCheck = Mock( + side_effect=[ + ({}, [{"id": 1, "metadata": {"container": {"tags": ["pr-123"]}}}]), + None, # DELETE call returns None + ] + ) + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="ghcr.io/org/repo:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "ghcr.io/org/repo"), + patch.object(pull_request_handler.github_webhook, "github_api", Mock(requester=mock_requester)), + patch.object(pull_request_handler.github_webhook, "token", "test-token"), + patch.object(mock_pull_request, "create_issue_comment", new=Mock()), + ): + await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + assert pull_request_handler.logger.step.called + assert mock_pull_request.create_issue_comment.called + + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_ghcr_users_scope_fallback( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting GHCR tag when package is found under /users/{owner} scope (not /orgs/{owner}).""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + mock_requester = Mock() + # First call to /orgs/{owner}/packages/... returns 404 (not found) + # Second call to /users/{owner}/packages/... returns versions (found) + # Third call is the DELETE operation + org_404_exception = GithubException(404, {}, {}) + mock_requester.requestJsonAndCheck = Mock( + side_effect=[ + org_404_exception, # /orgs/{owner}/packages/... returns 404 + ({}, [{"id": 1, "metadata": {"container": {"tags": ["pr-123"]}}}]), # /users/{owner}/packages/... + None, # DELETE call returns None + ] + ) + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="ghcr.io/org/repo:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "ghcr.io/org/repo"), + patch.object(pull_request_handler.github_webhook, "github_api", Mock(requester=mock_requester)), + patch.object(pull_request_handler.github_webhook, "token", "test-token"), + patch.object(mock_pull_request, "create_issue_comment", new=Mock()), + ): + await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + # Verify the deletion was successful + assert pull_request_handler.logger.step.called + assert mock_pull_request.create_issue_comment.called + # Verify requestJsonAndCheck was called 3 times (orgs GET, users GET, DELETE) + assert mock_requester.requestJsonAndCheck.call_count == 3 + + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_ghcr_package_not_found( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting GHCR tag when package is not found (404).""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + mock_requester = Mock() + ex = GithubException(404, {}, {}) + mock_requester.requestJsonAndCheck = Mock(side_effect=ex) + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="ghcr.io/org/repo:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "ghcr.io/org/repo"), + patch.object(pull_request_handler.github_webhook, "github_api", Mock(requester=mock_requester)), + patch.object(pull_request_handler.github_webhook, "token", "test-token"), + ): + await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + assert pull_request_handler.logger.step.called + assert pull_request_handler.logger.warning.called + + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_ghcr_tag_not_found( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting GHCR tag when tag is not found in package versions.""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + mock_requester = Mock() + mock_requester.requestJsonAndCheck = Mock( + return_value=({}, [{"id": 1, "metadata": {"container": {"tags": ["other-tag"]}}}]) + ) + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="ghcr.io/org/repo:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "ghcr.io/org/repo"), + patch.object(pull_request_handler.github_webhook, "github_api", Mock(requester=mock_requester)), + patch.object(pull_request_handler.github_webhook, "token", "test-token"), + ): + await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + assert pull_request_handler.logger.step.called + assert pull_request_handler.logger.warning.called + + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_ghcr_api_failure( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting GHCR tag when API call fails.""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + mock_requester = Mock() + ex = GithubException(500, {}, {}) + mock_requester.requestJsonAndCheck = Mock(side_effect=ex) + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="ghcr.io/org/repo:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "ghcr.io/org/repo"), + patch.object(pull_request_handler.github_webhook, "github_api", Mock(requester=mock_requester)), + patch.object(pull_request_handler.github_webhook, "token", "test-token"), + ): + await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + assert pull_request_handler.logger.step.called + assert pull_request_handler.logger.exception.called + + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_ghcr_no_api( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting GHCR tag when GitHub API is not available.""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="ghcr.io/org/repo:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "ghcr.io/org/repo"), + patch.object(pull_request_handler.github_webhook, "github_api", None), + patch.object(pull_request_handler.github_webhook, "token", "test-token"), + ): + await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + assert pull_request_handler.logger.step.called + assert pull_request_handler.logger.error.called + + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_ghcr_invalid_format( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting GHCR tag with invalid repository format.""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + mock_requester = Mock() + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="ghcr.io/invalid:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "ghcr.io/invalid"), + patch.object(pull_request_handler.github_webhook, "github_api", Mock(requester=mock_requester)), + patch.object(pull_request_handler.github_webhook, "token", "test-token"), + ): + # Directly call _delete_ghcr_tag_via_github_api to test invalid format check + await pull_request_handler._delete_ghcr_tag_via_github_api( + pull_request=mock_pull_request, + repository_full_tag="ghcr.io/invalid:pr-123", + pr_tag="pr-123", + ) + assert pull_request_handler.logger.step.called + assert pull_request_handler.logger.error.called + + @pytest.mark.asyncio + async def test_delete_remote_tag_for_merged_or_closed_pr_ghcr_delete_404( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test deleting GHCR tag when version deletion returns 404 (already deleted).""" + mock_pull_request.title = "Test PR" + mock_pull_request.number = 123 + mock_requester = Mock() + ex = GithubException(404, {}, {}) + mock_requester.requestJsonAndCheck = Mock( + side_effect=[ + ({}, [{"id": 1, "metadata": {"container": {"tags": ["pr-123"]}}}]), + ex, # DELETE call returns 404 + ] + ) + with ( + patch.object(pull_request_handler.github_webhook, "build_and_push_container", True), + patch.object( + pull_request_handler.github_webhook, + "container_repository_and_tag", + return_value="ghcr.io/org/repo:pr-123", + ), + patch.object(pull_request_handler.github_webhook, "container_repository", "ghcr.io/org/repo"), + patch.object(pull_request_handler.github_webhook, "github_api", Mock(requester=mock_requester)), + patch.object(pull_request_handler.github_webhook, "token", "test-token"), + patch.object(mock_pull_request, "create_issue_comment", new=Mock()), + ): + await pull_request_handler.delete_remote_tag_for_merged_or_closed_pr(pull_request=mock_pull_request) + assert pull_request_handler.logger.step.called + assert pull_request_handler.logger.warning.called + @pytest.mark.asyncio async def test_close_issue_for_merged_or_closed_pr_without_issue( self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock diff --git a/webhook_server/tests/test_pull_request_owners.py b/webhook_server/tests/test_pull_request_owners.py index d99a80749..f045e075e 100644 --- a/webhook_server/tests/test_pull_request_owners.py +++ b/webhook_server/tests/test_pull_request_owners.py @@ -1,7 +1,7 @@ import pytest import yaml -from webhook_server.libs.pull_request_handler import PullRequestHandler +from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler from webhook_server.tests.conftest import ContentFile, Tree from webhook_server.utils.constants import APPROVED_BY_LABEL_PREFIX @@ -135,7 +135,7 @@ async def test_get_all_repository_approvers_and_reviewers( async def test_owners_data_for_changed_files( changed_files, process_github_webhook, owners_file_handler, all_repository_approvers_and_reviewers ): - owners_data_changed_files_result = await owners_file_handler.owners_data_for_changed_files() + owners_data_changed_files_result = await owners_file_handler.owners_data_for_changed_files owners_data_chaged_files_expected = { "folder5": { "approvers": ["folder5_approver1", "folder5_approver2"], diff --git a/webhook_server/tests/test_pull_request_review_handler.py b/webhook_server/tests/test_pull_request_review_handler.py index 1d25c0489..a977de361 100644 --- a/webhook_server/tests/test_pull_request_review_handler.py +++ b/webhook_server/tests/test_pull_request_review_handler.py @@ -1,10 +1,11 @@ -"""Tests for webhook_server.libs.pull_request_review_handler module.""" +"""Tests for webhook_server.libs.handlers.pull_request_review_handler module.""" -import pytest from unittest.mock import AsyncMock, Mock, patch + +import pytest from github.PullRequest import PullRequest -from webhook_server.libs.pull_request_review_handler import PullRequestReviewHandler +from webhook_server.libs.handlers.pull_request_review_handler import PullRequestReviewHandler from webhook_server.utils.constants import ADD_STR, APPROVE_STR @@ -45,7 +46,9 @@ async def test_process_pull_request_review_webhook_data_submitted_action( """Test processing pull request review webhook data with submitted action.""" mock_pull_request = Mock(spec=PullRequest) - with patch.object(pull_request_review_handler.labels_handler, "manage_reviewed_by_label") as mock_manage_label: + with patch.object( + pull_request_review_handler.labels_handler, "manage_reviewed_by_label", new_callable=AsyncMock + ) as mock_manage_label: with patch.object( pull_request_review_handler.labels_handler, "label_by_user_comment" ) as mock_label_comment: @@ -72,7 +75,9 @@ async def test_process_pull_request_review_webhook_data_non_submitted_action( mock_pull_request = Mock(spec=PullRequest) pull_request_review_handler.hook_data["action"] = "edited" - with patch.object(pull_request_review_handler.labels_handler, "manage_reviewed_by_label") as mock_manage_label: + with patch.object( + pull_request_review_handler.labels_handler, "manage_reviewed_by_label", new_callable=AsyncMock + ) as mock_manage_label: with patch.object( pull_request_review_handler.labels_handler, "label_by_user_comment" ) as mock_label_comment: @@ -89,7 +94,9 @@ async def test_process_pull_request_review_webhook_data_no_body( mock_pull_request = Mock(spec=PullRequest) pull_request_review_handler.hook_data["review"]["body"] = None - with patch.object(pull_request_review_handler.labels_handler, "manage_reviewed_by_label") as mock_manage_label: + with patch.object( + pull_request_review_handler.labels_handler, "manage_reviewed_by_label", new_callable=AsyncMock + ) as mock_manage_label: with patch.object( pull_request_review_handler.labels_handler, "label_by_user_comment" ) as mock_label_comment: @@ -111,7 +118,9 @@ async def test_process_pull_request_review_webhook_data_empty_body( mock_pull_request = Mock(spec=PullRequest) pull_request_review_handler.hook_data["review"]["body"] = "" - with patch.object(pull_request_review_handler.labels_handler, "manage_reviewed_by_label") as mock_manage_label: + with patch.object( + pull_request_review_handler.labels_handler, "manage_reviewed_by_label", new_callable=AsyncMock + ) as mock_manage_label: with patch.object( pull_request_review_handler.labels_handler, "label_by_user_comment" ) as mock_label_comment: @@ -133,7 +142,9 @@ async def test_process_pull_request_review_webhook_data_body_without_approve( mock_pull_request = Mock(spec=PullRequest) pull_request_review_handler.hook_data["review"]["body"] = "Good work, but needs some changes" - with patch.object(pull_request_review_handler.labels_handler, "manage_reviewed_by_label") as mock_manage_label: + with patch.object( + pull_request_review_handler.labels_handler, "manage_reviewed_by_label", new_callable=AsyncMock + ) as mock_manage_label: with patch.object( pull_request_review_handler.labels_handler, "label_by_user_comment" ) as mock_label_comment: diff --git a/webhook_server/tests/test_pull_request_size.py b/webhook_server/tests/test_pull_request_size.py index e0728e2da..5fbfd7737 100644 --- a/webhook_server/tests/test_pull_request_size.py +++ b/webhook_server/tests/test_pull_request_size.py @@ -1,6 +1,6 @@ import pytest -from webhook_server.libs.labels_handler import LabelsHandler +from webhook_server.libs.handlers.labels_handler import LabelsHandler from webhook_server.tests.conftest import PullRequest from webhook_server.utils.constants import SIZE_LABEL_PREFIX @@ -19,7 +19,7 @@ ) def test_get_size_thresholds(process_github_webhook, owners_file_handler, additions, deletions, expected_label): pull_request = PullRequest(additions=additions, deletions=deletions) - lables_handler = LabelsHandler(github_webhook=process_github_webhook, owners_file_handler=owners_file_handler) - result = lables_handler.get_size(pull_request=pull_request) + labels_handler = LabelsHandler(github_webhook=process_github_webhook, owners_file_handler=owners_file_handler) + result = labels_handler.get_size(pull_request=pull_request) assert result == f"{SIZE_LABEL_PREFIX}{expected_label}" diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index c928ecc11..b1cdc2005 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -1,10 +1,10 @@ -"""Tests for webhook_server.libs.push_handler module.""" +"""Tests for webhook_server.libs.handlers.push_handler module.""" -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest -from webhook_server.libs.push_handler import PushHandler +from webhook_server.libs.handlers.push_handler import PushHandler class TestPushHandler: @@ -24,7 +24,6 @@ def mock_github_webhook(self) -> Mock: mock_webhook.clone_repo_dir = "/tmp/test-repo" mock_webhook.slack_webhook_url = "https://hooks.slack.com/test" mock_webhook.repository_name = "test-repo" - mock_webhook.send_slack_message = Mock() mock_webhook.container_repository_username = "test-user" # Always a string mock_webhook.container_repository_password = "test-password" # Always a string # pragma: allowlist secret mock_webhook.token = "test-token" # Always a string @@ -38,8 +37,8 @@ def push_handler(self, mock_github_webhook: Mock) -> PushHandler: @pytest.mark.asyncio async def test_process_push_webhook_data_with_tag_and_pypi(self, push_handler: PushHandler) -> None: """Test processing push webhook data with tag and pypi enabled.""" - with patch.object(push_handler, "upload_to_pypi") as mock_upload: - with patch.object(push_handler.runner_handler, "run_build_container") as mock_build: + with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: + with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: await push_handler.process_push_webhook_data() mock_upload.assert_called_once_with(tag_name="v1.0.0") @@ -50,8 +49,8 @@ async def test_process_push_webhook_data_with_tag_no_pypi(self, push_handler: Pu """Test processing push webhook data with tag but no pypi.""" push_handler.github_webhook.pypi = {} # Empty dict instead of None - with patch.object(push_handler, "upload_to_pypi") as mock_upload: - with patch.object(push_handler.runner_handler, "run_build_container") as mock_build: + with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: + with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: await push_handler.process_push_webhook_data() mock_upload.assert_not_called() @@ -62,8 +61,8 @@ async def test_process_push_webhook_data_with_tag_no_container(self, push_handle """Test processing push webhook data with tag but no container build.""" push_handler.github_webhook.build_and_push_container = False - with patch.object(push_handler, "upload_to_pypi") as mock_upload: - with patch.object(push_handler.runner_handler, "run_build_container") as mock_build: + with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: + with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: await push_handler.process_push_webhook_data() mock_upload.assert_called_once_with(tag_name="v1.0.0") @@ -74,8 +73,8 @@ async def test_process_push_webhook_data_with_tag_no_container_release(self, pus """Test processing push webhook data with tag but no container release.""" push_handler.github_webhook.container_release = False - with patch.object(push_handler, "upload_to_pypi") as mock_upload: - with patch.object(push_handler.runner_handler, "run_build_container") as mock_build: + with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: + with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: await push_handler.process_push_webhook_data() mock_upload.assert_called_once_with(tag_name="v1.0.0") @@ -86,8 +85,8 @@ async def test_process_push_webhook_data_no_tag(self, push_handler: PushHandler) """Test processing push webhook data without tag.""" push_handler.hook_data["ref"] = "refs/heads/main" - with patch.object(push_handler, "upload_to_pypi") as mock_upload: - with patch.object(push_handler.runner_handler, "run_build_container") as mock_build: + with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: + with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: await push_handler.process_push_webhook_data() mock_upload.assert_not_called() @@ -98,8 +97,8 @@ async def test_process_push_webhook_data_tag_with_slash(self, push_handler: Push """Test processing push webhook data with tag containing slash.""" push_handler.hook_data["ref"] = "refs/tags/release/v1.0.0" - with patch.object(push_handler, "upload_to_pypi") as mock_upload: - with patch.object(push_handler.runner_handler, "run_build_container") as mock_build: + with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: + with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: await push_handler.process_push_webhook_data() mock_upload.assert_called_once_with(tag_name="release/v1.0.0") @@ -109,31 +108,34 @@ async def test_process_push_webhook_data_tag_with_slash(self, push_handler: Push async def test_upload_to_pypi_success(self, push_handler: PushHandler) -> None: """Test successful upload to pypi.""" with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - with patch("webhook_server.libs.push_handler.run_command") as mock_run_command: - with patch("webhook_server.libs.push_handler.uuid4") as mock_uuid: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") + with patch( + "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock + ) as mock_run_command: + with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: + with patch("webhook_server.libs.handlers.push_handler.send_slack_message") as mock_slack: + # Mock successful clone + mock_prepare.return_value.__aenter__.return_value = (True, "", "") - # Mock successful build - mock_run_command.side_effect = [ - (True, "", ""), # uv build - (True, "package-1.0.0.tar.gz", ""), # ls command - (True, "", ""), # twine check - (True, "", ""), # twine upload - ] + # Mock successful build + mock_run_command.side_effect = [ + (True, "", ""), # uv build + (True, "package-1.0.0.tar.gz", ""), # ls command + (True, "", ""), # twine check + (True, "", ""), # twine upload + ] - mock_uuid.return_value = "test-uuid" + mock_uuid.return_value = "test-uuid" - await push_handler.upload_to_pypi(tag_name="v1.0.0") + await push_handler.upload_to_pypi(tag_name="v1.0.0") - # Verify clone was called - mock_prepare.assert_called_once() + # Verify clone was called + mock_prepare.assert_called_once() - # Verify build command was called - assert mock_run_command.call_count == 4 + # Verify build command was called + assert mock_run_command.call_count == 4 - # Verify slack message was sent - push_handler.github_webhook.send_slack_message.assert_called_once() + # Verify slack message was sent + mock_slack.assert_called_once() @pytest.mark.asyncio async def test_upload_to_pypi_clone_failure(self, push_handler: PushHandler) -> None: @@ -154,7 +156,9 @@ async def test_upload_to_pypi_clone_failure(self, push_handler: PushHandler) -> async def test_upload_to_pypi_build_failure(self, push_handler: PushHandler) -> None: """Test upload to pypi when build fails.""" with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - with patch("webhook_server.libs.push_handler.run_command") as mock_run_command: + with patch( + "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock + ) as mock_run_command: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: # Mock successful clone mock_prepare.return_value.__aenter__.return_value = (True, "", "") @@ -173,7 +177,9 @@ async def test_upload_to_pypi_build_failure(self, push_handler: PushHandler) -> async def test_upload_to_pypi_ls_failure(self, push_handler: PushHandler) -> None: """Test upload to pypi when ls command fails.""" with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - with patch("webhook_server.libs.push_handler.run_command") as mock_run_command: + with patch( + "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock + ) as mock_run_command: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: # Mock successful clone mock_prepare.return_value.__aenter__.return_value = (True, "", "") @@ -195,7 +201,9 @@ async def test_upload_to_pypi_ls_failure(self, push_handler: PushHandler) -> Non async def test_upload_to_pypi_twine_check_failure(self, push_handler: PushHandler) -> None: """Test upload to pypi when twine check fails.""" with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - with patch("webhook_server.libs.push_handler.run_command") as mock_run_command: + with patch( + "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock + ) as mock_run_command: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: # Mock successful clone mock_prepare.return_value.__aenter__.return_value = (True, "", "") @@ -218,7 +226,9 @@ async def test_upload_to_pypi_twine_check_failure(self, push_handler: PushHandle async def test_upload_to_pypi_twine_upload_failure(self, push_handler: PushHandler) -> None: """Test upload to pypi when twine upload fails.""" with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - with patch("webhook_server.libs.push_handler.run_command") as mock_run_command: + with patch( + "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock + ) as mock_run_command: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: # Mock successful clone mock_prepare.return_value.__aenter__.return_value = (True, "", "") @@ -244,8 +254,10 @@ async def test_upload_to_pypi_success_no_slack(self, push_handler: PushHandler) push_handler.github_webhook.slack_webhook_url = "" # Empty string instead of None with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - with patch("webhook_server.libs.push_handler.run_command") as mock_run_command: - with patch("webhook_server.libs.push_handler.uuid4") as mock_uuid: + with patch( + "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock + ) as mock_run_command: + with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: # Mock successful clone mock_prepare.return_value.__aenter__.return_value = (True, "", "") @@ -259,17 +271,20 @@ async def test_upload_to_pypi_success_no_slack(self, push_handler: PushHandler) mock_uuid.return_value = "test-uuid" - await push_handler.upload_to_pypi(tag_name="v1.0.0") + with patch("webhook_server.libs.handlers.push_handler.send_slack_message") as mock_slack: + await push_handler.upload_to_pypi(tag_name="v1.0.0") - # Verify slack message was not sent - push_handler.github_webhook.send_slack_message.assert_not_called() + # Verify slack message was not sent + mock_slack.assert_not_called() @pytest.mark.asyncio async def test_upload_to_pypi_commands_execution_order(self, push_handler: PushHandler) -> None: """Test that commands are executed in the correct order.""" with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - with patch("webhook_server.libs.push_handler.run_command") as mock_run_command: - with patch("webhook_server.libs.push_handler.uuid4") as mock_uuid: + with patch( + "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock + ) as mock_run_command: + with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: # Mock successful clone mock_prepare.return_value.__aenter__.return_value = (True, "", "") @@ -300,8 +315,10 @@ async def test_upload_to_pypi_commands_execution_order(self, push_handler: PushH async def test_upload_to_pypi_unique_clone_directory(self, push_handler: PushHandler) -> None: """Test that each upload uses a unique clone directory.""" with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - with patch("webhook_server.libs.push_handler.run_command") as mock_run_command: - with patch("webhook_server.libs.push_handler.uuid4") as mock_uuid: + with patch( + "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock + ) as mock_run_command: + with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: # Mock successful clone mock_prepare.return_value.__aenter__.return_value = (True, "", "") @@ -337,36 +354,43 @@ async def test_upload_to_pypi_issue_creation_format(self, push_handler: PushHand mock_create_issue.assert_called_once() call_args = mock_create_issue.call_args - # The title should be the full formatted error text from get_check_run_text - expected_title = "```\nError details\n\nClone failed\n```" + # The title should be sanitized (newlines replaced, backticks removed) + # Original: "```\nError details\n\nClone failed\n```" + # Sanitized: "Error details Clone failed" + expected_title = "Error details Clone failed" assert call_args[1]["title"] == expected_title @pytest.mark.asyncio async def test_upload_to_pypi_slack_message_format(self, push_handler: PushHandler) -> None: """Test that slack messages are sent with proper format.""" with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - with patch("webhook_server.libs.push_handler.run_command") as mock_run_command: - with patch("webhook_server.libs.push_handler.uuid4") as mock_uuid: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") - - # Mock successful build - mock_run_command.side_effect = [ - (True, "", ""), # uv build - (True, "package-1.0.0.tar.gz", ""), # ls command - (True, "", ""), # twine check - (True, "", ""), # twine upload - ] - - mock_uuid.return_value = "test-uuid" - - await push_handler.upload_to_pypi(tag_name="v1.0.0") - - # Verify slack message format - push_handler.github_webhook.send_slack_message.assert_called_once() - call_args = push_handler.github_webhook.send_slack_message.call_args - - assert call_args[1]["webhook_url"] == "https://hooks.slack.com/test" - assert "test-repo" in call_args[1]["message"] - assert "v1.0.0" in call_args[1]["message"] - assert "published to PYPI" in call_args[1]["message"] + with patch( + "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock + ) as mock_run_command: + with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: + with patch("webhook_server.libs.handlers.push_handler.send_slack_message") as mock_slack: + # Mock successful clone + mock_prepare.return_value.__aenter__.return_value = (True, "", "") + + # Mock successful build + mock_run_command.side_effect = [ + (True, "", ""), # uv build + (True, "package-1.0.0.tar.gz", ""), # ls command + (True, "", ""), # twine check + (True, "", ""), # twine upload + ] + + mock_uuid.return_value = "test-uuid" + + await push_handler.upload_to_pypi(tag_name="v1.0.0") + + # Verify slack message format + mock_slack.assert_called_once() + call_args = mock_slack.call_args + + assert call_args[1]["webhook_url"] == "https://hooks.slack.com/test" + assert "test-repo" in call_args[1]["message"] + assert "v1.0.0" in call_args[1]["message"] + assert "published to PYPI" in call_args[1]["message"] + assert call_args[1]["logger"] == push_handler.logger + assert call_args[1]["log_prefix"] == push_handler.log_prefix diff --git a/webhook_server/tests/test_runner_handler.py b/webhook_server/tests/test_runner_handler.py index 6572c3038..1bbb83656 100644 --- a/webhook_server/tests/test_runner_handler.py +++ b/webhook_server/tests/test_runner_handler.py @@ -1,9 +1,9 @@ -from typing import Generator +from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch import pytest -from webhook_server.libs.runner_handler import RunnerHandler +from webhook_server.libs.handlers.runner_handler import RunnerHandler class TestRunnerHandler: @@ -65,7 +65,8 @@ def mock_pull_request(self) -> Mock: @pytest.fixture(autouse=True) def patch_check_run_text(self) -> Generator[None, None, None]: with patch( - "webhook_server.libs.check_run_handler.CheckRunHandler.get_check_run_text", return_value="dummy output" + "webhook_server.libs.handlers.check_run_handler.CheckRunHandler.get_check_run_text", + return_value="dummy output", ): yield @@ -95,7 +96,9 @@ def test_fix_podman_bug(self, mock_rmtree: Mock, runner_handler: RunnerHandler) @pytest.mark.asyncio async def test_run_podman_command_success(self, runner_handler: RunnerHandler) -> None: """Test run_podman_command with successful command.""" - with patch("webhook_server.libs.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", ""))): + with patch( + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")) + ): rc, out, err = await runner_handler.run_podman_command("podman build .") assert rc is True assert "success" in out # Relaxed assertion @@ -104,7 +107,7 @@ async def test_run_podman_command_success(self, runner_handler: RunnerHandler) - async def test_run_podman_command_podman_bug(self, runner_handler: RunnerHandler) -> None: """Test run_podman_command with podman bug error.""" podman_bug_err = "Error: current system boot ID differs from cached boot ID; an unhandled reboot has occurred" - with patch("webhook_server.libs.runner_handler.run_command", new=AsyncMock()) as mock_run: + with patch("webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock()) as mock_run: mock_run.side_effect = [(False, "output", podman_bug_err), (True, "success after fix", "")] with patch.object(runner_handler, "fix_podman_bug") as mock_fix: rc, out, err = await runner_handler.run_podman_command("podman build .") @@ -114,7 +117,7 @@ async def test_run_podman_command_podman_bug(self, runner_handler: RunnerHandler async def test_run_podman_command_other_error(self, runner_handler: RunnerHandler) -> None: """Test run_podman_command with other error.""" with patch( - "webhook_server.libs.runner_handler.run_command", + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(False, "output", "other error")), ): rc, out, err = await runner_handler.run_podman_command("podman build .") @@ -171,14 +174,18 @@ async def test_run_tox_success(self, runner_handler: RunnerHandler, mock_pull_re with patch.object( runner_handler.check_run_handler, "is_check_run_in_progress", new=AsyncMock(return_value=False) ): - with patch.object(runner_handler.check_run_handler, "set_run_tox_check_in_progress") as mock_set_progress: - with patch.object(runner_handler.check_run_handler, "set_run_tox_check_success") as mock_set_success: + with patch.object( + runner_handler.check_run_handler, "set_run_tox_check_in_progress", new_callable=AsyncMock + ) as mock_set_progress: + with patch.object( + runner_handler.check_run_handler, "set_run_tox_check_success", new_callable=AsyncMock + ) as mock_set_success: with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: mock_prepare.return_value = AsyncMock() mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) with patch( - "webhook_server.libs.runner_handler.run_command", + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")), ): await runner_handler.run_tox(mock_pull_request) @@ -231,7 +238,7 @@ async def test_run_pre_commit_success(self, runner_handler: RunnerHandler, mock_ mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) with patch( - "webhook_server.libs.runner_handler.run_command", + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")), ): await runner_handler.run_pre_commit(mock_pull_request) @@ -347,7 +354,7 @@ async def test_run_install_python_module_success( mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) with patch( - "webhook_server.libs.runner_handler.run_command", + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")), ): await runner_handler.run_install_python_module(mock_pull_request) @@ -481,7 +488,7 @@ async def test_cherry_pick_success(self, runner_handler: RunnerHandler, mock_pul mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) with patch( - "webhook_server.libs.runner_handler.run_command", + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")), ): with patch("asyncio.to_thread"): @@ -494,7 +501,9 @@ async def test_prepare_cloned_repo_dir_success( self, runner_handler: RunnerHandler, mock_pull_request: Mock ) -> None: """Test _prepare_cloned_repo_dir with successful preparation.""" - with patch("webhook_server.libs.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", ""))): + with patch( + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")) + ): with patch.object( runner_handler.github_webhook, "get_pull_request", new=AsyncMock(return_value=mock_pull_request) ): @@ -508,7 +517,8 @@ async def test_prepare_cloned_repo_dir_success( async def test_prepare_cloned_repo_dir_clone_failure(self, runner_handler: RunnerHandler) -> None: """Test _prepare_cloned_repo_dir when clone fails.""" with patch( - "webhook_server.libs.runner_handler.run_command", new=AsyncMock(return_value=(False, "output", "error")) + "webhook_server.libs.handlers.runner_handler.run_command", + new=AsyncMock(return_value=(False, "output", "error")), ): async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-unique2") as result: success, out, err = result @@ -520,7 +530,9 @@ async def test_prepare_cloned_repo_dir_with_checkout( self, runner_handler: RunnerHandler, mock_pull_request: Mock ) -> None: """Test _prepare_cloned_repo_dir with checkout parameter.""" - with patch("webhook_server.libs.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", ""))): + with patch( + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")) + ): async with runner_handler._prepare_cloned_repo_dir( "/tmp/test-repo-unique3", mock_pull_request, checkout="feature-branch" ) as result: @@ -532,7 +544,9 @@ async def test_prepare_cloned_repo_dir_with_tag( self, runner_handler: RunnerHandler, mock_pull_request: Mock ) -> None: """Test _prepare_cloned_repo_dir with tag_name parameter.""" - with patch("webhook_server.libs.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", ""))): + with patch( + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")) + ): async with runner_handler._prepare_cloned_repo_dir( "/tmp/test-repo-unique4", mock_pull_request, tag_name="v1.0.0" ) as result: @@ -544,7 +558,9 @@ async def test_prepare_cloned_repo_dir_merged_pr( self, runner_handler: RunnerHandler, mock_pull_request: Mock ) -> None: """Test _prepare_cloned_repo_dir with merged pull request.""" - with patch("webhook_server.libs.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", ""))): + with patch( + "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")) + ): async with runner_handler._prepare_cloned_repo_dir( "/tmp/test-repo-unique5", mock_pull_request, is_merged=True ) as result: @@ -563,7 +579,8 @@ async def run_command_side_effect(*args, **kwargs): return (True, "ok", "") with patch( - "webhook_server.libs.runner_handler.run_command", new=AsyncMock(side_effect=run_command_side_effect) + "webhook_server.libs.handlers.runner_handler.run_command", + new=AsyncMock(side_effect=run_command_side_effect), ): async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-x", mock_pull_request) as result: success, out, err = result @@ -584,7 +601,8 @@ async def run_command_side_effect(*args, **kwargs): return (True, "ok", "") with patch( - "webhook_server.libs.runner_handler.run_command", new=AsyncMock(side_effect=run_command_side_effect) + "webhook_server.libs.handlers.runner_handler.run_command", + new=AsyncMock(side_effect=run_command_side_effect), ): async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-x", mock_pull_request) as result: success, out, err = result @@ -605,7 +623,8 @@ async def run_command_side_effect(*args, **kwargs): return (True, "ok", "") with patch( - "webhook_server.libs.runner_handler.run_command", new=AsyncMock(side_effect=run_command_side_effect) + "webhook_server.libs.handlers.runner_handler.run_command", + new=AsyncMock(side_effect=run_command_side_effect), ): async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-x", mock_pull_request) as result: success, out, err = result @@ -630,7 +649,8 @@ async def run_command_side_effect(*args, **kwargs): return (True, "ok", "") with patch( - "webhook_server.libs.runner_handler.run_command", new=AsyncMock(side_effect=run_command_side_effect) + "webhook_server.libs.handlers.runner_handler.run_command", + new=AsyncMock(side_effect=run_command_side_effect), ): async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-x", mock_pull_request) as result: success, out, err = result @@ -672,8 +692,8 @@ async def test_run_build_container_push_failure(self, runner_handler, mock_pull_ with patch.object( runner_handler.github_webhook, "slack_webhook_url", "http://slack" ): - with patch.object( - runner_handler.github_webhook, "send_slack_message" + with patch( + "webhook_server.libs.handlers.runner_handler.send_slack_message" ) as mock_slack: with patch("asyncio.to_thread") as mock_to_thread: # Set set_check=False to avoid early return after build success diff --git a/webhook_server/tests/test_schema_validator.py b/webhook_server/tests/test_schema_validator.py index 4260b0617..eb5353a6b 100644 --- a/webhook_server/tests/test_schema_validator.py +++ b/webhook_server/tests/test_schema_validator.py @@ -8,7 +8,7 @@ import sys from pathlib import Path -from typing import Any, Union +from typing import Any import yaml # type: ignore from simple_logger.logger import get_logger @@ -239,7 +239,7 @@ def _validate_tox_config(self, repo_name: str, tox_config: Any) -> None: self.errors.append(f"Repository '{repo_name}' tox branch '{branch}' must be a string or array") -def validate_config_file(config_path: Union[str, Path]) -> bool: +def validate_config_file(config_path: str | Path) -> bool: """ Validate a configuration file. @@ -250,7 +250,7 @@ def validate_config_file(config_path: Union[str, Path]) -> bool: True if valid, False otherwise """ try: - with open(config_path, "r") as file_handle: + with open(config_path) as file_handle: config_data = yaml.safe_load(file_handle) except Exception as exception: logger = get_logger(name="test_schema_validator") diff --git a/webhook_server/utils/app_utils.py b/webhook_server/utils/app_utils.py index 5786d025c..3c63fbf75 100644 --- a/webhook_server/utils/app_utils.py +++ b/webhook_server/utils/app_utils.py @@ -81,8 +81,8 @@ async def gate_by_allowlist_ips(request: Request, allowed_ips: tuple[ipaddress._ try: src_ip = ipaddress.ip_address(request.client.host) - except ValueError: - raise HTTPException(status.HTTP_400_BAD_REQUEST, "Could not parse client IP address") + except ValueError as e: + raise HTTPException(status.HTTP_400_BAD_REQUEST, "Could not parse client IP address") from e for valid_ip_range in allowed_ips: if src_ip in valid_ip_range: @@ -116,4 +116,4 @@ def parse_datetime_string(datetime_str: str | None, field_name: str) -> datetime raise HTTPException( status_code=400, detail=f"Invalid {field_name} format: {datetime_str}. Expected ISO 8601 format. Error: {str(e)}", - ) + ) from e diff --git a/webhook_server/utils/comment_utils.py b/webhook_server/utils/comment_utils.py new file mode 100644 index 000000000..48ab02ae6 --- /dev/null +++ b/webhook_server/utils/comment_utils.py @@ -0,0 +1,20 @@ +"""Comment formatting utilities.""" + + +def comment_with_details(title: str, body: str) -> str: + """ + Format comment with collapsible details section. + + Args: + title: Summary text shown when collapsed + body: Detailed content shown when expanded + + Returns: + Formatted HTML comment with details/summary tags + """ + return f""" +
+{title} + {body} +
+ """ diff --git a/webhook_server/utils/constants.py b/webhook_server/utils/constants.py index ef6f75bb9..78ee56dbd 100644 --- a/webhook_server/utils/constants.py +++ b/webhook_server/utils/constants.py @@ -37,6 +37,7 @@ COMMAND_ADD_ALLOWED_USER_STR: str = "add-allowed-user" COMMAND_AUTOMERGE_STR: str = "automerge" AUTOMERGE_LABEL_STR: str = "automerge" +ROOT_APPROVERS_KEY: str = "root-approvers" # Gitlab colors require a '#' prefix; e.g: # USER_LABELS_DICT: dict[str, str] = { diff --git a/webhook_server/utils/container_utils.py b/webhook_server/utils/container_utils.py new file mode 100644 index 000000000..d928ceef0 --- /dev/null +++ b/webhook_server/utils/container_utils.py @@ -0,0 +1,55 @@ +"""Container build utilities.""" + +from __future__ import annotations + +from logging import Logger + +from github.PullRequest import PullRequest + +from webhook_server.utils.constants import OTHER_MAIN_BRANCH + + +def get_container_repository_and_tag( + container_repository: str, + container_tag: str, + is_merged: bool = False, + tag: str = "", + pull_request: PullRequest | None = None, + logger: Logger | None = None, + log_prefix: str = "", +) -> str | None: + """ + Get container repository and tag for build. + + Args: + container_repository: Base container repository URL + container_tag: Default tag to use + is_merged: Whether PR is merged + tag: Optional explicit tag override + pull_request: Pull request object (PyGithub PullRequest, needed if tag not provided) + logger: Logger instance for debug output + log_prefix: Prefix for log messages + + Returns: + Full container repository:tag string, or None if tag cannot be determined + """ + if not tag: + if not pull_request: + if logger: + logger.error(f"{log_prefix} No pull request provided and no tag specified") + return None + + if is_merged: + pull_request_branch = pull_request.base.ref + tag = pull_request_branch if pull_request_branch not in (OTHER_MAIN_BRANCH, "main") else container_tag + else: + tag = f"pr-{pull_request.number}" + + if tag: + if logger: + logger.debug(f"{log_prefix} container tag is: {tag}") + return f"{container_repository}:{tag}" + + if logger: + logger.error(f"{log_prefix} container tag not found") + return None diff --git a/webhook_server/utils/github_repository_settings.py b/webhook_server/utils/github_repository_settings.py index 6457dfd07..b7cbec793 100644 --- a/webhook_server/utils/github_repository_settings.py +++ b/webhook_server/utils/github_repository_settings.py @@ -1,16 +1,16 @@ -import contextlib import copy import os +from collections.abc import Callable from concurrent.futures import Future, ThreadPoolExecutor, as_completed from copy import deepcopy -from typing import Any, Callable +from typing import Any import github from github import Auth, Github, GithubIntegration from github.Auth import AppAuth from github.Branch import Branch from github.Commit import Commit -from github.GithubException import UnknownObjectException +from github.GithubException import GithubException, UnknownObjectException from github.Label import Label from github.PullRequest import PullRequest from github.Repository import Repository @@ -70,7 +70,8 @@ def set_branch_protection( api_user: str, ) -> bool: LOGGER.info( - f"[API user {api_user}] - Set branch {branch} setting for {repository.name}. enabled checks: {required_status_checks}" + f"[API user {api_user}] - Set branch {branch} setting for {repository.name}. " + f"enabled checks: {required_status_checks}" ) branch.edit_protection( strict=strict, @@ -139,15 +140,30 @@ def get_required_status_checks( if data.get(CONVENTIONAL_TITLE_STR): default_status_checks.append(CONVENTIONAL_TITLE_STR) - with contextlib.suppress(Exception): + try: repo.get_contents(".pre-commit-config.yaml") default_status_checks.append("pre-commit.ci - pr") - + except UnknownObjectException: + # 404 is expected if file doesn't exist + pass + except GithubException as ex: + # Handle other GitHub API errors (rate limits, permissions, etc.) + LOGGER.warning(f"Failed to check .pre-commit-config.yaml for {repo.name}: {ex}") + + # Deduplicate status checks while preserving order + seen: set[str] = set() + deduplicated: list[str] = [] + for status_check in default_status_checks: + if status_check not in seen: + seen.add(status_check) + deduplicated.append(status_check) + + # Remove excluded status checks for status_check in exclude_status_checks: - while status_check in default_status_checks: - default_status_checks.remove(status_check) + while status_check in deduplicated: + deduplicated.remove(status_check) - return default_status_checks + return deduplicated def get_user_configures_status_checks(status_checks: dict[str, Any]) -> tuple[list[str], list[str]]: @@ -201,7 +217,12 @@ async def set_repositories_settings(config: Config, apis_dict: dict[str, dict[st LOGGER.info("Login in to docker.io") docker_username: str = docker["username"] docker_password: str = docker["password"] - await run_command(log_prefix="", command=f"podman login -u {docker_username} -p {docker_password} docker.io") + await run_command( + log_prefix="docker-login", + command=f"podman login -u {docker_username} --password-stdin docker.io", + stdin_input=docker_password, + redact_secrets=[docker_username, docker_password], + ) futures = [] with ThreadPoolExecutor() as executor: @@ -255,7 +276,7 @@ def set_repository( LOGGER.warning, ) - futures: list["Future"] = [] + futures: list[Future] = [] with ThreadPoolExecutor() as executor: for branch_name, status_checks in protected_branches.items(): @@ -318,7 +339,7 @@ def set_all_in_progress_check_runs_to_queued(repo_config: Config, apis_dict: dic BUILD_CONTAINER_STR, PRE_COMMIT_STR, ) - futures: list["Future"] = [] + futures: list[Future] = [] with ThreadPoolExecutor() as executor: for repo, data in repo_config.root_data["repositories"].items(): @@ -347,11 +368,20 @@ def set_repository_check_runs_to_queued( api_user: str, ) -> tuple[bool, str, Callable]: def _set_checkrun_queued(_api: Repository, _pull_request: PullRequest) -> None: - last_commit: Commit = list(_pull_request.get_commits())[-1] + # Avoid materializing all commits - use single-pass iteration to find last commit + # This is O(1) memory instead of O(N) for large PRs + last_commit: Commit | None = None + for commit in _pull_request.get_commits(): + last_commit = commit # Assign on each iteration to get final value + if last_commit is None: + LOGGER.error(f"[API user {api_user}] - {repository}: [PR:{_pull_request.number}] No commits found") + return + # Use REST API method directly (this is REST-only code) for check_run in last_commit.get_check_runs(): if check_run.name in check_runs and check_run.status == IN_PROGRESS_STR: LOGGER.warning( - f"[API user {api_user}] - {repository}: [PR:{pull_request.number}] {check_run.name} status is {IN_PROGRESS_STR}, " + f"[API user {api_user}] - {repository}: [PR:{_pull_request.number}] " + f"{check_run.name} status is {IN_PROGRESS_STR}, " f"Setting check run {check_run.name} to {QUEUED_STR}" ) _api.create_check_run(name=check_run.name, head_sha=last_commit.sha, status=QUEUED_STR) diff --git a/webhook_server/utils/helpers.py b/webhook_server/utils/helpers.py index 968773a2b..1a72e7d10 100644 --- a/webhook_server/utils/helpers.py +++ b/webhook_server/utils/helpers.py @@ -5,6 +5,7 @@ import json import os import random +import re import shlex import subprocess from concurrent.futures import Future, as_completed @@ -13,6 +14,7 @@ import github from colorama import Fore +from github import GithubException from github.RateLimitOverview import RateLimitOverview from github.Repository import Repository from simple_logger.logger import get_logger @@ -86,21 +88,172 @@ def get_logger_with_params( file_max_bytes=1024 * 1024 * 10, mask_sensitive=mask_sensitive, mask_sensitive_patterns=mask_sensitive_patterns, + console=True, # Enable console output for docker logs with FORCE_COLOR support ) -def extract_key_from_dict(key: Any, _dict: dict[Any, Any]) -> Any: - if isinstance(_dict, dict): - for _key, _val in _dict.items(): - if _key == key: - yield _val - if isinstance(_val, dict): - for result in extract_key_from_dict(key, _val): - yield result - elif isinstance(_val, list): - for _item in _val: - for result in extract_key_from_dict(key, _item): - yield result +def _sanitize_log_value(value: str) -> str: + """Sanitize value for safe inclusion in structured log messages. + + Prevents log injection by removing newlines and escaping brackets. + + Args: + value: Raw value to sanitize + + Returns: + Sanitized value safe for log formatting + """ + # Remove newlines and carriage returns to prevent log injection + sanitized = value.replace("\n", " ").replace("\r", " ") + # Escape brackets to prevent breaking structured log parsing + sanitized = sanitized.replace("[", "\\[").replace("]", "\\]") + return sanitized + + +def format_task_fields(task_id: str | None = None, task_type: str | None = None, task_status: str | None = None) -> str: + """Format task correlation fields for log messages. + + Args: + task_id: Task identifier (e.g., "check_tox", "webhook_processing") + task_type: Task type category (e.g., "ci_check", "webhook_routing") + task_status: Task status (e.g., "started", "completed", "failed") + + Returns: + Formatted string with task fields in brackets, or empty string if no fields provided. + Example: "[task_id=check_tox] [task_type=ci_check] [task_status=started]" + """ + parts = [] + if task_id: + parts.append(f"[task_id={_sanitize_log_value(task_id)}]") + if task_type: + parts.append(f"[task_type={_sanitize_log_value(task_type)}]") + if task_status: + parts.append(f"[task_status={_sanitize_log_value(task_status)}]") + return " ".join(parts) + + +# Global cache for compiled regex patterns +# Cache key: (tuple of secrets, case_insensitive flag) +_REDACT_REGEX_CACHE: dict[tuple[tuple[str, ...], bool], re.Pattern[str]] = {} + + +def _redact_secrets( + text: str, secrets: list[str] | None, case_insensitive: bool = False, mask_sensitive: bool = True +) -> str: + """ + Redact sensitive strings from text for logging using compiled regex for performance. + + Uses regex with escaped patterns for safer matching and better scalability. + For large secret lists or frequent calls, this is significantly faster than + multiple string.replace() operations. + + Args: + text: The text to redact secrets from + secrets: List of sensitive strings to redact (empty strings are filtered out) + case_insensitive: Enable case-insensitive matching (default: False for security) + mask_sensitive: Whether to mask sensitive data (default: True). If False, returns text unchanged. + + Returns: + Text with secrets replaced by ***REDACTED*** (if mask_sensitive=True), otherwise unchanged text + + Performance: + - O(n) where n = len(text) instead of O(s*n) where s = len(secrets) + - Compiles single regex pattern from all secrets + - Uses re.escape() to handle special regex characters safely + - Caches compiled regex by (secrets, case_insensitive) to reduce CPU in hot paths + + Security Note: + - Default case-sensitive matching prevents accidental false positives + - Enable case_insensitive only when secrets may vary in case (e.g., base64 tokens) + """ + # Early return if masking is disabled + if not mask_sensitive: + return text + + if not secrets: + return text + + # Filter out empty secrets, deduplicate, and escape special regex characters + # Sort by length descending to prevent substring leaks + # (e.g., if "abc" and "abcdef" are both secrets, match "abcdef" first) + escaped_secrets = sorted( + {re.escape(secret) for secret in secrets if secret}, + key=len, + reverse=True, + ) + if not escaped_secrets: + return text + + # Create cache key from tuple of sorted secrets and case_insensitive flag + cache_key = (tuple(escaped_secrets), case_insensitive) + + # Check cache for existing compiled regex + if cache_key in _REDACT_REGEX_CACHE: + regex = _REDACT_REGEX_CACHE[cache_key] + else: + # Build single regex pattern with non-capturing group: (?:secret1|secret2|secret3) + # Non-capturing group for alternation without word boundaries + # (tokens can appear anywhere in strings, not just as whole words) + # Longer secrets first prevents partial redaction + pattern = f"(?:{'|'.join(escaped_secrets)})" + + # Compile regex with optional case-insensitive flag + flags = re.IGNORECASE if case_insensitive else 0 + regex = re.compile(pattern, flags) + + # Store in cache + _REDACT_REGEX_CACHE[cache_key] = regex + + # Replace all matches with single sub() call - much faster than loop + return regex.sub("***REDACTED***", text) + + +def _truncate_output(text: str, max_length: int = 500) -> str: + """ + Truncate output text for logging to prevent log explosion. + + Args: + text: The text to truncate + max_length: Maximum length before truncation (default: 500) + + Returns: + Truncated text with ellipsis if exceeds max_length + """ + if len(text) <= max_length: + return text + + return f"{text[:max_length]}... [truncated {len(text) - max_length} chars]" + + +def strip_ansi_codes(text: str) -> str: + """ + Remove ANSI escape codes from text. + + ANSI escape codes are special character sequences used for terminal formatting + (colors, bold, underline, etc.) that appear as scrambled characters when displayed + in non-terminal contexts like GitHub check-run details. + + Args: + text: Text potentially containing ANSI escape codes + + Returns: + Clean text with all ANSI escape codes removed + + Examples: + >>> strip_ansi_codes("\\x1b[31mRed text\\x1b[0m") + 'Red text' + >>> strip_ansi_codes("\\x1b[1m\\x1b[32mBold green\\x1b[0m") + 'Bold green' + >>> strip_ansi_codes("No ANSI codes here") + 'No ANSI codes here' + """ + # Comprehensive regex pattern for ANSI escape sequences: + # \x1B = ESC character (can also be \033) + # (?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~]) = matches all ANSI escape sequences: + # - [@-Z\\-_] = single-character sequences (ESC followed by one char) + # - \[[0-?]*[ -/]*[@-~] = CSI sequences (colors, cursor movement, etc.) + ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") + return ansi_escape.sub("", text) def get_github_repo_api(github_app_api: github.Github, repository: int | str) -> Repository: @@ -114,27 +267,50 @@ async def run_command( command: str, log_prefix: str, verify_stderr: bool = False, + redact_secrets: list[str] | None = None, + stdin_input: str | bytes | None = None, + timeout: int | None = None, + mask_sensitive: bool = True, **kwargs: Any, -) -> tuple[bool, Any, Any]: +) -> tuple[bool, str, str]: """ - Run command locally. + Run command locally using create_subprocess_exec (safe from shell injection). Args: - command (str): Command to run + command (str): Command to run (will be split with shlex.split for safety) log_prefix (str): Prefix for log messages verify_stderr (bool, default False): Check command stderr + redact_secrets (list[str], optional): List of sensitive strings to redact from logs only + stdin_input (str | bytes | None, optional): Input to pass to command via stdin (for passwords, etc.) + timeout (int | None, optional): Timeout in seconds for command execution. None means no timeout. + mask_sensitive (bool, default True): Whether to mask sensitive data in logs. If False, logs unredacted output. Returns: - tuple: True, out if command succeeded, False, err otherwise. + tuple[bool, str, str]: (success, stdout, stderr) where stdout and stderr are UNREDACTED strings. + Redaction is ONLY applied to log output, not return values. + Callers may need to parse unredacted output for command results. + + Security: + Uses asyncio.create_subprocess_exec (NOT shell=True) to prevent command injection. + stdin_input is passed via pipe, not command line arguments. + Secrets are redacted in logs but NOT in return values - callers must handle sensitive data. """ logger = get_logger_with_params() out_decoded: str = "" err_decoded: str = "" - kwargs["stdout"] = subprocess.PIPE - kwargs["stderr"] = subprocess.PIPE + # Don't override caller-provided pipes - use setdefault to respect provided kwargs + kwargs.setdefault("stdout", subprocess.PIPE) + kwargs.setdefault("stderr", subprocess.PIPE) + + # Set up stdin pipe if input is provided + if stdin_input is not None: + kwargs.setdefault("stdin", subprocess.PIPE) + + # Redact sensitive data from command for logging + logged_command = _redact_secrets(command, redact_secrets, mask_sensitive=mask_sensitive) try: - logger.debug(f"{log_prefix} Running '{command}' command") + logger.debug(f"{log_prefix} Running '{logged_command}' command") command_list = shlex.split(command) sub_process = await asyncio.create_subprocess_exec( @@ -142,13 +318,41 @@ async def run_command( **kwargs, ) - stdout, stderr = await sub_process.communicate() - out_decoded = stdout.decode(errors="ignore") if isinstance(stdout, bytes) else stdout - err_decoded = stderr.decode(errors="ignore") if isinstance(stderr, bytes) else stderr + # Prepare stdin (convert str to bytes if needed) + stdin_bytes = None + if stdin_input is not None: + stdin_bytes = stdin_input.encode("utf-8") if isinstance(stdin_input, str) else stdin_input + + # Execute with optional timeout + try: + if timeout: + stdout, stderr = await asyncio.wait_for(sub_process.communicate(input=stdin_bytes), timeout=timeout) + else: + stdout, stderr = await sub_process.communicate(input=stdin_bytes) + except TimeoutError: + logger.error(f"{log_prefix} Command '{logged_command}' timed out after {timeout}s") + try: + sub_process.kill() + await sub_process.wait() + except Exception: + pass # Process may already be dead + return False, "", f"Command timed out after {timeout}s" + # Ensure we always have strings, never None or bytes + out_decoded = stdout.decode(errors="ignore") if isinstance(stdout, bytes) else (stdout or "") + err_decoded = stderr.decode(errors="ignore") if isinstance(stderr, bytes) else (stderr or "") + + # Redact secrets ONLY for logging, keep original for return value + # Callers may need to parse unredacted output + out_redacted = _redact_secrets(out_decoded, redact_secrets, mask_sensitive=mask_sensitive) + err_redacted = _redact_secrets(err_decoded, redact_secrets, mask_sensitive=mask_sensitive) + + # Truncate output for error messages to prevent log explosion (logging only) + truncated_out = _truncate_output(out_redacted) + truncated_err = _truncate_output(err_redacted) error_msg = ( - f"{log_prefix} Failed to run '{command}'. " - f"rc: {sub_process.returncode}, out: {out_decoded}, error: {err_decoded}" + f"{log_prefix} Failed to run '{logged_command}'. " + f"rc: {sub_process.returncode}, out: {truncated_out}, error: {truncated_err}" ) if sub_process.returncode != 0: @@ -162,14 +366,18 @@ async def run_command( return True, out_decoded, err_decoded - except Exception as ex: - logger.error(f"{log_prefix} Failed to run '{command}' command: {ex}") + except asyncio.CancelledError: + logger.debug(f"{log_prefix} Command '{logged_command}' cancelled") + raise + except (OSError, subprocess.SubprocessError, ValueError): + logger.exception(f"{log_prefix} Failed to run '{logged_command}' command") return False, out_decoded, err_decoded def get_apis_and_tokes_from_config(config: Config) -> list[tuple[github.Github, str]]: apis_and_tokens: list[tuple[github.Github, str]] = [] - tokens = config.get_value(value="github-tokens") + # Guard against None tokens from config - default to empty list + tokens = config.get_value(value="github-tokens") or [] for _token in tokens: apis_and_tokens.append((github.Github(auth=github.Auth.Token(_token)), _token)) @@ -193,7 +401,6 @@ def get_api_with_highest_rate_limit(config: Config, repository_name: str = "") - api: github.Github | None = None token: str | None = None _api_user: str = "" - rate_limit: RateLimitOverview | None = None remaining = 0 @@ -205,6 +412,7 @@ def get_api_with_highest_rate_limit(config: Config, repository_name: str = "") - logger.debug(msg) apis_and_tokens = get_apis_and_tokes_from_config(config=config) + logger.debug(f"Checking {len(apis_and_tokens)} API(s) for highest rate limit") for _api, _token in apis_and_tokens: if _api.rate_limiting[-1] == 60: @@ -213,18 +421,18 @@ def get_api_with_highest_rate_limit(config: Config, repository_name: str = "") - try: _api_user = _api.get_user().login - except Exception as ex: + except GithubException as ex: + # This catches RateLimitExceededException as it's a subclass of GithubException logger.warning(f"Failed to get API user for API {_api}, skipping. {ex}") continue _rate_limit = _api.get_rate_limit() + log_rate_limit(rate_limit=_rate_limit, api_user=_api_user) if _rate_limit.rate.remaining > remaining: remaining = _rate_limit.rate.remaining - api, token, _api_user, rate_limit = _api, _token, _api_user, _rate_limit - - if rate_limit: - log_rate_limit(rate_limit=rate_limit, api_user=_api_user) + api, token, _api_user = _api, _token, _api_user + logger.debug(f"API user {_api_user} has higher rate limit ({remaining}), updating selection") if not _api_user or not api or not token: raise NoApiTokenError("Failed to get API with highest rate limit") @@ -237,7 +445,8 @@ def log_rate_limit(rate_limit: RateLimitOverview, api_user: str) -> None: logger = get_logger_with_params() rate_limit_str: str - time_for_limit_reset: int = (rate_limit.rate.reset - datetime.datetime.now(tz=datetime.timezone.utc)).seconds + delta = rate_limit.rate.reset - datetime.datetime.now(tz=datetime.UTC) + time_for_limit_reset = max(int(delta.total_seconds()), 0) below_minimum: bool = rate_limit.rate.remaining < 700 if below_minimum: @@ -252,14 +461,14 @@ def log_rate_limit(rate_limit: RateLimitOverview, api_user: str) -> None: msg = ( f"{Fore.CYAN}[{api_user}] API rate limit:{Fore.RESET} Current {rate_limit_str} of {rate_limit.rate.limit}. " f"Reset in {rate_limit.rate.reset} [{datetime.timedelta(seconds=time_for_limit_reset)}] " - f"(UTC time is {datetime.datetime.now(tz=datetime.timezone.utc)})" + f"(UTC time is {datetime.datetime.now(tz=datetime.UTC)})" ) logger.debug(msg) if below_minimum: logger.warning(msg) -def get_future_results(futures: list["Future"]) -> None: +def get_future_results(futures: list[Future]) -> None: """ result must return tuple[bool, str, Callable] when the Callable is Logger function (LOGGER.info, LOGGER.error, etc) """ @@ -357,10 +566,10 @@ def prepare_log_prefix( else: repository_color = repository_name or "" - # Build prefix components - components = [event_type, delivery_id] + # Build prefix components (sanitize to prevent log injection) + components = [_sanitize_log_value(event_type), _sanitize_log_value(delivery_id)] if api_user: - components.append(api_user) + components.append(_sanitize_log_value(api_user)) prefix = f"{repository_color} [{']['.join(components)}]" diff --git a/webhook_server/utils/notification_utils.py b/webhook_server/utils/notification_utils.py new file mode 100644 index 000000000..8334f0a34 --- /dev/null +++ b/webhook_server/utils/notification_utils.py @@ -0,0 +1,35 @@ +"""Notification utilities.""" + +from __future__ import annotations + +import json +from logging import Logger + +import requests + + +def send_slack_message(message: str, webhook_url: str, logger: Logger, log_prefix: str = "") -> None: + """ + Send message to Slack webhook. + + Args: + message: Message text to send + webhook_url: Slack webhook URL + logger: Logger instance + log_prefix: Prefix for log messages + + Raises: + ValueError: If Slack webhook returns error status code + """ + slack_data: dict[str, str] = {"text": message} + logger.info(f"{log_prefix} Sending message to slack: {message}") + response: requests.Response = requests.post( + webhook_url, + data=json.dumps(slack_data), + headers={"Content-Type": "application/json"}, + timeout=10, + ) + if response.status_code != 200: + raise ValueError( + f"Request to slack returned an error {response.status_code} with the following message: {response.text}" + ) diff --git a/webhook_server/utils/webhook.py b/webhook_server/utils/webhook.py index 302141064..f905bc0d8 100644 --- a/webhook_server/utils/webhook.py +++ b/webhook_server/utils/webhook.py @@ -1,5 +1,6 @@ +from collections.abc import Callable from concurrent.futures import ThreadPoolExecutor -from typing import Any, Callable +from typing import Any from github.Hook import Hook diff --git a/webhook_server/web/log_viewer.py b/webhook_server/web/log_viewer.py index d08173c22..19b88dc74 100644 --- a/webhook_server/web/log_viewer.py +++ b/webhook_server/web/log_viewer.py @@ -5,8 +5,10 @@ import logging import os import re +from collections import deque +from collections.abc import Generator, Iterator from pathlib import Path -from typing import Any, Generator, Iterator +from typing import Any from fastapi import HTTPException, WebSocket, WebSocketDisconnect from fastapi.responses import HTMLResponse, StreamingResponse @@ -75,17 +77,14 @@ def get_log_page(self) -> HTMLResponse: HTML response with log viewer interface Raises: - HTTPException: 404 if template not found, 500 for other errors + HTTPException: 500 for other errors """ try: html_content = self._get_log_viewer_html() return HTMLResponse(content=html_content) - except FileNotFoundError: - self.logger.error("Log viewer HTML template not found") - raise HTTPException(status_code=404, detail="Log viewer template not found") except Exception as e: - self.logger.error(f"Error serving log viewer page: {e}") - raise HTTPException(status_code=500, detail="Internal server error") + self.logger.exception("Error serving log viewer page") + raise HTTPException(status_code=500, detail="Internal server error") from e def get_log_entries( self, @@ -211,13 +210,13 @@ def get_log_entries( except ValueError as e: self.logger.warning(f"Invalid parameters for log entries request: {e}") - raise HTTPException(status_code=400, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) from e except (OSError, PermissionError) as e: - self.logger.error(f"File access error loading log entries: {e}") - raise HTTPException(status_code=500, detail="Error accessing log files") + self.logger.exception("File access error loading log entries") + raise HTTPException(status_code=500, detail="Error accessing log files") from e except Exception as e: - self.logger.error(f"Unexpected error getting log entries: {e}") - raise HTTPException(status_code=500, detail="Internal server error") + self.logger.exception("Unexpected error getting log entries") + raise HTTPException(status_code=500, detail="Internal server error") from e def _entry_matches_filters( self, @@ -322,7 +321,7 @@ def export_logs( end_time, search, ]) - max_entries_to_process = min(limit + 20000, 50000) if has_filters else limit + 1000 + max_entries_to_process = min(limit + 20000, 100000) if has_filters else limit + 1000 for entry in self._stream_log_entries(max_files=25, max_entries=max_entries_to_process): if not self._entry_matches_filters( @@ -353,13 +352,13 @@ def generate() -> Generator[bytes, None, None]: except ValueError as e: if "Result set too large" in str(e): self.logger.warning(f"Export request too large: {e}") - raise HTTPException(status_code=413, detail=str(e)) + raise HTTPException(status_code=413, detail=str(e)) from e else: self.logger.warning(f"Invalid export parameters: {e}") - raise HTTPException(status_code=400, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) from e except Exception as e: self.logger.error(f"Error generating export: {e}") - raise HTTPException(status_code=500, detail="Export generation failed") + raise HTTPException(status_code=500, detail="Export generation failed") from e async def handle_websocket( self, @@ -479,13 +478,44 @@ def get_pr_flow_data(self, hook_id: str) -> dict[str, Any]: except ValueError as e: if "No data found" in str(e): self.logger.warning(f"PR flow data not found: {e}") - raise HTTPException(status_code=404, detail=str(e)) + raise HTTPException(status_code=404, detail=str(e)) from e else: self.logger.warning(f"Invalid PR flow hook_id: {e}") - raise HTTPException(status_code=400, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) from e except Exception as e: self.logger.error(f"Error getting PR flow data: {e}") - raise HTTPException(status_code=500, detail="Internal server error") + raise HTTPException(status_code=500, detail="Internal server error") from e + + def _build_log_prefix_from_context( + self, + repository: str | None, + event_type: str | None, + hook_id: str | None, + github_user: str | None, + pr_number: int | None, + ) -> str: + """Build log prefix from context variables for structured logging. + + Args: + repository: Repository name + event_type: Event type (e.g., 'pull_request', 'check_run') + hook_id: Hook ID + github_user: GitHub user + pr_number: PR number + + Returns: + Formatted log prefix string + """ + log_prefix_parts = [] + if repository: + log_prefix_parts.append(repository) + if event_type and hook_id: + log_prefix_parts.append(f"[{event_type}][{hook_id}]") + if github_user: + log_prefix_parts.append(f"[{github_user}]") + if pr_number: + log_prefix_parts.append(f"[PR {pr_number}]") + return " ".join(log_prefix_parts) + ": " if log_prefix_parts else "" def get_workflow_steps(self, hook_id: str) -> dict[str, Any]: """Get workflow step timeline data for a specific hook ID. @@ -504,7 +534,9 @@ def get_workflow_steps(self, hook_id: str) -> dict[str, Any]: filtered_entries: list[LogEntry] = [] # Stream entries and filter by hook ID - for entry in self._stream_log_entries(max_files=15, max_entries=10000): + # Increase max_files and max_entries to ensure we capture token spend logs + # Token spend is logged at the end of webhook processing, so we need to read enough entries + for entry in self._stream_log_entries(max_files=25, max_entries=50000): if not self._entry_matches_filters(entry, hook_id=hook_id): continue filtered_entries.append(entry) @@ -518,20 +550,76 @@ def get_workflow_steps(self, hook_id: str) -> dict[str, Any]: if not workflow_steps: raise ValueError(f"No workflow steps found for hook ID: {hook_id}") + # Extract token spend from all entries (not just workflow steps) + # Search in reverse order (newest first) since token spend is logged at the end + token_spend = None + entries_with_token_spend = [e for e in filtered_entries if e.token_spend is not None] + + # Extract context from first entry for structured logging (all entries have same hook_id) + # filtered_entries is guaranteed to be non-empty at this point + context_entry = filtered_entries[0] + repository = context_entry.repository + event_type = context_entry.event_type + github_user = context_entry.github_user + pr_number = context_entry.pr_number + + if entries_with_token_spend: + # Take the most recent token spend entry (should be only one per webhook, but take latest to be safe) + token_spend = entries_with_token_spend[-1].token_spend + # Format log message using prepare_log_prefix format so it's parseable and clickable + log_prefix = self._build_log_prefix_from_context( + repository, event_type, hook_id, github_user, pr_number + ) + self.logger.info( + f"{log_prefix}Found token spend {token_spend} for hook {hook_id} " + f"from {len(entries_with_token_spend)} entries" + ) + else: + # Check if any entries contain "token" or "API calls" in message (for debugging) + entries_with_token_keywords = [ + e for e in filtered_entries if "token" in e.message.lower() or "API calls" in e.message + ] + if entries_with_token_keywords: + # Format log message using prepare_log_prefix format + log_prefix = self._build_log_prefix_from_context( + repository, event_type, hook_id, github_user, pr_number + ) + self.logger.warning( + f"{log_prefix}Found {len(entries_with_token_keywords)} entries with token keywords " + f"for hook {hook_id}, but token_spend is None. " + f"Sample: {entries_with_token_keywords[0].message[:150]}" + ) + # Try to extract token spend directly from the message as fallback + for entry in reversed(entries_with_token_keywords): + extracted = self.log_parser.extract_token_spend(entry.message) + if extracted is not None: + token_spend = extracted + # Format log message using prepare_log_prefix format + log_prefix = self._build_log_prefix_from_context( + repository, event_type, hook_id, github_user, pr_number + ) + self.logger.info( + f"{log_prefix}Extracted token spend {token_spend} directly from message " + f"for hook {hook_id}" + ) + break + # Build timeline data timeline_data = self._build_workflow_timeline(workflow_steps, hook_id) + if token_spend is not None: + timeline_data["token_spend"] = token_spend return timeline_data except ValueError as e: if "No data found" in str(e) or "No workflow steps found" in str(e): self.logger.warning(f"Workflow steps not found: {e}") - raise HTTPException(status_code=404, detail=str(e)) + raise HTTPException(status_code=404, detail=str(e)) from e else: self.logger.warning(f"Invalid hook ID: {e}") - raise HTTPException(status_code=400, detail=str(e)) + raise HTTPException(status_code=400, detail=str(e)) from e except Exception as e: self.logger.error(f"Error getting workflow steps: {e}") - raise HTTPException(status_code=500, detail="Internal server error") + raise HTTPException(status_code=500, detail="Internal server error") from e def _build_workflow_timeline(self, workflow_steps: list[LogEntry], hook_id: str) -> dict[str, Any]: """Build timeline data from workflow step entries. @@ -541,7 +629,7 @@ def _build_workflow_timeline(self, workflow_steps: list[LogEntry], hook_id: str) hook_id: The hook ID for this timeline Returns: - Dictionary with timeline data structure + Dictionary with timeline data structure including task correlation fields """ # Sort steps by timestamp sorted_steps = sorted(workflow_steps, key=lambda x: x.timestamp) @@ -564,6 +652,9 @@ def _build_workflow_timeline(self, workflow_steps: list[LogEntry], hook_id: str) "repository": step.repository, "event_type": step.event_type, "pr_number": step.pr_number, + "task_id": step.task_id, + "task_type": step.task_type, + "task_status": step.task_status, }) # Calculate total duration @@ -580,7 +671,7 @@ def _build_workflow_timeline(self, workflow_steps: list[LogEntry], hook_id: str) } def _stream_log_entries( - self, max_files: int = 10, chunk_size: int = 1000, max_entries: int = 50000 + self, max_files: int = 10, _chunk_size: int = 1000, max_entries: int = 50000 ) -> Iterator[LogEntry]: """Stream log entries from configured log files in chunks to reduce memory usage. @@ -589,7 +680,7 @@ def _stream_log_entries( Args: max_files: Maximum number of log files to process (newest first) - chunk_size: Number of entries to yield per chunk from each file + _chunk_size: Number of entries to yield per chunk from each file (unused, reserved for future) max_entries: Maximum total entries to yield (safety limit) Yields: @@ -629,37 +720,23 @@ def sort_key(f: Path) -> tuple: break try: - file_entries: list[LogEntry] = [] + remaining_capacity = max_entries - total_yielded + if remaining_capacity <= 0: + break - # Parse file in one go (files are typically reasonable size individually) - with open(log_file, "r", encoding="utf-8") as f: - for line_num, line in enumerate(f, 1): - if total_yielded >= max_entries: - break + buffer: deque[LogEntry] = deque(maxlen=remaining_capacity) + with open(log_file, encoding="utf-8") as f: + for line in f: entry = self.log_parser.parse_log_entry(line) if entry: - file_entries.append(entry) - - # Process in chunks to avoid memory buildup for large files - if len(file_entries) >= chunk_size: - # Sort chunk by timestamp (newest first) and yield - file_entries.sort(key=lambda x: x.timestamp, reverse=True) - for entry in file_entries: - yield entry - total_yielded += 1 - if total_yielded >= max_entries: - break - file_entries.clear() # Free memory - - # Yield remaining entries from this file - if file_entries and total_yielded < max_entries: - file_entries.sort(key=lambda x: x.timestamp, reverse=True) - for entry in file_entries: - if total_yielded >= max_entries: - break - yield entry - total_yielded += 1 + buffer.append(entry) + + for entry in reversed(buffer): + if total_yielded >= max_entries: + break + yield entry + total_yielded += 1 self.logger.debug(f"Streamed entries from {log_file.name}, total so far: {total_yielded}") @@ -703,13 +780,13 @@ def _get_log_viewer_html(self) -> str: template_path = Path(__file__).parent / "templates" / "log_viewer.html" try: - with open(template_path, "r", encoding="utf-8") as f: + with open(template_path, encoding="utf-8") as f: return f.read() except FileNotFoundError: - self.logger.error(f"Log viewer template not found at {template_path}") + self.logger.exception(f"Log viewer template not found at {template_path}") return self._get_fallback_html() - except IOError as e: - self.logger.error(f"Failed to read log viewer template: {e}") + except OSError: + self.logger.exception("Failed to read log viewer template") return self._get_fallback_html() def _get_fallback_html(self) -> str: diff --git a/webhook_server/web/static/css/log_viewer.css b/webhook_server/web/static/css/log_viewer.css index d596c7d89..7a550d1b6 100644 --- a/webhook_server/web/static/css/log_viewer.css +++ b/webhook_server/web/static/css/log_viewer.css @@ -3,6 +3,7 @@ --bg-color: #f5f5f5; --container-bg: #ffffff; --text-color: #333333; + --text-secondary: #666666; --border-color: #dddddd; --input-bg: #ffffff; --input-border: #dddddd; @@ -15,12 +16,16 @@ --status-disconnected-text: #721c24; --status-disconnected-border: #f5c6cb; --log-entry-border: #eeeeee; + /* Log level colors */ --log-info-bg: #d4f8d4; --log-error-bg: #ffd6d6; --log-warning-bg: #fff3cd; --log-debug-bg: #f8f9fa; --log-step-bg: #e3f2fd; --log-success-bg: #d1f2d1; + /* Level badge colors */ + --level-info-bg: #d1ecf1; + --level-info-border: #17a2b8; --tag-bg: #e9ecef; --timestamp-color: #666666; } @@ -30,6 +35,7 @@ --bg-color: #1a1a1a; --container-bg: #2d2d2d; --text-color: #e0e0e0; + --text-secondary: #999999; --border-color: #404040; --input-bg: #3d3d3d; --input-border: #555555; @@ -42,12 +48,16 @@ --status-disconnected-text: #f8d7da; --status-disconnected-border: #f5c6cb; --log-entry-border: #404040; + /* Log level colors */ --log-info-bg: #1e4a1e; --log-error-bg: #5a1e1e; --log-warning-bg: #5a4a1e; --log-debug-bg: #2a2a2a; --log-step-bg: #1a237e; --log-success-bg: #1e4a1e; + /* Level badge colors */ + --level-info-bg: #0c4a5a; + --level-info-border: #3ebdcc; --tag-bg: #4a4a4a; --timestamp-color: #888888; } @@ -58,7 +68,9 @@ body { padding: 20px; background-color: var(--bg-color); color: var(--text-color); - transition: background-color 0.3s ease, color 0.3s ease; + transition: + background-color 0.3s ease, + color 0.3s ease; } .container { max-width: 95vw; @@ -66,7 +78,7 @@ body { background: var(--container-bg); padding: 20px; border-radius: 8px; - box-shadow: 0 2px 4px rgba(0,0,0,0.1); + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); transition: background-color 0.3s ease; } .header { @@ -77,7 +89,9 @@ body { justify-content: space-between; align-items: center; } -.header h1 { margin: 0; } +.header h1 { + margin: 0; +} .theme-toggle { background: var(--button-bg); color: white; @@ -87,19 +101,41 @@ body { cursor: pointer; transition: background-color 0.3s ease; } -.theme-toggle:hover { background: var(--button-hover); } -.filters { display: grid; grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); gap: 10px; margin-bottom: 20px; } -.filter-group { display: flex; flex-direction: column; } -.filter-group label { font-weight: bold; margin-bottom: 3px; font-size: 14px; color: var(--text-color); } -.filter-group input, .filter-group select { +.theme-toggle:hover { + background: var(--button-hover); +} +.filters { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); + gap: 10px; + margin-bottom: 20px; +} +.filter-group { + display: flex; + flex-direction: column; +} +.filter-group label { + font-weight: bold; + margin-bottom: 3px; + font-size: 14px; + color: var(--text-color); +} +.filter-group input, +.filter-group select { padding: 8px; border: 1px solid var(--input-border); border-radius: 4px; background: var(--input-bg); color: var(--text-color); - transition: background-color 0.3s ease, border-color 0.3s ease; + transition: + background-color 0.3s ease, + border-color 0.3s ease; +} +.log-entries { + border: 1px solid var(--border-color); + border-radius: 4px; + min-height: 200px; } -.log-entries { border: 1px solid var(--border-color); border-radius: 4px; min-height: 200px; } /* Loading skeleton styles */ .loading-skeleton { @@ -114,14 +150,27 @@ body { height: 14px; margin: 4px 0; border-radius: 3px; - background: linear-gradient(90deg, var(--border-color) 25%, var(--input-bg) 50%, var(--border-color) 75%); + background: linear-gradient( + 90deg, + var(--border-color) 25%, + var(--input-bg) 50%, + var(--border-color) 75% + ); background-size: 200% 100%; animation: shimmer 1.5s infinite; } -.skeleton-timestamp { width: 20%; } -.skeleton-level { width: 10%; } -.skeleton-message { width: 60%; } -.skeleton-meta { width: 30%; } +.skeleton-timestamp { + width: 20%; +} +.skeleton-level { + width: 10%; +} +.skeleton-message { + width: 60%; +} +.skeleton-meta { + width: 30%; +} .loading-text { text-align: center; color: var(--timestamp-color); @@ -164,12 +213,20 @@ body { /* Animations */ @keyframes pulse { - 0% { opacity: 1; } - 100% { opacity: 0.6; } + 0% { + opacity: 1; + } + 100% { + opacity: 0.6; + } } @keyframes shimmer { - 0% { background-position: -200% 0; } - 100% { background-position: 200% 0; } + 0% { + background-position: -200% 0; + } + 100% { + background-position: 200% 0; + } } /* Timeline styles */ @@ -329,7 +386,7 @@ body { border-radius: 4px; padding: 8px; font-size: 12px; - box-shadow: 0 2px 8px rgba(0,0,0,0.1); + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1); z-index: 1000; pointer-events: none; display: none; @@ -342,17 +399,40 @@ body { font-size: 14px; transition: background-color 0.3s ease; } -.log-entry:last-child { border-bottom: none; } -.log-entry.INFO { background-color: var(--log-info-bg); } -.log-entry.ERROR { background-color: var(--log-error-bg); } -.log-entry.WARNING { background-color: var(--log-warning-bg); } -.log-entry.DEBUG { background-color: var(--log-debug-bg); } -.log-entry.STEP { background-color: var(--log-step-bg); } -.log-entry.SUCCESS { background-color: var(--log-success-bg); } -.timestamp { color: var(--timestamp-color); } -.level { font-weight: bold; } -.message { margin-left: 10px; } -.hook-id, .pr-number, .repository, .user { +.log-entry:last-child { + border-bottom: none; +} +.log-entry.INFO { + background-color: var(--log-info-bg); +} +.log-entry.ERROR { + background-color: var(--log-error-bg); +} +.log-entry.WARNING { + background-color: var(--log-warning-bg); +} +.log-entry.DEBUG { + background-color: var(--log-debug-bg); +} +.log-entry.STEP { + background-color: var(--log-step-bg); +} +.log-entry.SUCCESS { + background-color: var(--log-success-bg); +} +.timestamp { + color: var(--timestamp-color); +} +.level { + font-weight: bold; +} +.message { + margin-left: 10px; +} +.hook-id, +.pr-number, +.repository, +.user { margin-left: 10px; padding: 2px 6px; background-color: var(--tag-bg); @@ -360,7 +440,9 @@ body { font-size: 12px; transition: background-color 0.3s ease; } -.controls { margin-bottom: 20px; } +.controls { + margin-bottom: 20px; +} .btn { padding: 10px 20px; background-color: var(--button-bg); @@ -371,8 +453,29 @@ body { margin-right: 10px; transition: background-color 0.3s ease; } -.btn:hover { background-color: var(--button-hover); } -.status { padding: 10px; margin-bottom: 20px; border-radius: 4px; } +.btn:hover { + background-color: var(--button-hover); +} +.btn-secondary { + padding: 8px 16px; + background-color: transparent; + color: var(--button-bg); + border: 1px solid var(--button-bg); + border-radius: 4px; + cursor: pointer; + transition: + background-color 0.3s ease, + color 0.3s ease; +} +.btn-secondary:hover { + background-color: var(--button-bg); + color: white; +} +.status { + padding: 10px; + margin-bottom: 20px; + border-radius: 4px; +} .status.connected { background-color: var(--status-connected-bg); color: var(--status-connected-text); @@ -401,11 +504,491 @@ body { align-items: center; } +/* Flow Modal Styles */ +.modal { + position: fixed; + z-index: 1000; + left: 0; + top: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.7); + display: flex; + align-items: center; + justify-content: center; + animation: fadeIn 0.3s ease; +} + +@keyframes fadeIn { + from { + opacity: 0; + } + to { + opacity: 1; + } +} + +.modal-content { + background-color: var(--container-bg); + border-radius: 12px; + box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3); + width: 90%; + max-width: 800px; + max-height: 90vh; + display: flex; + flex-direction: column; + animation: slideIn 0.3s ease; +} + +@keyframes slideIn { + from { + transform: translateY(-50px); + opacity: 0; + } + to { + transform: translateY(0); + opacity: 1; + } +} + +.modal-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 20px 24px; + border-bottom: 2px solid var(--border-color); +} + +.modal-header h2 { + margin: 0; + font-size: 24px; + color: var(--text-color); +} + +.modal-close { + background: none; + border: none; + font-size: 32px; + color: var(--text-secondary); + cursor: pointer; + padding: 0; + width: 40px; + height: 40px; + display: flex; + align-items: center; + justify-content: center; + border-radius: 50%; + transition: all 0.2s ease; +} + +.modal-close:hover { + background-color: rgba(255, 0, 0, 0.1); + color: #ff4444; +} + +.modal-body { + padding: 24px; + overflow-y: auto; + flex: 1; +} + +.flow-summary { + background: var(--level-info-bg); + border-left: 4px solid var(--level-info-border); + padding: 16px; + border-radius: 8px; + margin-bottom: 24px; +} + +.flow-summary h3 { + margin: 0 0 12px 0; + font-size: 18px; + color: var(--level-info-border); +} + +.flow-summary-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); + gap: 16px; + margin-top: 12px; +} + +.flow-summary-item { + display: flex; + flex-direction: column; + gap: 4px; +} + +.flow-summary-label { + font-size: 12px; + color: var(--text-secondary); + text-transform: uppercase; + font-weight: 600; +} + +.flow-summary-value { + font-size: 18px; + font-weight: 700; + color: var(--text-color); +} + +.flow-visualization { + position: relative; +} + +.flow-step-container { + position: relative; + margin-bottom: 8px; +} + +.flow-step-container:not(:last-child)::before { + content: ""; + position: absolute; + left: 19px; + top: 40px; + bottom: 0; + width: 2px; + background: var(--border-color); + z-index: 0; +} + +.flow-step { + display: flex; + gap: 16px; + position: relative; +} + +.flow-step-number { + flex-shrink: 0; + width: 40px; + height: 40px; + border-radius: 50%; + background: var(--level-info-border); + color: white; + display: flex; + align-items: center; + justify-content: center; + font-weight: 700; + font-size: 16px; + z-index: 1; + position: relative; +} + +.flow-step.success .flow-step-number { + background: #28a745; +} + +.flow-step.error .flow-step-number { + background: #dc3545; +} + +.flow-step.warning .flow-step-number { + background: #ffc107; +} + +.flow-step-content { + flex: 1; + background: var(--container-bg); + border: 1px solid var(--border-color); + border-radius: 8px; + padding: 12px 16px; + transition: all 0.2s ease; +} + +.flow-step-content:hover { + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); + border-color: var(--level-info-border); + transform: translateX(4px); +} + +.flow-step-title { + font-weight: 600; + color: var(--text-color); + margin-bottom: 4px; + font-size: 15px; +} + +.flow-step-time { + font-size: 12px; + color: var(--text-secondary); + display: flex; + gap: 12px; + margin-top: 4px; +} + +.flow-step-duration { + font-weight: 600; + color: var(--level-info-border); +} + +.flow-success { + background: rgba(40, 167, 69, 0.1); + border-color: #28a745; + padding: 16px; + border-radius: 8px; + text-align: center; + margin-top: 24px; +} + +.flow-success h3 { + color: #28a745; + margin: 0; + font-size: 18px; +} + +.flow-error { + background: rgba(220, 53, 69, 0.1); + border: 1px solid #dc3545; + border-left: 4px solid #dc3545; + padding: 16px; + border-radius: 8px; + margin-top: 24px; +} + +.flow-error h3 { + color: #dc3545; + margin: 0 0 8px 0; + font-size: 16px; +} + +.flow-error-message { + font-size: 14px; + color: var(--text-color); + font-family: monospace; + background: rgba(0, 0, 0, 0.2); + padding: 8px; + border-radius: 4px; + margin-top: 8px; +} + /* Responsive adjustments */ @media (max-width: 768px) { - .filters { grid-template-columns: repeat(auto-fit, minmax(150px, 1fr)); gap: 8px; } - .filter-group label { font-size: 13px; } - .filter-group input, .filter-group select { padding: 6px; font-size: 14px; } - .controls { display: flex; flex-wrap: wrap; gap: 8px; } - .btn { padding: 8px 16px; font-size: 14px; } + .filters { + grid-template-columns: repeat(auto-fit, minmax(150px, 1fr)); + gap: 8px; + } + .filter-group label { + font-size: 13px; + } + .filter-group input, + .filter-group select { + padding: 6px; + font-size: 14px; + } + .controls { + display: flex; + flex-wrap: wrap; + gap: 8px; + } + .btn { + padding: 8px 16px; + font-size: 14px; + } + + .modal-content { + width: 95%; + max-height: 95vh; + } + + .modal-header { + padding: 16px; + } + + .modal-body { + padding: 16px; + } + + .flow-summary-grid { + grid-template-columns: 1fr; + } +} + +.step-logs-container { + margin-top: 12px; + margin-left: 56px; + margin-bottom: 12px; + max-height: 300px; + overflow-y: auto; + background: var(--log-debug-bg); + border: 1px solid var(--border-color); + border-left: 3px solid var(--level-info-border); + border-radius: 4px; + padding: 12px; +} + +.step-logs-container .log-entry { + padding: 8px; + margin-bottom: 8px; + border-bottom: 1px solid var(--log-entry-border); + font-family: monospace; + font-size: 13px; +} + +.step-logs-container .log-entry:last-child { + border-bottom: none; + margin-bottom: 0; +} + +.hook-id-link { + cursor: pointer; + color: var(--button-bg); + text-decoration: underline; + font-weight: bold; +} + +.hook-id-link:hover { + color: var(--button-hover); +} + +.pr-number-link { + cursor: pointer; + color: var(--button-bg); + text-decoration: underline; + font-weight: bold; +} + +.pr-number-link:hover { + color: var(--button-hover); +} + +/* PR Modal Styles */ +.pr-summary { + background: var(--level-info-bg); + border-left: 4px solid var(--level-info-border); + padding: 16px; + border-radius: 8px; + margin-bottom: 24px; +} + +.pr-summary h3 { + margin: 0 0 12px 0; + font-size: 18px; + color: var(--level-info-border); +} + +.pr-hook-list { + display: flex; + flex-direction: column; + gap: 12px; +} + +.pr-hook-item { + display: flex; + align-items: center; + gap: 12px; + padding: 12px 16px; + background: var(--container-bg); + border: 1px solid var(--border-color); + border-radius: 8px; + cursor: pointer; + transition: all 0.2s ease; +} + +.pr-hook-item:hover { + box-shadow: 0 4px 12px rgba(0, 0, 0, 0.1); + border-color: var(--button-bg); + transform: translateX(4px); +} + +.pr-hook-icon { + font-size: 20px; + color: var(--button-bg); +} + +.pr-hook-id { + flex: 1; + font-family: monospace; + font-size: 14px; + color: var(--text-color); + font-weight: 600; +} + +/* Task Group Styles */ +.task-group { + margin-bottom: 16px; + border: 1px solid var(--border-color); + border-radius: 8px; + overflow: hidden; + background: var(--container-bg); +} + +.task-group-header { + display: flex; + align-items: center; + gap: 12px; + padding: 12px 16px; + background: var(--log-debug-bg); + border-bottom: 1px solid var(--border-color); + cursor: pointer; + transition: all 0.2s ease; + user-select: none; +} + +.task-group-header:hover { + background: var(--log-entry-border); +} + +.task-group-arrow { + font-size: 14px; + transition: transform 0.3s ease; + display: inline-block; + width: 16px; + text-align: center; +} + +.task-group-arrow.expanded { + transform: rotate(90deg); +} + +.task-group-arrow.collapsed { + transform: rotate(0deg); +} + +.task-group-status { + font-size: 18px; + font-weight: bold; + width: 24px; + text-align: center; +} + +.task-group-success { + color: #28a745; +} + +.task-group-error { + color: #dc3545; +} + +.task-group-in_progress { + color: #007bff; +} + +.task-group-title { + flex: 1; + font-weight: 600; + font-size: 15px; + color: var(--text-color); +} + +.task-group-duration { + font-size: 13px; + color: var(--timestamp-color); + font-weight: 600; + padding: 4px 8px; + background: var(--tag-bg); + border-radius: 4px; +} + +.task-group-steps { + padding: 8px; + background: var(--container-bg); +} + +.task-group-steps .flow-step-container.nested { + margin-left: 20px; + position: relative; +} + +.task-group-steps .flow-step-container.nested::before { + left: -1px; } diff --git a/webhook_server/web/static/js/log_viewer.js b/webhook_server/web/static/js/log_viewer.js index fdcaaaed2..679c45ffa 100644 --- a/webhook_server/web/static/js/log_viewer.js +++ b/webhook_server/web/static/js/log_viewer.js @@ -1,16 +1,23 @@ let ws = null; let logEntries = []; +// Configuration constants +const CONFIG = { + // Maximum number of entries to fetch when loading PR details + // This prevents performance issues with very large datasets + PR_FETCH_LIMIT: 10000, +}; + function updateConnectionStatus(connected) { - const status = document.getElementById('connectionStatus'); - const statusText = document.getElementById('statusText'); + const status = document.getElementById("connectionStatus"); + const statusText = document.getElementById("statusText"); if (connected) { - status.className = 'status connected'; - statusText.textContent = 'Connected - Real-time updates active'; + status.className = "status connected"; + statusText.textContent = "Connected - Real-time updates active"; } else { - status.className = 'status disconnected'; - statusText.textContent = 'Disconnected - Real-time updates inactive'; + status.className = "status disconnected"; + statusText.textContent = "Disconnected - Real-time updates inactive"; } } @@ -19,46 +26,48 @@ function connectWebSocket() { ws.close(); } - const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; + const protocol = window.location.protocol === "https:" ? "wss:" : "ws:"; // Build WebSocket URL with current filter parameters const filters = new URLSearchParams(); - const hookId = document.getElementById('hookIdFilter').value.trim(); - const prNumber = document.getElementById('prNumberFilter').value.trim(); - const repository = document.getElementById('repositoryFilter').value.trim(); - const user = document.getElementById('userFilter').value.trim(); - const level = document.getElementById('levelFilter').value; - const search = document.getElementById('searchFilter').value.trim(); - - if (hookId) filters.append('hook_id', hookId); - if (prNumber) filters.append('pr_number', prNumber); - if (repository) filters.append('repository', repository); - if (user) filters.append('github_user', user); - if (level) filters.append('level', level); - if (search) filters.append('search', search); - - const wsUrl = `${protocol}//${window.location.host}/logs/ws${filters.toString() ? '?' + filters.toString() : ''}`; + const hookId = document.getElementById("hookIdFilter").value.trim(); + const prNumber = document.getElementById("prNumberFilter").value.trim(); + const repository = document.getElementById("repositoryFilter").value.trim(); + const user = document.getElementById("userFilter").value.trim(); + const level = document.getElementById("levelFilter").value; + const search = document.getElementById("searchFilter").value.trim(); + + if (hookId) filters.append("hook_id", hookId); + if (prNumber) filters.append("pr_number", prNumber); + if (repository) filters.append("repository", repository); + if (user) filters.append("github_user", user); + if (level) filters.append("level", level); + if (search) filters.append("search", search); + + const wsUrl = `${protocol}//${window.location.host}/logs/ws${ + filters.toString() ? "?" + filters.toString() : "" + }`; ws = new WebSocket(wsUrl); - ws.onopen = function() { + ws.onopen = function () { updateConnectionStatus(true); - console.log('WebSocket connected'); + console.log("WebSocket connected"); }; - ws.onmessage = function(event) { + ws.onmessage = function (event) { const logEntry = JSON.parse(event.data); addLogEntry(logEntry); }; - ws.onclose = function() { + ws.onclose = function () { updateConnectionStatus(false); - console.log('WebSocket disconnected'); + console.log("WebSocket disconnected"); }; - ws.onerror = function(error) { + ws.onerror = function (error) { updateConnectionStatus(false); - console.error('WebSocket error:', error); + console.error("WebSocket error:", error); }; } @@ -75,7 +84,8 @@ function disconnectWebSocket() { // Helper function to apply memory bounding to logEntries array function applyMemoryBounding() { - const maxEntries = parseInt(document.getElementById('limitFilter').value); + const parsed = parseInt(document.getElementById("limitFilter").value); + const maxEntries = Number.isFinite(parsed) ? parsed : 1000; if (logEntries.length > maxEntries) { // Remove oldest entries to keep array size bounded logEntries = logEntries.slice(0, maxEntries); @@ -96,13 +106,13 @@ function addLogEntry(entry) { } function updateDisplayedCount() { - const displayedCount = document.getElementById('displayedCount'); + const displayedCount = document.getElementById("displayedCount"); const filteredEntries = filterLogEntries(logEntries); displayedCount.textContent = filteredEntries.length; } function renderLogEntriesOptimized() { - const container = document.getElementById('logEntries'); + const container = document.getElementById("logEntries"); const filteredEntries = filterLogEntries(logEntries); // Always use direct rendering to prevent any scrollbar flashing @@ -114,7 +124,7 @@ function renderLogEntriesDirect(container, entries) { // Use DocumentFragment for efficient DOM manipulation to minimize reflows const fragment = document.createDocumentFragment(); - entries.forEach(entry => { + entries.forEach((entry) => { const entryElement = createLogEntryElement(entry); fragment.appendChild(entryElement); }); @@ -131,32 +141,96 @@ function renderLogEntriesDirect(container, entries) { // All rendering now uses direct DOM manipulation only function createLogEntryElement(entry) { - const div = document.createElement('div'); + const div = document.createElement("div"); // Whitelist of allowed log levels to prevent class-name injection - const allowedLevels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'STEP', 'SUCCESS']; - const safeLevel = allowedLevels.includes(entry.level) ? entry.level : 'INFO'; // Default fallback + const allowedLevels = [ + "DEBUG", + "INFO", + "WARNING", + "ERROR", + "STEP", + "SUCCESS", + ]; + const safeLevel = allowedLevels.includes(entry.level) ? entry.level : "INFO"; // Default fallback div.className = `log-entry ${safeLevel}`; - // Use efficient string template - div.innerHTML = ` - ${new Date(entry.timestamp).toLocaleString()} - [${entry.level}] - ${escapeHtml(entry.message)} - ${entry.hook_id ? `[Hook: ${escapeHtml(entry.hook_id)}]` : ''} - ${entry.pr_number ? `[PR: #${entry.pr_number}]` : ''} - ${entry.repository ? `[${escapeHtml(entry.repository)}]` : ''} - ${entry.github_user ? `[User: ${escapeHtml(entry.github_user)}]` : ''} - `; + // Create timestamp + const timestamp = document.createElement("span"); + timestamp.className = "timestamp"; + timestamp.textContent = new Date(entry.timestamp).toLocaleString(); + div.appendChild(timestamp); + + // Create level + const level = document.createElement("span"); + level.className = "level"; + level.textContent = `[${entry.level}]`; + div.appendChild(level); + + // Create message + const message = document.createElement("span"); + message.className = "message"; + message.textContent = entry.message; + div.appendChild(message); + + // Create clickable hook ID link if present + if (entry.hook_id) { + const hookIdSpan = document.createElement("span"); + hookIdSpan.className = "hook-id"; + hookIdSpan.textContent = "[Hook: "; + + const hookLink = document.createElement("span"); + hookLink.className = "hook-id-link"; + hookLink.textContent = entry.hook_id; + hookLink.title = "Click to view workflow"; + hookLink.style.cursor = "pointer"; + hookLink.addEventListener("click", () => { + showFlowModal(entry.hook_id); + }); - return div; -} + hookIdSpan.appendChild(hookLink); + const closeBracket = document.createTextNode("]"); + hookIdSpan.appendChild(closeBracket); + div.appendChild(hookIdSpan); + } + + // Add other metadata - make PR number clickable + if (entry.pr_number) { + const prSpan = document.createElement("span"); + prSpan.className = "pr-number"; + prSpan.textContent = "[PR: #"; + + const prLink = document.createElement("span"); + prLink.className = "pr-number-link"; + prLink.textContent = entry.pr_number; + prLink.title = "Click to view all webhook flows for this PR"; + prLink.style.cursor = "pointer"; + prLink.addEventListener("click", () => { + showPrModal(entry.pr_number); + }); -function escapeHtml(text) { - const div = document.createElement('div'); - div.textContent = text; - return div.innerHTML; + prSpan.appendChild(prLink); + const closeBracket = document.createTextNode("]"); + prSpan.appendChild(closeBracket); + div.appendChild(prSpan); + } + + if (entry.repository) { + const repoSpan = document.createElement("span"); + repoSpan.className = "repository"; + repoSpan.textContent = `[${entry.repository}]`; + div.appendChild(repoSpan); + } + + if (entry.github_user) { + const userSpan = document.createElement("span"); + userSpan.className = "user"; + userSpan.textContent = `[User: ${entry.github_user}]`; + div.appendChild(userSpan); + } + + return div; } // Alias for backward compatibility @@ -165,23 +239,26 @@ function renderLogEntries() { } function renderLogEntriesDirectly(entries) { - const container = document.getElementById('logEntries'); + const container = document.getElementById("logEntries"); // Always use direct rendering for backend-filtered data to ensure all entries show renderLogEntriesDirect(container, entries); } // Optimized filtering with caching and early exit -let lastFilterHash = ''; +let lastFilterHash = ""; let cachedFilteredEntries = []; function filterLogEntries(entries) { - const hookId = document.getElementById('hookIdFilter').value.trim(); - const prNumber = document.getElementById('prNumberFilter').value.trim(); - const repository = document.getElementById('repositoryFilter').value.trim(); - const user = document.getElementById('userFilter').value.trim(); - const level = document.getElementById('levelFilter').value; - const search = document.getElementById('searchFilter').value.trim().toLowerCase(); + const hookId = document.getElementById("hookIdFilter").value.trim(); + const prNumber = document.getElementById("prNumberFilter").value.trim(); + const repository = document.getElementById("repositoryFilter").value.trim(); + const user = document.getElementById("userFilter").value.trim(); + const level = document.getElementById("levelFilter").value; + const search = document + .getElementById("searchFilter") + .value.trim() + .toLowerCase(); // Create hash of current filters for caching const filterHash = `${hookId}-${prNumber}-${repository}-${user}-${level}-${search}-${entries.length}`; @@ -192,11 +269,13 @@ function filterLogEntries(entries) { } // Pre-compile search terms for better performance - const searchTerms = search ? search.split(' ').filter(term => term.length > 0) : []; + const searchTerms = search + ? search.split(" ").filter((term) => term.length > 0) + : []; const prNumberInt = prNumber ? parseInt(prNumber) : null; // Use optimized filtering with early exits - const filtered = entries.filter(entry => { + const filtered = entries.filter((entry) => { // Exact matches first (fastest) if (hookId && entry.hook_id !== hookId) return false; if (prNumberInt && entry.pr_number !== prNumberInt) return false; @@ -207,7 +286,7 @@ function filterLogEntries(entries) { // Text search last (slowest) if (searchTerms.length > 0) { const messageText = entry.message.toLowerCase(); - return searchTerms.every(term => messageText.includes(term)); + return searchTerms.every((term) => messageText.includes(term)); } return true; @@ -222,7 +301,7 @@ function filterLogEntries(entries) { // Clear filter cache when entries change function clearFilterCache() { - lastFilterHash = ''; + lastFilterHash = ""; cachedFilteredEntries = []; } @@ -233,22 +312,22 @@ async function loadHistoricalLogs() { // Build API URL with current filter parameters const filters = new URLSearchParams(); - const hookId = document.getElementById('hookIdFilter').value.trim(); - const prNumber = document.getElementById('prNumberFilter').value.trim(); - const repository = document.getElementById('repositoryFilter').value.trim(); - const user = document.getElementById('userFilter').value.trim(); - const level = document.getElementById('levelFilter').value; - const search = document.getElementById('searchFilter').value.trim(); - const limit = document.getElementById('limitFilter').value; + const hookId = document.getElementById("hookIdFilter").value.trim(); + const prNumber = document.getElementById("prNumberFilter").value.trim(); + const repository = document.getElementById("repositoryFilter").value.trim(); + const user = document.getElementById("userFilter").value.trim(); + const level = document.getElementById("levelFilter").value; + const search = document.getElementById("searchFilter").value.trim(); + const limit = document.getElementById("limitFilter").value; // Use user-configured limit - filters.append('limit', limit); - if (hookId) filters.append('hook_id', hookId); - if (prNumber) filters.append('pr_number', prNumber); - if (repository) filters.append('repository', repository); - if (user) filters.append('github_user', user); - if (level) filters.append('level', level); - if (search) filters.append('search', search); + filters.append("limit", limit); + if (hookId) filters.append("hook_id", hookId); + if (prNumber) filters.append("pr_number", prNumber); + if (repository) filters.append("repository", repository); + if (user) filters.append("github_user", user); + if (level) filters.append("level", level); + if (search) filters.append("search", search); const response = await fetch(`/logs/api/entries?${filters.toString()}`); @@ -259,9 +338,10 @@ async function loadHistoricalLogs() { // Try to parse error message from response body const errorData = await response.json(); if (errorData.detail || errorData.message || errorData.error) { - errorMessage = errorData.detail || errorData.message || errorData.error; + errorMessage = + errorData.detail || errorData.message || errorData.error; } - } catch (parseError) { + } catch { // If JSON parsing fails, use the status text } throw new Error(errorMessage); @@ -274,7 +354,7 @@ async function loadHistoricalLogs() { // Progressive loading for large datasets if (data.entries.length > 200) { - await loadEntriesProgressivelyDirect(data.entries); + await loadEntriesDirectly(data.entries); } else { logEntries = data.entries; // Apply memory bounding after loading entries @@ -286,44 +366,28 @@ async function loadHistoricalLogs() { hideLoadingSkeleton(); } catch (error) { - console.error('Error loading historical logs:', error); + console.error("Error loading historical logs:", error); hideLoadingSkeleton(); - showErrorMessage('Failed to load log entries'); + showErrorMessage("Failed to load log entries"); } } -async function loadEntriesProgressively(entries) { - const chunkSize = 50; - logEntries = []; - clearFilterCache(); // Clear cache when loading new entries - - for (let i = 0; i < entries.length; i += chunkSize) { - const chunk = entries.slice(i, i + chunkSize); - logEntries.push(...chunk); - // Apply memory bounding after each chunk to prevent unbounded growth - applyMemoryBounding(); - clearFilterCache(); // Clear cache for each chunk - renderLogEntries(); - - // Add small delay to prevent UI blocking - if (i + chunkSize < entries.length) { - await new Promise(resolve => setTimeout(resolve, 10)); - } - } -} - -async function loadEntriesProgressivelyDirect(entries) { - // For backend-filtered data, just render all entries at once - // Progressive loading isn't needed since data is already filtered and limited +async function loadEntriesDirectly(entries) { + // Backend-filtered entries are assigned and rendered all at once + // All entries are displayed immediately - backend handles chunked streaming logEntries = entries; // Apply memory bounding after direct assignment applyMemoryBounding(); + clearFilterCache(); // Clear cache when loading new entries to prevent stale results + hideLoadingSkeleton(); renderLogEntriesDirectly(logEntries); - console.log(`Loaded ${entries.length} backend-filtered entries`); + console.log( + `Loaded and rendered ${entries.length} backend-filtered entries at once`, + ); } function showLoadingSkeleton() { - const container = document.getElementById('logEntries'); + const container = document.getElementById("logEntries"); container.innerHTML = `
${createSkeletonEntry()} @@ -348,48 +412,71 @@ function createSkeletonEntry() { } function hideLoadingSkeleton() { - const skeleton = document.querySelector('.loading-skeleton'); + const skeleton = document.querySelector(".loading-skeleton"); if (skeleton) { skeleton.remove(); } } function showErrorMessage(message) { - const container = document.getElementById('logEntries'); - container.innerHTML = ` -
- ⚠️ - ${message} - -
- `; + const container = document.getElementById("logEntries"); - // Add event listener to the dynamically created retry button - const retryBtn = document.getElementById('retryBtn'); - if (retryBtn) { - retryBtn.addEventListener('click', loadHistoricalLogs); - } + // Create error message structure safely using DOM methods to prevent XSS + const errorDiv = document.createElement("div"); + errorDiv.className = "error-message"; + + const iconSpan = document.createElement("span"); + iconSpan.className = "error-icon"; + iconSpan.textContent = "⚠️"; + + const messageSpan = document.createElement("span"); + messageSpan.textContent = message; // Safe - automatically escapes HTML + + const retryBtn = document.createElement("button"); + retryBtn.id = "retryBtn"; + retryBtn.className = "retry-btn"; + retryBtn.textContent = "Retry"; + retryBtn.addEventListener("click", loadHistoricalLogs); + + errorDiv.appendChild(iconSpan); + errorDiv.appendChild(messageSpan); + errorDiv.appendChild(retryBtn); + + container.replaceChildren(errorDiv); } function updateLogStatistics(data) { - const statsPanel = document.getElementById('logStats'); - const displayedCount = document.getElementById('displayedCount'); - const totalCount = document.getElementById('totalCount'); - const processedCount = document.getElementById('processedCount'); + const statsPanel = document.getElementById("logStats"); + const displayedCount = document.getElementById("displayedCount"); + const totalCount = document.getElementById("totalCount"); + const processedCount = document.getElementById("processedCount"); // Update counts from API response displayedCount.textContent = data.entries ? data.entries.length : 0; - processedCount.textContent = data.entries_processed || '0'; + processedCount.textContent = data.entries_processed || "0"; // Use the total log count estimate for better user information - totalCount.textContent = data.total_log_count_estimate || 'Unknown'; + totalCount.textContent = data.total_log_count_estimate || "Unknown"; // Show the statistics panel - statsPanel.style.display = 'block'; + statsPanel.style.display = "block"; // Add indicator for partial scans if (data.is_partial_scan) { - processedCount.innerHTML = `${data.entries_processed} (partial scan)`; + // Clear existing content and rebuild safely to prevent XSS + processedCount.textContent = ""; // Clear first + + // Add the count as safe text + const countText = document.createTextNode( + String(data.entries_processed || "0") + " ", + ); + processedCount.appendChild(countText); + + // Add the partial scan indicator + const partialIndicator = document.createElement("small"); + partialIndicator.style.color = "var(--timestamp-color)"; + partialIndicator.textContent = "(partial scan)"; + processedCount.appendChild(partialIndicator); } } @@ -398,34 +485,35 @@ function clearLogs() { clearFilterCache(); // Clear cache when clearing entries // Clear the container directly to avoid any scrollbar flashing - const container = document.getElementById('logEntries'); + const container = document.getElementById("logEntries"); container.replaceChildren(); // More efficient than innerHTML = '' // Hide stats panel when no entries - document.getElementById('logStats').style.display = 'none'; + document.getElementById("logStats").style.display = "none"; } function exportLogs(format) { const filters = new URLSearchParams(); - const hookId = document.getElementById('hookIdFilter').value.trim(); - const prNumber = document.getElementById('prNumberFilter').value.trim(); - const repository = document.getElementById('repositoryFilter').value.trim(); - const user = document.getElementById('userFilter').value.trim(); - const level = document.getElementById('levelFilter').value; - const search = document.getElementById('searchFilter').value.trim(); - const limit = document.getElementById('limitFilter').value; - - if (hookId) filters.append('hook_id', hookId); - if (prNumber) filters.append('pr_number', prNumber); - if (repository) filters.append('repository', repository); - if (user) filters.append('github_user', user); - if (level) filters.append('level', level); - if (search) filters.append('search', search); - filters.append('limit', limit); - filters.append('format', format); + const hookId = document.getElementById("hookIdFilter").value.trim(); + const prNumber = document.getElementById("prNumberFilter").value.trim(); + const repository = document.getElementById("repositoryFilter").value.trim(); + const user = document.getElementById("userFilter").value.trim(); + const level = document.getElementById("levelFilter").value; + const search = document.getElementById("searchFilter").value.trim(); + const limit = document.getElementById("limitFilter").value; + + if (hookId) filters.append("hook_id", hookId); + if (prNumber) filters.append("pr_number", prNumber); + if (repository) filters.append("repository", repository); + if (user) filters.append("github_user", user); + if (level) filters.append("level", level); + if (search) filters.append("search", search); + filters.append("limit", limit); + filters.append("format", format); const url = `/logs/api/export?${filters.toString()}`; - window.open(url, '_blank'); + const w = window.open(url, "_blank"); + if (w) w.opener = null; } function applyFilters() { @@ -442,7 +530,7 @@ function applyFilters() { let filterTimeout; function debounceFilter() { // Clear only filter cache, not entry cache - lastFilterHash = ''; + lastFilterHash = ""; // Immediate client-side filtering for fast feedback renderLogEntries(); @@ -455,114 +543,151 @@ function debounceFilter() { } function clearFilters() { - document.getElementById('hookIdFilter').value = ''; - document.getElementById('prNumberFilter').value = ''; - document.getElementById('repositoryFilter').value = ''; - document.getElementById('userFilter').value = ''; - document.getElementById('levelFilter').value = ''; - document.getElementById('searchFilter').value = ''; - document.getElementById('limitFilter').value = '1000'; // Reset to default + document.getElementById("hookIdFilter").value = ""; + document.getElementById("prNumberFilter").value = ""; + document.getElementById("repositoryFilter").value = ""; + document.getElementById("userFilter").value = ""; + document.getElementById("levelFilter").value = ""; + document.getElementById("searchFilter").value = ""; + document.getElementById("limitFilter").value = "1000"; // Reset to default // Reload data with cleared filters applyFilters(); } -document.getElementById('hookIdFilter').addEventListener('input', debounceFilter); -document.getElementById('prNumberFilter').addEventListener('input', debounceFilter); -document.getElementById('repositoryFilter').addEventListener('input', debounceFilter); -document.getElementById('userFilter').addEventListener('input', debounceFilter); -document.getElementById('levelFilter').addEventListener('change', debounceFilter); -document.getElementById('searchFilter').addEventListener('input', debounceFilter); -document.getElementById('limitFilter').addEventListener('change', debounceFilter); +document + .getElementById("hookIdFilter") + .addEventListener("input", debounceFilter); +document + .getElementById("prNumberFilter") + .addEventListener("input", debounceFilter); +document + .getElementById("repositoryFilter") + .addEventListener("input", debounceFilter); +document.getElementById("userFilter").addEventListener("input", debounceFilter); +document + .getElementById("levelFilter") + .addEventListener("change", debounceFilter); +document + .getElementById("searchFilter") + .addEventListener("input", debounceFilter); +document + .getElementById("limitFilter") + .addEventListener("change", debounceFilter); // Theme management function toggleTheme() { - const currentTheme = document.documentElement.getAttribute('data-theme'); - const newTheme = currentTheme === 'dark' ? 'light' : 'dark'; + const currentTheme = document.documentElement.getAttribute("data-theme"); + const newTheme = currentTheme === "dark" ? "light" : "dark"; - document.documentElement.setAttribute('data-theme', newTheme); + document.documentElement.setAttribute("data-theme", newTheme); // Update theme toggle button icon and accessibility attributes - const themeToggle = document.querySelector('.theme-toggle'); - themeToggle.textContent = newTheme === 'dark' ? '☀️' : '🌙'; - themeToggle.setAttribute('aria-label', newTheme === 'dark' ? 'Switch to light theme' : 'Switch to dark theme'); - themeToggle.setAttribute('title', newTheme === 'dark' ? 'Switch to light theme' : 'Switch to dark theme'); + const themeToggle = document.querySelector(".theme-toggle"); + themeToggle.textContent = newTheme === "dark" ? "☀️" : "🌙"; + themeToggle.setAttribute( + "aria-label", + newTheme === "dark" ? "Switch to light theme" : "Switch to dark theme", + ); + themeToggle.setAttribute( + "title", + newTheme === "dark" ? "Switch to light theme" : "Switch to dark theme", + ); // Store theme preference in localStorage - localStorage.setItem('log-viewer-theme', newTheme); + localStorage.setItem("log-viewer-theme", newTheme); } // Initialize theme from localStorage or default to light function initializeTheme() { - const savedTheme = localStorage.getItem('log-viewer-theme') || 'light'; - document.documentElement.setAttribute('data-theme', savedTheme); + const savedTheme = localStorage.getItem("log-viewer-theme") || "light"; + document.documentElement.setAttribute("data-theme", savedTheme); // Update theme toggle button icon and accessibility attributes - const themeToggle = document.querySelector('.theme-toggle'); - themeToggle.textContent = savedTheme === 'dark' ? '☀️' : '🌙'; - themeToggle.setAttribute('aria-label', savedTheme === 'dark' ? 'Switch to light theme' : 'Switch to dark theme'); - themeToggle.setAttribute('title', savedTheme === 'dark' ? 'Switch to light theme' : 'Switch to dark theme'); + const themeToggle = document.querySelector(".theme-toggle"); + themeToggle.textContent = savedTheme === "dark" ? "☀️" : "🌙"; + themeToggle.setAttribute( + "aria-label", + savedTheme === "dark" ? "Switch to light theme" : "Switch to dark theme", + ); + themeToggle.setAttribute( + "title", + savedTheme === "dark" ? "Switch to light theme" : "Switch to dark theme", + ); } // Initialize theme on page load initializeTheme(); -// Initialize timeline collapse state -initializeTimelineState(); - // Initialize connection status updateConnectionStatus(false); // Initialize event listeners when DOM is ready function initializeEventListeners() { // Theme toggle button - const themeToggleBtn = document.getElementById('themeToggleBtn'); + const themeToggleBtn = document.getElementById("themeToggleBtn"); if (themeToggleBtn) { - themeToggleBtn.addEventListener('click', toggleTheme); + themeToggleBtn.addEventListener("click", toggleTheme); } // Control buttons - const connectBtn = document.getElementById('connectBtn'); + const connectBtn = document.getElementById("connectBtn"); if (connectBtn) { - connectBtn.addEventListener('click', connectWebSocket); + connectBtn.addEventListener("click", connectWebSocket); } - const disconnectBtn = document.getElementById('disconnectBtn'); + const disconnectBtn = document.getElementById("disconnectBtn"); if (disconnectBtn) { - disconnectBtn.addEventListener('click', disconnectWebSocket); + disconnectBtn.addEventListener("click", disconnectWebSocket); } - const refreshBtn = document.getElementById('refreshBtn'); + const refreshBtn = document.getElementById("refreshBtn"); if (refreshBtn) { - refreshBtn.addEventListener('click', loadHistoricalLogs); + refreshBtn.addEventListener("click", loadHistoricalLogs); } - const clearFiltersBtn = document.getElementById('clearFiltersBtn'); + const clearFiltersBtn = document.getElementById("clearFiltersBtn"); if (clearFiltersBtn) { - clearFiltersBtn.addEventListener('click', clearFilters); + clearFiltersBtn.addEventListener("click", clearFilters); } - const clearLogsBtn = document.getElementById('clearLogsBtn'); + const clearLogsBtn = document.getElementById("clearLogsBtn"); if (clearLogsBtn) { - clearLogsBtn.addEventListener('click', clearLogs); + clearLogsBtn.addEventListener("click", clearLogs); } - const exportBtn = document.getElementById('exportBtn'); + const exportBtn = document.getElementById("exportBtn"); if (exportBtn) { - exportBtn.addEventListener('click', () => exportLogs('json')); + exportBtn.addEventListener("click", () => exportLogs("json")); } - // Timeline header and toggle button - const timelineHeader = document.getElementById('timelineHeader'); - if (timelineHeader) { - timelineHeader.addEventListener('click', toggleTimeline); + // Flow modal event listeners + const closeModalBtn = document.getElementById("closeFlowModal"); + if (closeModalBtn) { + closeModalBtn.addEventListener("click", closeFlowModal); } - const timelineToggle = document.getElementById('timelineToggle'); - if (timelineToggle) { - timelineToggle.addEventListener('click', (event) => { - event.stopPropagation(); - toggleTimeline(); + const flowModal = document.getElementById("flowModal"); + if (flowModal) { + flowModal.addEventListener("click", (e) => { + if (e.target === flowModal) { + closeFlowModal(); + } + }); + } + + // PR modal event listeners + const closePrModalBtn = document.getElementById("closePrModal"); + if (closePrModalBtn) { + closePrModalBtn.addEventListener("click", closePrModal); + } + + const prModal = document.getElementById("prModal"); + if (prModal) { + prModal.addEventListener("click", (e) => { + if (e.target === prModal) { + closePrModal(); + } }); } } @@ -573,332 +698,995 @@ initializeEventListeners(); // Load initial data loadHistoricalLogs(); -// Timeline functionality -let currentTimelineData = null; +// Flow Modal functionality +let currentFlowData = null; +let currentFlowController = null; +let flowModalKeydownHandler = null; +let flowModalPreviousFocus = null; +let currentStepLogsController = null; +// eslint-disable-next-line no-unused-vars function showTimeline(hookId) { + // Redirect old timeline calls to new modal (backward compatibility shim) + showFlowModal(hookId); +} + +function showFlowModal(hookId) { if (!hookId) { - hideTimeline(); + closeFlowModal(); return; } + // Hide step logs section when opening new modal + const flowLogsSection = document.getElementById("flowLogs"); + if (flowLogsSection) { + flowLogsSection.style.display = "none"; + } + + // Cancel previous fetch if still in progress + if (currentFlowController) { + currentFlowController.abort(); + } + + // Create new AbortController for this fetch + currentFlowController = new AbortController(); + + // Show modal with loading indicator + const modal = document.getElementById("flowModal"); + modal.style.display = "flex"; + showFlowModalLoading(); // Fetch workflow steps data - fetch(`/logs/api/workflow-steps/${hookId}`) - .then(response => { + fetch(`/logs/api/workflow-steps/${encodeURIComponent(hookId)}`, { + signal: currentFlowController.signal, + }) + .then((response) => { if (!response.ok) { if (response.status === 404) { - hideTimeline(); + console.log("No flow data found for hook ID:", hookId); + showFlowModalError("No workflow data found for this hook"); return; } - throw new Error('Failed to fetch workflow steps'); + throw new Error(`HTTP ${response.status}: ${response.statusText}`); } return response.json(); }) - .then(data => { - currentTimelineData = data; - renderTimeline(data); - document.getElementById('timelineSection').style.display = 'block'; - - // Ensure the correct collapse state is maintained when showing timeline - initializeTimelineState(); + .then((data) => { + if (data) { + currentFlowData = data; + renderFlowModal(data); + setupFlowModalAccessibility(); + } }) - .catch(error => { - hideTimeline(); + .catch((error) => { + if (error.name === "AbortError") { + // Request was cancelled, ignore silently + return; + } + console.error("Error fetching flow data:", error); + showFlowModalError("Failed to load workflow data. Please try again."); }); } -function hideTimeline() { - document.getElementById('timelineSection').style.display = 'none'; - currentTimelineData = null; -} +function closeFlowModal() { + const modal = document.getElementById("flowModal"); + if (modal) { + modal.style.display = "none"; + } + if (currentFlowController) { + currentFlowController.abort(); + currentFlowController = null; + } + if (currentStepLogsController) { + currentStepLogsController.abort(); + currentStepLogsController = null; + } + currentFlowData = null; -function toggleTimeline() { - const content = document.getElementById('timelineContent'); - const toggle = document.getElementById('timelineToggle'); + // Remove keyboard event listener + if (flowModalKeydownHandler) { + document.removeEventListener("keydown", flowModalKeydownHandler); + flowModalKeydownHandler = null; + } - if (content.classList.contains('expanded')) { - // Collapse - content.classList.remove('expanded'); - content.classList.add('collapsed'); - toggle.textContent = '▶ Expand'; + // Restore focus to the element that opened the modal + if (flowModalPreviousFocus) { + flowModalPreviousFocus.focus(); + flowModalPreviousFocus = null; + } +} - // Store collapse state in localStorage - localStorage.setItem('timeline-collapsed', 'true'); - } else { - // Expand - content.classList.remove('collapsed'); - content.classList.add('expanded'); - toggle.textContent = '▼ Collapse'; +// PR Modal functionality +let currentPrController = null; +let prModalKeydownHandler = null; +let prModalPreviousFocus = null; - // Store expand state in localStorage - localStorage.setItem('timeline-collapsed', 'false'); +function showPrModal(prNumber) { + if (!prNumber) { + closePrModal(); + return; } + + // Cancel previous fetch if still in progress + if (currentPrController) { + currentPrController.abort(); + } + + // Create new AbortController for this fetch + currentPrController = new AbortController(); + + // Show modal with loading indicator + const modal = document.getElementById("prModal"); + modal.style.display = "flex"; + showPrModalLoading(); + + // Fetch all log entries for this PR number + const params = new URLSearchParams({ + pr_number: prNumber, + limit: CONFIG.PR_FETCH_LIMIT.toString(), + }); + + fetch(`/logs/api/entries?${params}`, { signal: currentPrController.signal }) + .then((response) => { + if (!response.ok) { + throw new Error(`HTTP ${response.status}: ${response.statusText}`); + } + return response.json(); + }) + .then((data) => { + if (data.entries && data.entries.length > 0) { + // Extract unique hook IDs (deduplicate) + const hookIds = data.entries + .map((e) => e.hook_id) + .filter((id) => id !== null && id !== undefined); + const uniqueHookIds = [...new Set(hookIds)]; + + if (uniqueHookIds.length === 0) { + console.log("No hook IDs found for PR:", prNumber); + showPrModalError(`No workflow events found for PR #${prNumber}`); + return; + } + + renderPrModal(prNumber, uniqueHookIds, data.entries[0].repository); + setupPrModalAccessibility(); + } else { + showPrModalError(`No log entries found for PR #${prNumber}`); + } + }) + .catch((error) => { + if (error.name === "AbortError") { + // Request was cancelled, ignore silently + return; + } + console.error("Error fetching PR data:", error); + showPrModalError("Failed to load PR data. Please try again."); + }); } -function initializeTimelineState() { - // Initialize timeline collapse state from localStorage - default to collapsed - const timelineState = localStorage.getItem('timeline-collapsed'); - const isCollapsed = timelineState === null ? true : timelineState === 'true'; // Default collapsed if no preference set - const content = document.getElementById('timelineContent'); - const toggle = document.getElementById('timelineToggle'); +function closePrModal() { + const modal = document.getElementById("prModal"); + if (modal) { + modal.style.display = "none"; + } - if (isCollapsed) { - content.classList.remove('expanded'); - content.classList.add('collapsed'); - toggle.textContent = '▶ Expand'; - } else { - content.classList.remove('collapsed'); - content.classList.add('expanded'); - toggle.textContent = '▼ Collapse'; + // Remove keyboard event listener + if (prModalKeydownHandler) { + document.removeEventListener("keydown", prModalKeydownHandler); + prModalKeydownHandler = null; + } + + // Restore focus to the element that opened the modal + if (prModalPreviousFocus) { + prModalPreviousFocus.focus(); + prModalPreviousFocus = null; } } -function updateTimelineInfo(data) { - const info = document.getElementById('timelineInfo'); - const duration = data.total_duration_ms > 0 ? `${(data.total_duration_ms / 1000).toFixed(2)}s` : '< 1s'; - info.innerHTML = ` -
Hook ID: ${data.hook_id}
-
Steps: ${data.step_count}
-
Duration: ${duration}
- `; +// Keyboard accessibility for Flow Modal +function setupFlowModalAccessibility() { + const modal = document.getElementById("flowModal"); + if (!modal) return; + + // Set ARIA attributes for screen reader support + modal.setAttribute("role", "dialog"); + modal.setAttribute("aria-modal", "true"); + modal.setAttribute("aria-labelledby", "flowModalTitle"); + modal.setAttribute("aria-describedby", "flowSummary"); + + // Save the element that had focus before modal opened + flowModalPreviousFocus = document.activeElement; + + // Find all focusable elements in the modal + const focusableElements = modal.querySelectorAll( + 'button, [href], input, select, textarea, [tabindex]:not([tabindex="-1"])', + ); + const firstFocusable = focusableElements[0]; + const lastFocusable = focusableElements[focusableElements.length - 1]; + + // Move focus to first interactive element in modal + if (firstFocusable) { + firstFocusable.focus(); + } + + // Create and attach keyboard handler + flowModalKeydownHandler = function (e) { + // Close modal on Escape key + if (e.key === "Escape") { + e.preventDefault(); + closeFlowModal(); + return; + } + + // Trap focus within modal using Tab + if (e.key === "Tab") { + if (e.shiftKey) { + // Shift+Tab: moving backwards + if (document.activeElement === firstFocusable) { + e.preventDefault(); + lastFocusable.focus(); + } + } else { + // Tab: moving forwards + if (document.activeElement === lastFocusable) { + e.preventDefault(); + firstFocusable.focus(); + } + } + } + }; + + document.addEventListener("keydown", flowModalKeydownHandler); } -function renderEmptyTimeline() { - const svg = document.getElementById('timelineSvg'); - svg.innerHTML = 'No workflow steps found'; +// Keyboard accessibility for PR Modal +function setupPrModalAccessibility() { + const modal = document.getElementById("prModal"); + if (!modal) return; + + // Set ARIA attributes for screen reader support + modal.setAttribute("role", "dialog"); + modal.setAttribute("aria-modal", "true"); + modal.setAttribute("aria-labelledby", "prModalTitle"); + modal.setAttribute("aria-describedby", "prSummary"); + + // Save the element that had focus before modal opened + prModalPreviousFocus = document.activeElement; + + // Find all focusable elements in the modal + const focusableElements = modal.querySelectorAll( + 'button, [href], input, select, textarea, [tabindex]:not([tabindex="-1"])', + ); + const firstFocusable = focusableElements[0]; + const lastFocusable = focusableElements[focusableElements.length - 1]; + + // Move focus to first interactive element in modal + if (firstFocusable) { + firstFocusable.focus(); + } + + // Create and attach keyboard handler + prModalKeydownHandler = function (e) { + // Close modal on Escape key + if (e.key === "Escape") { + e.preventDefault(); + closePrModal(); + return; + } + + // Trap focus within modal using Tab + if (e.key === "Tab") { + if (e.shiftKey) { + // Shift+Tab: moving backwards + if (document.activeElement === firstFocusable) { + e.preventDefault(); + lastFocusable.focus(); + } + } else { + // Tab: moving forwards + if (document.activeElement === lastFocusable) { + e.preventDefault(); + firstFocusable.focus(); + } + } + } + }; + + document.addEventListener("keydown", prModalKeydownHandler); } -function renderTimelineVisualization(layout, data) { - const svg = document.getElementById('timelineSvg'); +function renderPrModal(prNumber, hookIds, repository) { + // Render summary section + const summaryElement = document.getElementById("prSummary"); + if (!summaryElement) return; // Clear existing content - svg.innerHTML = ''; - - // SVG dimensions - much larger and adaptive - const width = Math.max(1400, layout.totalWidth + 200); - const height = layout.totalHeight + 150; - const margin = { left: 75, right: 75, top: 75, bottom: 75 }; - - // Update SVG size - svg.setAttribute('width', width); - svg.setAttribute('height', height); - - // Draw timeline lines and steps - layout.lines.forEach((line, lineIndex) => { - const lineY = margin.top + (lineIndex * layout.lineHeight) + layout.lineHeight / 2; - - // Draw horizontal timeline line for this row - if (line.steps.length > 0) { - const lineElement = document.createElementNS('http://www.w3.org/2000/svg', 'line'); - lineElement.setAttribute('class', 'step-line'); - lineElement.setAttribute('x1', margin.left); - lineElement.setAttribute('y1', lineY); - lineElement.setAttribute('x2', margin.left + layout.lineWidth); - lineElement.setAttribute('y2', lineY); - svg.appendChild(lineElement); - } + while (summaryElement.firstChild) { + summaryElement.removeChild(summaryElement.firstChild); + } + + const title = document.createElement("h3"); + title.textContent = `PR #${prNumber} Workflow Overview`; + summaryElement.appendChild(title); - // Draw steps for this line - line.steps.forEach((step, stepIndex) => { - const stepX = margin.left + (stepIndex * layout.stepSpacing) + layout.stepSpacing / 2; - - const group = document.createElementNS('http://www.w3.org/2000/svg', 'g'); - group.setAttribute('class', 'timeline-step'); - group.setAttribute('data-step-index', step.originalIndex); - - // Step circle - larger - const circle = document.createElementNS('http://www.w3.org/2000/svg', 'circle'); - circle.setAttribute('class', `step-circle ${getStepType(step.message)}`); - circle.setAttribute('cx', stepX); - circle.setAttribute('cy', lineY); - circle.setAttribute('r', 12); // Larger circle - svg.appendChild(circle); - group.appendChild(circle); - - // Step label - with multi-line text wrapping - const labelLines = wrapTextToLines(step.message, 25); // Longer text allowed - labelLines.forEach((line, lineIndex) => { - const label = document.createElementNS('http://www.w3.org/2000/svg', 'text'); - label.setAttribute('class', 'step-label'); - label.setAttribute('x', stepX); - label.setAttribute('y', lineY - 35 + (lineIndex * 14)); // Multi-line spacing - label.setAttribute('text-anchor', 'middle'); - label.setAttribute('font-size', '12'); // Larger font - label.textContent = line; - svg.appendChild(label); - group.appendChild(label); - }); - - // Time label - larger and positioned better - const timeLabel = document.createElementNS('http://www.w3.org/2000/svg', 'text'); - timeLabel.setAttribute('class', 'step-time'); - timeLabel.setAttribute('x', stepX); - timeLabel.setAttribute('y', lineY + 35); - timeLabel.setAttribute('text-anchor', 'middle'); - timeLabel.setAttribute('font-size', '11'); // Larger time font - timeLabel.textContent = `+${(step.relative_time_ms / 1000).toFixed(1)}s`; - svg.appendChild(timeLabel); - group.appendChild(timeLabel); - - // Step index number - larger and better positioned - const indexLabel = document.createElementNS('http://www.w3.org/2000/svg', 'text'); - indexLabel.setAttribute('class', 'step-index'); - indexLabel.setAttribute('x', stepX); - indexLabel.setAttribute('y', lineY + 5); - indexLabel.setAttribute('text-anchor', 'middle'); - indexLabel.setAttribute('font-size', '13'); // Larger index font - indexLabel.setAttribute('font-weight', 'bold'); - indexLabel.setAttribute('fill', 'white'); // White text for better contrast - indexLabel.textContent = (step.originalIndex + 1).toString(); - svg.appendChild(indexLabel); - group.appendChild(indexLabel); - - // Add hover events - group.addEventListener('mouseenter', (e) => showTooltip(e, step)); - group.addEventListener('mouseleave', hideTooltip); - group.addEventListener('click', () => filterByStep(step)); - - svg.appendChild(group); + const info = document.createElement("p"); + info.textContent = `Found ${hookIds.length} unique webhook event${ + hookIds.length !== 1 ? "s" : "" + }${repository ? ` for ${repository}` : ""}`; + info.style.margin = "8px 0 0 0"; + info.style.color = "var(--timestamp-color)"; + summaryElement.appendChild(info); + + // Render hook ID list + const listElement = document.getElementById("prHookList"); + if (!listElement) return; + + // Clear existing content + while (listElement.firstChild) { + listElement.removeChild(listElement.firstChild); + } + + if (hookIds.length === 0) { + const emptyMsg = document.createElement("p"); + emptyMsg.style.textAlign = "center"; + emptyMsg.style.color = "var(--timestamp-color)"; + emptyMsg.textContent = "No webhook events found"; + listElement.appendChild(emptyMsg); + return; + } + + // Create clickable list items for each hook ID + hookIds.forEach((hookId, index) => { + const hookItem = document.createElement("div"); + hookItem.className = "pr-hook-item"; + hookItem.addEventListener("click", () => { + closePrModal(); + showFlowModal(hookId); }); + + const icon = document.createElement("span"); + icon.className = "pr-hook-icon"; + icon.textContent = "🔗"; + + const hookIdSpan = document.createElement("span"); + hookIdSpan.className = "pr-hook-id"; + hookIdSpan.textContent = `Event ${index + 1}: ${hookId}`; + + hookItem.appendChild(icon); + hookItem.appendChild(hookIdSpan); + listElement.appendChild(hookItem); }); } -function renderTimeline(data) { - // Update timeline information - updateTimelineInfo(data); +// Flow Modal loading and error helper functions +function showFlowModalLoading() { + const summaryElement = document.getElementById("flowSummary"); + const vizElement = document.getElementById("flowVisualization"); - // Handle empty state - if (data.steps.length === 0) { - renderEmptyTimeline(); - return; + if (summaryElement) { + while (summaryElement.firstChild) { + summaryElement.removeChild(summaryElement.firstChild); + } + const loadingDiv = document.createElement("div"); + loadingDiv.className = "modal-loading"; + loadingDiv.style.textAlign = "center"; + loadingDiv.style.padding = "24px"; + loadingDiv.style.color = "var(--timestamp-color)"; + + const spinner = document.createElement("div"); + spinner.className = "loading-spinner"; + spinner.textContent = "⏳"; + spinner.style.fontSize = "32px"; + spinner.style.marginBottom = "12px"; + + const text = document.createElement("div"); + text.textContent = "Loading workflow data..."; + + loadingDiv.appendChild(spinner); + loadingDiv.appendChild(text); + summaryElement.appendChild(loadingDiv); } - // Calculate layout for multi-line timeline - const layout = calculateMultiLineLayout(data.steps, data.total_duration_ms); + if (vizElement) { + while (vizElement.firstChild) { + vizElement.removeChild(vizElement.firstChild); + } + } +} + +function showFlowModalError(errorMessage) { + const summaryElement = document.getElementById("flowSummary"); + const vizElement = document.getElementById("flowVisualization"); - // Render the timeline visualization - renderTimelineVisualization(layout, data); + if (summaryElement) { + while (summaryElement.firstChild) { + summaryElement.removeChild(summaryElement.firstChild); + } + const errorDiv = document.createElement("div"); + errorDiv.className = "modal-error"; + errorDiv.style.textAlign = "center"; + errorDiv.style.padding = "24px"; + + const icon = document.createElement("div"); + icon.style.fontSize = "48px"; + icon.style.marginBottom = "12px"; + icon.textContent = "⚠️"; + + const message = document.createElement("div"); + message.style.color = "var(--error-color, #dc3545)"; + message.style.fontSize = "16px"; + message.style.marginBottom = "16px"; + message.textContent = errorMessage; + + const closeBtn = document.createElement("button"); + closeBtn.textContent = "Close"; + closeBtn.className = "btn-secondary"; + closeBtn.style.padding = "8px 16px"; + closeBtn.style.cursor = "pointer"; + closeBtn.addEventListener("click", closeFlowModal); + + errorDiv.appendChild(icon); + errorDiv.appendChild(message); + errorDiv.appendChild(closeBtn); + summaryElement.appendChild(errorDiv); + } + + if (vizElement) { + while (vizElement.firstChild) { + vizElement.removeChild(vizElement.firstChild); + } + } } -function getStepType(message) { - if (message.includes('completed successfully') || message.includes('success')) { - return 'success'; - } else if (message.includes('failed') || message.includes('error')) { - return 'failure'; - } else if (message.includes('Starting') || message.includes('Executing')) { - return 'progress'; - } else { - return 'info'; +// PR Modal loading and error helper functions +function showPrModalLoading() { + const summaryElement = document.getElementById("prSummary"); + const listElement = document.getElementById("prHookList"); + + if (summaryElement) { + while (summaryElement.firstChild) { + summaryElement.removeChild(summaryElement.firstChild); + } + const loadingDiv = document.createElement("div"); + loadingDiv.className = "modal-loading"; + loadingDiv.style.textAlign = "center"; + loadingDiv.style.padding = "24px"; + loadingDiv.style.color = "var(--timestamp-color)"; + + const spinner = document.createElement("div"); + spinner.className = "loading-spinner"; + spinner.textContent = "⏳"; + spinner.style.fontSize = "32px"; + spinner.style.marginBottom = "12px"; + + const text = document.createElement("div"); + text.textContent = "Loading PR data..."; + + loadingDiv.appendChild(spinner); + loadingDiv.appendChild(text); + summaryElement.appendChild(loadingDiv); + } + + if (listElement) { + while (listElement.firstChild) { + listElement.removeChild(listElement.firstChild); + } } } -function truncateText(text, maxLength) { - return text.length > maxLength ? text.substring(0, maxLength) + '...' : text; +function showPrModalError(errorMessage) { + const summaryElement = document.getElementById("prSummary"); + const listElement = document.getElementById("prHookList"); + + if (summaryElement) { + while (summaryElement.firstChild) { + summaryElement.removeChild(summaryElement.firstChild); + } + const errorDiv = document.createElement("div"); + errorDiv.className = "modal-error"; + errorDiv.style.textAlign = "center"; + errorDiv.style.padding = "24px"; + + const icon = document.createElement("div"); + icon.style.fontSize = "48px"; + icon.style.marginBottom = "12px"; + icon.textContent = "⚠️"; + + const message = document.createElement("div"); + message.style.color = "var(--error-color, #dc3545)"; + message.style.fontSize = "16px"; + message.style.marginBottom = "16px"; + message.textContent = errorMessage; + + const closeBtn = document.createElement("button"); + closeBtn.textContent = "Close"; + closeBtn.className = "btn-secondary"; + closeBtn.style.padding = "8px 16px"; + closeBtn.style.cursor = "pointer"; + closeBtn.addEventListener("click", closePrModal); + + errorDiv.appendChild(icon); + errorDiv.appendChild(message); + errorDiv.appendChild(closeBtn); + summaryElement.appendChild(errorDiv); + } + + if (listElement) { + while (listElement.firstChild) { + listElement.removeChild(listElement.firstChild); + } + } } -function calculateMultiLineLayout(steps, totalDuration) { - // Layout configuration - much larger for better readability - const stepsPerLine = 6; // Fewer steps per line for more space - const stepSpacing = 200; // Much larger horizontal space between steps - const lineHeight = 120; // Much larger vertical space between lines - const lineWidth = stepsPerLine * stepSpacing; +function groupStepsByTaskId(steps, flowCompletedSuccessfully = false) { + const redundantPatterns = [ + "signature verification successful", + "processing webhook for repository:", + ]; + + const groups = []; + const ungrouped = []; + const taskMap = new Map(); + + const filteredSteps = steps + .map((step, originalIndex) => ({ step, originalIndex })) + .filter(({ step }) => { + const message = step.message ? step.message.toLowerCase() : ""; + return !redundantPatterns.some((pattern) => message.includes(pattern)); + }); - // Organize steps into lines - const lines = []; - for (let i = 0; i < steps.length; i += stepsPerLine) { - const lineSteps = steps.slice(i, i + stepsPerLine).map((step, index) => ({ - ...step, - originalIndex: i + index - })); - lines.push({ steps: lineSteps }); + filteredSteps.forEach(({ step, originalIndex }) => { + const stepWithIndex = { ...step, original_index: originalIndex }; + + if (step.task_id) { + if (!taskMap.has(step.task_id)) { + taskMap.set(step.task_id, { + task_id: step.task_id, + task_title: step.task_title || step.task_id, + steps: [], + start_time: step.timestamp, + end_time: step.timestamp, + start_index: originalIndex, + }); + } + const group = taskMap.get(step.task_id); + group.steps.push(stepWithIndex); + if (new Date(step.timestamp) > new Date(group.end_time)) { + group.end_time = step.timestamp; + } + } else { + ungrouped.push(stepWithIndex); + } + }); + + // Calculate duration and status for each group + taskMap.forEach((group) => { + const startMs = new Date(group.start_time).getTime(); + const endMs = new Date(group.end_time).getTime(); + group.duration_ms = endMs - startMs; + + // Determine group status based on step levels and task_status field + // Priority: task_status field > level field > default based on flow completion + const hasErrorLevel = group.steps.some((s) => s.level === "ERROR"); + const hasSuccessLevel = group.steps.some((s) => s.level === "SUCCESS"); + + // Check task_status field from log entries (more reliable than message text) + const finalTaskStatus = group.steps[group.steps.length - 1]?.task_status; + + if (hasErrorLevel || finalTaskStatus === "failed") { + group.status = "error"; + } else if (hasSuccessLevel || finalTaskStatus === "completed") { + group.status = "success"; + } else if (finalTaskStatus === "in_progress" || finalTaskStatus === "processing") { + // task_status="processing" means the task is still running + // Only show as in-progress if flow hasn't completed (still running) + group.status = "in_progress"; + } else { + // Default: if flow completed successfully overall, mark as success + group.status = flowCompletedSuccessfully ? "success" : "in_progress"; + } + + groups.push(group); + }); + + // Sort groups by start index to maintain chronological order + groups.sort((a, b) => a.start_index - b.start_index); + + return { groups, ungrouped }; +} + +function renderFlowModal(data) { + // Render summary section using safe DOM methods + const summaryElement = document.getElementById("flowSummary"); + if (!summaryElement) return; + + // Clear existing content + while (summaryElement.firstChild) { + summaryElement.removeChild(summaryElement.firstChild); } - return { - lines, - lineHeight, - lineWidth, - stepSpacing, - totalWidth: lineWidth, - totalHeight: lines.length * lineHeight + const title = document.createElement("h3"); + title.textContent = "Flow Overview"; + summaryElement.appendChild(title); + + const grid = document.createElement("div"); + grid.className = "flow-summary-grid"; + + // Helper to create summary items safely + const createSummaryItem = (label, value) => { + const item = document.createElement("div"); + item.className = "flow-summary-item"; + + const labelDiv = document.createElement("div"); + labelDiv.className = "flow-summary-label"; + labelDiv.textContent = label; + + const valueDiv = document.createElement("div"); + valueDiv.className = "flow-summary-value"; + valueDiv.textContent = value; + + item.appendChild(labelDiv); + item.appendChild(valueDiv); + return item; }; -} + const duration = + data.total_duration_ms > 0 + ? `${(data.total_duration_ms / 1000).toFixed(2)}s` + : "< 1s"; + + grid.appendChild(createSummaryItem("Hook ID", data.hook_id)); + grid.appendChild( + createSummaryItem("Total Steps", data.step_count.toString()), + ); + grid.appendChild(createSummaryItem("Duration", duration)); + // Token spend is only available for webhooks processed after token tracking was added + if (data.token_spend !== undefined && data.token_spend !== null) { + grid.appendChild( + createSummaryItem("Token Spend", `${data.token_spend} API calls`), + ); + } else { + // Show "N/A" for older webhooks that don't have token spend data + grid.appendChild(createSummaryItem("Token Spend", "N/A (older webhook)")); + } + + if (data.steps[0] && data.steps[0].repository) { + grid.appendChild(createSummaryItem("Repository", data.steps[0].repository)); + } + + summaryElement.appendChild(grid); + + // Render vertical flow visualization using safe DOM methods + const vizElement = document.getElementById("flowVisualization"); + if (!vizElement) return; + + // Clear existing content + while (vizElement.firstChild) { + vizElement.removeChild(vizElement.firstChild); + } -function wrapTextToLines(text, maxCharacters) { - // Smart text wrapping for timeline labels - const words = text.split(' '); - const lines = []; - let currentLine = ''; + if (data.steps.length === 0) { + const emptyMsg = document.createElement("p"); + emptyMsg.style.textAlign = "center"; + emptyMsg.style.color = "var(--timestamp-color)"; + emptyMsg.textContent = "No workflow steps found"; + vizElement.appendChild(emptyMsg); + return; + } - for (const word of words) { - const testLine = currentLine ? `${currentLine} ${word}` : word; - if (testLine.length <= maxCharacters) { - currentLine = testLine; + // Check if flow completed successfully (no errors or failed tasks) + const hasFailedTasks = data.steps.some( + (step) => step.level === "ERROR" || step.task_status === "failed", + ); + const hasActiveTasks = data.steps.some( + (step) => + step.task_status === "processing" || step.task_status === "in_progress", + ); + const flowCompletedSuccessfully = !hasFailedTasks && !hasActiveTasks; + + // Group steps by task_id and get groups and ungrouped steps + const { groups, ungrouped } = groupStepsByTaskId(data.steps, flowCompletedSuccessfully); + + // Merge groups and ungrouped steps into a single array with original_index + const combinedEntries = [ + // Map groups to entries with type "group" and original_index from start_index + ...groups.map((group) => ({ + type: "group", + data: group, + original_index: group.start_index, + })), + // Map ungrouped steps to entries with type "step" and original_index + ...ungrouped.map((step) => ({ + type: "step", + data: step, + original_index: step.original_index, + })), + ]; + + // Sort combined entries by original_index to preserve chronological order + combinedEntries.sort((a, b) => a.original_index - b.original_index); + + // Render entries in chronological order + combinedEntries.forEach((entry) => { + if (entry.type === "group") { + renderTaskGroup(entry.data, vizElement); } else { - if (currentLine) { - lines.push(currentLine); - currentLine = word; - } else { - // Single word is too long, truncate it - lines.push(word.substring(0, maxCharacters - 3) + '...'); - currentLine = ''; - } + renderSingleStep(entry.data, vizElement); } + }); + + // Add final status (hasFailedTasks and hasActiveTasks already declared above) + const finalStatus = document.createElement("div"); + finalStatus.className = hasFailedTasks + ? "flow-error" + : hasActiveTasks + ? "flow-in-progress" + : "flow-success"; + + const statusTitle = document.createElement("h3"); + statusTitle.textContent = hasFailedTasks + ? "⚠️ Flow Completed with Errors" + : hasActiveTasks + ? "◷ Flow Still Running" + : "✓ Flow Completed Successfully"; + finalStatus.appendChild(statusTitle); + + if (hasFailedTasks) { + const errorMsg = document.createElement("div"); + errorMsg.className = "flow-error-message"; + errorMsg.textContent = + "Some steps encountered errors. Check the logs for details."; + finalStatus.appendChild(errorMsg); } - if (currentLine) { - lines.push(currentLine); + vizElement.appendChild(finalStatus); +} + +function renderTaskGroup(group, parentElement) { + const taskGroupContainer = document.createElement("div"); + taskGroupContainer.className = "task-group"; + + // Create group header + const groupHeader = document.createElement("div"); + groupHeader.className = "task-group-header"; + groupHeader.style.cursor = "pointer"; + + // Collapse arrow + const arrow = document.createElement("span"); + arrow.className = "task-group-arrow collapsed"; + arrow.textContent = "►"; + + // Status icon + const statusIcon = document.createElement("span"); + statusIcon.className = `task-group-status task-group-${group.status}`; + if (group.status === "success") { + statusIcon.textContent = "✓"; + } else if (group.status === "error") { + statusIcon.textContent = "✗"; + } else { + statusIcon.textContent = "◷"; } - // Return max 2 lines to prevent overcrowding - return lines.slice(0, 2); + // Task title + const taskTitle = document.createElement("span"); + taskTitle.className = "task-group-title"; + taskTitle.textContent = group.task_title; + + // Duration + const duration = document.createElement("span"); + duration.className = "task-group-duration"; + duration.textContent = `${(group.duration_ms / 1000).toFixed(2)}s`; + + groupHeader.appendChild(arrow); + groupHeader.appendChild(statusIcon); + groupHeader.appendChild(taskTitle); + groupHeader.appendChild(duration); + + // Create nested steps container + const stepsContainer = document.createElement("div"); + stepsContainer.className = "task-group-steps"; + stepsContainer.style.display = "none"; // Start collapsed + + group.steps.forEach((step) => { + renderSingleStep(step, stepsContainer, true); + }); + + // Toggle expand/collapse + groupHeader.addEventListener("click", () => { + const isCollapsed = stepsContainer.style.display === "none"; + stepsContainer.style.display = isCollapsed ? "block" : "none"; + arrow.className = isCollapsed + ? "task-group-arrow expanded" + : "task-group-arrow collapsed"; + }); + + taskGroupContainer.appendChild(groupHeader); + taskGroupContainer.appendChild(stepsContainer); + parentElement.appendChild(taskGroupContainer); } -function showTooltip(event, step) { - const tooltip = document.getElementById('timelineTooltip'); +function renderSingleStep(step, parentElement, isNested = false) { + const stepType = getStepType(step.level); const timeFromStart = `+${(step.relative_time_ms / 1000).toFixed(2)}s`; + const timestamp = new Date(step.timestamp).toLocaleTimeString(); - tooltip.innerHTML = ` -
Step: ${step.message}
-
Time: ${timeFromStart}
-
Timestamp: ${new Date(step.timestamp).toLocaleTimeString()}
- ${step.pr_number ? `
PR: #${step.pr_number}
` : ''} -
Click to filter logs by this step
- `; + const flowStepContainer = document.createElement("div"); + flowStepContainer.className = isNested + ? "flow-step-container nested" + : "flow-step-container"; - const rect = event.target.getBoundingClientRect(); - const containerRect = document.getElementById('timelineSection').getBoundingClientRect(); + const flowStep = document.createElement("div"); + flowStep.className = `flow-step ${stepType}`; + flowStep.setAttribute("data-step-index", step.original_index.toString()); + flowStep.style.cursor = "pointer"; + flowStep.addEventListener("click", () => filterByStep(step.original_index)); - tooltip.style.left = (rect.left - containerRect.left + rect.width / 2) + 'px'; - tooltip.style.top = (rect.top - containerRect.top - tooltip.offsetHeight - 10) + 'px'; - tooltip.style.display = 'block'; -} + const stepNumber = document.createElement("div"); + stepNumber.className = "flow-step-number"; + stepNumber.textContent = (step.original_index + 1).toString(); -function hideTooltip() { - document.getElementById('timelineTooltip').style.display = 'none'; -} + const stepContent = document.createElement("div"); + stepContent.className = "flow-step-content"; -function filterByStep(step) { - // Set search filter to find this specific step message - document.getElementById('searchFilter').value = step.message.substring(0, 30); - debounceFilter(); + const stepTitle = document.createElement("div"); + stepTitle.className = "flow-step-title"; + stepTitle.textContent = step.message; + + const stepTime = document.createElement("div"); + stepTime.className = "flow-step-time"; + + const timestampSpan = document.createElement("span"); + timestampSpan.textContent = timestamp; + + const durationSpan = document.createElement("span"); + durationSpan.className = "flow-step-duration"; + durationSpan.textContent = timeFromStart; + + stepTime.appendChild(timestampSpan); + stepTime.appendChild(durationSpan); + + stepContent.appendChild(stepTitle); + stepContent.appendChild(stepTime); + + flowStep.appendChild(stepNumber); + flowStep.appendChild(stepContent); + + // Create logs container for this step (hidden by default) + const stepLogsContainer = document.createElement("div"); + stepLogsContainer.className = "step-logs-container"; + stepLogsContainer.style.display = "none"; + stepLogsContainer.setAttribute( + "data-step-logs", + step.original_index.toString(), + ); + + flowStepContainer.appendChild(flowStep); + flowStepContainer.appendChild(stepLogsContainer); + + parentElement.appendChild(flowStepContainer); } -// Auto-show timeline when hook ID filter is applied -function checkForTimelineDisplay() { - const hookId = document.getElementById('hookIdFilter').value.trim(); - if (hookId) { - showTimeline(hookId); +function getStepType(level) { + // Accept level parameter to determine step type based on log level + const levelUpper = typeof level === "string" ? level.toUpperCase() : ""; + + if (levelUpper === "SUCCESS") { + return "success"; + } else if (levelUpper === "ERROR") { + return "error"; + } else if (levelUpper === "WARNING") { + return "warning"; } else { - hideTimeline(); + return "info"; } } -// Add timeline check to hook ID filter specifically -document.getElementById('hookIdFilter').addEventListener('input', () => { - setTimeout(checkForTimelineDisplay, 300); // Small delay to let the value settle -}); +async function filterByStep(stepIndex) { + if (!currentFlowData || !currentFlowData.steps[stepIndex]) return; + + const step = currentFlowData.steps[stepIndex]; + const logsContainer = document.querySelector( + `[data-step-logs="${stepIndex}"]`, + ); + + if (!logsContainer) return; + + // Toggle: if this step's logs are already showing, hide them + if (logsContainer.style.display === "block") { + logsContainer.style.display = "none"; + logsContainer.innerHTML = ""; + return; + } -// Also check on initial load -setTimeout(checkForTimelineDisplay, 1000); + // Hide all other step logs + document.querySelectorAll(".step-logs-container").forEach((container) => { + container.style.display = "none"; + container.innerHTML = ""; + }); + + // Show logs for this step + await showStepLogsInModal(step, logsContainer); +} + +async function showStepLogsInModal(step, logsContainer) { + if (!logsContainer) return; + + // Show loading state + logsContainer.style.display = "block"; + logsContainer.textContent = "Loading logs..."; + + // Cancel previous fetch if still in progress + if (currentStepLogsController) { + currentStepLogsController.abort(); + } + + // Create new AbortController for this fetch + currentStepLogsController = new AbortController(); + + try { + // Using full message for precision to avoid ambiguous matches + const searchText = step.message; + const hookId = currentFlowData.hook_id; + + const params = new URLSearchParams({ + hook_id: hookId, + search: searchText, + limit: "100", + }); + + const response = await fetch(`/logs/api/entries?${params}`, { + signal: currentStepLogsController.signal, + }); + if (!response.ok) throw new Error("Failed to fetch logs"); + + const data = await response.json(); + + // Clear and display logs using safe DOM methods + logsContainer.textContent = ""; + + if (data.entries.length === 0) { + const emptyMsg = document.createElement("div"); + emptyMsg.textContent = "No logs found for this step"; + emptyMsg.style.textAlign = "center"; + emptyMsg.style.color = "var(--timestamp-color)"; + emptyMsg.style.padding = "12px"; + logsContainer.appendChild(emptyMsg); + return; + } + + // Render log entries + data.entries.forEach((entry) => { + const logEntry = document.createElement("div"); + const allowed = ["DEBUG", "INFO", "WARNING", "ERROR", "STEP", "SUCCESS"]; + const safeLevel = allowed.includes(entry.level) ? entry.level : "INFO"; + logEntry.className = `log-entry ${safeLevel}`; + + const timestamp = document.createElement("span"); + timestamp.className = "timestamp"; + timestamp.textContent = new Date(entry.timestamp).toLocaleString(); + + const level = document.createElement("span"); + level.className = "level"; + level.textContent = ` [${entry.level}] `; + + const message = document.createElement("span"); + message.className = "message"; + message.textContent = entry.message; + + logEntry.appendChild(timestamp); + logEntry.appendChild(level); + logEntry.appendChild(message); + + logsContainer.appendChild(logEntry); + }); + + // Scroll to the logs container + logsContainer.scrollIntoView({ behavior: "smooth", block: "nearest" }); + } catch (error) { + if (error.name === "AbortError") { + // Request was cancelled, ignore silently + return; + } + console.error("Error fetching step logs:", error); + logsContainer.textContent = "Error loading logs"; + } +} diff --git a/webhook_server/web/templates/log_viewer.html b/webhook_server/web/templates/log_viewer.html index 4fbcab23f..315a3a349 100644 --- a/webhook_server/web/templates/log_viewer.html +++ b/webhook_server/web/templates/log_viewer.html @@ -1,117 +1,153 @@ - + - - - + + + GitHub Webhook Server - Log Viewer - - - + + +
-
-
-

GitHub Webhook Server - Log Viewer

-

Real-time log monitoring and filtering for webhook events

-
- +
+
+

GitHub Webhook Server - Log Viewer

+

Real-time log monitoring and filtering for webhook events

+ +
-
- Connecting... -
+
+ Connecting... +
-
-
- Displayed: 0 entries - Total Available: 0 entries - Processed: 0 entries -
+
+
+ Displayed: + 0 entries + Total Available: + 0 entries + Processed: + 0 entries
+
+ +
+ + + + + + +
-
- - - - - - +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+
+ + +
+
+ + +
+
-
-
- - -
-
- - -
-
- - + +