diff --git a/Dockerfile b/Dockerfile index afc861d6..40808bd9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -39,7 +39,7 @@ RUN mkdir -p $BIN_DIR \ && mkdir -p $DATA_DIR \ && mkdir -p $DATA_DIR/logs -COPY entrypoint.py pyproject.toml uv.lock README.md $APP_DIR/ +COPY entrypoint.py pyproject.toml uv.lock README.md alembic.ini $APP_DIR/ COPY webhook_server $APP_DIR/webhook_server/ COPY scripts $APP_DIR/scripts/ diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 00000000..32993ef5 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,94 @@ +# Alembic configuration for GitHub Webhook Server metrics database +# See: https://alembic.sqlalchemy.org/en/latest/tutorial.html + +[alembic] +# Path to migration scripts directory +script_location = webhook_server/migrations + +# Template used to generate migration files +# %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s +# Example: 20250123_1430_abc123def456_add_metrics_table +file_template = %%(year)d%%(month).2d%%(day).2d_%%(hour).2d%%(minute).2d_%%(rev)s_%%(slug)s + +# Prepends given value to alembic.script_location +# prepend_sys_path = . + +# Timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python-dateutil library. +# timezone = UTC + +# Max length of characters to apply to the "slug" field +# truncate_slug_length = 40 + +# Set to 'true' to run the environment during the 'revision' command +# revision_environment = false + +# Set to 'true' to allow .pyc and .pyo files without a .py file to be detected +# sourceless = false + +# Version table name - should match across all databases +version_table = alembic_version + +# Version location specification +# Determines where Alembic stores version information +# IMPORTANT: This value is OVERRIDDEN dynamically in env.py +# env.py sets version_locations based on WEBHOOK_SERVER_DATA_DIR environment variable +# Default: {WEBHOOK_SERVER_DATA_DIR}/migrations/versions +# Container default: /home/podman/data/migrations/versions +# This placeholder is kept for reference only - actual path is set in env.py +version_locations = migrations/versions + +# Version path separator (used if version_locations is specified) +# version_path_separator = os # Use os.pathsep. Default is ':' + +# Database URL - loaded dynamically from config.yaml via env.py +# IMPORTANT: Do NOT set sqlalchemy.url here - it's loaded from config.yaml +# sqlalchemy.url = + +# Logging configuration for Alembic migrations +# This section controls Alembic's own logging during migration operations +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = INFO +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S + +# Metrics-specific sections +# These are custom sections for webhook server metrics feature + +[metrics] +# Feature flags for metrics migration +enable_postgres = true + +# Migration behavior configuration +auto_migrate_on_startup = false +validate_schema_on_startup = true diff --git a/entrypoint.py b/entrypoint.py index a44bdf8c..dbb4a8be 100644 --- a/entrypoint.py +++ b/entrypoint.py @@ -1,4 +1,5 @@ import asyncio +import os import subprocess import sys from pathlib import Path @@ -41,10 +42,66 @@ def run_podman_cleanup() -> None: print(f"ℹ️ Podman cleanup script not found at {cleanup_script}") +def run_database_migrations() -> None: + """Run Alembic database migrations. + + Only runs if ENABLE_METRICS_SERVER environment variable is set to "true". + Applies pending migrations with 'alembic upgrade head'. + + Note: Migrations must be generated manually by developers: + alembic revision --autogenerate -m "Description" + + Raises: + SystemExit: If migration fails (fail-fast behavior) + """ + metrics_enabled = os.environ.get("ENABLE_METRICS_SERVER") == "true" + + if not metrics_enabled: + print("ℹ️ Metrics server disabled - skipping database migrations") + return + + try: + alembic_ini = Path(__file__).parent / "alembic.ini" + versions_dir = Path(_config.data_dir) / "migrations" / "versions" + + # Ensure versions directory exists (required for Alembic) + versions_dir.mkdir(parents=True, exist_ok=True) + + print("⬆️ Applying database migrations...") + result = subprocess.run( + ["uv", "run", "alembic", "-c", str(alembic_ini), "upgrade", "head"], + check=True, + capture_output=True, + text=True, + timeout=60, + cwd=Path(__file__).parent, + ) + print(result.stdout) + if result.stderr: + print(result.stderr, file=sys.stderr) + print("✅ Database migrations completed successfully") + except subprocess.CalledProcessError as e: + print(f"❌ FATAL: Database migration failed: {e}", file=sys.stderr) + if e.stdout: + print(f"stdout: {e.stdout}", file=sys.stderr) + if e.stderr: + print(f"stderr: {e.stderr}", file=sys.stderr) + sys.exit(1) + except subprocess.TimeoutExpired: + print("❌ FATAL: Database migration timed out after 60 seconds", file=sys.stderr) + sys.exit(1) + except Exception as e: + print(f"❌ FATAL: Unexpected error during database migration: {e}", file=sys.stderr) + sys.exit(1) + + if __name__ == "__main__": # Run Podman cleanup before starting the application run_podman_cleanup() + # Run database migrations if metrics server is enabled + run_database_migrations() + result = asyncio.run(repository_and_webhook_settings(webhook_secret=_webhook_secret)) # Logging Configuration: diff --git a/eslint.config.js b/eslint.config.js index 9f845d8c..1b4c4204 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -4,7 +4,7 @@ module.exports = [ files: ["webhook_server/web/static/**/*.js"], languageOptions: { ecmaVersion: 2022, - sourceType: "script", + sourceType: "module", globals: { // Browser environment globals window: "readonly", @@ -23,6 +23,12 @@ module.exports = [ clearInterval: "readonly", URLSearchParams: "readonly", AbortController: "readonly", + URL: "readonly", + Blob: "readonly", + // CommonJS globals for conditional exports + module: "readonly", + // Chart.js global + Chart: "readonly", }, }, rules: { diff --git a/examples/config.yaml b/examples/config.yaml index 86f6c39b..9e6fc69d 100644 --- a/examples/config.yaml +++ b/examples/config.yaml @@ -4,8 +4,36 @@ log-level: INFO # Set global log level, change take effect immediately without s log-file: webhook-server.log # Set global log file, change take effect immediately without server restart mcp-log-file: mcp_server.log # Set global MCP log file, change take effect immediately without server restart logs-server-log-file: logs_server.log # Set global Logs Server log file, change take effect immediately without server restart +metrics-server-log-file: metrics_server.log # Set global Metrics Server log file, change take effect immediately without server restart mask-sensitive-data: true # Mask sensitive data in logs (default: true). Set to false for debugging (NOT recommended in production) +# Metrics Server Configuration (requires ENABLE_METRICS_SERVER=true environment variable) +# Provides PostgreSQL-based historical analytics and AI-powered natural language queries +# NOTE: For docker-compose deployments, use service name as hostname: +# - metrics-database host: github-webhook-server-postgres (defined in examples/docker-compose.yaml) +# +# ⚠️ SECURITY WARNING - TRUSTED NETWORK ONLY ⚠️ +# The metrics dashboard and API endpoints (/api/v1/metrics/*) are UNAUTHENTICATED. +# These endpoints expose sensitive webhook data including user activity and repository statistics. +# REQUIREMENTS: +# - Deploy ONLY on trusted/private networks (VPN, internal network, localhost) +# - NEVER expose metrics endpoints directly to the public internet +# - Use a reverse proxy with authentication (OAuth, API keys) for external access +# - Enable SSL/TLS connections to PostgreSQL (sslmode=require) in production +metrics-database: + host: localhost # PostgreSQL server hostname (use 'github-webhook-server-postgres' in docker-compose) + port: 5432 # PostgreSQL server port + database: webhook_metrics # Database name for metrics + username: webhook_user # Database username + password: # Database password + pool-size: 20 # Connection pool size (default: 20) + +# AI Query Configuration (optional - enables natural language queries in dashboard) +# Requires a valid Gemini API key - set ai-query-enabled to true only after configuring the key +# WARNING: Enabling ai-query-enabled without a valid gemini-api-key will cause AI query failures +gemini-api-key: # Google Gemini API key for AI queries +ai-query-enabled: false # Enable AI-powered queries (default: false, requires valid API key) + # Server configuration disable-ssl-warnings: true # Disable SSL warnings (useful in production to reduce log noise from SSL certificate issues) diff --git a/examples/docker-compose.yaml b/examples/docker-compose.yaml index 99dc04e0..78de7ef8 100644 --- a/examples/docker-compose.yaml +++ b/examples/docker-compose.yaml @@ -1,4 +1,34 @@ +# ⚠️ SECURITY WARNING - TRUSTED NETWORK ONLY ⚠️ +# The metrics dashboard and API endpoints (/api/v1/metrics/*) are UNAUTHENTICATED. +# These endpoints expose sensitive webhook data including user activity and repository statistics. +# REQUIREMENTS: +# - Deploy ONLY on trusted/private networks (VPN, internal network, localhost) +# - NEVER expose metrics endpoints directly to the public internet +# - Use a reverse proxy with authentication (OAuth, API keys) for external access +# - Consider using Docker network isolation to restrict access to metrics endpoints + services: + # PostgreSQL database for metrics storage + github-webhook-server-postgres: + image: postgres:16-alpine + container_name: github-webhook-server-postgres + environment: + - POSTGRES_DB=webhook_metrics + - POSTGRES_USER=webhook_user + - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} # Set POSTGRES_PASSWORD in .env or environment + volumes: + - postgres-data:/var/lib/postgresql/data + ports: + # Bind to localhost only - prevents external network access to database + # For production, consider removing ports entirely and using Docker network only + - "127.0.0.1:5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U webhook_user -d webhook_metrics"] + interval: 10s + timeout: 5s + retries: 5 + restart: unless-stopped + github-webhook-server: container_name: github-webhook-server build: ghcr.io/myk-org/github-webhook-server:latest @@ -18,7 +48,15 @@ services: - VERIFY_CLOUDFLARE_IPS=1 # Verify hook request is from Cloudflare IPs - ENABLE_LOG_SERVER=true # Enable log viewer endpoints (default: false) - ENABLE_MCP_SERVER=false # Enable MCP server for AI agent integration (default: false) + - ENABLE_METRICS_SERVER=true # Enable metrics server with PostgreSQL (default: false) ports: - "5000:5000" privileged: true + depends_on: + github-webhook-server-postgres: + condition: service_healthy restart: unless-stopped + +volumes: + postgres-data: + driver: local diff --git a/pyproject.toml b/pyproject.toml index 8f3016ce..d2c26a2f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ output-format = "grouped" select = ["E", "F", "W", "I", "B", "UP", "PLC0415", "ARG", "RUF059"] [tool.ruff.lint.per-file-ignores] -"webhook_server/tests/*" = ["ARG"] +"webhook_server/tests/*" = ["ARG", "PLC0415"] [tool.ruff.format] exclude = [".git", ".venv", ".mypy_cache", ".tox", "__pycache__"] @@ -75,6 +75,11 @@ dependencies = [ "pydantic>=2.8.0", "psutil>=7.0.0", "fastapi-mcp>=0.4.0", + "asyncpg>=0.29.0", + "alembic>=1.13.0", + "sqlalchemy[asyncio]>=2.0.0", + "google-genai>=0.1.0", + "aiosqlite>=0.21.0", ] [[project.authors]] @@ -90,8 +95,6 @@ homepage = "https://github.com/myakove/github-webhook-server" repository = "https://github.com/myakove/github-webhook-server" "Bug Tracker" = "https://github.com/myakove/github-webhook-server/issues" -[project.optional-dependencies] -tests = ["pytest-asyncio>=0.26.0", "pytest-xdist>=3.7.0"] [build-system] requires = ["hatchling"] diff --git a/tox.toml b/tox.toml index 8a11e557..974eb746 100644 --- a/tox.toml +++ b/tox.toml @@ -18,7 +18,7 @@ commands = [ [ "uv", "run", - "--extra", + "--group", "tests", "pytest", "-n", diff --git a/uv.lock b/uv.lock index 15f3d9a4..2c56bbb5 100644 --- a/uv.lock +++ b/uv.lock @@ -2,6 +2,32 @@ version = 1 revision = 3 requires-python = "==3.13.*" +[[package]] +name = "aiosqlite" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size = 13454, upload-time = "2025-02-03T07:30:16.235Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size = 15792, upload-time = "2025-02-03T07:30:13.6Z" }, +] + +[[package]] +name = "alembic" +version = "1.17.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/a6/74c8cadc2882977d80ad756a13857857dbcf9bd405bc80b662eb10651282/alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e", size = 1988064, upload-time = "2025-11-14T20:35:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ba/88/6237e97e3385b57b5f1528647addea5cc03d4d65d5979ab24327d41fb00d/alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6", size = 248554, upload-time = "2025-11-14T20:35:05.699Z" }, +] + [[package]] name = "annotated-doc" version = "0.0.4" @@ -42,6 +68,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" }, ] +[[package]] +name = "asyncpg" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746, upload-time = "2024-10-20T00:30:41.127Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/22/e20602e1218dc07692acf70d5b902be820168d6282e69ef0d3cb920dc36f/asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", size = 670373, upload-time = "2024-10-20T00:29:55.165Z" }, + { url = "https://files.pythonhosted.org/packages/3d/b3/0cf269a9d647852a95c06eb00b815d0b95a4eb4b55aa2d6ba680971733b9/asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", size = 634745, upload-time = "2024-10-20T00:29:57.14Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6d/a4f31bf358ce8491d2a31bfe0d7bcf25269e80481e49de4d8616c4295a34/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", size = 3512103, upload-time = "2024-10-20T00:29:58.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/19/139227a6e67f407b9c386cb594d9628c6c78c9024f26df87c912fabd4368/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", size = 3592471, upload-time = "2024-10-20T00:30:00.354Z" }, + { url = "https://files.pythonhosted.org/packages/67/e4/ab3ca38f628f53f0fd28d3ff20edff1c975dd1cb22482e0061916b4b9a74/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", size = 3496253, upload-time = "2024-10-20T00:30:02.794Z" }, + { url = "https://files.pythonhosted.org/packages/ef/5f/0bf65511d4eeac3a1f41c54034a492515a707c6edbc642174ae79034d3ba/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", size = 3662720, upload-time = "2024-10-20T00:30:04.501Z" }, + { url = "https://files.pythonhosted.org/packages/e7/31/1513d5a6412b98052c3ed9158d783b1e09d0910f51fbe0e05f56cc370bc4/asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", size = 560404, upload-time = "2024-10-20T00:30:06.537Z" }, + { url = "https://files.pythonhosted.org/packages/c8/a4/cec76b3389c4c5ff66301cd100fe88c318563ec8a520e0b2e792b5b84972/asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", size = 621623, upload-time = "2024-10-20T00:30:09.024Z" }, +] + [[package]] name = "asyncstdlib" version = "3.13.1" @@ -127,6 +169,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cb/8c/2b30c12155ad8de0cf641d76a8b396a16d2c36bc6d50b621a62b7c4567c1/build-1.3.0-py3-none-any.whl", hash = "sha256:7145f0b5061ba90a1500d60bd1b13ca0a8a4cebdd0cc16ed8adf1c0e739f43b4", size = 23382, upload-time = "2025-08-01T21:27:07.844Z" }, ] +[[package]] +name = "cachetools" +version = "6.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/44/ca1675be2a83aeee1886ab745b28cda92093066590233cc501890eb8417a/cachetools-6.2.2.tar.gz", hash = "sha256:8e6d266b25e539df852251cfd6f990b4bc3a141db73b939058d809ebd2590fc6", size = 31571, upload-time = "2025-11-13T17:42:51.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/46/eb6eca305c77a4489affe1c5d8f4cae82f285d9addd8de4ec084a7184221/cachetools-6.2.2-py3-none-any.whl", hash = "sha256:6c09c98183bf58560c97b2abfcedcbaf6a896a490f534b031b661d3723b45ace", size = 11503, upload-time = "2025-11-13T17:42:50.232Z" }, +] + [[package]] name = "certifi" version = "2025.11.12" @@ -374,12 +425,16 @@ name = "github-webhook-server" version = "4.0.0" source = { editable = "." } dependencies = [ + { name = "aiosqlite" }, + { name = "alembic" }, + { name = "asyncpg" }, { name = "asyncstdlib" }, { name = "build" }, { name = "colorama" }, { name = "colorlog" }, { name = "fastapi" }, { name = "fastapi-mcp" }, + { name = "google-genai" }, { name = "httpx" }, { name = "psutil" }, { name = "pydantic" }, @@ -394,18 +449,13 @@ dependencies = [ { name = "requests" }, { name = "ruff" }, { name = "shortuuid" }, + { name = "sqlalchemy", extra = ["asyncio"] }, { name = "string-color" }, { name = "timeout-sampler" }, { name = "uvicorn", extra = ["standard"] }, { name = "webcolors" }, ] -[package.optional-dependencies] -tests = [ - { name = "pytest-asyncio" }, - { name = "pytest-xdist" }, -] - [package.dev-dependencies] dev = [ { name = "ipdb" }, @@ -422,12 +472,16 @@ tests = [ [package.metadata] requires-dist = [ + { name = "aiosqlite", specifier = ">=0.21.0" }, + { name = "alembic", specifier = ">=1.13.0" }, + { name = "asyncpg", specifier = ">=0.29.0" }, { name = "asyncstdlib", specifier = ">=3.13.1" }, { name = "build", specifier = ">=1.2.2.post1" }, { name = "colorama", specifier = ">=0.4.6" }, { name = "colorlog", specifier = ">=6.8.2" }, { name = "fastapi", specifier = ">=0.115.0" }, { name = "fastapi-mcp", specifier = ">=0.4.0" }, + { name = "google-genai", specifier = ">=0.1.0" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "psutil", specifier = ">=7.0.0" }, { name = "pydantic", specifier = ">=2.8.0" }, @@ -435,21 +489,19 @@ requires-dist = [ { name = "pyhelper-utils", specifier = ">=0.0.42" }, { name = "pyjwt", specifier = ">=2.8.0" }, { name = "pytest", specifier = ">=8.3.3" }, - { name = "pytest-asyncio", marker = "extra == 'tests'", specifier = ">=0.26.0" }, { name = "pytest-cov", specifier = ">=6.0.0" }, { name = "pytest-mock", specifier = ">=3.14.0" }, - { name = "pytest-xdist", marker = "extra == 'tests'", specifier = ">=3.7.0" }, { name = "python-simple-logger", specifier = ">=1.0.40" }, { name = "pyyaml", specifier = ">=6.0.2" }, { name = "requests", specifier = ">=2.32.3" }, { name = "ruff", specifier = ">=0.6.9" }, { name = "shortuuid", specifier = ">=1.0.13" }, + { name = "sqlalchemy", extras = ["asyncio"], specifier = ">=2.0.0" }, { name = "string-color", specifier = ">=1.2.3" }, { name = "timeout-sampler", specifier = ">=0.0.46" }, { name = "uvicorn", extras = ["standard"], specifier = ">=0.32.0" }, { name = "webcolors", specifier = ">=24.11.1" }, ] -provides-extras = ["tests"] [package.metadata.requires-dev] dev = [ @@ -465,6 +517,58 @@ tests = [ { name = "pytest-xdist", specifier = ">=3.7.0" }, ] +[[package]] +name = "google-auth" +version = "2.43.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/ef/66d14cf0e01b08d2d51ffc3c20410c4e134a1548fc246a6081eae585a4fe/google_auth-2.43.0.tar.gz", hash = "sha256:88228eee5fc21b62a1b5fe773ca15e67778cb07dc8363adcb4a8827b52d81483", size = 296359, upload-time = "2025-11-06T00:13:36.587Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6f/d1/385110a9ae86d91cc14c5282c61fe9f4dc41c0b9f7d423c6ad77038c4448/google_auth-2.43.0-py2.py3-none-any.whl", hash = "sha256:af628ba6fa493f75c7e9dbe9373d148ca9f4399b5ea29976519e0a3848eddd16", size = 223114, upload-time = "2025-11-06T00:13:35.209Z" }, +] + +[[package]] +name = "google-genai" +version = "1.52.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "google-auth" }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "tenacity" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/4e/0ad8585d05312074bb69711b2d81cfed69ce0ae441913d57bf169bed20a7/google_genai-1.52.0.tar.gz", hash = "sha256:a74e8a4b3025f23aa98d6a0f84783119012ca6c336fd68f73c5d2b11465d7fc5", size = 258743, upload-time = "2025-11-21T02:18:55.742Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/66/03f663e7bca7abe9ccfebe6cb3fe7da9a118fd723a5abb278d6117e7990e/google_genai-1.52.0-py3-none-any.whl", hash = "sha256:c8352b9f065ae14b9322b949c7debab8562982f03bf71d44130cd2b798c20743", size = 261219, upload-time = "2025-11-21T02:18:54.515Z" }, +] + +[[package]] +name = "greenlet" +version = "3.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, + { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" }, + { url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" }, + { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, + { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, + { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/1c/53/f9c440463b3057485b8594d7a638bed53ba531165ef0ca0e6c364b5cc807/greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b", size = 1564759, upload-time = "2025-11-04T12:42:19.395Z" }, + { url = "https://files.pythonhosted.org/packages/47/e4/3bb4240abdd0a8d23f4f88adec746a3099f0d86bfedb623f063b2e3b4df0/greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929", size = 1634288, upload-time = "2025-11-04T12:42:21.174Z" }, + { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -638,6 +742,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] +[[package]] +name = "mako" +version = "1.3.10" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, +] + [[package]] name = "markdown-it-py" version = "4.0.0" @@ -650,6 +766,36 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, ] +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, +] + [[package]] name = "matplotlib-inline" version = "0.2.1" @@ -809,6 +955,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, ] +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + [[package]] name = "pycparser" version = "2.23" @@ -1172,6 +1339,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3b/3a/12dc43f13594a54ea0c9d7e9d43002116557330e3ad45bc56097ddf266e2/rpds_py-0.29.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f49196aec7c4b406495f60e6f947ad71f317a765f956d74bbd83996b9edc0352", size = 225248, upload-time = "2025-11-16T14:49:24.841Z" }, ] +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, +] + [[package]] name = "ruff" version = "0.14.6" @@ -1243,6 +1422,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] +[[package]] +name = "sqlalchemy" +version = "2.0.44" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/d3/c67077a2249fdb455246e6853166360054c331db4613cda3e31ab1cadbef/sqlalchemy-2.0.44-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ff486e183d151e51b1d694c7aa1695747599bb00b9f5f604092b54b74c64a8e1", size = 2135479, upload-time = "2025-10-10T16:03:37.671Z" }, + { url = "https://files.pythonhosted.org/packages/2b/91/eabd0688330d6fd114f5f12c4f89b0d02929f525e6bf7ff80aa17ca802af/sqlalchemy-2.0.44-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b1af8392eb27b372ddb783b317dea0f650241cea5bd29199b22235299ca2e45", size = 2123212, upload-time = "2025-10-10T16:03:41.755Z" }, + { url = "https://files.pythonhosted.org/packages/b0/bb/43e246cfe0e81c018076a16036d9b548c4cc649de241fa27d8d9ca6f85ab/sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b61188657e3a2b9ac4e8f04d6cf8e51046e28175f79464c67f2fd35bceb0976", size = 3255353, upload-time = "2025-10-10T15:35:31.221Z" }, + { url = "https://files.pythonhosted.org/packages/b9/96/c6105ed9a880abe346b64d3b6ddef269ddfcab04f7f3d90a0bf3c5a88e82/sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b87e7b91a5d5973dda5f00cd61ef72ad75a1db73a386b62877d4875a8840959c", size = 3260222, upload-time = "2025-10-10T15:43:50.124Z" }, + { url = "https://files.pythonhosted.org/packages/44/16/1857e35a47155b5ad927272fee81ae49d398959cb749edca6eaa399b582f/sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15f3326f7f0b2bfe406ee562e17f43f36e16167af99c4c0df61db668de20002d", size = 3189614, upload-time = "2025-10-10T15:35:32.578Z" }, + { url = "https://files.pythonhosted.org/packages/88/ee/4afb39a8ee4fc786e2d716c20ab87b5b1fb33d4ac4129a1aaa574ae8a585/sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e77faf6ff919aa8cd63f1c4e561cac1d9a454a191bb864d5dd5e545935e5a40", size = 3226248, upload-time = "2025-10-10T15:43:51.862Z" }, + { url = "https://files.pythonhosted.org/packages/32/d5/0e66097fc64fa266f29a7963296b40a80d6a997b7ac13806183700676f86/sqlalchemy-2.0.44-cp313-cp313-win32.whl", hash = "sha256:ee51625c2d51f8baadf2829fae817ad0b66b140573939dd69284d2ba3553ae73", size = 2101275, upload-time = "2025-10-10T15:03:26.096Z" }, + { url = "https://files.pythonhosted.org/packages/03/51/665617fe4f8c6450f42a6d8d69243f9420f5677395572c2fe9d21b493b7b/sqlalchemy-2.0.44-cp313-cp313-win_amd64.whl", hash = "sha256:c1c80faaee1a6c3428cecf40d16a2365bcf56c424c92c2b6f0f9ad204b899e9e", size = 2127901, upload-time = "2025-10-10T15:03:27.548Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" }, +] + +[package.optional-dependencies] +asyncio = [ + { name = "greenlet" }, +] + [[package]] name = "sse-starlette" version = "3.0.3" @@ -1295,6 +1500,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c3/59/7b7c77303c7003667d4ebea8a96bd2e08d17b6c1a16e807bf92edb3a645f/string_color-1.3.0-py3-none-any.whl", hash = "sha256:cf16bbf0b2e4d11789570799f6827cf7f946b94dee5ec1016605561715942742", size = 12391, upload-time = "2025-06-30T18:15:43.547Z" }, ] +[[package]] +name = "tenacity" +version = "9.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/d4/2b0cd0fe285e14b36db076e78c93766ff1d529d70408bd1d2a5a84f1d929/tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb", size = 48036, upload-time = "2025-04-02T08:25:09.966Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/30/643397144bfbfec6f6ef821f36f33e57d35946c44a2352d3c9f0ae847619/tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138", size = 28248, upload-time = "2025-04-02T08:25:07.678Z" }, +] + [[package]] name = "timeout-sampler" version = "1.0.22" diff --git a/webhook_server/app.py b/webhook_server/app.py index 2e7711ee..ffa46692 100644 --- a/webhook_server/app.py +++ b/webhook_server/app.py @@ -1,10 +1,13 @@ import asyncio +import base64 import ipaddress import json import logging +import math import os from collections.abc import AsyncGenerator from contextlib import asynccontextmanager +from datetime import UTC, datetime from typing import Any import httpx @@ -18,7 +21,9 @@ Request, Response, WebSocket, - status, +) +from fastapi import ( + status as http_status, ) from fastapi.responses import HTMLResponse, JSONResponse, StreamingResponse from fastapi.staticfiles import StaticFiles @@ -30,8 +35,10 @@ from starlette.datastructures import Headers from webhook_server.libs.config import Config +from webhook_server.libs.database import DatabaseManager from webhook_server.libs.exceptions import RepositoryNotFoundInConfigError from webhook_server.libs.github_api import GithubWebhook +from webhook_server.libs.metrics_tracker import MetricsTracker from webhook_server.utils.app_utils import ( HTTP_TIMEOUT_SECONDS, gate_by_allowlist_ips, @@ -45,11 +52,13 @@ prepare_log_prefix, ) from webhook_server.web.log_viewer import LogViewerController +from webhook_server.web.metrics_dashboard import MetricsDashboardController # Constants APP_URL_ROOT_PATH: str = "/webhook_server" LOG_SERVER_ENABLED: bool = os.environ.get("ENABLE_LOG_SERVER") == "true" MCP_SERVER_ENABLED: bool = os.environ.get("ENABLE_MCP_SERVER") == "true" +METRICS_SERVER_ENABLED: bool = os.environ.get("ENABLE_METRICS_SERVER") == "true" # Global variables ALLOWED_IPS: tuple[ipaddress._BaseNetwork, ...] = () @@ -62,6 +71,10 @@ http_transport: Any | None = None mcp: Any | None = None +# Metrics Server Globals +db_manager: Any | None = None +metrics_tracker: Any | None = None + class IgnoreMCPClosedResourceErrorFilter(logging.Filter): """Filter to suppress ClosedResourceError logs from MCP server.""" @@ -87,14 +100,24 @@ def require_log_server_enabled() -> None: """Dependency to ensure log server is enabled before accessing log viewer APIs.""" if not LOG_SERVER_ENABLED: raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, + status_code=http_status.HTTP_404_NOT_FOUND, detail="Log server is disabled. Set ENABLE_LOG_SERVER=true to enable.", ) +def require_metrics_server_enabled() -> None: + """Dependency to ensure metrics server is enabled before accessing metrics APIs.""" + if not METRICS_SERVER_ENABLED: + raise HTTPException( + status_code=http_status.HTTP_404_NOT_FOUND, + detail="Metrics server is disabled. Set ENABLE_METRICS_SERVER=true to enable.", + ) + + @asynccontextmanager async def lifespan(_app: FastAPI) -> AsyncGenerator[None]: - global _lifespan_http_client + global _lifespan_http_client, ALLOWED_IPS, http_transport, mcp, db_manager + global metrics_tracker, _log_viewer_controller_singleton, _metrics_dashboard_controller_singleton, _background_tasks _lifespan_http_client = httpx.AsyncClient(timeout=HTTP_TIMEOUT_SECONDS) # Apply filter to MCP logger to suppress client disconnect noise @@ -142,6 +165,28 @@ async def lifespan(_app: FastAPI) -> AsyncGenerator[None]: mcp_logger.propagate = False LOGGER.info(f"MCP logging configured to: {mcp_log_file} via handlers from {mcp_file_logger.name}") + # Configure Metrics Server logging separation + if METRICS_SERVER_ENABLED: + metrics_log_file = root_config.get("metrics-server-log-file", "metrics_server.log") + + # Use get_logger_with_params to reuse existing logging configuration logic + # (rotation, sensitive data masking, formatting) + # This returns a logger configured for the specific file + metrics_file_logger = get_logger_with_params(log_file_name=metrics_log_file) + + # Create dedicated logger for metrics server and stop propagation + # This ensures Metrics logs go ONLY to metrics_server.log and not webhook_server.log + metrics_logger = logging.getLogger("webhook_server.metrics") + if metrics_file_logger.handlers and not metrics_logger.handlers: + for handler in metrics_file_logger.handlers: + metrics_logger.addHandler(handler) + + metrics_logger.propagate = False + LOGGER.info( + f"Metrics Server logging configured to: {metrics_log_file} " + f"via handlers from {metrics_file_logger.name}" + ) + verify_github_ips = root_config.get("verify-github-ips", False) verify_cloudflare_ips = root_config.get("verify-cloudflare-ips", False) disable_ssl_warnings = root_config.get("disable-ssl-warnings", False) @@ -153,7 +198,6 @@ async def lifespan(_app: FastAPI) -> AsyncGenerator[None]: LOGGER.debug(f"verify_github_ips: {verify_github_ips}, verify_cloudflare_ips: {verify_cloudflare_ips}") - global ALLOWED_IPS networks: set[ipaddress._BaseNetwork] = set() if verify_cloudflare_ips: @@ -195,7 +239,6 @@ async def lifespan(_app: FastAPI) -> AsyncGenerator[None]: ) # Initialize MCP session manager if enabled and configured - global http_transport, mcp if MCP_SERVER_ENABLED and http_transport is not None and mcp is not None: if http_transport._session_manager is None: http_transport._session_manager = StreamableHTTPSessionManager( @@ -214,25 +257,47 @@ async def run_manager() -> None: http_transport._manager_started = True LOGGER.info("MCP session manager initialized in lifespan") + # Initialize database managers if metrics server is enabled + if METRICS_SERVER_ENABLED: + metrics_logger = logging.getLogger("webhook_server.metrics") + db_manager = DatabaseManager(config, metrics_logger) + + await db_manager.connect() + LOGGER.info("Metrics Server database manager initialized successfully") + + # Initialize metrics tracker + metrics_tracker = MetricsTracker(db_manager, metrics_logger) + LOGGER.info("Metrics tracker initialized successfully") + yield - except Exception as ex: - LOGGER.error(f"Application failed during lifespan management: {ex}") + except Exception: + LOGGER.exception("Application failed during lifespan management") raise finally: # Shutdown LogViewerController singleton and close WebSocket connections - global _log_viewer_controller_singleton if _log_viewer_controller_singleton is not None: await _log_viewer_controller_singleton.shutdown() LOGGER.debug("LogViewerController singleton shutdown complete") + # Shutdown MetricsDashboardController singleton and close WebSocket connections + if _metrics_dashboard_controller_singleton is not None: + await _metrics_dashboard_controller_singleton.shutdown() + LOGGER.debug("MetricsDashboardController singleton shutdown complete") + + # Shutdown database manager (metrics server) - must be before HTTP client close + # as database operations might use the HTTP client + if db_manager is not None: + await db_manager.disconnect() + LOGGER.debug("Database manager disconnected") + LOGGER.info("Metrics Server database manager shutdown complete") + if _lifespan_http_client: await _lifespan_http_client.aclose() LOGGER.debug("HTTP client closed") # Optionally wait for pending background tasks for graceful shutdown - global _background_tasks if _background_tasks: LOGGER.info(f"Waiting for {len(_background_tasks)} pending background task(s) to complete...") # Wait up to 30 seconds for tasks to complete @@ -384,6 +449,9 @@ async def process_with_error_handling( _delivery_id: GitHub delivery ID for logging _event_type: GitHub event type for logging """ + # Track processing start time for metrics + start_time = datetime.now(UTC) + # Create repository-specific logger in background repository_name = _hook_data.get("repository", {}).get("name", "unknown") _logger = get_logger_with_params(repository_name=repository_name) @@ -392,23 +460,114 @@ async def process_with_error_handling( ) _logger.info(f"{_log_context} Processing webhook") + # Extract common webhook metadata for metrics tracking + _repository = _hook_data.get("repository", {}).get("full_name", "unknown") + _action = _hook_data.get("action") + _sender = _hook_data.get("sender", {}).get("login") + + # Extract PR number from multiple sources depending on event type + _pr_number = _hook_data.get("pull_request", {}).get("number") # pull_request events + + # For issue_comment events on PRs: issue has pull_request key + if not _pr_number and "issue" in _hook_data: + issue = _hook_data["issue"] + # If issue has pull_request key, it's actually a PR comment + if "pull_request" in issue: + _pr_number = issue.get("number") + + # For check_run events: extract from pull_requests array + if not _pr_number and "check_run" in _hook_data: + check_run = _hook_data["check_run"] + pull_requests = check_run.get("pull_requests", []) + if pull_requests and len(pull_requests) > 0: + _pr_number = pull_requests[0].get("number") + + async def track_metrics_safe( + status: str, + error_message: str | None = None, + api_calls_count: int = 0, + token_spend: int = 0, + token_remaining: int = 0, + metrics_available: bool = True, + ) -> None: + """Track webhook metrics in best-effort manner - never fail webhook processing. + + Args: + status: Processing status (success, error, partial) + error_message: Optional error message for failures + api_calls_count: Number of GitHub API calls made + token_spend: Rate limit tokens consumed + token_remaining: Remaining rate limit tokens + metrics_available: Whether API metrics are available (False = no tracking) + """ + if not (METRICS_SERVER_ENABLED and metrics_tracker): + return + + try: + processing_time = (datetime.now(UTC) - start_time).total_seconds() * 1000 + await metrics_tracker.track_webhook_event( + delivery_id=_delivery_id, + repository=_repository, + event_type=_event_type, + action=_action, + sender=_sender, + payload=_hook_data, + processing_time_ms=int(processing_time), + status=status, + pr_number=_pr_number, + error_message=error_message, + api_calls_count=api_calls_count, + token_spend=token_spend, + token_remaining=token_remaining, + metrics_available=metrics_available, + ) + except Exception: + # Metrics tracking failures should never affect webhook processing + # Log the failure but don't re-raise + _logger.exception(f"{_log_context} Metrics tracking failed (non-critical)") + try: # Initialize GithubWebhook inside background task to avoid blocking webhook response _api: GithubWebhook = GithubWebhook(hook_data=_hook_data, headers=_headers, logger=_logger) try: await _api.process() + + # Extract API usage metrics for database tracking (defensive - use .get() for safety) + api_metrics = _api.get_api_metrics() + + # Track successful webhook event with API metrics (best-effort) + # Use .get() with defaults since metrics tracking is best-effort and shouldn't break on partial dict + await track_metrics_safe( + status="success", + api_calls_count=int(api_metrics.get("api_calls_count", 0)), + token_spend=int(api_metrics.get("token_spend", 0)), + token_remaining=int(api_metrics.get("token_remaining", 0)), + metrics_available=bool(api_metrics.get("metrics_available", False)), + ) finally: await _api.cleanup() - except RepositoryNotFoundInConfigError: + except RepositoryNotFoundInConfigError as ex: # Repository-specific error - not exceptional, log as error not exception _logger.error(f"{_log_context} Repository not found in configuration") - except (httpx.ConnectError, httpx.RequestError, requests.exceptions.ConnectionError): + + # Track failed webhook event (best-effort) + # Note: No API metrics available - error happened before GithubWebhook processing + await track_metrics_safe(status="error", error_message=str(ex), metrics_available=False) + except (httpx.ConnectError, httpx.RequestError, requests.exceptions.ConnectionError) as ex: # Network/connection errors - can be transient _logger.exception(f"{_log_context} API connection error - check network connectivity") - except Exception: + + # Track failed webhook event (best-effort) + # Note: No API metrics available - error happened during GithubWebhook processing + await track_metrics_safe(status="error", error_message=str(ex), metrics_available=False) + except Exception as ex: # Catch-all for unexpected errors _logger.exception(f"{_log_context} Unexpected error in background webhook processing") + # Track failed webhook event (best-effort) + # Note: No API metrics available - error happened during GithubWebhook processing + await track_metrics_safe(status="error", error_message=str(ex), metrics_available=False) + # Start background task immediately using asyncio.create_task # This ensures the HTTP response is sent immediately without waiting # Store task reference for observability and graceful shutdown @@ -425,9 +584,9 @@ async def process_with_error_handling( # Return 200 immediately with JSONResponse for fastest serialization return JSONResponse( - status_code=status.HTTP_200_OK, + status_code=http_status.HTTP_200_OK, content={ - "status": status.HTTP_200_OK, + "status": http_status.HTTP_200_OK, "message": "Webhook queued for processing", "delivery_id": delivery_id, "event_type": event_type, @@ -435,8 +594,9 @@ async def process_with_error_handling( ) -# Module-level singleton instance +# Module-level singleton instances _log_viewer_controller_singleton: LogViewerController | None = None +_metrics_dashboard_controller_singleton: MetricsDashboardController | None = None def get_log_viewer_controller() -> LogViewerController: @@ -464,6 +624,30 @@ def get_log_viewer_controller() -> LogViewerController: controller_dependency = Depends(get_log_viewer_controller) +def get_metrics_dashboard_controller() -> MetricsDashboardController: + """Dependency to provide a singleton MetricsDashboardController instance. + + Returns the same MetricsDashboardController instance across all requests to ensure + proper WebSocket connection tracking and shared state management. + + Returns: + MetricsDashboardController: The singleton instance + """ + global _metrics_dashboard_controller_singleton + if _metrics_dashboard_controller_singleton is None: + # Metrics dashboard requires database manager and logger + if db_manager is None: + raise RuntimeError("Metrics database not available - metrics server not enabled") + + metrics_logger = logging.getLogger("webhook_server.metrics") + _metrics_dashboard_controller_singleton = MetricsDashboardController(db_manager, metrics_logger) + return _metrics_dashboard_controller_singleton + + +# Create dependency instance to avoid flake8 M511 warnings +metrics_dashboard_dependency = Depends(get_metrics_dashboard_controller) + + # Log Viewer Endpoints - Only register if ENABLE_LOG_SERVER=true if LOG_SERVER_ENABLED: @@ -1112,7 +1296,7 @@ async def websocket_log_stream( """Handle WebSocket connection for real-time log streaming.""" # Check if log server is enabled (manual check since WebSocket doesn't support dependencies same way) if not LOG_SERVER_ENABLED: - await websocket.close(code=status.WS_1008_POLICY_VIOLATION, reason="Log server is disabled") + await websocket.close(code=http_status.WS_1008_POLICY_VIOLATION, reason="Log server is disabled") return controller = get_log_viewer_controller() @@ -1127,6 +1311,1887 @@ async def websocket_log_stream( ) +# Metrics Dashboard Endpoints - Only register if ENABLE_METRICS_SERVER=true +if METRICS_SERVER_ENABLED: + + @FASTAPI_APP.get("/metrics", operation_id="get_metrics_dashboard_page", response_class=HTMLResponse) + def get_metrics_dashboard_page( + controller: MetricsDashboardController = metrics_dashboard_dependency, + ) -> HTMLResponse: + """Serve the metrics dashboard HTML page.""" + return controller.get_dashboard_page() + + @FASTAPI_APP.websocket("/metrics/ws") + async def websocket_metrics_stream( + websocket: WebSocket, + repository: str | None = None, + event_type: str | None = None, + status: str | None = None, + ) -> None: + """Handle WebSocket connection for real-time metrics streaming.""" + # Check if metrics server is enabled (manual check since WebSocket doesn't support dependencies same way) + if not METRICS_SERVER_ENABLED: + await websocket.close(code=http_status.WS_1008_POLICY_VIOLATION, reason="Metrics server is disabled") + return + + controller = get_metrics_dashboard_controller() + await controller.handle_websocket( + websocket=websocket, + repository=repository, + event_type=event_type, + status=status, + ) + + +@FASTAPI_APP.get("/favicon.ico", include_in_schema=False) +async def favicon() -> Response: + """Serve favicon.ico to prevent 404 errors. + + Returns a minimal 1x1 transparent PNG as favicon to eliminate browser 404 errors + without requiring an actual favicon file. This is a lightweight solution that + satisfies browser favicon requests with minimal overhead. + """ + # 1x1 transparent PNG (base64 encoded) + transparent_png = base64.b64decode( + "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==" + ) + + return Response(content=transparent_png, media_type="image/png") + + +# Metrics API Endpoints - Only functional if ENABLE_METRICS_SERVER=true (guarded by dependency) +@FASTAPI_APP.get( + "/api/metrics/webhooks", + operation_id="get_webhook_events", + dependencies=[Depends(require_metrics_server_enabled)], +) +async def get_webhook_events( + repository: str | None = Query(default=None, description="Filter by repository (org/repo format)"), + event_type: str | None = Query( + default=None, description="Filter by event type (pull_request, issue_comment, etc.)" + ), + status: str | None = Query(default=None, description="Filter by status (success, error, partial)"), + start_time: str | None = Query( + default=None, description="Start time in ISO 8601 format (e.g., 2024-01-15T00:00:00Z)" + ), + end_time: str | None = Query(default=None, description="End time in ISO 8601 format (e.g., 2024-01-31T23:59:59Z)"), + page: int = Query(default=1, ge=1, description="Page number (1-indexed)"), + page_size: int = Query(default=100, ge=1, le=1000, description="Items per page (1-1000)"), +) -> dict[str, Any]: + """Retrieve recent webhook events with filtering and pagination. + + This endpoint provides comprehensive access to webhook event history for monitoring, + debugging, and analytics. It supports multiple filtering dimensions and is optimized + for memory-efficient querying of large datasets. + + **Primary Use Cases:** + - Monitor webhook processing status and identify failures + - Analyze webhook traffic patterns by repository or event type + - Debug specific webhook delivery issues + - Generate reports on webhook processing performance + - Track webhook event trends over time + - Audit webhook activity for specific repositories + + **Parameters:** + - `repository` (str, optional): Repository name in "owner/repo" format. + Example: "myakove/github-webhook-server" + - `event_type` (str, optional): GitHub webhook event type. + Common values: "pull_request", "push", "issues", "issue_comment", "pull_request_review" + - `status` (str, optional): Processing status filter. + Values: "success", "error", "partial" + - `start_time` (str, optional): Start of time range in ISO 8601 format. + Example: "2024-01-15T10:00:00Z" or "2024-01-15T10:00:00.123456" + - `end_time` (str, optional): End of time range in ISO 8601 format. + Example: "2024-01-15T18:00:00Z" + - `page` (int, default=1): Page number (1-indexed). + - `page_size` (int, default=100): Items per page (1-1000). + + **Pagination:** + - Response includes pagination metadata with total count, page info, and navigation flags + - Use `page` and `page_size` to navigate through results + - `has_next` and `has_prev` indicate if more pages are available + + **Return Structure:** + ```json + { + "data": [ + { + "delivery_id": "f4b3c2d1-a9b8-4c5d-9e8f-1a2b3c4d5e6f", + "repository": "myakove/test-repo", + "event_type": "pull_request", + "action": "opened", + "pr_number": 42, + "sender": "contributor123", + "status": "success", + "created_at": "2024-01-15T14:30:25.123456Z", + "processed_at": "2024-01-15T14:30:30.456789Z", + "duration_ms": 5333, + "api_calls_count": 12, + "token_spend": 12, + "token_remaining": 4988, + "error_message": null + } + ], + "pagination": { + "total": 1542, + "page": 1, + "page_size": 100, + "total_pages": 16, + "has_next": true, + "has_prev": false + } + } + ``` + + **Common Filtering Scenarios:** + - Recent errors: `status=error&start_time=2024-01-15T00:00:00Z` + - Repository-specific events: `repository=owner/repo&page=1&page_size=50` + - Event type analysis: `event_type=pull_request&start_time=2024-01-01T00:00:00Z&page=1&page_size=100` + - Failed webhooks: `status=error&event_type=pull_request&page=1&page_size=100` + + **Note:** `page` is 1-indexed, and `page_size` is capped at 1000. + + **Error Conditions:** + - 400: Invalid datetime format in start_time/end_time parameters + - 404: Metrics server disabled (ENABLE_METRICS_SERVER=false) + - 500: Database connection errors or query failures + + **Performance Notes:** + - Response times increase with larger date ranges + - Use specific filters (repository, event_type) for fastest queries + - Pagination recommended for large result sets + """ + # Validate database manager is available + if db_manager is None: + LOGGER.error("Database manager not initialized - metrics server may not be properly configured") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Metrics database not available", + ) + + # Parse datetime strings + start_datetime = parse_datetime_string(start_time, "start_time") + end_datetime = parse_datetime_string(end_time, "end_time") + + # Build query with filters + query = """ + SELECT + delivery_id, + repository, + event_type, + action, + pr_number, + sender, + status, + created_at, + processed_at, + duration_ms, + api_calls_count, + token_spend, + token_remaining, + error_message + FROM webhooks + WHERE 1=1 + """ + params: list[Any] = [] + param_idx = 1 + + if repository: + query += f" AND repository = ${param_idx}" + params.append(repository) + param_idx += 1 + + if event_type: + query += f" AND event_type = ${param_idx}" + params.append(event_type) + param_idx += 1 + + if status: + query += f" AND status = ${param_idx}" + params.append(status) + param_idx += 1 + + if start_datetime: + query += f" AND created_at >= ${param_idx}" + params.append(start_datetime) + param_idx += 1 + + if end_datetime: + query += f" AND created_at <= ${param_idx}" + params.append(end_datetime) + param_idx += 1 + + # Calculate offset for pagination + offset = (page - 1) * page_size + + # Get total count for pagination + # Safe: query is built with parameterized WHERE clauses, no user input in SQL string + count_query = f"SELECT COUNT(*) FROM ({query}) AS filtered" # noqa: S608 + query += f" ORDER BY created_at DESC LIMIT ${param_idx} OFFSET ${param_idx + 1}" + params.extend([page_size, offset]) + + try: + # Get total count using DatabaseManager helper + total_count = await db_manager.fetchval(count_query, *params[:-2]) + + # Get paginated results using DatabaseManager helper + rows = await db_manager.fetch(query, *params) + + events = [ + { + "delivery_id": row["delivery_id"], + "repository": row["repository"], + "event_type": row["event_type"], + "action": row["action"], + "pr_number": row["pr_number"], + "sender": row["sender"], + "status": row["status"], + "created_at": row["created_at"].isoformat() if row["created_at"] else None, + "processed_at": row["processed_at"].isoformat() if row["processed_at"] else None, + "duration_ms": row["duration_ms"], + "api_calls_count": row["api_calls_count"], + "token_spend": row["token_spend"], + "token_remaining": row["token_remaining"], + "error_message": row["error_message"], + } + for row in rows + ] + + total_pages = math.ceil(total_count / page_size) if total_count > 0 else 0 + has_next = page < total_pages + has_prev = page > 1 + + return { + "data": events, + "pagination": { + "total": total_count, + "page": page, + "page_size": page_size, + "total_pages": total_pages, + "has_next": has_next, + "has_prev": has_prev, + }, + } + except asyncio.CancelledError: + raise + except HTTPException: + raise + except Exception as ex: + LOGGER.exception("Failed to fetch webhook events from database") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to fetch webhook events", + ) from ex + + +@FASTAPI_APP.get( + "/api/metrics/webhooks/{delivery_id}", + operation_id="get_webhook_event_by_id", + dependencies=[Depends(require_metrics_server_enabled)], +) +async def get_webhook_event_by_id(delivery_id: str) -> dict[str, Any]: + """Get specific webhook event details including full payload. + + Retrieve comprehensive details for a specific webhook event, including the complete + GitHub webhook payload, processing metrics, and related metadata. Essential for + debugging specific webhook deliveries and analyzing event processing. + + **Primary Use Cases:** + - Debug specific webhook delivery failures + - Inspect complete webhook payload for analysis + - Verify webhook processing metrics and timing + - Audit specific webhook events for compliance + - Troubleshoot GitHub API integration issues + + **Parameters:** + - `delivery_id` (str, required): GitHub webhook delivery ID (X-GitHub-Delivery header). + Example: "f4b3c2d1-a9b8-4c5d-9e8f-1a2b3c4d5e6f" + + **Return Structure:** + ```json + { + "delivery_id": "f4b3c2d1-a9b8-4c5d-9e8f-1a2b3c4d5e6f", + "repository": "myakove/test-repo", + "event_type": "pull_request", + "action": "opened", + "pr_number": 42, + "sender": "contributor123", + "status": "success", + "created_at": "2024-01-15T14:30:25.123456Z", + "processed_at": "2024-01-15T14:30:30.456789Z", + "duration_ms": 5333, + "api_calls_count": 12, + "token_spend": 12, + "token_remaining": 4988, + "error_message": null, + "payload": { + "action": "opened", + "number": 42, + "pull_request": {...}, + "repository": {...}, + "sender": {...} + } + } + ``` + + **Error Conditions:** + - 404: Webhook event not found for the specified delivery_id + - 404: Metrics server disabled (ENABLE_METRICS_SERVER=false) + - 500: Database connection errors or query failures + + **AI Agent Usage Examples:** + - "Get webhook details for delivery abc123 to debug processing failure" + - "Show full payload for webhook xyz789 to analyze event structure" + - "Retrieve webhook event def456 to verify API call metrics" + """ + # Validate database manager is available + if db_manager is None: + LOGGER.error("Database manager not initialized - metrics server may not be properly configured") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Metrics database not available", + ) + + query = """ + SELECT + delivery_id, + repository, + event_type, + action, + pr_number, + sender, + payload, + status, + created_at, + processed_at, + duration_ms, + api_calls_count, + token_spend, + token_remaining, + error_message + FROM webhooks + WHERE delivery_id = $1 + """ + + try: + # Fetch single row using DatabaseManager helper + row = await db_manager.fetchrow(query, delivery_id) + + if not row: + raise HTTPException( + status_code=http_status.HTTP_404_NOT_FOUND, + detail=f"Webhook event not found: {delivery_id}", + ) + + return { + "delivery_id": row["delivery_id"], + "repository": row["repository"], + "event_type": row["event_type"], + "action": row["action"], + "pr_number": row["pr_number"], + "sender": row["sender"], + "status": row["status"], + "created_at": row["created_at"].isoformat() if row["created_at"] else None, + "processed_at": row["processed_at"].isoformat() if row["processed_at"] else None, + "duration_ms": row["duration_ms"], + "api_calls_count": row["api_calls_count"], + "token_spend": row["token_spend"], + "token_remaining": row["token_remaining"], + "error_message": row["error_message"], + "payload": row["payload"], + } + except asyncio.CancelledError: + raise + except HTTPException: + raise + except Exception as ex: + LOGGER.exception(f"Failed to fetch webhook event {delivery_id} from database") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to fetch webhook event", + ) from ex + + +@FASTAPI_APP.get( + "/api/metrics/repositories", + operation_id="get_repository_statistics", + dependencies=[Depends(require_metrics_server_enabled)], +) +async def get_repository_statistics( + start_time: str | None = Query( + default=None, description="Start time in ISO 8601 format (e.g., 2024-01-01T00:00:00Z)" + ), + end_time: str | None = Query(default=None, description="End time in ISO 8601 format (e.g., 2024-01-31T23:59:59Z)"), + page: int = Query(default=1, ge=1, description="Page number (1-indexed)"), + page_size: int = Query(default=10, ge=1, le=100, description="Items per page (1-100)"), +) -> dict[str, Any]: + """Get aggregated statistics per repository. + + Provides comprehensive repository-level metrics including event counts, processing + performance, success rates, and API usage. Essential for identifying high-traffic + repositories, performance bottlenecks, and operational trends. + + **Primary Use Cases:** + - Identify repositories with highest webhook traffic + - Analyze repository-specific processing performance + - Monitor success rates and error patterns by repository + - Track API usage and rate limiting by repository + - Generate repository-level operational reports + - Optimize webhook processing for high-volume repositories + + **Parameters:** + - `start_time` (str, optional): Start of time range in ISO 8601 format. + Example: "2024-01-01T00:00:00Z" + Default: No time filter (all-time stats) + - `end_time` (str, optional): End of time range in ISO 8601 format. + Example: "2024-01-31T23:59:59Z" + Default: No time filter (up to current time) + - `page` (int, default=1): Page number (1-indexed) + - `page_size` (int, default=10): Items per page (1-100) + + **Pagination:** + - Response includes pagination metadata + - `total`: Total number of repositories + - `total_pages`: Total number of pages + - `has_next`: Whether there's a next page + - `has_prev`: Whether there's a previous page + + **Return Structure:** + ```json + { + "time_range": { + "start_time": "2024-01-01T00:00:00Z", + "end_time": "2024-01-31T23:59:59Z" + }, + "repositories": [ + { + "repository": "myakove/test-repo", + "total_events": 1542, + "successful_events": 1489, + "failed_events": 53, + "success_rate": 96.56, + "avg_processing_time_ms": 5234, + "median_processing_time_ms": 4123, + "p95_processing_time_ms": 12456, + "max_processing_time_ms": 45230, + "total_api_calls": 18504, + "avg_api_calls_per_event": 12.0, + "total_token_spend": 18504, + "event_type_breakdown": { + "pull_request": 856, + "issue_comment": 423, + "check_run": 263 + } + } + ], + "pagination": { + "total": 150, + "page": 1, + "page_size": 10, + "total_pages": 15, + "has_next": true, + "has_prev": false + } + } + ``` + + **Metrics Explained:** + - `total_events`: Total webhook events processed for this repository + - `successful_events`: Events that completed successfully + - `failed_events`: Events that failed or partially failed + - `success_rate`: Percentage of successful events (0-100) + - `avg_processing_time_ms`: Average processing duration in milliseconds + - `median_processing_time_ms`: Median processing duration (50th percentile) + - `p95_processing_time_ms`: 95th percentile processing time (performance SLA) + - `max_processing_time_ms`: Maximum processing time (worst case) + - `total_api_calls`: Total GitHub API calls made + - `avg_api_calls_per_event`: Average API calls per webhook event + - `total_token_spend`: Total rate limit tokens consumed + - `event_type_breakdown`: Event count distribution by type + + **Common Analysis Scenarios:** + - Monthly repository metrics: `start_time=2024-01-01&end_time=2024-01-31&page=1&page_size=10` + - High-traffic repositories: Sort by `total_events` descending + - Performance issues: Analyze `p95_processing_time_ms` and `max_processing_time_ms` + - Error-prone repositories: Sort by `failed_events` descending or `success_rate` ascending + - API usage optimization: Analyze `avg_api_calls_per_event` and `total_token_spend` + + **Note:** `page` is 1-indexed, and `page_size` is capped at 100 for this endpoint. + + **Error Conditions:** + - 400: Invalid datetime format in start_time/end_time parameters + - 404: Metrics server disabled (ENABLE_METRICS_SERVER=false) + - 500: Database connection errors or query failures + + **AI Agent Usage Examples:** + - "Show repository statistics for last month to identify high-traffic repos" + - "Get repository performance metrics to find slow processing repositories" + - "Analyze repository error rates to identify problematic configurations" + - "Review API usage by repository to optimize rate limiting strategy" + + **Performance Notes:** + - Statistics are computed in real-time from webhook events table + - Queries with time filters are optimized using indexed created_at column + - Large date ranges may increase query time + - Results ordered by total events (highest traffic first) + """ + # Validate database manager is available + if db_manager is None: + LOGGER.error("Database manager not initialized - metrics server may not be properly configured") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Metrics database not available", + ) + + # Parse datetime strings + start_datetime = parse_datetime_string(start_time, "start_time") + end_datetime = parse_datetime_string(end_time, "end_time") + + # Build query with time filters + where_clause = "WHERE 1=1" + params: list[Any] = [] + param_idx = 1 + + if start_datetime: + where_clause += f" AND created_at >= ${param_idx}" + params.append(start_datetime) + param_idx += 1 + + if end_datetime: + where_clause += f" AND created_at <= ${param_idx}" + params.append(end_datetime) + param_idx += 1 + + # Calculate offset for pagination + offset = (page - 1) * page_size + + # Count total repositories for pagination + count_query = f""" + SELECT COUNT(DISTINCT repository) as total + FROM webhooks + {where_clause} + """ # noqa: S608 + + query = f""" + SELECT + repository, + COUNT(*) as total_events, + COUNT(*) FILTER (WHERE status = 'success') as successful_events, + COUNT(*) FILTER (WHERE status IN ('error', 'partial')) as failed_events, + ROUND( + (COUNT(*) FILTER (WHERE status = 'success')::numeric / COUNT(*)::numeric * 100)::numeric, + 2 + ) as success_rate, + ROUND(AVG(duration_ms)) as avg_processing_time_ms, + PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY duration_ms) as median_processing_time_ms, + PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY duration_ms) as p95_processing_time_ms, + MAX(duration_ms) as max_processing_time_ms, + SUM(api_calls_count) as total_api_calls, + ROUND(AVG(api_calls_count), 2) as avg_api_calls_per_event, + SUM(token_spend) as total_token_spend, + jsonb_object_agg(event_type, event_count) as event_type_breakdown + FROM ( + SELECT + repository, + event_type, + status, + duration_ms, + api_calls_count, + token_spend, + COUNT(*) OVER (PARTITION BY repository, event_type) as event_count + FROM webhooks + {where_clause} + ) as events_with_counts + GROUP BY repository + ORDER BY total_events DESC + LIMIT ${param_idx} OFFSET ${param_idx + 1} + """ # noqa: S608 + params.extend([page_size, offset]) + + try: + # Get total count for pagination (params without LIMIT/OFFSET) + total_count = await db_manager.fetchval(count_query, *params[:-2]) + + # Fetch repository statistics using DatabaseManager helper + rows = await db_manager.fetch(query, *params) + + repositories = [ + { + "repository": row["repository"], + "total_events": row["total_events"], + "successful_events": row["successful_events"], + "failed_events": row["failed_events"], + "success_rate": float(row["success_rate"]) if row["success_rate"] is not None else 0.0, + "avg_processing_time_ms": int(row["avg_processing_time_ms"]) + if row["avg_processing_time_ms"] is not None + else 0, + "median_processing_time_ms": int(row["median_processing_time_ms"]) + if row["median_processing_time_ms"] is not None + else 0, + "p95_processing_time_ms": int(row["p95_processing_time_ms"]) + if row["p95_processing_time_ms"] is not None + else 0, + "max_processing_time_ms": row["max_processing_time_ms"] or 0, + "total_api_calls": row["total_api_calls"] or 0, + "avg_api_calls_per_event": float(row["avg_api_calls_per_event"]) + if row["avg_api_calls_per_event"] is not None + else 0.0, + "total_token_spend": row["total_token_spend"] or 0, + "event_type_breakdown": row["event_type_breakdown"] or {}, + } + for row in rows + ] + + total_pages = math.ceil(total_count / page_size) if total_count > 0 else 0 + has_next = page < total_pages + has_prev = page > 1 + + return { + "time_range": { + "start_time": start_datetime.isoformat() if start_datetime else None, + "end_time": end_datetime.isoformat() if end_datetime else None, + }, + "repositories": repositories, + "pagination": { + "total": total_count, + "page": page, + "page_size": page_size, + "total_pages": total_pages, + "has_next": has_next, + "has_prev": has_prev, + }, + } + except asyncio.CancelledError: + raise + except HTTPException: + raise + except Exception as ex: + LOGGER.exception("Failed to fetch repository statistics from database") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to fetch repository statistics", + ) from ex + + +@FASTAPI_APP.get( + "/api/metrics/contributors", + operation_id="get_metrics_contributors", + dependencies=[Depends(require_metrics_server_enabled)], +) +async def get_metrics_contributors( + start_time: str | None = Query( + default=None, description="Start time in ISO 8601 format (e.g., 2024-01-01T00:00:00Z)" + ), + end_time: str | None = Query(default=None, description="End time in ISO 8601 format (e.g., 2024-01-31T23:59:59Z)"), + user: str | None = Query(default=None, description="Filter by username"), + repository: str | None = Query(default=None, description="Filter by repository (org/repo format)"), + page: int = Query(default=1, ge=1, description="Page number (1-indexed)"), + page_size: int = Query(default=10, ge=1, le=100, description="Items per page (1-100)"), +) -> dict[str, Any]: + """Get PR contributors statistics (creators, reviewers, approvers, LGTM). + + Analyzes webhook payloads to extract contributor activity including PR creation, + code review, approval, and LGTM metrics. Essential for understanding team contributions + and identifying active contributors. + + **Primary Use Cases:** + - Track who is creating PRs and how many + - Monitor code review participation + - Identify approval patterns and bottlenecks + - Track LGTM activity separate from approvals + - Measure team collaboration and engagement + - Generate contributor leaderboards + + **Parameters:** + - `start_time` (str, optional): Start of time range in ISO 8601 format + - `end_time` (str, optional): End of time range in ISO 8601 format + - `user` (str, optional): Filter by username + - `repository` (str, optional): Filter by repository (org/repo format) + - `page` (int, default=1): Page number (1-indexed) + - `page_size` (int, default=10): Items per page (1-100) + + **Pagination:** + - Each category (pr_creators, pr_reviewers, pr_approvers, pr_lgtm) includes pagination metadata + - `total`: Total number of contributors in this category + - `total_pages`: Total number of pages + - `has_next`: Whether there's a next page + - `has_prev`: Whether there's a previous page + + **Return Structure:** + ```json + { + "time_range": { + "start_time": "2024-01-01T00:00:00Z", + "end_time": "2024-01-31T23:59:59Z" + }, + "pr_creators": { + "data": [ + { + "user": "john-doe", + "total_prs": 45, + "merged_prs": 42, + "closed_prs": 3, + "avg_commits_per_pr": 3.0 + } + ], + "pagination": { + "total": 150, + "page": 1, + "page_size": 10, + "total_pages": 15, + "has_next": true, + "has_prev": false + } + }, + "pr_reviewers": { + "data": [ + { + "user": "jane-smith", + "total_reviews": 78, + "prs_reviewed": 65, + "avg_reviews_per_pr": 1.2 + } + ], + "pagination": { + "total": 120, + "page": 1, + "page_size": 10, + "total_pages": 12, + "has_next": true, + "has_prev": false + } + }, + "pr_approvers": { + "data": [ + { + "user": "bob-wilson", + "total_approvals": 56, + "prs_approved": 54 + } + ], + "pagination": { + "total": 95, + "page": 1, + "page_size": 10, + "total_pages": 10, + "has_next": true, + "has_prev": false + } + }, + "pr_lgtm": { + "data": [ + { + "user": "alice-jones", + "total_lgtm": 42, + "prs_lgtm": 40 + } + ], + "pagination": { + "total": 78, + "page": 1, + "page_size": 10, + "total_pages": 8, + "has_next": true, + "has_prev": false + } + } + } + ``` + + **Notes:** + - PR Approvers: Tracks /approve commands (approved- labels) + - PR LGTM: Tracks /lgtm commands (lgtm- labels) + - LGTM is separate from approvals in this workflow + + **Errors:** + - 500: Database connection error or metrics server disabled + """ + if db_manager is None: + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Metrics database not available", + ) + + start_datetime = parse_datetime_string(start_time, "start_time") + end_datetime = parse_datetime_string(end_time, "end_time") + + # Build filter clause with time, user, and repository filters + time_filter = "" + params: list[Any] = [] + param_count = 0 + + if start_datetime: + param_count += 1 + time_filter += f" AND created_at >= ${param_count}" + params.append(start_datetime) + + if end_datetime: + param_count += 1 + time_filter += f" AND created_at <= ${param_count}" + params.append(end_datetime) + + # Add repository filter if provided + repository_filter = "" + if repository: + param_count += 1 + repository_filter = f" AND repository = ${param_count}" + params.append(repository) + + # Build category-specific user filters to align with per-category "user" semantics + # PR Creators: user = COALESCE(CASE event_type WHEN 'pull_request'/'pull_request_review'/'issue_comment'..., sender) + # PR Reviewers: user = sender + # PR Approvers: user = SUBSTRING(payload->'label'->>'name' FROM 10) + # PR LGTM: user = SUBSTRING(payload->'label'->>'name' FROM 6) + user_filter_reviewers = "" + user_filter_approvers = "" + user_filter_lgtm = "" + + if user: + param_count += 1 + user_param_idx = param_count + params.append(user) + + # PR Reviewers: filter on sender (correct as-is) + user_filter_reviewers = f" AND sender = ${user_param_idx}" + # PR Approvers: filter on extracted username from 'approved-' label + user_filter_approvers = f" AND SUBSTRING(payload->'label'->>'name' FROM 10) = ${user_param_idx}" + # PR LGTM: filter on extracted username from 'lgtm-' label + user_filter_lgtm = f" AND SUBSTRING(payload->'label'->>'name' FROM 6) = ${user_param_idx}" + + # Calculate offset for pagination + offset = (page - 1) * page_size + + # Add page_size and offset to params + param_count += 1 + page_size_param = param_count + param_count += 1 + offset_param = param_count + params.extend([page_size, offset]) + + # Count query for PR Creators + pr_creators_count_query = f""" + WITH pr_creators AS ( + SELECT DISTINCT ON (pr_number) + pr_number, + CASE event_type + WHEN 'pull_request' THEN payload->'pull_request'->'user'->>'login' + WHEN 'pull_request_review' THEN payload->'pull_request'->'user'->>'login' + WHEN 'pull_request_review_comment' + THEN payload->'pull_request'->'user'->>'login' + WHEN 'issue_comment' THEN COALESCE( + payload->'pull_request'->'user'->>'login', + payload->'issue'->'user'->>'login' + ) + END as pr_creator + FROM webhooks + WHERE pr_number IS NOT NULL + AND event_type IN ( + 'pull_request', + 'pull_request_review', + 'pull_request_review_comment', + 'issue_comment' + ) + {time_filter} + {repository_filter} + ORDER BY pr_number, created_at ASC + ) + SELECT COUNT(DISTINCT pr_creator) as total + FROM pr_creators + WHERE pr_creator IS NOT NULL{f" AND pr_creator = ${user_param_idx}" if user else ""} + """ # noqa: S608 + + # Query PR Creators (from any event with pr_number) + pr_creators_query = f""" + WITH pr_creators AS ( + SELECT DISTINCT ON (pr_number) + pr_number, + CASE event_type + WHEN 'pull_request' THEN payload->'pull_request'->'user'->>'login' + WHEN 'pull_request_review' THEN payload->'pull_request'->'user'->>'login' + WHEN 'pull_request_review_comment' + THEN payload->'pull_request'->'user'->>'login' + WHEN 'issue_comment' THEN COALESCE( + payload->'pull_request'->'user'->>'login', + payload->'issue'->'user'->>'login' + ) + END as pr_creator + FROM webhooks + WHERE pr_number IS NOT NULL + AND event_type IN ( + 'pull_request', + 'pull_request_review', + 'pull_request_review_comment', + 'issue_comment' + ) + {time_filter} + {repository_filter} + ORDER BY pr_number, created_at ASC + ), + user_prs AS ( + SELECT + pc.pr_creator, + w.pr_number, + COALESCE((w.payload->'pull_request'->>'commits')::int, 0) as commits, + (w.payload->'pull_request'->>'merged' = 'true') as is_merged, + ( + w.payload->'pull_request'->>'state' = 'closed' + AND w.payload->'pull_request'->>'merged' = 'false' + ) as is_closed + FROM webhooks w + INNER JOIN pr_creators pc ON w.pr_number = pc.pr_number + WHERE w.pr_number IS NOT NULL + {time_filter} + {repository_filter} + ) + SELECT + pr_creator as user, + COUNT(DISTINCT pr_number) as total_prs, + COUNT(DISTINCT pr_number) FILTER (WHERE is_merged) as merged_prs, + COUNT(DISTINCT pr_number) FILTER (WHERE is_closed) as closed_prs, + ROUND(AVG(max_commits), 1) as avg_commits + FROM ( + SELECT + pr_creator, + pr_number, + MAX(commits) as max_commits, + BOOL_OR(is_merged) as is_merged, + BOOL_OR(is_closed) as is_closed + FROM user_prs + WHERE pr_creator IS NOT NULL + GROUP BY pr_creator, pr_number + ) pr_stats + WHERE 1=1{f" AND pr_creator = ${user_param_idx}" if user else ""} + GROUP BY pr_creator + ORDER BY total_prs DESC + LIMIT ${page_size_param} OFFSET ${offset_param} + """ # noqa: S608 + + # Count query for PR Reviewers + pr_reviewers_count_query = f""" + SELECT COUNT(DISTINCT sender) as total + FROM webhooks + WHERE event_type = 'pull_request_review' + AND action = 'submitted' + AND sender != payload->'pull_request'->'user'->>'login' + {time_filter} + {user_filter_reviewers} + {repository_filter} + """ # noqa: S608 + + # Query PR Reviewers (from pull_request_review events) + pr_reviewers_query = f""" + SELECT + sender as user, + COUNT(*) as total_reviews, + COUNT(DISTINCT pr_number) as prs_reviewed + FROM webhooks + WHERE event_type = 'pull_request_review' + AND action = 'submitted' + AND sender != payload->'pull_request'->'user'->>'login' + {time_filter} + {user_filter_reviewers} + {repository_filter} + GROUP BY sender + ORDER BY total_reviews DESC + LIMIT ${page_size_param} OFFSET ${offset_param} + """ # noqa: S608 + + # Count query for PR Approvers + pr_approvers_count_query = f""" + SELECT COUNT(DISTINCT SUBSTRING(payload->'label'->>'name' FROM 10)) as total + FROM webhooks + WHERE event_type = 'pull_request' + AND action = 'labeled' + AND payload->'label'->>'name' LIKE 'approved-%' + {time_filter} + {user_filter_approvers} + {repository_filter} + """ # noqa: S608 + + # Query PR Approvers (from pull_request labeled events with 'approved-' prefix only) + # Custom approval workflow: /approve comment triggers 'approved-' label + # Note: LGTM is separate from approval - tracked separately + pr_approvers_query = f""" + SELECT + SUBSTRING(payload->'label'->>'name' FROM 10) as user, + COUNT(*) as total_approvals, + COUNT(DISTINCT pr_number) as prs_approved + FROM webhooks + WHERE event_type = 'pull_request' + AND action = 'labeled' + AND payload->'label'->>'name' LIKE 'approved-%' + {time_filter} + {user_filter_approvers} + {repository_filter} + GROUP BY SUBSTRING(payload->'label'->>'name' FROM 10) + ORDER BY total_approvals DESC + LIMIT ${page_size_param} OFFSET ${offset_param} + """ # noqa: S608 + + # Count query for LGTM + pr_lgtm_count_query = f""" + SELECT COUNT(DISTINCT SUBSTRING(payload->'label'->>'name' FROM 6)) as total + FROM webhooks + WHERE event_type = 'pull_request' + AND action = 'labeled' + AND payload->'label'->>'name' LIKE 'lgtm-%' + {time_filter} + {user_filter_lgtm} + {repository_filter} + """ # noqa: S608 + + # Query LGTM (from pull_request labeled events with 'lgtm-' prefix) + # Custom LGTM workflow: /lgtm comment triggers 'lgtm-' label + pr_lgtm_query = f""" + SELECT + SUBSTRING(payload->'label'->>'name' FROM 6) as user, + COUNT(*) as total_lgtm, + COUNT(DISTINCT pr_number) as prs_lgtm + FROM webhooks + WHERE event_type = 'pull_request' + AND action = 'labeled' + AND payload->'label'->>'name' LIKE 'lgtm-%' + {time_filter} + {user_filter_lgtm} + {repository_filter} + GROUP BY SUBSTRING(payload->'label'->>'name' FROM 6) + ORDER BY total_lgtm DESC + LIMIT ${page_size_param} OFFSET ${offset_param} + """ # noqa: S608 + + try: + # Execute all count queries in parallel (params without LIMIT/OFFSET) + params_without_pagination = params[:-2] + ( + pr_creators_total, + pr_reviewers_total, + pr_approvers_total, + pr_lgtm_total, + ) = await asyncio.gather( + db_manager.fetchval(pr_creators_count_query, *params_without_pagination), + db_manager.fetchval(pr_reviewers_count_query, *params_without_pagination), + db_manager.fetchval(pr_approvers_count_query, *params_without_pagination), + db_manager.fetchval(pr_lgtm_count_query, *params_without_pagination), + ) + + # Execute all data queries in parallel for better performance + pr_creators_rows, pr_reviewers_rows, pr_approvers_rows, pr_lgtm_rows = await asyncio.gather( + db_manager.fetch(pr_creators_query, *params), + db_manager.fetch(pr_reviewers_query, *params), + db_manager.fetch(pr_approvers_query, *params), + db_manager.fetch(pr_lgtm_query, *params), + ) + + # Format PR creators + pr_creators = [ + { + "user": row["user"], + "total_prs": row["total_prs"], + "merged_prs": row["merged_prs"] or 0, + "closed_prs": row["closed_prs"] or 0, + "avg_commits_per_pr": round(row["avg_commits"] or 0, 1), + } + for row in pr_creators_rows + ] + + # Format PR reviewers + pr_reviewers = [ + { + "user": row["user"], + "total_reviews": row["total_reviews"], + "prs_reviewed": row["prs_reviewed"], + "avg_reviews_per_pr": round(row["total_reviews"] / max(row["prs_reviewed"], 1), 2), + } + for row in pr_reviewers_rows + ] + + # Format PR approvers + pr_approvers = [ + { + "user": row["user"], + "total_approvals": row["total_approvals"], + "prs_approved": row["prs_approved"], + } + for row in pr_approvers_rows + ] + + # Format LGTM + pr_lgtm = [ + { + "user": row["user"], + "total_lgtm": row["total_lgtm"], + "prs_lgtm": row["prs_lgtm"], + } + for row in pr_lgtm_rows + ] + + # Calculate pagination metadata for each category + total_pages_creators = math.ceil(pr_creators_total / page_size) if pr_creators_total > 0 else 0 + total_pages_reviewers = math.ceil(pr_reviewers_total / page_size) if pr_reviewers_total > 0 else 0 + total_pages_approvers = math.ceil(pr_approvers_total / page_size) if pr_approvers_total > 0 else 0 + total_pages_lgtm = math.ceil(pr_lgtm_total / page_size) if pr_lgtm_total > 0 else 0 + + return { + "time_range": { + "start_time": start_datetime.isoformat() if start_datetime else None, + "end_time": end_datetime.isoformat() if end_datetime else None, + }, + "pr_creators": { + "data": pr_creators, + "pagination": { + "total": pr_creators_total, + "page": page, + "page_size": page_size, + "total_pages": total_pages_creators, + "has_next": page < total_pages_creators, + "has_prev": page > 1, + }, + }, + "pr_reviewers": { + "data": pr_reviewers, + "pagination": { + "total": pr_reviewers_total, + "page": page, + "page_size": page_size, + "total_pages": total_pages_reviewers, + "has_next": page < total_pages_reviewers, + "has_prev": page > 1, + }, + }, + "pr_approvers": { + "data": pr_approvers, + "pagination": { + "total": pr_approvers_total, + "page": page, + "page_size": page_size, + "total_pages": total_pages_approvers, + "has_next": page < total_pages_approvers, + "has_prev": page > 1, + }, + }, + "pr_lgtm": { + "data": pr_lgtm, + "pagination": { + "total": pr_lgtm_total, + "page": page, + "page_size": page_size, + "total_pages": total_pages_lgtm, + "has_next": page < total_pages_lgtm, + "has_prev": page > 1, + }, + }, + } + except asyncio.CancelledError: + raise + except HTTPException: + raise + except Exception: + LOGGER.exception("Failed to fetch contributor metrics from database") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to fetch contributor metrics", + ) from None + + +@FASTAPI_APP.get( + "/api/metrics/user-prs", + operation_id="get_user_pull_requests", + dependencies=[Depends(require_metrics_server_enabled)], +) +async def get_user_pull_requests( + user: str | None = Query(None, description="GitHub username (optional - shows all PRs if not specified)"), + repository: str | None = Query(None, description="Filter by repository (org/repo)"), + start_time: str | None = Query( + default=None, description="Start time in ISO 8601 format (e.g., 2024-01-01T00:00:00Z)" + ), + end_time: str | None = Query(default=None, description="End time in ISO 8601 format (e.g., 2024-01-31T23:59:59Z)"), + page: int = Query(1, ge=1, description="Page number"), + page_size: int = Query(10, ge=1, le=100, description="Items per page"), +) -> dict[str, Any]: + """Get pull requests with optional user filtering and commit details. + + Retrieves pull requests with pagination. Can show all PRs or filter by user. + Includes detailed commit information for each PR. Supports filtering by repository + and time range. + + **Primary Use Cases:** + - View all PRs across repositories with pagination + - Filter PRs by specific user to track contributions + - Analyze commit patterns per PR + - Monitor PR lifecycle (created, merged, closed) + - Filter PR activity by repository or time period + + **Parameters:** + - `user` (str, optional): GitHub username to filter by (shows all PRs if not specified) + - `repository` (str, optional): Filter by specific repository (format: org/repo) + - `start_time` (str, optional): Start of time range in ISO 8601 format + - `end_time` (str, optional): End of time range in ISO 8601 format + - `page` (int, optional): Page number for pagination (default: 1) + - `page_size` (int, optional): Items per page, 1-100 (default: 10) + + **Return Structure:** + ```json + { + "data": [ + { + "number": 123, + "title": "Add feature X", + "repository": "org/repo1", + "state": "closed", + "merged": true, + "url": "https://github.com/org/repo1/pull/123", + "created_at": "2024-11-20T10:00:00Z", + "updated_at": "2024-11-21T15:30:00Z", + "commits_count": 5, + "head_sha": "abc123def456" # pragma: allowlist secret + } + ], + "pagination": { + "total": 45, + "page": 1, + "page_size": 10, + "total_pages": 5, + "has_next": true, + "has_prev": false + } + } + ``` + + **Errors:** + - 500: Database connection error or metrics server disabled + """ + if db_manager is None: + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Metrics database not available", + ) + + # Parse datetime strings + start_datetime = parse_datetime_string(start_time, "start_time") + end_datetime = parse_datetime_string(end_time, "end_time") + + # Build filter clauses + filters = [] + params: list[Any] = [] + param_count = 0 + + # Add user filter if provided + if user and user.strip(): + param_count += 1 + filters.append(f"(payload->'pull_request'->'user'->>'login' = ${param_count} OR sender = ${param_count})") + params.append(user.strip()) + + if start_datetime: + param_count += 1 + filters.append(f"created_at >= ${param_count}") + params.append(start_datetime) + + if end_datetime: + param_count += 1 + filters.append(f"created_at <= ${param_count}") + params.append(end_datetime) + + if repository: + param_count += 1 + filters.append(f"repository = ${param_count}") + params.append(repository) + + where_clause = " AND ".join(filters) if filters else "1=1" + + # Count total matching PRs + count_query = f""" + SELECT COUNT(DISTINCT (payload->'pull_request'->>'number')::int) as total + FROM webhooks + WHERE event_type = 'pull_request' + AND {where_clause} + """ # noqa: S608 + + # Calculate pagination + offset = (page - 1) * page_size + param_count += 1 + limit_param_idx = param_count + param_count += 1 + offset_param_idx = param_count + + # Query for PR data with pagination + data_query = f""" + SELECT DISTINCT ON (pr_number) + (payload->'pull_request'->>'number')::int as pr_number, + payload->'pull_request'->>'title' as title, + repository, + payload->'pull_request'->>'state' as state, + (payload->'pull_request'->>'merged')::boolean as merged, + payload->'pull_request'->>'html_url' as url, + payload->'pull_request'->>'created_at' as created_at, + payload->'pull_request'->>'updated_at' as updated_at, + (payload->'pull_request'->>'commits')::int as commits_count, + payload->'pull_request'->'head'->>'sha' as head_sha + FROM webhooks + WHERE event_type = 'pull_request' + AND {where_clause} + ORDER BY pr_number DESC, created_at DESC + LIMIT ${limit_param_idx} OFFSET ${offset_param_idx} + """ # noqa: S608 + + try: + # Execute count and data queries in parallel + count_result, pr_rows = await asyncio.gather( + db_manager.fetchrow(count_query, *params), + db_manager.fetch(data_query, *params, page_size, offset), + ) + + total = count_result["total"] if count_result else 0 + total_pages = (total + page_size - 1) // page_size if total > 0 else 0 + + # Format PR data + prs = [ + { + "number": row["pr_number"], + "title": row["title"], + "repository": row["repository"], + "state": row["state"], + "merged": row["merged"] or False, + "url": row["url"], + "created_at": row["created_at"], + "updated_at": row["updated_at"], + "commits_count": row["commits_count"] or 0, + "head_sha": row["head_sha"], + } + for row in pr_rows + ] + + return { + "data": prs, + "pagination": { + "total": total, + "page": page, + "page_size": page_size, + "total_pages": total_pages, + "has_next": page < total_pages, + "has_prev": page > 1, + }, + } + except HTTPException: + raise + except asyncio.CancelledError: + LOGGER.debug("User pull requests request was cancelled") + raise + except Exception: + LOGGER.exception("Failed to fetch user pull requests from database") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to fetch user pull requests", + ) from None + + +@FASTAPI_APP.get( + "/api/metrics/trends", + operation_id="get_metrics_trends", + dependencies=[Depends(require_metrics_server_enabled)], +) +async def get_metrics_trends( + start_time: str | None = Query( + default=None, description="Start time in ISO 8601 format (e.g., 2024-01-01T00:00:00Z)" + ), + end_time: str | None = Query(default=None, description="End time in ISO 8601 format (e.g., 2024-01-31T23:59:59Z)"), + bucket: str = Query(default="hour", pattern="^(hour|day)$", description="Time bucket ('hour', 'day')"), +) -> dict[str, Any]: + """Get aggregated event trends over time. + + Returns aggregated event counts (total, success, error) grouped by time bucket. + Essential for visualizing event volume and success rates over time on charts. + + **Parameters:** + - `start_time`: Start of time range in ISO format. + - `end_time`: End of time range in ISO format. + - `bucket`: Time aggregation bucket ('hour' or 'day'). + + **Return Structure:** + ```json + { + "time_range": { + "start_time": "...", + "end_time": "..." + }, + "trends": [ + { + "bucket": "2024-01-15T14:00:00Z", + "total_events": 120, + "successful_events": 115, + "failed_events": 5 + }, + ... + ] + } + ``` + """ + if db_manager is None: + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Metrics database not available", + ) + + start_datetime = parse_datetime_string(start_time, "start_time") + end_datetime = parse_datetime_string(end_time, "end_time") + + where_clause = "WHERE 1=1" + params: list[Any] = [] + param_idx = 1 + + if start_datetime: + where_clause += f" AND created_at >= ${param_idx}" + params.append(start_datetime) + param_idx += 1 + + if end_datetime: + where_clause += f" AND created_at <= ${param_idx}" + params.append(end_datetime) + param_idx += 1 + + # Add bucket parameter + params.append(bucket) + bucket_param_idx = param_idx + + query = f""" + SELECT + date_trunc(${bucket_param_idx}, created_at) as bucket, + COUNT(*) as total_events, + COUNT(*) FILTER (WHERE status = 'success') as successful_events, + COUNT(*) FILTER (WHERE status IN ('error', 'partial')) as failed_events + FROM webhooks + {where_clause} + GROUP BY bucket + ORDER BY bucket + """ # noqa: S608 + + try: + rows = await db_manager.fetch(query, *params) + + trends = [ + { + "bucket": row["bucket"].isoformat() if row["bucket"] else None, + "total_events": row["total_events"], + "successful_events": row["successful_events"], + "failed_events": row["failed_events"], + } + for row in rows + ] + + return { + "time_range": { + "start_time": start_datetime.isoformat() if start_datetime else None, + "end_time": end_datetime.isoformat() if end_datetime else None, + }, + "trends": trends, + } + except asyncio.CancelledError: + raise + except Exception as ex: + LOGGER.exception("Failed to fetch metrics trends from database") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to fetch metrics trends", + ) from ex + + +@FASTAPI_APP.get( + "/api/metrics/summary", + operation_id="get_metrics_summary", + dependencies=[Depends(require_metrics_server_enabled)], +) +async def get_metrics_summary( + start_time: str | None = Query( + default=None, description="Start time in ISO 8601 format (e.g., 2024-01-01T00:00:00Z)" + ), + end_time: str | None = Query(default=None, description="End time in ISO 8601 format (e.g., 2024-01-31T23:59:59Z)"), +) -> dict[str, Any]: + """Get overall metrics summary for webhook processing. + + Provides high-level overview of webhook processing metrics including total events, + performance statistics, success rates, and top repositories. Essential for operational + dashboards, executive reporting, and system health monitoring. + + **Primary Use Cases:** + - Generate executive dashboards and summary reports + - Monitor overall system health and performance + - Track webhook processing trends over time + - Identify system-wide performance issues + - Analyze API usage patterns across all repositories + - Quick health check for webhook processing system + + **Parameters:** + - `start_time` (str, optional): Start of time range in ISO 8601 format. + Example: "2024-01-01T00:00:00Z" + Default: No time filter (all-time stats) + - `end_time` (str, optional): End of time range in ISO 8601 format. + Example: "2024-01-31T23:59:59Z" + Default: No time filter (up to current time) + + **Return Structure:** + ```json + { + "time_range": { + "start_time": "2024-01-01T00:00:00Z", + "end_time": "2024-01-31T23:59:59Z" + }, + "summary": { + "total_events": 8745, + "successful_events": 8423, + "failed_events": 322, + "success_rate": 96.32, + "avg_processing_time_ms": 5834, + "median_processing_time_ms": 4521, + "p95_processing_time_ms": 14234, + "max_processing_time_ms": 52134, + "total_api_calls": 104940, + "avg_api_calls_per_event": 12.0, + "total_token_spend": 104940, + "total_events_trend": 15.3, + "success_rate_trend": 2.1, + "failed_events_trend": -8.5, + "avg_duration_trend": -12.4 + }, + "top_repositories": [ + { + "repository": "myakove/high-traffic-repo", + "total_events": 3456, + "success_rate": 98.5 + }, + { + "repository": "myakove/medium-traffic-repo", + "total_events": 2134, + "success_rate": 95.2 + }, + { + "repository": "myakove/low-traffic-repo", + "total_events": 856, + "success_rate": 97.8 + } + ], + "event_type_distribution": { + "pull_request": 4523, + "issue_comment": 2134, + "check_run": 1234, + "push": 854 + }, + "hourly_event_rate": 12.3, + "daily_event_rate": 295.4 + } + ``` + + **Metrics Explained:** + - `total_events`: Total webhook events processed in time range + - `successful_events`: Events that completed successfully + - `failed_events`: Events that failed or partially failed + - `success_rate`: Overall success percentage (0-100) + - `avg_processing_time_ms`: Average processing duration across all events + - `median_processing_time_ms`: Median processing duration (50th percentile) + - `p95_processing_time_ms`: 95th percentile processing time (SLA metric) + - `max_processing_time_ms`: Maximum processing time (worst case scenario) + - `total_api_calls`: Total GitHub API calls made across all events + - `avg_api_calls_per_event`: Average API calls per webhook event + - `total_token_spend`: Total rate limit tokens consumed + - `total_events_trend`: Percentage change in total events vs previous period (e.g., 15.3 = 15.3% increase) + - `success_rate_trend`: Percentage change in success rate vs previous period + - `failed_events_trend`: Percentage change in failed events vs previous period (negative = improvement) + - `avg_duration_trend`: Percentage change in avg processing time vs previous period (negative = faster) + - `top_repositories`: Top 10 repositories by event volume + - `event_type_distribution`: Event count breakdown by type + - `hourly_event_rate`: Average events per hour in time range + - `daily_event_rate`: Average events per day in time range + + **Trend Calculation:** + - Trends compare current period to previous period of equal duration + - Example: If querying last 24 hours, trends compare to 24 hours before that + - Trend = ((current - previous) / previous) * 100 + - Returns 0.0 if no previous data or both periods have no events + - Returns 100.0 if previous period had 0 but current period has data + - Negative trends for duration metrics indicate performance improvement + + **Common Analysis Scenarios:** + - Daily summary: `start_time=&end_time=` + - Weekly trends: `start_time=&end_time=` + - Monthly reporting: `start_time=2024-01-01&end_time=2024-01-31` + - System health check: No time filters (all-time stats) + + **Error Conditions:** + - 400: Invalid datetime format in start_time/end_time parameters + - 404: Metrics server disabled (ENABLE_METRICS_SERVER=false) + - 500: Database connection errors or query failures + + **AI Agent Usage Examples:** + - "Show overall metrics summary for last month for executive report" + - "Get webhook processing health metrics to check system status" + - "Analyze event type distribution to understand webhook traffic patterns" + - "Review top repositories by event volume to identify high-traffic sources" + + **Performance Notes:** + - Summary computed in real-time from webhooks table + - Optimized queries using indexed columns (created_at, repository, event_type) + - Large date ranges may increase query time + - Consider caching for frequently accessed time ranges + """ + + # Helper function to calculate percentage change trends + def calculate_trend(current: float, previous: float) -> float: + """Calculate percentage change from previous to current. + + Args: + current: Current period value + previous: Previous period value + + Returns: + Percentage change rounded to 1 decimal place + - Returns 0.0 if both values are 0 + - Returns 100.0 if previous is 0 but current is not + """ + if previous == 0: + return 0.0 if current == 0 else 100.0 + return round(((current - previous) / previous) * 100, 1) + + # Validate database manager is available + if db_manager is None: + LOGGER.error("Database manager not initialized - metrics server may not be properly configured") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Metrics database not available", + ) + + # Parse datetime strings + start_datetime = parse_datetime_string(start_time, "start_time") + end_datetime = parse_datetime_string(end_time, "end_time") + + # Calculate previous period for trend comparison + prev_start_datetime = None + prev_end_datetime = None + if start_datetime and end_datetime: + # Previous period has same duration as current period + period_duration = end_datetime - start_datetime + prev_start_datetime = start_datetime - period_duration + prev_end_datetime = end_datetime - period_duration + + # Build query with time filters for current period + where_clause = "WHERE 1=1" + params: list[Any] = [] + param_idx = 1 + + if start_datetime: + where_clause += f" AND created_at >= ${param_idx}" + params.append(start_datetime) + param_idx += 1 + + if end_datetime: + where_clause += f" AND created_at <= ${param_idx}" + params.append(end_datetime) + param_idx += 1 + + # Build query with time filters for previous period + prev_where_clause = "WHERE 1=1" + prev_params: list[Any] = [] + prev_param_idx = 1 + + if prev_start_datetime: + prev_where_clause += f" AND created_at >= ${prev_param_idx}" + prev_params.append(prev_start_datetime) + prev_param_idx += 1 + + if prev_end_datetime: + prev_where_clause += f" AND created_at <= ${prev_param_idx}" + prev_params.append(prev_end_datetime) + prev_param_idx += 1 + + # Main summary query + summary_query = f""" + SELECT + COUNT(*) as total_events, + COUNT(*) FILTER (WHERE status = 'success') as successful_events, + COUNT(*) FILTER (WHERE status IN ('error', 'partial')) as failed_events, + ROUND( + (COUNT(*) FILTER (WHERE status = 'success')::numeric / NULLIF(COUNT(*), 0)::numeric * 100)::numeric, + 2 + ) as success_rate, + ROUND(AVG(duration_ms)) as avg_processing_time_ms, + PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY duration_ms) as median_processing_time_ms, + PERCENTILE_CONT(0.95) WITHIN GROUP (ORDER BY duration_ms) as p95_processing_time_ms, + MAX(duration_ms) as max_processing_time_ms, + SUM(api_calls_count) as total_api_calls, + ROUND(AVG(api_calls_count), 2) as avg_api_calls_per_event, + SUM(token_spend) as total_token_spend + FROM webhooks + {where_clause} + """ # noqa: S608 + + # Top repositories query + top_repos_query = f""" + WITH total AS ( + SELECT COUNT(*) as total_count + FROM webhooks + {where_clause} + ) + SELECT + repository, + COUNT(*) as total_events, + ROUND( + (COUNT(*) FILTER (WHERE status = 'success')::numeric / COUNT(*)::numeric * 100)::numeric, + 2 + ) as success_rate, + ROUND( + (COUNT(*)::numeric / (SELECT total_count FROM total) * 100)::numeric, + 2 + ) as percentage + FROM webhooks + {where_clause} + GROUP BY repository + ORDER BY total_events DESC + LIMIT 10 + """ # noqa: S608 + + # Event type distribution query + event_type_query = f""" + SELECT + event_type, + COUNT(*) as event_count + FROM webhooks + {where_clause} + GROUP BY event_type + ORDER BY event_count DESC + """ # noqa: S608 + + # Time range for rate calculations + time_range_query = f""" + SELECT + MIN(created_at) as first_event_time, + MAX(created_at) as last_event_time + FROM webhooks + {where_clause} + """ # noqa: S608 + + # Previous period summary query for trend calculation + prev_summary_query = f""" + SELECT + COUNT(*) as total_events, + COUNT(*) FILTER (WHERE status = 'success') as successful_events, + COUNT(*) FILTER (WHERE status IN ('error', 'partial')) as failed_events, + ROUND( + (COUNT(*) FILTER (WHERE status = 'success')::numeric / NULLIF(COUNT(*), 0)::numeric * 100)::numeric, + 2 + ) as success_rate, + ROUND(AVG(duration_ms)) as avg_processing_time_ms + FROM webhooks + {prev_where_clause} + """ # noqa: S608 + + try: + # Execute queries using DatabaseManager helpers + summary_row = await db_manager.fetchrow(summary_query, *params) + top_repos_rows = await db_manager.fetch(top_repos_query, *params) + event_type_rows = await db_manager.fetch(event_type_query, *params) + time_range_row = await db_manager.fetchrow(time_range_query, *params) + + # Execute previous period query if time range is specified + prev_summary_row = None + if prev_start_datetime and prev_end_datetime: + prev_summary_row = await db_manager.fetchrow(prev_summary_query, *prev_params) + + # Process summary metrics + total_events = summary_row["total_events"] or 0 + current_success_rate = float(summary_row["success_rate"]) if summary_row["success_rate"] is not None else 0.0 + current_failed_events = summary_row["failed_events"] or 0 + current_avg_duration = ( + int(summary_row["avg_processing_time_ms"]) if summary_row["avg_processing_time_ms"] is not None else 0 + ) + + summary = { + "total_events": total_events, + "successful_events": summary_row["successful_events"] or 0, + "failed_events": current_failed_events, + "success_rate": current_success_rate, + "avg_processing_time_ms": current_avg_duration, + "median_processing_time_ms": int(summary_row["median_processing_time_ms"]) + if summary_row["median_processing_time_ms"] is not None + else 0, + "p95_processing_time_ms": int(summary_row["p95_processing_time_ms"]) + if summary_row["p95_processing_time_ms"] is not None + else 0, + "max_processing_time_ms": summary_row["max_processing_time_ms"] or 0, + "total_api_calls": summary_row["total_api_calls"] or 0, + "avg_api_calls_per_event": float(summary_row["avg_api_calls_per_event"]) + if summary_row["avg_api_calls_per_event"] is not None + else 0.0, + "total_token_spend": summary_row["total_token_spend"] or 0, + } + + # Calculate and add trend fields if previous period data is available + if prev_summary_row: + prev_total_events = prev_summary_row["total_events"] or 0 + prev_success_rate = ( + float(prev_summary_row["success_rate"]) if prev_summary_row["success_rate"] is not None else 0.0 + ) + prev_failed_events = prev_summary_row["failed_events"] or 0 + prev_avg_duration = ( + int(prev_summary_row["avg_processing_time_ms"]) + if prev_summary_row["avg_processing_time_ms"] is not None + else 0 + ) + + summary["total_events_trend"] = calculate_trend(float(total_events), float(prev_total_events)) + summary["success_rate_trend"] = calculate_trend(current_success_rate, prev_success_rate) + summary["failed_events_trend"] = calculate_trend(float(current_failed_events), float(prev_failed_events)) + summary["avg_duration_trend"] = calculate_trend(float(current_avg_duration), float(prev_avg_duration)) + else: + # No previous period data - set trends to 0.0 + summary["total_events_trend"] = 0.0 + summary["success_rate_trend"] = 0.0 + summary["failed_events_trend"] = 0.0 + summary["avg_duration_trend"] = 0.0 + + # Process top repositories + top_repositories = [ + { + "repository": row["repository"], + "total_events": row["total_events"], + "percentage": float(row["percentage"]) if row["percentage"] is not None else 0.0, + "success_rate": float(row["success_rate"]) if row["success_rate"] is not None else 0.0, + } + for row in top_repos_rows + ] + + # Process event type distribution + event_type_distribution = {row["event_type"]: row["event_count"] for row in event_type_rows} + + # Calculate event rates + hourly_event_rate = 0.0 + daily_event_rate = 0.0 + if time_range_row and time_range_row["first_event_time"] and time_range_row["last_event_time"]: + time_diff = time_range_row["last_event_time"] - time_range_row["first_event_time"] + total_hours = max(time_diff.total_seconds() / 3600, 1) # Avoid division by zero + total_days = max(time_diff.total_seconds() / 86400, 1) # Avoid division by zero + hourly_event_rate = round(total_events / total_hours, 2) + daily_event_rate = round(total_events / total_days, 2) + + return { + "time_range": { + "start_time": start_datetime.isoformat() if start_datetime else None, + "end_time": end_datetime.isoformat() if end_datetime else None, + }, + "summary": summary, + "top_repositories": top_repositories, + "event_type_distribution": event_type_distribution, + "hourly_event_rate": hourly_event_rate, + "daily_event_rate": daily_event_rate, + } + except asyncio.CancelledError: + raise + except HTTPException: + raise + except Exception as ex: + LOGGER.exception("Failed to fetch metrics summary from database") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, + detail="Failed to fetch metrics summary", + ) from ex + + # MCP Integration - Only register if ENABLE_MCP_SERVER=true if MCP_SERVER_ENABLED: # Create MCP instance with the main app @@ -1151,7 +3216,9 @@ async def handle_mcp_streamable_http(request: Request) -> Response: # Session manager is initialized in lifespan if http_transport is None or http_transport._session_manager is None: LOGGER.error("MCP session manager not initialized") - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="MCP server not initialized") + raise HTTPException( + status_code=http_status.HTTP_500_INTERNAL_SERVER_ERROR, detail="MCP server not initialized" + ) return await http_transport.handle_fastapi_request(request) diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index c63156aa..18c8f001 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -18,6 +18,50 @@ properties: type: string description: File path for the Logs Server log file default: logs_server.log + metrics-server-log-file: + type: string + description: File path for the Metrics Server log file + default: metrics_server.log + metrics-database: + type: object + description: PostgreSQL database configuration for metrics storage + properties: + host: + type: string + description: PostgreSQL server hostname or IP address + port: + type: integer + description: PostgreSQL server port + default: 5432 + database: + type: string + description: Database name for metrics storage + username: + type: string + description: Database username + password: + type: string + format: password + description: Database password + pool-size: + type: integer + description: Connection pool size + default: 20 + minimum: 1 + maximum: 100 + required: + - host + - database + - username + - password + gemini-api-key: + type: string + format: password + description: Google Gemini API key for AI-powered natural language queries + ai-query-enabled: + type: boolean + description: Enable AI-powered natural language queries in metrics dashboard + default: false mask-sensitive-data: type: boolean description: Mask sensitive data in logs (tokens, passwords, secrets, etc.). Default is true for security. diff --git a/webhook_server/libs/database.py b/webhook_server/libs/database.py new file mode 100644 index 00000000..1b947812 --- /dev/null +++ b/webhook_server/libs/database.py @@ -0,0 +1,308 @@ +""" +Async database connection management for PostgreSQL. + +Provides connection pooling, health checks, and graceful error handling +for metrics storage infrastructure. +""" + +from __future__ import annotations + +import logging +from typing import Any + +import asyncpg + +from webhook_server.libs.config import Config +from webhook_server.utils.helpers import get_logger_with_params + + +class DatabaseManager: + """ + Async PostgreSQL connection manager using asyncpg. + + Provides connection pooling, query execution, and health monitoring + for metrics database operations. + + Architecture guarantees: + - config is ALWAYS provided (required parameter) - no defensive checks needed + - logger is ALWAYS provided (required parameter) - no defensive checks needed + - pool starts as None (lazy initialization) - defensive check acceptable + + Example: + async with DatabaseManager(config, logger) as db_manager: + result = await db_manager.fetch("SELECT * FROM metrics WHERE id = $1", metric_id) + """ + + def __init__(self, config: Config, logger: logging.Logger) -> None: + """ + Initialize PostgreSQL connection manager. + + Args: + config: Configuration object containing database settings + logger: Logger instance for connection lifecycle events + + Raises: + ValueError: If required database configuration is missing + """ + self.config = config + self.logger = logger + self.pool: asyncpg.Pool[asyncpg.Record] | None = None # Lazy initialization + + # Load database configuration - fail-fast if missing required fields + db_config = self.config.root_data.get("metrics-database") + if not db_config: + raise ValueError("Missing 'metrics-database' section in config.yaml") + + self.host: str = db_config.get("host", "localhost") + self.port: int = db_config.get("port", 5432) + self.database: str = db_config.get("database", "") + self.username: str = db_config.get("username", "") + self.password: str = db_config.get("password", "") + self.pool_size: int = db_config.get("pool-size", 20) + + # Validate required fields - fail-fast + if not self.database: + raise ValueError("Missing required field 'database' in metrics-database configuration") + if not self.username: + raise ValueError("Missing required field 'username' in metrics-database configuration") + if not self.password: + raise ValueError("Missing required field 'password' in metrics-database configuration") + + async def connect(self) -> None: + """ + Create connection pool to PostgreSQL database. + + Establishes connection pool with configured parameters and validates connectivity. + + Raises: + asyncpg.PostgresError: If connection fails + ValueError: If pool already exists + """ + if self.pool is not None: + raise ValueError("Database pool already exists. Call disconnect() first.") + + self.logger.info( + f"Connecting to PostgreSQL database: {self.username}@{self.host}:{self.port}/{self.database} " + f"(pool_size={self.pool_size})" + ) + + try: + self.pool = await asyncpg.create_pool( + host=self.host, + port=self.port, + database=self.database, + user=self.username, + password=self.password, + min_size=1, + max_size=self.pool_size, + command_timeout=60, # 60 seconds for query execution + ) + self.logger.info("PostgreSQL connection pool created successfully") + except Exception: + self.logger.exception("Failed to connect to PostgreSQL database") + raise + + async def disconnect(self) -> None: + """ + Close connection pool gracefully. + + Waits for active connections to finish and closes pool. + Safe to call multiple times (idempotent). + """ + if self.pool is not None: # Legitimate check - lazy initialization + self.logger.info("Closing PostgreSQL connection pool") + try: + await self.pool.close() + self.logger.info("PostgreSQL connection pool closed successfully") + except Exception: + self.logger.exception("Error closing PostgreSQL connection pool") + finally: + self.pool = None + + async def execute(self, query: str, *args: Any) -> str: + """ + Execute a SQL query that doesn't return data (INSERT, UPDATE, DELETE). + + Args: + query: SQL query with $1, $2, ... placeholders + *args: Query parameters + + Returns: + Result status string (e.g., "INSERT 0 1", "UPDATE 5", "DELETE 3") + + Raises: + ValueError: If connection pool not initialized + asyncpg.PostgresError: If query execution fails + + Example: + await db.execute("INSERT INTO metrics (name, value) VALUES ($1, $2)", "cpu", 85.5) + """ + if self.pool is None: # Legitimate check - lazy initialization + raise ValueError("Database pool not initialized. Call connect() first.") + + try: + async with self.pool.acquire() as connection: + result = await connection.execute(query, *args) + self.logger.debug(f"Query executed successfully: {result}") + return result + except Exception: + self.logger.exception(f"Failed to execute query: {query}") + raise + + async def fetch(self, query: str, *args: Any) -> list[asyncpg.Record]: + """ + Execute a SQL query and fetch all results (SELECT). + + Args: + query: SQL query with $1, $2, ... placeholders + *args: Query parameters + + Returns: + List of records (each record behaves like dict and tuple) + + Raises: + ValueError: If connection pool not initialized + asyncpg.PostgresError: If query execution fails + + Example: + rows = await db.fetch("SELECT * FROM metrics WHERE timestamp > $1", start_time) + for row in rows: + print(row["name"], row["value"]) + """ + if self.pool is None: # Legitimate check - lazy initialization + raise ValueError("Database pool not initialized. Call connect() first.") + + try: + async with self.pool.acquire() as connection: + results = await connection.fetch(query, *args) + self.logger.debug(f"Query returned {len(results)} rows") + return results + except Exception: + self.logger.exception(f"Failed to fetch query results: {query}") + raise + + async def fetchrow(self, query: str, *args: Any) -> asyncpg.Record | None: + """ + Execute a SQL query and fetch single result row (SELECT). + + Args: + query: SQL query with $1, $2, ... placeholders + *args: Query parameters + + Returns: + Single record or None if no results + + Raises: + ValueError: If connection pool not initialized + asyncpg.PostgresError: If query execution fails + + Example: + row = await db.fetchrow("SELECT * FROM metrics WHERE id = $1", metric_id) + if row: + print(row["name"], row["value"]) + """ + if self.pool is None: # Legitimate check - lazy initialization + raise ValueError("Database pool not initialized. Call connect() first.") + + try: + async with self.pool.acquire() as connection: + result = await connection.fetchrow(query, *args) + if result: + self.logger.debug("Query returned 1 row") + else: + self.logger.debug("Query returned no rows") + return result + except Exception: + self.logger.exception(f"Failed to fetch single row: {query}") + raise + + async def fetchval(self, query: str, *args: Any) -> Any: + """ + Execute a SQL query and fetch single scalar value (SELECT). + + Args: + query: SQL query with $1, $2, ... placeholders + *args: Query parameters + + Returns: + Single scalar value (e.g., int, str, bool) or None if no results + + Raises: + ValueError: If connection pool not initialized + asyncpg.PostgresError: If query execution fails + + Example: + count = await db.fetchval("SELECT COUNT(*) FROM metrics WHERE status = $1", "active") + print(f"Active metrics: {count}") + """ + if self.pool is None: # Legitimate check - lazy initialization + raise ValueError("Database pool not initialized. Call connect() first.") + + try: + async with self.pool.acquire() as connection: + result = await connection.fetchval(query, *args) + self.logger.debug(f"Query returned value: {result}") + return result + except Exception: + self.logger.exception(f"Failed to fetch scalar value: {query}") + raise + + async def health_check(self) -> bool: + """ + Check database connectivity and responsiveness. + + Returns: + True if database is healthy, False otherwise + + Example: + if await db.health_check(): + print("Database is healthy") + """ + try: + if self.pool is None: # Legitimate check - lazy initialization + self.logger.warning("Database pool not initialized") + return False + + async with self.pool.acquire() as connection: + await connection.fetchval("SELECT 1") + self.logger.debug("Database health check: OK") + return True + except Exception: + self.logger.exception("Database health check failed") + return False + + async def __aenter__(self) -> DatabaseManager: + """Context manager entry - initialize connection pool.""" + await self.connect() + return self + + async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + """Context manager exit - cleanup connection pool.""" + await self.disconnect() + + +def get_database_manager(repository_name: str = "") -> DatabaseManager: + """ + Factory function to create DatabaseManager with proper logging. + + Args: + repository_name: Repository name for logger context (optional) + + Returns: + Configured DatabaseManager instance + + Raises: + ValueError: If database configuration missing + + Note: + asyncpg import is checked at module load time (line 13), not function call time. + If asyncpg is not installed, module import will fail before this function can be called. + + Example: + db_manager = get_database_manager() + async with db_manager as db: + results = await db.fetch("SELECT * FROM metrics") + """ + config = Config(repository=repository_name) + logger = get_logger_with_params(repository_name=repository_name) + return DatabaseManager(config=config, logger=logger) diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index 41c6493e..059a87e9 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -228,6 +228,43 @@ async def _get_token_metrics(self) -> str: self.logger.debug(f"{self.log_prefix} Failed to get token metrics: {ex}") return "" + def get_api_metrics(self) -> dict[str, int | bool]: + """Get API usage metrics for this webhook instance. + + Returns: + dict with keys: + - api_calls_count: Number of API calls made during webhook processing + - token_spend: Rate limit tokens consumed (same as api_calls_count) + - token_remaining: Estimated remaining rate limit tokens + - metrics_available: Boolean indicating if metrics are available + (False = no metrics tracking, True = metrics tracked) + + Note: + When metrics_available=False, all counts will be zero (metrics not tracked). + When metrics_available=True with zero counts, it indicates legitimate zero API calls. + """ + if not self.requester_wrapper or self.initial_rate_limit_remaining is None: + return { + "api_calls_count": 0, + "token_spend": 0, + "token_remaining": 0, + "metrics_available": False, + } + + # Calculate API calls made during this webhook (thread-safe via CountingRequester) + api_calls_count = self.requester_wrapper.count - self.initial_wrapper_count + token_spend = api_calls_count # Same value per GitHub API rate limit semantics + + # Calculate remaining tokens (clamp to 0 if negative due to race conditions) + token_remaining = max(0, self.initial_rate_limit_remaining - token_spend) + + return { + "api_calls_count": api_calls_count, + "token_spend": token_spend, + "token_remaining": token_remaining, + "metrics_available": True, + } + async def _clone_repository( self, pull_request: PullRequest | None = None, diff --git a/webhook_server/libs/metrics_tracker.py b/webhook_server/libs/metrics_tracker.py new file mode 100644 index 00000000..388e4f0a --- /dev/null +++ b/webhook_server/libs/metrics_tracker.py @@ -0,0 +1,189 @@ +""" +Metrics tracking for GitHub webhook events and processing statistics. + +Provides comprehensive metrics collection including: +- Webhook event storage with full payload +- Processing time and performance metrics +- API usage tracking +- Error tracking and status monitoring + +Architecture: +- Async database operations using asyncpg connection pool +- No defensive checks on required parameters (fail-fast principle) +- Proper error handling with structured logging +- Integration with DatabaseManager +""" + +from __future__ import annotations + +import json +import logging +from typing import Any +from uuid import uuid4 + +from webhook_server.libs.database import DatabaseManager + + +class MetricsTracker: + """ + Tracks webhook events and processing metrics in PostgreSQL database. + + Stores comprehensive metrics including: + - Webhook event metadata and payloads + - Processing duration and performance + - API usage and rate limit consumption + - Success/failure status with error details + + Architecture guarantees: + - db_manager is ALWAYS provided (required parameter) - no defensive checks + - logger is ALWAYS provided (required parameter) - no defensive checks + + Example: + tracker = MetricsTracker(db_manager, logger) + await tracker.track_webhook_event( + delivery_id="abc123", + repository="org/repo", + event_type="pull_request", + action="opened", + pr_number=42, + sender="user", + payload={"key": "value"}, + processing_time_ms=150, + status="success", + ) + """ + + def __init__( + self, + db_manager: DatabaseManager, + logger: logging.Logger, + ) -> None: + """ + Initialize metrics tracker. + + Args: + db_manager: Database connection manager for metrics storage + logger: Logger instance for metrics tracking events + + Note: + No defensive checks - all parameters are required and ALWAYS provided. + Architecture guarantees these are initialized before MetricsTracker. + """ + self.db_manager = db_manager + self.logger = logger + + async def track_webhook_event( + self, + delivery_id: str, + repository: str, + event_type: str, + action: str, + sender: str, + payload: dict[str, Any], + processing_time_ms: int, + status: str, + pr_number: int | None = None, + error_message: str | None = None, + api_calls_count: int = 0, + token_spend: int = 0, + token_remaining: int = 0, + metrics_available: bool = True, + ) -> None: + """ + Track webhook event with comprehensive metrics. + + Stores webhook event in database with processing metrics including: + - Event metadata (delivery ID, repository, event type, action) + - Processing metrics (duration, API calls, token usage) + - Status tracking (success, error, partial) + - Full payload for debugging and analytics + + Uses DatabaseManager.execute() for centralized pool management and + precondition checking. All database operations go through DatabaseManager + to avoid duplicated connection handling logic. + + Args: + delivery_id: GitHub webhook delivery ID (X-GitHub-Delivery header) + repository: Repository in org/repo format + event_type: GitHub event type (pull_request, issue_comment, etc.) + action: Event action (opened, synchronize, closed, etc.) + sender: GitHub username who triggered the event + payload: Full webhook payload from GitHub + processing_time_ms: Processing duration in milliseconds + status: Processing status (success, error, partial) + pr_number: PR number if applicable (optional) + error_message: Error message if processing failed (optional) + api_calls_count: Number of GitHub API calls made (default: 0) + token_spend: GitHub API calls consumed (default: 0) + token_remaining: Rate limit remaining after processing (default: 0) + metrics_available: Whether API metrics are available (default: True) + + Raises: + asyncpg.PostgresError: If database insert fails + ValueError: If database pool not initialized + + Example: + await tracker.track_webhook_event( + delivery_id="abc123", + repository="myorg/myrepo", + event_type="pull_request", + action="opened", + pr_number=42, + sender="johndoe", + payload=webhook_payload, + processing_time_ms=150, + status="success", + api_calls_count=3, + token_spend=3, + token_remaining=4997, + metrics_available=True, + ) + """ + try: + # Serialize payload to JSON string for JSONB storage + # Use default=str for defensive handling of non-serializable types + # (datetime, UUID, etc.) to prevent TypeError + payload_json = json.dumps(payload, default=str) + + # Insert webhook event into database using DatabaseManager.execute() + # This centralizes pool management and precondition checks + # Note: processed_at is auto-populated by database via server_default=func.now() + await self.db_manager.execute( + """ + INSERT INTO webhooks ( + id, delivery_id, repository, event_type, action, + pr_number, sender, payload, duration_ms, + status, error_message, api_calls_count, token_spend, token_remaining, + metrics_available + ) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15) + """, + uuid4(), + delivery_id, + repository, + event_type, + action, + pr_number, + sender, + payload_json, + processing_time_ms, + status, + error_message, + api_calls_count, + token_spend, + token_remaining, + metrics_available, + ) + + self.logger.info( + f"Webhook event tracked successfully: delivery_id={delivery_id}, " + f"repository={repository}, event_type={event_type}, action={action}, " + f"status={status}, processing_time_ms={processing_time_ms}" + ) + + except Exception: + self.logger.exception( + f"Failed to track webhook event: delivery_id={delivery_id}, " + f"repository={repository}, event_type={event_type}" + ) + raise diff --git a/webhook_server/libs/models.py b/webhook_server/libs/models.py new file mode 100644 index 00000000..e5d345ea --- /dev/null +++ b/webhook_server/libs/models.py @@ -0,0 +1,772 @@ +""" +SQLAlchemy models for GitHub Webhook Server metrics database. + +Defines the complete database schema for tracking webhook events, pull requests, +reviews, labels, check runs, and API usage metrics. + +Architecture: +- SQLAlchemy 2.0 declarative style with type hints +- PostgreSQL-specific types (UUID, JSONB) for optimal performance +- Comprehensive indexes on frequently queried columns +- Foreign key relationships with CASCADE delete for data integrity +- Server-side defaults for timestamps and UUIDs + +Tables: +- webhooks: Webhook event store with full payload and metrics +- pull_requests: PR master records with size metrics +- pr_events: PR timeline events for analytics +- pr_reviews: Review data for approval tracking +- pr_labels: Label history for workflow tracking +- check_runs: Check run results for CI/CD metrics +- api_usage: GitHub API usage tracking for rate limit monitoring + +Integration: +- Imported in webhook_server/migrations/env.py for Alembic autogenerate +- Used by DatabaseManager for query operations +- Enables comprehensive metrics and analytics collection +""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any + +from sqlalchemy import ( + Boolean, + DateTime, + ForeignKey, + Index, + Integer, + String, + Text, + UniqueConstraint, +) +from sqlalchemy.dialects.postgresql import JSONB, UUID +from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship +from sqlalchemy.sql import func, text + + +class Base(DeclarativeBase): + """ + Base class for all SQLAlchemy models. + + Provides type hints for SQLAlchemy 2.0 declarative style. + All models inherit from this class. + """ + + +class Webhook(Base): + """ + Webhook event store - tracks all incoming GitHub webhook events. + + Stores complete webhook payload and processing metrics including: + - Event metadata (delivery ID, repository, event type, action) + - Processing metrics (duration, API calls, token usage) + - Status tracking (success, error, partial) + + Indexes: + - delivery_id (unique): Fast lookup by GitHub delivery ID + - repository: Filter events by repository + - event_type: Filter by event type (pull_request, issue_comment, etc.) + - pr_number: Fast PR event lookup + - created_at: Time-based queries for analytics + + Relationships: + - pr_events: Timeline events for this webhook + - check_runs: Check runs triggered by this webhook + - api_usage: API usage metrics for this webhook + """ + + __tablename__ = "webhooks" + __table_args__ = ( + Index("ix_webhooks_repository_created_at", "repository", "created_at"), + Index("ix_webhooks_repository_event_type", "repository", "event_type"), + ) + + id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + primary_key=True, + server_default=text("gen_random_uuid()"), + comment="Primary key UUID", + ) + delivery_id: Mapped[str] = mapped_column( + String(255), + unique=True, + index=True, + nullable=False, + comment="X-GitHub-Delivery header - unique webhook ID", + ) + repository: Mapped[str] = mapped_column( + String(255), + index=True, + nullable=False, + comment="Repository in org/repo format", + ) + event_type: Mapped[str] = mapped_column( + String(50), + index=True, + nullable=False, + comment="GitHub event type: pull_request, issue_comment, check_run, etc.", + ) + action: Mapped[str | None] = mapped_column( + String(50), + nullable=True, + comment="Event action: opened, synchronize, closed, etc. (null for events without actions like push)", + ) + pr_number: Mapped[int | None] = mapped_column( + Integer, + index=True, + nullable=True, + comment="PR number if applicable to this event", + ) + sender: Mapped[str] = mapped_column( + String(255), + nullable=False, + comment="GitHub username who triggered the event", + ) + payload: Mapped[dict[str, Any]] = mapped_column( + JSONB, + nullable=False, + comment="Full webhook payload from GitHub", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + index=True, + server_default=func.now(), + nullable=False, + comment="When webhook was received", + ) + processed_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + server_default=func.now(), + nullable=False, + comment="When webhook processing completed", + ) + duration_ms: Mapped[int] = mapped_column( + Integer, + nullable=False, + comment="Processing duration in milliseconds", + ) + status: Mapped[str] = mapped_column( + String(20), + nullable=False, + comment="Processing status: success, error, partial", + ) + error_message: Mapped[str | None] = mapped_column( + Text, + nullable=True, + comment="Error message if processing failed", + ) + api_calls_count: Mapped[int] = mapped_column( + Integer, + nullable=False, + default=0, + comment="Number of GitHub API calls made during processing", + ) + token_spend: Mapped[int] = mapped_column( + Integer, + nullable=False, + default=0, + comment="GitHub API calls consumed (rate limit tokens spent)", + ) + token_remaining: Mapped[int] = mapped_column( + Integer, + nullable=False, + default=0, + comment="Rate limit remaining after processing", + ) + metrics_available: Mapped[bool] = mapped_column( + Boolean, + nullable=False, + default=True, + server_default=text("TRUE"), + comment="Whether API metrics are available (False = no tracking, True = metrics tracked)", + ) + + # Relationships + pr_events: Mapped[list[PREvent]] = relationship( + "PREvent", + back_populates="webhook", + cascade="all, delete-orphan", + ) + check_runs: Mapped[list[CheckRun]] = relationship( + "CheckRun", + back_populates="webhook", + cascade="all, delete-orphan", + ) + api_usage: Mapped[list[APIUsage]] = relationship( + "APIUsage", + back_populates="webhook", + cascade="all, delete-orphan", + ) + + def __repr__(self) -> str: + """String representation for debugging.""" + return ( + f"" + ) + + +class PullRequest(Base): + """ + Pull request master records - tracks PR lifecycle and metrics. + + Stores PR metadata, statistics, and state changes including: + - Basic info (title, author, timestamps) + - Code metrics (additions, deletions, changed files) + - Size classification (XS, S, M, L, XL, XXL) + - State tracking (open, merged, closed) + + Indexes: + - repository + pr_number: Fast PR lookup (composite unique) + - author: Filter PRs by author + - created_at: Time-based queries + - updated_at: Recent activity tracking + + Relationships: + - pr_events: Timeline events for this PR + - pr_reviews: Reviews for this PR + - pr_labels: Label history for this PR + - check_runs: Check runs for this PR + """ + + __tablename__ = "pull_requests" + __table_args__ = ( + UniqueConstraint("repository", "pr_number", name="uq_pull_requests_repository_pr_number"), + Index("ix_pull_requests_repository_state", "repository", "state"), + Index("ix_pull_requests_repository_created_at", "repository", "created_at"), + Index("ix_pull_requests_author_created_at", "author", "created_at"), + ) + + id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + primary_key=True, + server_default=text("gen_random_uuid()"), + comment="Primary key UUID", + ) + repository: Mapped[str] = mapped_column( + String(255), + index=True, + nullable=False, + comment="Repository in org/repo format", + ) + pr_number: Mapped[int] = mapped_column( + Integer, + index=True, + nullable=False, + comment="PR number within repository", + ) + title: Mapped[str] = mapped_column( + String(500), + nullable=False, + comment="PR title", + ) + author: Mapped[str] = mapped_column( + String(255), + index=True, + nullable=False, + comment="GitHub username of PR author", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + index=True, + nullable=False, + comment="When PR was created", + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + index=True, + nullable=False, + comment="When PR was last updated", + ) + merged_at: Mapped[datetime | None] = mapped_column( + DateTime(timezone=True), + nullable=True, + comment="When PR was merged (null if not merged)", + ) + closed_at: Mapped[datetime | None] = mapped_column( + DateTime(timezone=True), + nullable=True, + comment="When PR was closed (null if still open)", + ) + state: Mapped[str] = mapped_column( + String(20), + nullable=False, + comment="PR state: open, merged, closed", + ) + draft: Mapped[bool] = mapped_column( + Boolean, + nullable=False, + default=False, + comment="Whether PR is in draft state", + ) + additions: Mapped[int] = mapped_column( + Integer, + nullable=False, + default=0, + comment="Lines of code added", + ) + deletions: Mapped[int] = mapped_column( + Integer, + nullable=False, + default=0, + comment="Lines of code deleted", + ) + changed_files: Mapped[int] = mapped_column( + Integer, + nullable=False, + default=0, + comment="Number of files changed", + ) + size_label: Mapped[str | None] = mapped_column( + String(10), + nullable=True, + comment="PR size classification: XS, S, M, L, XL, XXL", + ) + + # Relationships + pr_events: Mapped[list[PREvent]] = relationship( + "PREvent", + back_populates="pull_request", + cascade="all, delete-orphan", + ) + pr_reviews: Mapped[list[PRReview]] = relationship( + "PRReview", + back_populates="pull_request", + cascade="all, delete-orphan", + ) + pr_labels: Mapped[list[PRLabel]] = relationship( + "PRLabel", + back_populates="pull_request", + cascade="all, delete-orphan", + ) + check_runs: Mapped[list[CheckRun]] = relationship( + "CheckRun", + back_populates="pull_request", + cascade="all, delete-orphan", + ) + + def __repr__(self) -> str: + """String representation for debugging.""" + title_display = f"{self.title[:50]}..." if len(self.title) > 50 else self.title + return ( + f"" + ) + + +class PREvent(Base): + """ + PR timeline events - tracks all events in PR lifecycle. + + Records significant events in PR timeline including: + - Code updates (synchronize) + - State changes (opened, closed, merged) + - Reviews (approved, changes_requested) + - Check runs (CI/CD pipeline events) + + Indexes: + - pr_id: Fast event lookup by PR + - event_type: Filter by event type + - created_at: Time-based queries + + Relationships: + - pull_request: PR this event belongs to + - webhook: Webhook that triggered this event + """ + + __tablename__ = "pr_events" + + id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + primary_key=True, + server_default=text("gen_random_uuid()"), + comment="Primary key UUID", + ) + pr_id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + ForeignKey("pull_requests.id", ondelete="CASCADE"), + index=True, + nullable=False, + comment="Foreign key to pull_requests table", + ) + webhook_id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + ForeignKey("webhooks.id", ondelete="CASCADE"), + nullable=False, + comment="Foreign key to webhooks table", + ) + event_type: Mapped[str] = mapped_column( + String(50), + index=True, + nullable=False, + comment="Event type: opened, synchronize, review, check_run, etc.", + ) + event_data: Mapped[dict[str, Any]] = mapped_column( + JSONB, + nullable=False, + comment="Event-specific data from webhook payload", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + index=True, + server_default=func.now(), + nullable=False, + comment="When event occurred", + ) + + # Relationships + pull_request: Mapped[PullRequest] = relationship( + "PullRequest", + back_populates="pr_events", + ) + webhook: Mapped[Webhook] = relationship( + "Webhook", + back_populates="pr_events", + ) + + def __repr__(self) -> str: + """String representation for debugging.""" + return f"" + + +class PRReview(Base): + """ + PR review data - tracks review approvals and feedback. + + Records review submissions including: + - Reviewer identity + - Review type (approved, changes_requested, commented) + - Timing information + + Indexes: + - pr_id: Fast review lookup by PR + - reviewer: Filter reviews by reviewer + - created_at: Time-based queries + + Relationships: + - pull_request: PR this review belongs to + """ + + __tablename__ = "pr_reviews" + + id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + primary_key=True, + server_default=text("gen_random_uuid()"), + comment="Primary key UUID", + ) + pr_id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + ForeignKey("pull_requests.id", ondelete="CASCADE"), + index=True, + nullable=False, + comment="Foreign key to pull_requests table", + ) + reviewer: Mapped[str] = mapped_column( + String(255), + index=True, + nullable=False, + comment="GitHub username of reviewer", + ) + review_type: Mapped[str] = mapped_column( + String(30), + nullable=False, + comment="Review type: approved, changes_requested, commented", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + index=True, + server_default=func.now(), + nullable=False, + comment="When review was submitted", + ) + + # Relationships + pull_request: Mapped[PullRequest] = relationship( + "PullRequest", + back_populates="pr_reviews", + ) + + def __repr__(self) -> str: + """String representation for debugging.""" + return f"" + + +class PRLabel(Base): + """ + PR label history - tracks label additions and removals. + + Records label lifecycle including: + - Label name + - When label was added + - When label was removed (if applicable) + + Enables tracking of: + - Label-based workflows + - Size label history + - Review label progression + + Indexes: + - pr_id: Fast label lookup by PR + - label: Filter by specific label + - added_at: Time-based queries + + Relationships: + - pull_request: PR this label belongs to + """ + + __tablename__ = "pr_labels" + + id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + primary_key=True, + server_default=text("gen_random_uuid()"), + comment="Primary key UUID", + ) + pr_id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + ForeignKey("pull_requests.id", ondelete="CASCADE"), + index=True, + nullable=False, + comment="Foreign key to pull_requests table", + ) + label: Mapped[str] = mapped_column( + String(100), + index=True, + nullable=False, + comment="Label name", + ) + added_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + index=True, + server_default=func.now(), + nullable=False, + comment="When label was added", + ) + removed_at: Mapped[datetime | None] = mapped_column( + DateTime(timezone=True), + nullable=True, + comment="When label was removed (null if still present)", + ) + + # Relationships + pull_request: Mapped[PullRequest] = relationship( + "PullRequest", + back_populates="pr_labels", + ) + + def __repr__(self) -> str: + """String representation for debugging.""" + removed_str = f", removed_at='{self.removed_at}'" if self.removed_at else "" + return f"" + + +class CheckRun(Base): + """ + Check run results - tracks CI/CD pipeline execution. + + Records check run lifecycle including: + - Check name (tox, pre-commit, container-build, etc.) + - Status and conclusion + - Timing and duration metrics + - Output summary for failures + + Indexes: + - pr_id: Fast check run lookup by PR + - check_name: Filter by specific check + - started_at: Time-based queries + + Relationships: + - pull_request: PR this check run belongs to + - webhook: Webhook that triggered this check run + """ + + __tablename__ = "check_runs" + __table_args__ = ( + Index("ix_check_runs_pr_id_check_name", "pr_id", "check_name"), + Index("ix_check_runs_pr_id_started_at", "pr_id", "started_at"), + ) + + id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + primary_key=True, + server_default=text("gen_random_uuid()"), + comment="Primary key UUID", + ) + pr_id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + ForeignKey("pull_requests.id", ondelete="CASCADE"), + index=True, + nullable=False, + comment="Foreign key to pull_requests table", + ) + webhook_id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + ForeignKey("webhooks.id", ondelete="CASCADE"), + nullable=False, + comment="Foreign key to webhooks table", + ) + check_name: Mapped[str] = mapped_column( + String(255), + index=True, + nullable=False, + comment="Check name: tox, pre-commit, container-build, etc.", + ) + status: Mapped[str] = mapped_column( + String(20), + nullable=False, + comment="Status: queued, in_progress, completed", + ) + conclusion: Mapped[str | None] = mapped_column( + String(20), + nullable=True, + comment="Conclusion: success, failure, cancelled, etc. (null if not completed)", + ) + started_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + index=True, + nullable=False, + comment="When check run started", + ) + completed_at: Mapped[datetime | None] = mapped_column( + DateTime(timezone=True), + nullable=True, + comment="When check run completed (null if not completed)", + ) + duration_ms: Mapped[int | None] = mapped_column( + Integer, + nullable=True, + comment="Check run duration in milliseconds (null if not completed)", + ) + output_title: Mapped[str | None] = mapped_column( + String(500), + nullable=True, + comment="Check run output title", + ) + output_summary: Mapped[str | None] = mapped_column( + Text, + nullable=True, + comment="Check run output summary (especially for failures)", + ) + + # Relationships + pull_request: Mapped[PullRequest] = relationship( + "PullRequest", + back_populates="check_runs", + ) + webhook: Mapped[Webhook] = relationship( + "Webhook", + back_populates="check_runs", + ) + + def __repr__(self) -> str: + """String representation for debugging.""" + return ( + f"" + ) + + +class APIUsage(Base): + """ + GitHub API usage tracking - monitors rate limit consumption. + + Records API usage metrics per webhook including: + - Number of API calls made + - Rate limit before/after processing + - Token spend (calls consumed) + + Enables: + - Rate limit monitoring and alerting + - API usage optimization + - Cost analysis by repository/event type + + Indexes: + - webhook_id: Fast usage lookup by webhook + - repository: Filter by repository + - event_type: Analyze usage by event type + - created_at: Time-based queries + + Relationships: + - webhook: Webhook this usage record belongs to + """ + + __tablename__ = "api_usage" + + id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + primary_key=True, + server_default=text("gen_random_uuid()"), + comment="Primary key UUID", + ) + webhook_id: Mapped[UUID] = mapped_column( + UUID(as_uuid=True), + ForeignKey("webhooks.id", ondelete="CASCADE"), + index=True, + nullable=False, + comment="Foreign key to webhooks table", + ) + repository: Mapped[str] = mapped_column( + String(255), + index=True, + nullable=False, + comment="Repository in org/repo format", + ) + event_type: Mapped[str] = mapped_column( + String(50), + index=True, + nullable=False, + comment="Event type: pull_request, issue_comment, etc.", + ) + api_calls_count: Mapped[int] = mapped_column( + Integer, + nullable=False, + default=0, + comment="Number of GitHub API calls made", + ) + initial_rate_limit: Mapped[int] = mapped_column( + Integer, + nullable=False, + comment="Rate limit remaining before processing", + ) + final_rate_limit: Mapped[int] = mapped_column( + Integer, + nullable=False, + comment="Rate limit remaining after processing", + ) + token_spend: Mapped[int] = mapped_column( + Integer, + nullable=False, + default=0, + comment="GitHub API calls consumed (rate limit tokens spent)", + ) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), + index=True, + server_default=func.now(), + nullable=False, + comment="When API usage was recorded", + ) + + # Relationships + webhook: Mapped[Webhook] = relationship( + "Webhook", + back_populates="api_usage", + ) + + def __repr__(self) -> str: + """String representation for debugging.""" + return ( + f"" + ) diff --git a/webhook_server/migrations/README.md b/webhook_server/migrations/README.md new file mode 100644 index 00000000..41f60b25 --- /dev/null +++ b/webhook_server/migrations/README.md @@ -0,0 +1,330 @@ +# Database Migrations + +This directory contains Alembic database migrations for the GitHub Webhook Server metrics feature. + +## Overview + +Alembic manages database schema changes through versioned migration scripts. Each migration represents a specific change to the database schema and can be applied (`upgrade`) or reverted (`downgrade`). + +## Prerequisites + +- PostgreSQL database configured in `config.yaml` (metrics-database section) +- Metrics dependencies installed: `uv sync --extra metrics --extra ai` or `uv add asyncpg alembic sqlalchemy[asyncio]` +- Database connection verified (see DatabaseManager health check) + +## Configuration + +Database configuration is loaded from `config.yaml`: + +```yaml +metrics-database: + host: localhost + port: 5432 + database: webhook_metrics + username: webhook_user + password: + pool-size: 20 +``` + +**IMPORTANT:** Alembic loads database configuration from `config.yaml` (NOT from `alembic.ini`). The database URL is constructed dynamically in `env.py`. + +## Migration Workflow + +### Creating Migrations + +#### Autogenerate Migration (Recommended) + +Automatically detect schema changes by comparing SQLAlchemy models to database: + +```bash +# Create migration with auto-detected changes +uv run alembic revision --autogenerate -m "add webhook_events table" + +# Review generated migration in webhook_server/migrations/versions/ +# Edit if needed to customize upgrade/downgrade logic +``` + +#### Manual Migration + +Create empty migration template for custom changes: + +```bash +# Create empty migration +uv run alembic revision -m "add custom indexes" + +# Edit the generated file in webhook_server/migrations/versions/ +# Add your upgrade() and downgrade() logic +``` + +### Applying Migrations + +```bash +# Upgrade to latest version (head) +uv run alembic upgrade head + +# Upgrade by 1 version +uv run alembic upgrade +1 + +# Upgrade to specific revision +uv run alembic upgrade abc123def456 +``` + +### Reverting Migrations + +```bash +# Downgrade by 1 version +uv run alembic downgrade -1 + +# Downgrade to specific revision +uv run alembic downgrade abc123def456 + +# Downgrade all migrations (WARNING: destructive!) +uv run alembic downgrade base +``` + +### Migration Information + +```bash +# Show current database version +uv run alembic current + +# Show migration history +uv run alembic history + +# Show detailed migration history +uv run alembic history --verbose + +# Show specific migration details +uv run alembic show abc123def456 +``` + +### Offline Migrations (SQL Scripts) + +Generate SQL scripts without database connection: + +```bash +# Generate SQL for all pending migrations +uv run alembic upgrade head --sql > migration.sql + +# Generate SQL for specific migration +uv run alembic upgrade abc123def456 --sql > migration_abc123.sql + +# Review SQL and apply manually to database +psql -h localhost -U webhook_user -d webhook_metrics -f migration.sql +``` + +## Migration File Naming + +Migration files use timestamp-based naming for better organization: + +```text +Format: YYYYMMDD_HHMM__.py +Example: 20250123_1430_abc123def456_add_webhook_events_table.py +``` + +This format: +- Sorts chronologically in directory listings +- Shows creation time at a glance +- Includes descriptive slug for quick identification + +## Best Practices + +### Writing Migrations + +1. **Review autogenerated migrations** - Alembic detection isn't perfect +2. **Test upgrades AND downgrades** - Always verify both directions work +3. **Use transactions** - Alembic wraps migrations in transactions by default +4. **Add indexes carefully** - Create indexes `CONCURRENTLY` in production +5. **Handle data migrations** - Separate schema and data changes when possible + +### Migration Content + +```python +def upgrade() -> None: + """Apply migration changes to database schema.""" + # Create table + op.create_table( + 'webhook_events', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('event_type', sa.String(length=50), nullable=False), + sa.Column('timestamp', sa.DateTime(), nullable=False), + sa.PrimaryKeyConstraint('id') + ) + + # Create index (use CONCURRENTLY in production) + # op.create_index('ix_webhook_events_timestamp', 'webhook_events', ['timestamp']) + + +def downgrade() -> None: + """Revert migration changes from database schema.""" + # Drop in reverse order + # op.drop_index('ix_webhook_events_timestamp', table_name='webhook_events') + op.drop_table('webhook_events') +``` + +### Production Migrations + +1. **Backup database** before applying migrations +2. **Test in staging** environment first +3. **Review generated SQL** with `--sql` flag +4. **Use transactions** - default behavior, but verify +5. **Monitor performance** - large migrations can lock tables +6. **Create indexes concurrently** - use `postgresql_concurrently=True` + +### Handling Failures + +If migration fails: + +```bash +# Check current database version +uv run alembic current + +# Check what went wrong in database +psql -h localhost -U webhook_user -d webhook_metrics +SELECT * FROM alembic_version; + +# If partially applied, manually fix database or revert +# Then update alembic_version table to correct state +``` + +## Integration with Webhook Server + +### Startup Migrations (Optional) + +To automatically apply migrations on server startup, add to your startup script: + +```python +import subprocess + +# Apply pending migrations +result = subprocess.run(["uv", "run", "alembic", "upgrade", "head"], check=True) +``` + +**WARNING:** Automatic migrations are NOT recommended in production. Always apply migrations manually with proper monitoring and backup. + +### Health Checks + +Use `DatabaseManager.health_check()` to verify database connectivity: + +```python +from webhook_server.libs.database import get_database_manager + +async def check_database(): + db_manager = get_database_manager() + async with db_manager as db: + is_healthy = await db.health_check() + if not is_healthy: + raise RuntimeError("Database health check failed") +``` + +## Common Issues + +### Issue: "Target database is not up to date" + +**Cause:** Database schema doesn't match migrations + +```bash +# Check current version +uv run alembic current + +# Check pending migrations +uv run alembic history + +# Apply pending migrations +uv run alembic upgrade head +``` + +### Issue: "Can't locate revision abc123" + +**Cause:** Migration file missing or revision ID mismatch + +```bash +# Verify migration files exist +ls -la webhook_server/migrations/versions/ + +# Check migration history +uv run alembic history + +# If migration file deleted, recreate or revert to known good state +``` + +### Issue: "FAILED: Can't acquire lock" + +**Cause:** Database table locked by another process + +```bash +# Check for active connections +psql -h localhost -U webhook_user -d webhook_metrics +SELECT * FROM pg_stat_activity WHERE datname = 'webhook_metrics'; + +# Terminate blocking connections (if safe) +SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = 'webhook_metrics' AND pid != pg_backend_pid(); +``` + +### Issue: "Config file not found" + +**Cause:** `config.yaml` not found in `WEBHOOK_SERVER_DATA_DIR` + +```bash +# Check environment variable +echo $WEBHOOK_SERVER_DATA_DIR + +# Set environment variable if not set +export WEBHOOK_SERVER_DATA_DIR=/home/podman/data + +# Verify config file exists +ls -la $WEBHOOK_SERVER_DATA_DIR/config.yaml +``` + +## Architecture Notes + +### Async Support + +This migration setup uses **async PostgreSQL** via `asyncpg`: + +- Migrations run in async context (`run_async_migrations()`) +- Uses `async_engine_from_config()` for engine creation +- Connection handling via `connection.run_sync()` + +### Configuration Loading + +Database configuration and migration paths are loaded dynamically from `config.yaml` (NOT `alembic.ini`): + +1. `env.py` imports `webhook_server.libs.config.Config` +2. Reads `metrics-database` section from `config.yaml` +3. Constructs PostgreSQL URL: `postgresql+asyncpg://user:pass@host:port/db` # pragma: allowlist secret +4. Sets `sqlalchemy.url` in Alembic config dynamically +5. Sets `version_locations` based on `WEBHOOK_SERVER_DATA_DIR` environment variable + +**Migration Versions Path:** +- The path where Alembic stores migration version files is determined by `WEBHOOK_SERVER_DATA_DIR` +- Default path: `{WEBHOOK_SERVER_DATA_DIR}/migrations/versions` +- Container default: `/home/podman/data/migrations/versions` +- Non-container example: `/home/myakove/data/migrations/versions` (when `WEBHOOK_SERVER_DATA_DIR=/home/myakove/data`) +- This supports both container and non-container deployments without hardcoded paths + +### Model Discovery + +SQLAlchemy models are imported in `env.py` for autogenerate support: + +```python +from webhook_server.libs.models import Base +target_metadata = Base.metadata +``` + +This enables Alembic to auto-detect schema changes by comparing SQLAlchemy models to the database. + +## Next Steps + +1. **Create SQLAlchemy models** (task #5) - Define webhook_events, pull_request_metrics, etc. +2. **Generate initial migration** - `uv run alembic revision --autogenerate -m "initial schema"` +3. **Apply migration** - `uv run alembic upgrade head` +4. **Verify schema** - Check database tables created correctly + +## Resources + +- [Alembic Documentation](https://alembic.sqlalchemy.org/) +- [SQLAlchemy Async Documentation](https://docs.sqlalchemy.org/en/20/orm/extensions/asyncio.html) +- [asyncpg Documentation](https://magicstack.github.io/asyncpg/) +- Project: `webhook_server/libs/database.py` - DatabaseManager implementation +- Project: `examples/config.yaml` - Database configuration examples diff --git a/webhook_server/migrations/env.py b/webhook_server/migrations/env.py new file mode 100644 index 00000000..6863d027 --- /dev/null +++ b/webhook_server/migrations/env.py @@ -0,0 +1,242 @@ +""" +Alembic migration environment for GitHub Webhook Server metrics database. + +This module configures Alembic to: +- Use async PostgreSQL via asyncpg +- Load database configuration from webhook_server/libs/config.py +- Support both online (with database connection) and offline (SQL script) migrations +- Integrate with project logging infrastructure + +Key integration points: +- Database config loaded from config.yaml (metrics-database section) +- Uses DatabaseManager connection settings +- Async migration support for PostgreSQL + +Architecture guarantees: +- Config is loaded from environment or default path - fail-fast if missing +- All SQLAlchemy models are imported for autogenerate support +""" + +from __future__ import annotations + +import asyncio +import os +from logging.config import fileConfig +from urllib.parse import quote + +from alembic import context +from simple_logger.logger import get_logger +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from webhook_server.libs.config import Config +from webhook_server.libs.models import Base + +# Alembic Config object provides access to alembic.ini values +config = context.config + +# Interpret the config file for Python logging +# This line sets up loggers basically +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Get simple logger for Alembic (avoid Config dependency for migration-only commands) +logger = get_logger(name="alembic.migrations", level="INFO") + + +def _configure_from_config() -> None: + """ + Load database configuration and set Alembic options. + + This helper extracts the "load config + build URL + set Alembic options" logic + for easier testing and better separation of concerns. + + Raises: + FileNotFoundError: Config file not found + KeyError: Missing required database configuration key + ValueError: Database configuration section missing + + Architecture guarantees: + - Config is loaded from environment or default path - fail-fast if missing + - Required keys: username, password, database + - Optional keys: host (default: localhost), port (default: 5432) + """ + webhook_config = Config() + db_config = webhook_config.root_data.get("metrics-database") + + if not db_config: + raise ValueError( + "Database configuration missing. Add 'metrics-database' section to config.yaml. " + "See examples/config.yaml for reference." + ) + + # Construct PostgreSQL asyncpg URL with URL-encoded credentials + # URL-encode ALL components to handle special characters safely: + # - username/password: may contain @, :, /, etc. + # - database: may contain special characters + # Format: postgresql+asyncpg://user:password@host:port/database # pragma: allowlist secret + encoded_username = quote(db_config["username"], safe="") + encoded_password = quote(db_config["password"], safe="") + encoded_database = quote(db_config["database"], safe="") + host = db_config.get("host", "localhost") + port = db_config.get("port", 5432) + + db_url = f"postgresql+asyncpg://{encoded_username}:{encoded_password}@{host}:{port}/{encoded_database}" + + # Set database URL in Alembic config (overrides alembic.ini if set) + config.set_main_option("sqlalchemy.url", db_url) + + # Set version_locations dynamically based on data directory + # This replaces the hardcoded path in alembic.ini to support non-container deployments + # version_locations is where Alembic stores migration version files + versions_path = os.path.join(webhook_config.data_dir, "migrations", "versions") + config.set_main_option("version_locations", versions_path) + + logger.info( + f"Loaded database configuration: {db_config['username']}@" + f"{db_config.get('host', 'localhost')}:{db_config.get('port', 5432)}" + f"/{db_config['database']}" + ) + logger.info(f"Migration versions directory: {versions_path}") + + +# Load database configuration from config.yaml +try: + _configure_from_config() +except FileNotFoundError: + logger.exception("Config file not found. Ensure config.yaml exists in WEBHOOK_SERVER_DATA_DIR.") + raise +except KeyError: + # logger.exception automatically logs the traceback and exception details + # No need to interpolate the exception object + logger.exception("Missing required key in metrics-database config") + raise +except Exception: + logger.exception("Failed to load database configuration") + raise + +# Set target metadata for autogenerate - enables schema comparison +# All models in models.py are automatically registered with Base.metadata when Base is imported +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + """ + Run migrations in 'offline' mode. + + This configures the context with just a URL and not an Engine, + though an Engine is acceptable here as well. By skipping the Engine + creation we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + Useful for generating SQL scripts without database connectivity. + + Example: + alembic upgrade head --sql > migration.sql + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + compare_type=True, # Detect column type changes + compare_server_default=True, # Detect default value changes + ) + + logger.info("Running migrations in offline mode (SQL script generation)") + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + """ + Execute migrations with given database connection. + + Args: + connection: SQLAlchemy connection to use for migrations + + This is called by run_migrations_online() and runs the actual + migration operations against the database. + """ + context.configure( + connection=connection, + target_metadata=target_metadata, + compare_type=True, # Detect column type changes + compare_server_default=True, # Detect default value changes + ) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """ + Run migrations using async engine. + + Creates async engine from configuration and runs migrations + in async context. This is required for asyncpg (async PostgreSQL driver). + + The async engine is created from alembic.ini config with + database URL loaded from config.yaml. + """ + # Create async engine configuration + configuration = config.get_section(config.config_ini_section, {}) + + # Override with our database URL from config.yaml + configuration["sqlalchemy.url"] = config.get_main_option("sqlalchemy.url") + + # Async engine configuration for asyncpg + connectable = async_engine_from_config( + configuration, + prefix="sqlalchemy.", + poolclass=pool.NullPool, # No connection pooling in migrations + ) + + logger.info("Running migrations in online mode (async PostgreSQL)") + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """ + Run migrations in 'online' mode. + + In this scenario we create an async Engine and associate a connection + with the context. This is the normal mode for running migrations. + + Uses asyncpg for async PostgreSQL connectivity. + + Note on asyncio.run() usage: + This function is called by the Alembic CLI, which runs in a synchronous context. + Using asyncio.run() is safe here since no event loop is running. + + IMPORTANT: If run_migrations_online() is ever reused from an async context + (e.g., from within a running FastAPI application), you MUST use an alternate + entrypoint that directly awaits run_async_migrations() instead of wrapping + it in asyncio.run(). Calling asyncio.run() from within an already-running + event loop will raise RuntimeError. + + Example alternate async entrypoint: + async def run_migrations_online_async() -> None: + await run_async_migrations() + + Example: + alembic upgrade head + alembic downgrade -1 + """ + asyncio.run(run_async_migrations()) + + +# Determine migration mode and execute +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/webhook_server/migrations/script.py.mako b/webhook_server/migrations/script.py.mako new file mode 100644 index 00000000..fb69dbba --- /dev/null +++ b/webhook_server/migrations/script.py.mako @@ -0,0 +1,30 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from __future__ import annotations + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Apply migration changes to database schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Revert migration changes from database schema.""" + ${downgrades if downgrades else "pass"} diff --git a/webhook_server/tests/manifests/config.yaml b/webhook_server/tests/manifests/config.yaml index 88b5fb4c..3ddd258d 100644 --- a/webhook_server/tests/manifests/config.yaml +++ b/webhook_server/tests/manifests/config.yaml @@ -25,6 +25,13 @@ auto-verified-and-merged-users: auto-verify-cherry-picked-prs: true +metrics-database: + host: localhost + port: 5432 + database: webhook_metrics + username: webhook_user + password: webhook_pass # pragma: allowlist secret + repositories: test-repo: name: my-org/test-repo diff --git a/webhook_server/tests/test_app.py b/webhook_server/tests/test_app.py index 4852675d..cc55eb76 100644 --- a/webhook_server/tests/test_app.py +++ b/webhook_server/tests/test_app.py @@ -17,8 +17,8 @@ FASTAPI_APP, HTTPException, get_log_viewer_controller, + http_status, require_log_server_enabled, - status, websocket_log_stream, ) from webhook_server.libs.exceptions import RepositoryNotFoundInConfigError @@ -667,7 +667,9 @@ async def test_websocket_log_stream_disabled(self) -> None: mock_ws = AsyncMock() with patch("webhook_server.app.LOG_SERVER_ENABLED", False): await websocket_log_stream(mock_ws) - mock_ws.close.assert_called_once_with(code=status.WS_1008_POLICY_VIOLATION, reason="Log server is disabled") + mock_ws.close.assert_called_once_with( + code=http_status.WS_1008_POLICY_VIOLATION, reason="Log server is disabled" + ) @pytest.mark.asyncio async def test_websocket_log_stream_enabled(self) -> None: diff --git a/webhook_server/tests/test_config.py b/webhook_server/tests/test_config.py index e310ea14..ca7ea25d 100644 --- a/webhook_server/tests/test_config.py +++ b/webhook_server/tests/test_config.py @@ -154,6 +154,59 @@ def test_root_data_corrupted_file(self, temp_config_dir: str, monkeypatch: pytes with pytest.raises(yaml.YAMLError): _ = config.root_data + def test_root_data_file_deleted_after_init(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: + """Test root_data property when file is deleted after initialization (race condition).""" + monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) + + mock_logger = Mock() + config = Config(logger=mock_logger) + config_file = os.path.join(temp_config_dir, "config.yaml") + + # Delete the config file to simulate race condition + os.remove(config_file) + + # Should raise FileNotFoundError and log the exception + with pytest.raises(FileNotFoundError): + _ = config.root_data + + # Verify logger.exception was called + mock_logger.exception.assert_called_once() + assert "Config file not found" in mock_logger.exception.call_args.args[0] + + def test_root_data_permission_error(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: + """Test root_data property when permission is denied reading config file.""" + monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) + + mock_logger = Mock() + config = Config(logger=mock_logger) + + # Mock open to raise PermissionError for better portability + with patch("builtins.open", side_effect=PermissionError("Permission denied")): + # Should raise PermissionError and log the exception + with pytest.raises(PermissionError, match="Permission denied"): + _ = config.root_data + + # Verify logger.exception was called + mock_logger.exception.assert_called_once() + assert "Permission denied" in mock_logger.exception.call_args.args[0] + + def test_root_data_generic_exception(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: + """Test root_data property with generic exception during file read.""" + monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) + + mock_logger = Mock() + config = Config(logger=mock_logger) + + # Mock open to raise a generic exception + with patch("builtins.open", side_effect=RuntimeError("Unexpected error")): + # Should raise RuntimeError and log the exception + with pytest.raises(RuntimeError, match="Unexpected error"): + _ = config.root_data + + # Verify logger.exception was called + mock_logger.exception.assert_called_once() + assert "Failed to load config file" in mock_logger.exception.call_args.args[0] + def test_repository_data_with_repository(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: """Test repository_data property when repository is specified.""" monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) @@ -221,41 +274,72 @@ def test_repository_local_data_list_result(self, temp_config_dir: str, monkeypat assert result == {"local-setting": "value"} - @patch("webhook_server.utils.helpers.get_github_repo_api") - def test_repository_local_data_file_not_found( - self, mock_get_repo_api: Mock, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch - ) -> None: + def test_repository_local_data_file_not_found(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: """Test repository_local_data method when config file is not found.""" monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) # Mock repository that raises UnknownObjectException mock_repo = Mock() mock_repo.get_contents.side_effect = UnknownObjectException(404, "Not found") - mock_get_repo_api.return_value = mock_repo - config = Config(repository="test-repo") + mock_logger = Mock() + config = Config(logger=mock_logger, repository="test-repo") mock_github_api = Mock() + mock_github_api.get_repo.return_value = mock_repo result = config.repository_local_data(mock_github_api, "org/test-repo") assert result == {} + # Verify debug was called (relax constraint - don't over-constrain call count) + assert mock_logger.debug.called + # Verify the debug log message is about getting GitHub API + debug_calls = [call.args[0] for call in mock_logger.debug.call_args_list] + assert any("Get GitHub API for repository" in msg and "org/test-repo" in msg for msg in debug_calls) + + def test_repository_local_data_yaml_error(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: + """Test repository_local_data method when repository config has invalid YAML.""" + monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) + + # Mock repository and config file with invalid YAML + mock_repo = Mock() + mock_config_file = Mock() + mock_config_file.decoded_content = b"invalid: yaml: content: [" + mock_repo.get_contents.return_value = mock_config_file + + mock_logger = Mock() + config = Config(logger=mock_logger, repository="test-repo") + mock_github_api = Mock() + mock_github_api.get_repo.return_value = mock_repo + + # Should raise yaml.YAMLError and log the exception + with pytest.raises(yaml.YAMLError): + config.repository_local_data(mock_github_api, "org/test-repo") + + # Verify logger.exception was called + mock_logger.exception.assert_called_once() + assert "invalid YAML syntax" in mock_logger.exception.call_args.args[0] - @patch("webhook_server.utils.helpers.get_github_repo_api") def test_repository_local_data_exception_handling( - self, mock_get_repo_api: Mock, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch + self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch ) -> None: - """Test repository_local_data method with exception handling.""" - monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) + """Test repository_local_data method with generic exception handling. - # Mock repository that raises an exception - mock_get_repo_api.side_effect = Exception("API Error") + Note: Config.repository_local_data uses the passed-in github_api.get_repo + directly, not the global get_github_repo_api helper. We trigger the exception + via the github_api mock instead. + """ + monkeypatch.setenv("WEBHOOK_SERVER_DATA_DIR", temp_config_dir) config = Config(repository="test-repo") + + # Mock github_api to raise a generic exception when get_repo is called mock_github_api = Mock() + mock_github_api.get_repo.side_effect = Exception("API Error") result = config.repository_local_data(mock_github_api, "org/test-repo") assert result == {} + mock_github_api.get_repo.assert_called_once_with("org/test-repo") def test_repository_local_data_no_repository(self, temp_config_dir: str, monkeypatch: pytest.MonkeyPatch) -> None: """Test repository_local_data method when repository is not specified.""" diff --git a/webhook_server/tests/test_database.py b/webhook_server/tests/test_database.py new file mode 100644 index 00000000..9e2e7cb0 --- /dev/null +++ b/webhook_server/tests/test_database.py @@ -0,0 +1,512 @@ +"""Tests for database connection managers.""" + +from contextlib import asynccontextmanager +from unittest.mock import AsyncMock, Mock, patch + +import pytest + + +def create_async_pool_mock(connection: AsyncMock) -> Mock: + """Create a properly mocked async pool with async context manager.""" + + @asynccontextmanager + async def mock_acquire(): + yield connection + + pool = Mock() + pool.acquire = mock_acquire + pool.close = AsyncMock() + return pool + + +class TestDatabaseManager: + """Test suite for DatabaseManager class.""" + + @pytest.fixture + def mock_config(self) -> Mock: + """Create a mock Config object.""" + mock = Mock() + mock.root_data = { + "metrics-database": { + "host": "localhost", + "port": 5432, + "database": "test_db", + "username": "test_user", + "password": "test_pass", # pragma: allowlist secret + "pool-size": 10, + } + } + return mock + + @pytest.fixture + def mock_logger(self) -> Mock: + """Create a mock logger.""" + return Mock() + + def test_database_manager_init( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager initialization.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + + assert manager.host == "localhost" + assert manager.port == 5432 + assert manager.database == "test_db" + assert manager.username == "test_user" + assert manager.password == "test_pass" # noqa: S105 # pragma: allowlist secret + assert manager.pool_size == 10 + assert manager.pool is None + + def test_database_manager_init_missing_config( + self, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager initialization with missing config.""" + from webhook_server.libs.database import DatabaseManager + + mock_config = Mock() + mock_config.root_data = {} + + with pytest.raises(ValueError, match="Missing 'metrics-database' section"): + DatabaseManager(mock_config, mock_logger) + + def test_database_manager_init_missing_database( + self, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager initialization with missing database name.""" + from webhook_server.libs.database import DatabaseManager + + mock_config = Mock() + mock_config.root_data = { + "metrics-database": { + "host": "localhost", + "port": 5432, + "username": "test_user", + "password": "test_pass", # pragma: allowlist secret + } + } + + with pytest.raises(ValueError, match="Missing required field 'database'"): + DatabaseManager(mock_config, mock_logger) + + def test_database_manager_init_missing_username( + self, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager initialization with missing username.""" + from webhook_server.libs.database import DatabaseManager + + mock_config = Mock() + mock_config.root_data = { + "metrics-database": { + "host": "localhost", + "port": 5432, + "database": "test_db", + "password": "test_pass", # pragma: allowlist secret + } + } + + with pytest.raises(ValueError, match="username"): + DatabaseManager(mock_config, mock_logger) + + def test_database_manager_init_missing_password( + self, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager initialization with missing password.""" + from webhook_server.libs.database import DatabaseManager + + mock_config = Mock() + mock_config.root_data = { + "metrics-database": { + "host": "localhost", + "port": 5432, + "database": "test_db", + "username": "test_user", + } + } + + with pytest.raises(ValueError, match="password"): + DatabaseManager(mock_config, mock_logger) + + @pytest.mark.asyncio + async def test_database_manager_connect( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager connect.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + + with patch("webhook_server.libs.database.asyncpg.create_pool", new=AsyncMock()) as mock_create_pool: + mock_pool = Mock() + mock_pool.close = AsyncMock() + mock_create_pool.return_value = mock_pool + + await manager.connect() + + assert manager.pool is mock_pool + mock_create_pool.assert_called_once() + + @pytest.mark.asyncio + async def test_database_manager_connect_already_connected( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager connect when already connected.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + manager.pool = Mock() + + with pytest.raises(ValueError, match="Database pool already exists"): + await manager.connect() + + @pytest.mark.asyncio + async def test_database_manager_connect_failure( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager connect failure.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + + with patch("webhook_server.libs.database.asyncpg.create_pool") as mock_create_pool: + mock_create_pool.side_effect = Exception("Connection failed") + + with pytest.raises(Exception, match="Connection failed"): + await manager.connect() + + @pytest.mark.asyncio + async def test_database_manager_disconnect( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager disconnect.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_pool = AsyncMock() + manager.pool = mock_pool + + await manager.disconnect() + + mock_pool.close.assert_called_once() + assert manager.pool is None + + @pytest.mark.asyncio + async def test_database_manager_disconnect_no_pool( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager disconnect when no pool exists.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + + # Should not raise + await manager.disconnect() + assert manager.pool is None + + @pytest.mark.asyncio + async def test_database_manager_disconnect_error( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager disconnect with error.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_pool = AsyncMock() + mock_pool.close.side_effect = Exception("Close failed") + manager.pool = mock_pool + + # Should not raise, but log error + await manager.disconnect() + assert manager.pool is None + + @pytest.mark.asyncio + async def test_database_manager_execute( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager execute.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_connection = AsyncMock() + mock_connection.execute.return_value = "INSERT 0 1" + manager.pool = create_async_pool_mock(mock_connection) + + result = await manager.execute("INSERT INTO test VALUES ($1)", "value") + + assert result == "INSERT 0 1" + mock_connection.execute.assert_called_once_with("INSERT INTO test VALUES ($1)", "value") + + @pytest.mark.asyncio + async def test_database_manager_execute_no_pool( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager execute without pool.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + + with pytest.raises(ValueError, match="Database pool not initialized"): + await manager.execute("INSERT INTO test VALUES ($1)", "value") + + @pytest.mark.asyncio + async def test_database_manager_execute_failure( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager execute failure.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_connection = AsyncMock() + mock_connection.execute.side_effect = Exception("Execute failed") + manager.pool = create_async_pool_mock(mock_connection) + + with pytest.raises(Exception, match="Execute failed"): + await manager.execute("INSERT INTO test VALUES ($1)", "value") + + @pytest.mark.asyncio + async def test_database_manager_fetch( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager fetch.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_connection = AsyncMock() + mock_records = [{"id": 1, "name": "test"}] + mock_connection.fetch.return_value = mock_records + manager.pool = create_async_pool_mock(mock_connection) + + result = await manager.fetch("SELECT * FROM test WHERE id = $1", 1) + + assert result == mock_records + mock_connection.fetch.assert_called_once_with("SELECT * FROM test WHERE id = $1", 1) + + @pytest.mark.asyncio + async def test_database_manager_fetch_no_pool( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager fetch without pool.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + + with pytest.raises(ValueError, match="Database pool not initialized"): + await manager.fetch("SELECT * FROM test") + + @pytest.mark.asyncio + async def test_database_manager_fetch_failure( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager fetch failure.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_connection = AsyncMock() + mock_connection.fetch.side_effect = Exception("Fetch failed") + manager.pool = create_async_pool_mock(mock_connection) + + with pytest.raises(Exception, match="Fetch failed"): + await manager.fetch("SELECT * FROM test") + + @pytest.mark.asyncio + async def test_database_manager_fetchrow( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager fetchrow.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_connection = AsyncMock() + mock_record = {"id": 1, "name": "test"} + mock_connection.fetchrow.return_value = mock_record + manager.pool = create_async_pool_mock(mock_connection) + + result = await manager.fetchrow("SELECT * FROM test WHERE id = $1", 1) + + assert result == mock_record + mock_connection.fetchrow.assert_called_once_with("SELECT * FROM test WHERE id = $1", 1) + + @pytest.mark.asyncio + async def test_database_manager_fetchrow_no_result( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager fetchrow with no result.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_connection = AsyncMock() + mock_connection.fetchrow.return_value = None + manager.pool = create_async_pool_mock(mock_connection) + + result = await manager.fetchrow("SELECT * FROM test WHERE id = $1", 999) + + assert result is None + + @pytest.mark.asyncio + async def test_database_manager_fetchrow_no_pool( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager fetchrow without pool.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + + with pytest.raises(ValueError, match="Database pool not initialized"): + await manager.fetchrow("SELECT * FROM test WHERE id = $1", 1) + + @pytest.mark.asyncio + async def test_database_manager_fetchrow_failure( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager fetchrow failure.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_connection = AsyncMock() + mock_connection.fetchrow.side_effect = Exception("Fetchrow failed") + manager.pool = create_async_pool_mock(mock_connection) + + with pytest.raises(Exception, match="Fetchrow failed"): + await manager.fetchrow("SELECT * FROM test WHERE id = $1", 1) + + @pytest.mark.asyncio + async def test_database_manager_health_check_success( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager health_check success.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_connection = AsyncMock() + mock_connection.fetchval.return_value = 1 + manager.pool = create_async_pool_mock(mock_connection) + + result = await manager.health_check() + + assert result is True + mock_connection.fetchval.assert_called_once_with("SELECT 1") + + @pytest.mark.asyncio + async def test_database_manager_health_check_no_pool( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager health_check without pool.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + + result = await manager.health_check() + + assert result is False + + @pytest.mark.asyncio + async def test_database_manager_health_check_failure( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager health_check failure.""" + from webhook_server.libs.database import DatabaseManager + + manager = DatabaseManager(mock_config, mock_logger) + mock_connection = AsyncMock() + mock_connection.fetchval.side_effect = Exception("Health check failed") + manager.pool = create_async_pool_mock(mock_connection) + + result = await manager.health_check() + + assert result is False + + @pytest.mark.asyncio + async def test_database_manager_context_manager( + self, + mock_config: Mock, + mock_logger: Mock, + ) -> None: + """Test DatabaseManager as context manager.""" + from webhook_server.libs.database import DatabaseManager + + with patch("webhook_server.libs.database.asyncpg.create_pool", new=AsyncMock()) as mock_create_pool: + mock_pool = Mock() + mock_pool.close = AsyncMock() + mock_create_pool.return_value = mock_pool + + async with DatabaseManager(mock_config, mock_logger) as manager: + assert manager.pool is mock_pool + + # Pool should be closed after context exit + mock_pool.close.assert_called_once() + + +class TestFactoryFunctions: + """Test suite for factory functions.""" + + def test_get_database_manager(self) -> None: + """Test get_database_manager factory function.""" + from webhook_server.libs.database import get_database_manager + + with patch("webhook_server.libs.database.Config") as mock_config_class: + with patch("webhook_server.libs.database.get_logger_with_params") as mock_logger_func: + mock_config = Mock() + mock_config.root_data = { + "metrics-database": { + "host": "localhost", + "port": 5432, + "database": "test_db", + "username": "test_user", + "password": "test_pass", # pragma: allowlist secret + } + } + mock_config_class.return_value = mock_config + mock_logger = Mock() + mock_logger_func.return_value = mock_logger + + manager = get_database_manager("test/repo") + + mock_config_class.assert_called_once_with(repository="test/repo") + mock_logger_func.assert_called_once_with(repository_name="test/repo") + assert manager.config is mock_config + assert manager.logger is mock_logger diff --git a/webhook_server/tests/test_metrics_api.py b/webhook_server/tests/test_metrics_api.py new file mode 100644 index 00000000..8fd5d3a6 --- /dev/null +++ b/webhook_server/tests/test_metrics_api.py @@ -0,0 +1,1748 @@ +""" +Comprehensive tests for metrics API endpoints. + +Tests 7 metrics endpoints: +- GET /api/metrics/webhooks - List webhook events with filtering +- GET /api/metrics/webhooks/{delivery_id} - Get specific webhook details +- GET /api/metrics/repositories - Get repository statistics +- GET /api/metrics/summary - Get overall metrics summary +- GET /api/metrics/contributors - Get PR contributors statistics +- GET /api/metrics/user-prs - Get per-user PR metrics +- GET /api/metrics/trends - Get metrics trends over time +""" + +from datetime import UTC, datetime, timedelta +from unittest.mock import AsyncMock, Mock, patch +from urllib.parse import quote + +import pytest +from fastapi.testclient import TestClient + +import webhook_server.app +from webhook_server.app import FASTAPI_APP +from webhook_server.libs.database import DatabaseManager + + +@pytest.fixture(autouse=True) +def enable_metrics_server(monkeypatch: pytest.MonkeyPatch) -> None: + """Enable metrics server for all tests in this module.""" + monkeypatch.setattr(webhook_server.app, "METRICS_SERVER_ENABLED", True) + + +@pytest.fixture +def setup_db_manager(mock_db_manager: Mock, monkeypatch: pytest.MonkeyPatch) -> Mock: + """Set up global db_manager for metrics endpoints. + + This fixture prevents the app lifespan from constructing a real DatabaseManager + by monkeypatching the DatabaseManager class to return the mock, ensuring that + any DatabaseManager() instantiation during startup uses the mock and its + connect()/disconnect() are no-ops. + """ + # Monkeypatch DatabaseManager class to return the mock when instantiated + # This prevents lifespan from creating a real DB connection at line 260 + monkeypatch.setattr(DatabaseManager, "__new__", lambda *_args, **_kwargs: mock_db_manager) + + # Also set the global db_manager for request handling + monkeypatch.setattr(webhook_server.app, "db_manager", mock_db_manager) + + # Mock connect/disconnect to prevent real DB operations during lifespan + mock_db_manager.connect = AsyncMock(return_value=None) + mock_db_manager.disconnect = AsyncMock(return_value=None) + + return mock_db_manager + + +class TestMetricsAPIEndpoints: + """Test metrics API endpoints for webhook analytics.""" + + @pytest.fixture + def client(self, setup_db_manager: Mock) -> TestClient: + """FastAPI test client. + + Depends on setup_db_manager to ensure DatabaseManager is mocked + before the app lifespan runs. + """ + _ = setup_db_manager # Reference to satisfy linter (ARG002) + return TestClient(FASTAPI_APP) + + @pytest.fixture + def mock_db_manager(self) -> Mock: + """Mock database manager with helper methods.""" + db_manager = Mock() + + # Mock the helper methods that DatabaseManager provides + db_manager.fetch = AsyncMock(return_value=[]) + db_manager.fetchrow = AsyncMock(return_value=None) + db_manager.fetchval = AsyncMock(return_value=0) + db_manager.execute = AsyncMock(return_value="INSERT 0 1") + + # Mock pool for tests that check pool existence + db_manager.pool = Mock() + + return db_manager + + +class TestRequireMetricsServerEnabled(TestMetricsAPIEndpoints): + """Test require_metrics_server_enabled dependency.""" + + def test_metrics_endpoint_requires_enabled_server( + self, client: TestClient, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Test metrics endpoints return 404 when metrics server is disabled.""" + # Override the module-level fixture to disable metrics server + monkeypatch.setattr(webhook_server.app, "METRICS_SERVER_ENABLED", False) + + # Try all metrics endpoints + endpoints = [ + "/api/metrics/webhooks", + "/api/metrics/webhooks/test-delivery-123", + "/api/metrics/repositories", + "/api/metrics/summary", + ] + + for endpoint in endpoints: + response = client.get(endpoint) + assert response.status_code == 404 + assert "Metrics server is disabled" in response.json()["detail"] + + +class TestGetWebhookEventsEndpoint(TestMetricsAPIEndpoints): + """Test GET /api/metrics/webhooks endpoint.""" + + def test_get_webhook_events_success_no_filters( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting webhook events without filters.""" + # Mock database query results + now = datetime.now(UTC) + + # Mock fetchval (count query) + setup_db_manager.fetchval.return_value = 2 + + # Mock fetch (main query) + setup_db_manager.fetch.return_value = [ + { + "delivery_id": "test-delivery-1", + "repository": "org/repo1", + "event_type": "pull_request", + "action": "opened", + "pr_number": 42, + "sender": "user1", + "status": "success", + "created_at": now, + "processed_at": now + timedelta(seconds=1), + "duration_ms": 1000, + "api_calls_count": 5, + "token_spend": 10, + "token_remaining": 4990, + "error_message": None, + }, + { + "delivery_id": "test-delivery-2", + "repository": "org/repo2", + "event_type": "issue_comment", + "action": "created", + "pr_number": None, + "sender": "user2", + "status": "error", + "created_at": now - timedelta(minutes=5), + "processed_at": now - timedelta(minutes=4, seconds=58), + "duration_ms": 2000, + "api_calls_count": 3, + "token_spend": 5, + "token_remaining": 4995, + "error_message": "Processing failed", + }, + ] + + response = client.get("/api/metrics/webhooks") + + assert response.status_code == 200 + data = response.json() + + assert len(data["data"]) == 2 + assert data["pagination"]["total"] == 2 + assert data["pagination"]["has_next"] is False + + # Verify first event + event1 = data["data"][0] + assert event1["delivery_id"] == "test-delivery-1" + assert event1["repository"] == "org/repo1" + assert event1["event_type"] == "pull_request" + assert event1["action"] == "opened" + assert event1["pr_number"] == 42 + assert event1["status"] == "success" + assert event1["duration_ms"] == 1000 + assert event1["error_message"] is None + + # Verify second event + event2 = data["data"][1] + assert event2["status"] == "error" + assert event2["error_message"] == "Processing failed" + + def test_get_webhook_events_with_repository_filter( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test filtering webhook events by repository.""" + setup_db_manager.fetchval.return_value = 1 + now = datetime.now(UTC) + + setup_db_manager.fetch.return_value = [ + { + "delivery_id": "test-delivery-1", + "repository": "org/repo1", + "event_type": "pull_request", + "action": "opened", + "pr_number": 42, + "sender": "user1", + "status": "success", + "created_at": now, + "processed_at": now, + "duration_ms": 1000, + "api_calls_count": 5, + "token_spend": 10, + "token_remaining": 4990, + "error_message": None, + } + ] + + response = client.get("/api/metrics/webhooks?repository=org/repo1") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) == 1 + assert data["data"][0]["repository"] == "org/repo1" + + def test_get_webhook_events_with_event_type_filter( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test filtering webhook events by event type.""" + setup_db_manager.fetchval.return_value = 1 + now = datetime.now(UTC) + + setup_db_manager.fetch.return_value = [ + { + "delivery_id": "test-delivery-1", + "repository": "org/repo1", + "event_type": "check_run", + "action": "completed", + "pr_number": 42, + "sender": "github-actions", + "status": "success", + "created_at": now, + "processed_at": now, + "duration_ms": 500, + "api_calls_count": 2, + "token_spend": 2, + "token_remaining": 4998, + "error_message": None, + } + ] + + response = client.get("/api/metrics/webhooks?event_type=check_run") + + assert response.status_code == 200 + data = response.json() + assert data["data"][0]["event_type"] == "check_run" + + def test_get_webhook_events_with_status_filter( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test filtering webhook events by status.""" + setup_db_manager.fetchval.return_value = 1 + now = datetime.now(UTC) + + setup_db_manager.fetch.return_value = [ + { + "delivery_id": "test-delivery-error", + "repository": "org/repo1", + "event_type": "pull_request", + "action": "opened", + "pr_number": 99, + "sender": "user1", + "status": "error", + "created_at": now, + "processed_at": now, + "duration_ms": 5000, + "api_calls_count": 10, + "token_spend": 10, + "token_remaining": 4990, + "error_message": "Connection timeout", + } + ] + + response = client.get("/api/metrics/webhooks?status=error") + + assert response.status_code == 200 + data = response.json() + assert data["data"][0]["status"] == "error" + assert data["data"][0]["error_message"] == "Connection timeout" + + # Verify DB queries were executed (fetchval for count, fetch for results) + setup_db_manager.fetchval.assert_called_once() + setup_db_manager.fetch.assert_called_once() + + def test_get_webhook_events_with_time_filters( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test filtering webhook events by time range.""" + setup_db_manager.fetchval.return_value = 1 + now = datetime.now(UTC) + + setup_db_manager.fetch.return_value = [ + { + "delivery_id": "test-delivery-1", + "repository": "org/repo1", + "event_type": "pull_request", + "action": "opened", + "pr_number": 42, + "sender": "user1", + "status": "success", + "created_at": now, + "processed_at": now, + "duration_ms": 1000, + "api_calls_count": 5, + "token_spend": 10, + "token_remaining": 4990, + "error_message": None, + } + ] + + start_time = quote((now - timedelta(hours=1)).isoformat()) + end_time = quote((now + timedelta(hours=1)).isoformat()) + + response = client.get(f"/api/metrics/webhooks?start_time={start_time}&end_time={end_time}") + + assert response.status_code == 200 + + def test_get_webhook_events_pagination( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test webhook events pagination.""" + setup_db_manager.fetchval.return_value = 150 # Total count + now = datetime.now(UTC) + + # Generate 50 mock events + mock_events = [ + { + "delivery_id": f"test-delivery-{i}", + "repository": "org/repo1", + "event_type": "pull_request", + "action": "opened", + "pr_number": i, + "sender": "user1", + "status": "success", + "created_at": now, + "processed_at": now, + "duration_ms": 1000, + "api_calls_count": 5, + "token_spend": 10, + "token_remaining": 4990, + "error_message": None, + } + for i in range(50) + ] + setup_db_manager.fetch.return_value = mock_events + + response = client.get("/api/metrics/webhooks?page=1&page_size=50") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) == 50 + assert data["pagination"]["total"] == 150 + assert data["pagination"]["page_size"] == 50 + assert data["pagination"]["has_next"] is True + + def test_get_webhook_events_db_manager_none(self, client: TestClient) -> None: + """Test endpoint returns 500 when db_manager is None.""" + with patch("webhook_server.app.db_manager", None): + response = client.get("/api/metrics/webhooks") + + assert response.status_code == 500 + assert "Metrics database not available" in response.json()["detail"] + + def test_get_webhook_events_pool_none( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint returns 500 when database pool is not initialized.""" + # Simulate pool not initialized - helper methods raise ValueError + setup_db_manager.pool = None + setup_db_manager.fetchval.side_effect = ValueError("Database pool not initialized. Call connect() first.") + + response = client.get("/api/metrics/webhooks") + + assert response.status_code == 500 + assert "Failed to fetch webhook events" in response.json()["detail"] + + def test_get_webhook_events_database_error( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint handles database errors gracefully.""" + setup_db_manager.fetchval.side_effect = Exception("Database connection lost") + + response = client.get("/api/metrics/webhooks") + + assert response.status_code == 500 + assert "Failed to fetch webhook events" in response.json()["detail"] + + +class TestGetWebhookEventByIdEndpoint(TestMetricsAPIEndpoints): + """Test GET /api/metrics/webhooks/{delivery_id} endpoint.""" + + def test_get_webhook_event_by_id_success( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting specific webhook event by delivery ID.""" + now = datetime.now(UTC) + + setup_db_manager.fetchrow.return_value = { + "delivery_id": "test-delivery-123", + "repository": "org/repo", + "event_type": "pull_request", + "action": "opened", + "pr_number": 42, + "sender": "user1", + "status": "success", + "created_at": now, + "processed_at": now + timedelta(seconds=1), + "duration_ms": 1000, + "api_calls_count": 5, + "token_spend": 10, + "token_remaining": 4990, + "error_message": None, + "payload": {"key": "value", "nested": {"data": "test"}}, + } + + response = client.get("/api/metrics/webhooks/test-delivery-123") + + assert response.status_code == 200 + data = response.json() + assert data["delivery_id"] == "test-delivery-123" + assert data["repository"] == "org/repo" + assert data["status"] == "success" + assert data["payload"] == {"key": "value", "nested": {"data": "test"}} + + def test_get_webhook_event_by_id_not_found( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting non-existent webhook event returns 404.""" + setup_db_manager.fetchrow.return_value = None + + response = client.get("/api/metrics/webhooks/nonexistent-delivery-id") + + assert response.status_code == 404 + assert "Webhook event not found" in response.json()["detail"] + + def test_get_webhook_event_by_id_db_manager_none(self, client: TestClient) -> None: + """Test endpoint returns 500 when db_manager is None.""" + with patch("webhook_server.app.db_manager", None): + response = client.get("/api/metrics/webhooks/test-delivery-123") + + assert response.status_code == 500 + assert "Metrics database not available" in response.json()["detail"] + + def test_get_webhook_event_by_id_pool_none( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint returns 500 when database pool is not initialized.""" + # Simulate pool not initialized - helper methods raise ValueError + setup_db_manager.pool = None + setup_db_manager.fetchrow.side_effect = ValueError("Database pool not initialized. Call connect() first.") + + response = client.get("/api/metrics/webhooks/test-delivery-123") + + assert response.status_code == 500 + assert "Failed to fetch webhook event" in response.json()["detail"] + + def test_get_webhook_event_by_id_database_error( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint handles database errors gracefully.""" + setup_db_manager.fetchrow.side_effect = Exception("Database connection lost") + + response = client.get("/api/metrics/webhooks/test-delivery-123") + + assert response.status_code == 500 + assert "Failed to fetch webhook event" in response.json()["detail"] + + +class TestGetRepositoryStatisticsEndpoint(TestMetricsAPIEndpoints): + """Test GET /api/metrics/repositories endpoint.""" + + def test_get_repository_statistics_success( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting repository statistics.""" + setup_db_manager.fetchval.return_value = 2 + setup_db_manager.fetch.return_value = [ + { + "repository": "org/repo1", + "total_events": 100, + "successful_events": 95, + "failed_events": 5, + "success_rate": 95.00, + "avg_processing_time_ms": 1500, + "median_processing_time_ms": 1200, + "p95_processing_time_ms": 3000, + "max_processing_time_ms": 5000, + "total_api_calls": 500, + "avg_api_calls_per_event": 5.00, + "total_token_spend": 1000, + "event_type_breakdown": {"pull_request": 80, "issue_comment": 20}, + }, + { + "repository": "org/repo2", + "total_events": 50, + "successful_events": 48, + "failed_events": 2, + "success_rate": 96.00, + "avg_processing_time_ms": 800, + "median_processing_time_ms": 750, + "p95_processing_time_ms": 1500, + "max_processing_time_ms": 2000, + "total_api_calls": 200, + "avg_api_calls_per_event": 4.00, + "total_token_spend": 400, + "event_type_breakdown": {"check_run": 30, "pull_request": 20}, + }, + ] + + response = client.get("/api/metrics/repositories") + + assert response.status_code == 200 + data = response.json() + assert data["pagination"]["total"] == 2 + assert len(data["repositories"]) == 2 + + # Verify first repository + repo1 = data["repositories"][0] + assert repo1["repository"] == "org/repo1" + assert repo1["total_events"] == 100 + assert repo1["success_rate"] == 95.00 + assert repo1["event_type_breakdown"] == {"pull_request": 80, "issue_comment": 20} + + # Verify second repository + repo2 = data["repositories"][1] + assert repo2["repository"] == "org/repo2" + assert repo2["total_events"] == 50 + + def test_get_repository_statistics_with_time_range( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting repository statistics with time range filter.""" + now = datetime.now(UTC) + + setup_db_manager.fetch.return_value = [] + + start_time = quote((now - timedelta(days=7)).isoformat()) + end_time = quote(now.isoformat()) + + response = client.get(f"/api/metrics/repositories?start_time={start_time}&end_time={end_time}") + + assert response.status_code == 200 + data = response.json() + assert "time_range" in data + assert data["time_range"]["start_time"] is not None + assert data["time_range"]["end_time"] is not None + + def test_get_repository_statistics_empty( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting repository statistics when no data exists.""" + setup_db_manager.fetchval.return_value = 0 + setup_db_manager.fetch.return_value = [] + + response = client.get("/api/metrics/repositories") + + assert response.status_code == 200 + data = response.json() + assert data["pagination"]["total"] == 0 + assert data["repositories"] == [] + + def test_get_repository_statistics_db_manager_none(self, client: TestClient) -> None: + """Test endpoint returns 500 when db_manager is None.""" + with patch("webhook_server.app.db_manager", None): + response = client.get("/api/metrics/repositories") + + assert response.status_code == 500 + assert "Metrics database not available" in response.json()["detail"] + + def test_get_repository_statistics_pool_none( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint returns 500 when database pool is not initialized.""" + # Simulate pool not initialized - helper methods raise ValueError + setup_db_manager.pool = None + setup_db_manager.fetch.side_effect = ValueError("Database pool not initialized. Call connect() first.") + + response = client.get("/api/metrics/repositories") + + assert response.status_code == 500 + assert "Failed to fetch repository statistics" in response.json()["detail"] + + def test_get_repository_statistics_database_error( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint handles database errors gracefully.""" + setup_db_manager.fetch.side_effect = Exception("Database connection lost") + + response = client.get("/api/metrics/repositories") + + assert response.status_code == 500 + assert "Failed to fetch repository statistics" in response.json()["detail"] + + +class TestGetMetricsSummaryEndpoint(TestMetricsAPIEndpoints): + """Test GET /api/metrics/summary endpoint.""" + + def test_get_metrics_summary_success( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting overall metrics summary.""" + now = datetime.now(UTC) + + # Mock summary query + setup_db_manager.fetchrow.side_effect = [ + # Summary row + { + "total_events": 1000, + "successful_events": 950, + "failed_events": 50, + "success_rate": 95.00, + "avg_processing_time_ms": 1500, + "median_processing_time_ms": 1200, + "p95_processing_time_ms": 3000, + "max_processing_time_ms": 8000, + "total_api_calls": 5000, + "avg_api_calls_per_event": 5.00, + "total_token_spend": 10000, + }, + # Time range row + { + "first_event_time": now - timedelta(days=7), + "last_event_time": now, + }, + ] + + # Mock top repositories query + setup_db_manager.fetch.side_effect = [ + # Top repos + [ + {"repository": "org/repo1", "total_events": 600, "success_rate": 96.00, "percentage": 60.00}, + {"repository": "org/repo2", "total_events": 400, "success_rate": 94.00, "percentage": 40.00}, + ], + # Event type distribution + [ + {"event_type": "pull_request", "event_count": 700}, + {"event_type": "issue_comment", "event_count": 200}, + {"event_type": "check_run", "event_count": 100}, + ], + ] + + response = client.get("/api/metrics/summary") + + assert response.status_code == 200 + data = response.json() + + # Verify summary + assert data["summary"]["total_events"] == 1000 + assert data["summary"]["successful_events"] == 950 + assert data["summary"]["success_rate"] == 95.00 + + # Verify top repositories + assert len(data["top_repositories"]) == 2 + assert data["top_repositories"][0]["repository"] == "org/repo1" + + # Verify event type distribution + assert data["event_type_distribution"]["pull_request"] == 700 + assert data["event_type_distribution"]["issue_comment"] == 200 + + # Verify event rates + assert "hourly_event_rate" in data + assert "daily_event_rate" in data + + def test_get_metrics_summary_with_time_range( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting metrics summary with time range filter.""" + now = datetime.now(UTC) + + setup_db_manager.fetchrow.side_effect = [ + # Summary row + { + "total_events": 100, + "successful_events": 95, + "failed_events": 5, + "success_rate": 95.00, + "avg_processing_time_ms": 1500, + "median_processing_time_ms": 1200, + "p95_processing_time_ms": 3000, + "max_processing_time_ms": 5000, + "total_api_calls": 500, + "avg_api_calls_per_event": 5.00, + "total_token_spend": 1000, + }, + # Time range row + { + "first_event_time": now - timedelta(hours=24), + "last_event_time": now, + }, + # Previous period summary row (for trend calculation) + { + "total_events": 90, + "successful_events": 85, + "failed_events": 5, + "success_rate": 94.44, + "avg_processing_time_ms": 1600, + }, + ] + + setup_db_manager.fetch.side_effect = [[], []] + + start_time = quote((now - timedelta(days=1)).isoformat()) + end_time = quote(now.isoformat()) + + response = client.get(f"/api/metrics/summary?start_time={start_time}&end_time={end_time}") + + assert response.status_code == 200 + data = response.json() + assert "time_range" in data + assert data["time_range"]["start_time"] is not None + + def test_get_metrics_summary_empty( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting metrics summary when no data exists.""" + setup_db_manager.fetchrow.side_effect = [ + { + "total_events": 0, + "successful_events": 0, + "failed_events": 0, + "success_rate": None, + "avg_processing_time_ms": None, + "median_processing_time_ms": None, + "p95_processing_time_ms": None, + "max_processing_time_ms": None, + "total_api_calls": None, + "avg_api_calls_per_event": None, + "total_token_spend": None, + }, + None, + ] + + setup_db_manager.fetch.side_effect = [[], []] + + response = client.get("/api/metrics/summary") + + assert response.status_code == 200 + data = response.json() + assert data["summary"]["total_events"] == 0 + assert data["top_repositories"] == [] + assert data["event_type_distribution"] == {} + + def test_get_metrics_summary_db_manager_none(self, client: TestClient) -> None: + """Test endpoint returns 500 when db_manager is None.""" + with patch("webhook_server.app.db_manager", None): + response = client.get("/api/metrics/summary") + + assert response.status_code == 500 + assert "Metrics database not available" in response.json()["detail"] + + def test_get_metrics_summary_pool_none( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint returns 500 when database pool is not initialized.""" + # Simulate pool not initialized - helper methods raise ValueError + setup_db_manager.pool = None + setup_db_manager.fetchrow.side_effect = ValueError("Database pool not initialized. Call connect() first.") + + response = client.get("/api/metrics/summary") + + assert response.status_code == 500 + assert "Failed to fetch metrics summary" in response.json()["detail"] + + def test_get_metrics_summary_database_error( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint handles database errors gracefully.""" + setup_db_manager.fetchrow.side_effect = Exception("Database connection lost") + + response = client.get("/api/metrics/summary") + + assert response.status_code == 500 + assert "Failed to fetch metrics summary" in response.json()["detail"] + + +class TestUserPullRequestsEndpoint(TestMetricsAPIEndpoints): + """Test GET /api/metrics/user-prs endpoint.""" + + def test_get_user_prs_success(self, client: TestClient, setup_db_manager: Mock) -> None: + """Test successful retrieval of user's pull requests.""" + # Mock database responses + setup_db_manager.fetchrow.return_value = {"total": 2} + setup_db_manager.fetch.return_value = [ + { + "pr_number": 123, + "title": "Add feature X", + "repository": "org/repo1", + "state": "closed", + "merged": True, + "url": "https://github.com/org/repo1/pull/123", + "created_at": "2024-11-20T10:00:00Z", + "updated_at": "2024-11-21T15:30:00Z", + "commits_count": 5, + "head_sha": "abc123def456", # pragma: allowlist secret + }, + { + "pr_number": 124, + "title": "Fix bug Y", + "repository": "org/repo1", + "state": "open", + "merged": False, + "url": "https://github.com/org/repo1/pull/124", + "created_at": "2024-11-22T09:00:00Z", + "updated_at": "2024-11-22T09:00:00Z", + "commits_count": 2, + "head_sha": "def456abc789", # pragma: allowlist secret + }, + ] + + response = client.get("/api/metrics/user-prs?user=john-doe&page=1&page_size=10") + + assert response.status_code == 200 + data = response.json() + + # Check data structure + assert "data" in data + assert "pagination" in data + assert len(data["data"]) == 2 + + # Verify first PR + pr1 = data["data"][0] + assert pr1["number"] == 123 + assert pr1["title"] == "Add feature X" + assert pr1["repository"] == "org/repo1" + assert pr1["state"] == "closed" + assert pr1["merged"] is True + assert pr1["commits_count"] == 5 + + # Verify pagination + pagination = data["pagination"] + assert pagination["total"] == 2 + assert pagination["page"] == 1 + assert pagination["page_size"] == 10 + assert pagination["total_pages"] == 1 + assert pagination["has_next"] is False + assert pagination["has_prev"] is False + + def test_get_user_prs_with_repository_filter(self, client: TestClient, setup_db_manager: Mock) -> None: + """Test filtering by repository.""" + setup_db_manager.fetchrow.return_value = {"total": 1} + setup_db_manager.fetch.return_value = [ + { + "pr_number": 123, + "title": "Add feature X", + "repository": "org/repo1", + "state": "closed", + "merged": True, + "url": "https://github.com/org/repo1/pull/123", + "created_at": "2024-11-20T10:00:00Z", + "updated_at": "2024-11-21T15:30:00Z", + "commits_count": 5, + "head_sha": "abc123", + } + ] + + response = client.get("/api/metrics/user-prs?user=john-doe&repository=org/repo1") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) == 1 + assert data["data"][0]["repository"] == "org/repo1" + + def test_get_user_prs_with_time_range(self, client: TestClient, setup_db_manager: Mock) -> None: + """Test filtering by time range.""" + setup_db_manager.fetchrow.return_value = {"total": 1} + setup_db_manager.fetch.return_value = [] + + start_time = "2024-11-01T00:00:00Z" + end_time = "2024-11-30T23:59:59Z" + + response = client.get(f"/api/metrics/user-prs?user=john-doe&start_time={start_time}&end_time={end_time}") + + assert response.status_code == 200 + + def test_get_user_prs_pagination(self, client: TestClient, setup_db_manager: Mock) -> None: + """Test pagination with multiple pages.""" + # Total of 25 PRs, page size 10 + setup_db_manager.fetchrow.return_value = {"total": 25} + setup_db_manager.fetch.return_value = [] + + # Test page 2 + response = client.get("/api/metrics/user-prs?user=john-doe&page=2&page_size=10") + + assert response.status_code == 200 + data = response.json() + + pagination = data["pagination"] + assert pagination["total"] == 25 + assert pagination["page"] == 2 + assert pagination["page_size"] == 10 + assert pagination["total_pages"] == 3 + assert pagination["has_next"] is True + assert pagination["has_prev"] is True + + def test_get_user_prs_empty_result(self, client: TestClient, setup_db_manager: Mock) -> None: + """Test endpoint with no matching PRs.""" + setup_db_manager.fetchrow.return_value = {"total": 0} + setup_db_manager.fetch.return_value = [] + + response = client.get("/api/metrics/user-prs?user=nonexistent-user") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) == 0 + assert data["pagination"]["total"] == 0 + assert data["pagination"]["total_pages"] == 0 + + def test_get_user_prs_no_user_parameter(self, client: TestClient, setup_db_manager: Mock) -> None: + """Test endpoint works without user parameter (shows all PRs).""" + setup_db_manager.fetchrow.return_value = {"total": 2} + setup_db_manager.fetch.return_value = [ + { + "pr_number": 123, + "title": "Add feature X", + "repository": "org/repo1", + "state": "closed", + "merged": True, + "url": "https://github.com/org/repo1/pull/123", + "created_at": "2024-11-20T10:00:00Z", + "updated_at": "2024-11-21T15:30:00Z", + "commits_count": 5, + "head_sha": "abc123", + }, + { + "pr_number": 124, + "title": "Fix bug Y", + "repository": "org/repo2", + "state": "open", + "merged": False, + "url": "https://github.com/org/repo2/pull/124", + "created_at": "2024-11-22T09:00:00Z", + "updated_at": "2024-11-22T09:00:00Z", + "commits_count": 2, + "head_sha": "def456", + }, + ] + + response = client.get("/api/metrics/user-prs") + + assert response.status_code == 200 + data = response.json() + assert len(data["data"]) == 2 + assert data["pagination"]["total"] == 2 + + def test_get_user_prs_invalid_page_number(self, client: TestClient) -> None: + """Test endpoint fails with invalid page number.""" + response = client.get("/api/metrics/user-prs?user=john-doe&page=0") + + assert response.status_code == 422 # FastAPI validation error + + def test_get_user_prs_invalid_page_size(self, client: TestClient) -> None: + """Test endpoint fails with invalid page size.""" + # Too large + response = client.get("/api/metrics/user-prs?user=john-doe&page_size=101") + assert response.status_code == 422 + + # Too small + response = client.get("/api/metrics/user-prs?user=john-doe&page_size=0") + assert response.status_code == 422 + + def test_get_user_prs_database_error(self, client: TestClient, setup_db_manager: Mock) -> None: + """Test endpoint handles database errors gracefully.""" + setup_db_manager.fetchrow.side_effect = Exception("Database connection lost") + + response = client.get("/api/metrics/user-prs?user=john-doe") + + assert response.status_code == 500 + assert "Failed to fetch user pull requests" in response.json()["detail"] + + def test_get_user_prs_null_commits_count(self, client: TestClient, setup_db_manager: Mock) -> None: + """Test endpoint handles null commits_count gracefully.""" + setup_db_manager.fetchrow.return_value = {"total": 1} + setup_db_manager.fetch.return_value = [ + { + "pr_number": 123, + "title": "Add feature X", + "repository": "org/repo1", + "state": "open", + "merged": False, + "url": "https://github.com/org/repo1/pull/123", + "created_at": "2024-11-20T10:00:00Z", + "updated_at": "2024-11-21T15:30:00Z", + "commits_count": None, # NULL from database + "head_sha": "abc123", + } + ] + + response = client.get("/api/metrics/user-prs?user=john-doe") + + assert response.status_code == 200 + data = response.json() + assert data["data"][0]["commits_count"] == 0 # NULL converted to 0 + + def test_get_user_prs_metrics_server_disabled(self, client: TestClient, monkeypatch: pytest.MonkeyPatch) -> None: + """Test endpoint returns 404 when metrics server is disabled.""" + monkeypatch.setattr(webhook_server.app, "METRICS_SERVER_ENABLED", False) + + response = client.get("/api/metrics/user-prs?user=john-doe") + + assert response.status_code == 404 + + def test_get_user_prs_combined_filters(self, client: TestClient, setup_db_manager: Mock) -> None: + """Test endpoint with all filters combined.""" + setup_db_manager.fetchrow.return_value = {"total": 1} + setup_db_manager.fetch.return_value = [] + + response = client.get( + "/api/metrics/user-prs" + "?user=john-doe" + "&repository=org/repo1" + "&start_time=2024-11-01T00:00:00Z" + "&end_time=2024-11-30T23:59:59Z" + "&page=1" + "&page_size=20" + ) + + assert response.status_code == 200 + + +class TestGetContributorsEndpoint(TestMetricsAPIEndpoints): + """Test GET /api/metrics/contributors endpoint.""" + + def test_get_contributors_success( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting contributors statistics with all categories.""" + # Mock count queries (fetchval calls) - 4 categories + setup_db_manager.fetchval.side_effect = [ + 5, # pr_creators_total + 3, # pr_reviewers_total + 4, # pr_approvers_total + 2, # pr_lgtm_total + ] + + # Mock data queries (fetch calls) - 4 categories + setup_db_manager.fetch.side_effect = [ + # pr_creators + [ + { + "user": "john-doe", + "total_prs": 45, + "merged_prs": 42, + "closed_prs": 3, + "avg_commits": 3.5, + }, + { + "user": "jane-smith", + "total_prs": 30, + "merged_prs": 28, + "closed_prs": 2, + "avg_commits": 2.8, + }, + ], + # pr_reviewers + [ + { + "user": "bob-wilson", + "total_reviews": 78, + "prs_reviewed": 65, + }, + { + "user": "alice-jones", + "total_reviews": 56, + "prs_reviewed": 48, + }, + ], + # pr_approvers + [ + { + "user": "charlie-brown", + "total_approvals": 56, + "prs_approved": 54, + }, + { + "user": "diana-prince", + "total_approvals": 40, + "prs_approved": 38, + }, + ], + # pr_lgtm + [ + { + "user": "eve-adams", + "total_lgtm": 42, + "prs_lgtm": 40, + }, + { + "user": "frank-miller", + "total_lgtm": 35, + "prs_lgtm": 33, + }, + ], + ] + + response = client.get("/api/metrics/contributors") + + assert response.status_code == 200 + data = response.json() + + # Verify structure + assert "time_range" in data + assert "pr_creators" in data + assert "pr_reviewers" in data + assert "pr_approvers" in data + assert "pr_lgtm" in data + + # Verify pr_creators + assert len(data["pr_creators"]["data"]) == 2 + creator1 = data["pr_creators"]["data"][0] + assert creator1["user"] == "john-doe" + assert creator1["total_prs"] == 45 + assert creator1["merged_prs"] == 42 + assert creator1["closed_prs"] == 3 + assert creator1["avg_commits_per_pr"] == 3.5 + + # Verify pr_creators pagination + assert data["pr_creators"]["pagination"]["total"] == 5 + assert data["pr_creators"]["pagination"]["page"] == 1 + assert data["pr_creators"]["pagination"]["page_size"] == 10 + assert data["pr_creators"]["pagination"]["has_next"] is False + assert data["pr_creators"]["pagination"]["has_prev"] is False + + # Verify pr_reviewers + assert len(data["pr_reviewers"]["data"]) == 2 + reviewer1 = data["pr_reviewers"]["data"][0] + assert reviewer1["user"] == "bob-wilson" + assert reviewer1["total_reviews"] == 78 + assert reviewer1["prs_reviewed"] == 65 + assert reviewer1["avg_reviews_per_pr"] == 1.2 + + # Verify pr_approvers + assert len(data["pr_approvers"]["data"]) == 2 + approver1 = data["pr_approvers"]["data"][0] + assert approver1["user"] == "charlie-brown" + assert approver1["total_approvals"] == 56 + assert approver1["prs_approved"] == 54 + + # Verify pr_lgtm + assert len(data["pr_lgtm"]["data"]) == 2 + lgtm1 = data["pr_lgtm"]["data"][0] + assert lgtm1["user"] == "eve-adams" + assert lgtm1["total_lgtm"] == 42 + assert lgtm1["prs_lgtm"] == 40 + + def test_get_contributors_with_user_filter( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test filtering contributors by user.""" + # Mock count queries + setup_db_manager.fetchval.side_effect = [1, 1, 1, 1] + + # Mock data queries + setup_db_manager.fetch.side_effect = [ + # pr_creators for john-doe + [ + { + "user": "john-doe", + "total_prs": 45, + "merged_prs": 42, + "closed_prs": 3, + "avg_commits": 3.5, + } + ], + # pr_reviewers for john-doe + [ + { + "user": "john-doe", + "total_reviews": 20, + "prs_reviewed": 18, + } + ], + # pr_approvers for john-doe + [ + { + "user": "john-doe", + "total_approvals": 15, + "prs_approved": 14, + } + ], + # pr_lgtm for john-doe + [ + { + "user": "john-doe", + "total_lgtm": 10, + "prs_lgtm": 10, + } + ], + ] + + response = client.get("/api/metrics/contributors?user=john-doe") + + assert response.status_code == 200 + data = response.json() + + # Verify all categories filtered to john-doe + assert len(data["pr_creators"]["data"]) == 1 + assert data["pr_creators"]["data"][0]["user"] == "john-doe" + assert len(data["pr_reviewers"]["data"]) == 1 + assert data["pr_reviewers"]["data"][0]["user"] == "john-doe" + assert len(data["pr_approvers"]["data"]) == 1 + assert data["pr_approvers"]["data"][0]["user"] == "john-doe" + assert len(data["pr_lgtm"]["data"]) == 1 + assert data["pr_lgtm"]["data"][0]["user"] == "john-doe" + + def test_get_contributors_with_repository_filter( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test filtering contributors by repository.""" + # Mock count queries + setup_db_manager.fetchval.side_effect = [2, 1, 1, 1] + + # Mock data queries + setup_db_manager.fetch.side_effect = [ + # pr_creators + [ + { + "user": "john-doe", + "total_prs": 10, + "merged_prs": 9, + "closed_prs": 1, + "avg_commits": 2.5, + } + ], + # pr_reviewers + [ + { + "user": "jane-smith", + "total_reviews": 15, + "prs_reviewed": 12, + } + ], + # pr_approvers + [], + # pr_lgtm + [], + ] + + response = client.get("/api/metrics/contributors?repository=org/repo1") + + assert response.status_code == 200 + data = response.json() + + # Verify data is filtered by repository + assert len(data["pr_creators"]["data"]) == 1 + assert len(data["pr_reviewers"]["data"]) == 1 + assert len(data["pr_approvers"]["data"]) == 0 + assert len(data["pr_lgtm"]["data"]) == 0 + + def test_get_contributors_with_time_range( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test filtering contributors by time range.""" + # Mock count queries + setup_db_manager.fetchval.side_effect = [1, 1, 0, 0] + + # Mock data queries + setup_db_manager.fetch.side_effect = [ + [ + { + "user": "john-doe", + "total_prs": 5, + "merged_prs": 5, + "closed_prs": 0, + "avg_commits": 2.0, + } + ], + [ + { + "user": "jane-smith", + "total_reviews": 8, + "prs_reviewed": 7, + } + ], + [], + [], + ] + + start_time = "2024-11-01T00:00:00Z" + end_time = "2024-11-30T23:59:59Z" + + response = client.get(f"/api/metrics/contributors?start_time={start_time}&end_time={end_time}") + + assert response.status_code == 200 + data = response.json() + + # Verify time range is included in response + assert data["time_range"]["start_time"] == "2024-11-01T00:00:00+00:00" + assert data["time_range"]["end_time"] == "2024-11-30T23:59:59+00:00" + + def test_get_contributors_pagination( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test contributors pagination with multiple pages.""" + # Mock count queries - 25 total in each category + setup_db_manager.fetchval.side_effect = [25, 25, 25, 25] + + # Mock data queries - page 2 of size 10 + setup_db_manager.fetch.side_effect = [ + # pr_creators page 2 + [ + { + "user": f"user-{i}", + "total_prs": 10 - i, + "merged_prs": 9 - i, + "closed_prs": 1, + "avg_commits": 2.5, + } + for i in range(10, 20) + ], + # pr_reviewers page 2 + [ + { + "user": f"reviewer-{i}", + "total_reviews": 50 - i, + "prs_reviewed": 40 - i, + } + for i in range(10, 20) + ], + # pr_approvers page 2 + [], + # pr_lgtm page 2 + [], + ] + + response = client.get("/api/metrics/contributors?page=2&page_size=10") + + assert response.status_code == 200 + data = response.json() + + # Verify pagination for pr_creators + pagination = data["pr_creators"]["pagination"] + assert pagination["total"] == 25 + assert pagination["page"] == 2 + assert pagination["page_size"] == 10 + assert pagination["total_pages"] == 3 + assert pagination["has_next"] is True + assert pagination["has_prev"] is True + + # Verify pagination for pr_reviewers + pagination = data["pr_reviewers"]["pagination"] + assert pagination["total"] == 25 + assert pagination["page"] == 2 + assert pagination["total_pages"] == 3 + assert pagination["has_next"] is True + assert pagination["has_prev"] is True + + def test_get_contributors_empty_results( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test contributors endpoint with no data.""" + # Mock count queries - all zeros + setup_db_manager.fetchval.side_effect = [0, 0, 0, 0] + + # Mock data queries - all empty + setup_db_manager.fetch.side_effect = [[], [], [], []] + + response = client.get("/api/metrics/contributors") + + assert response.status_code == 200 + data = response.json() + + # Verify all categories are empty + assert len(data["pr_creators"]["data"]) == 0 + assert data["pr_creators"]["pagination"]["total"] == 0 + assert data["pr_creators"]["pagination"]["total_pages"] == 0 + + assert len(data["pr_reviewers"]["data"]) == 0 + assert data["pr_reviewers"]["pagination"]["total"] == 0 + + assert len(data["pr_approvers"]["data"]) == 0 + assert data["pr_approvers"]["pagination"]["total"] == 0 + + assert len(data["pr_lgtm"]["data"]) == 0 + assert data["pr_lgtm"]["pagination"]["total"] == 0 + + def test_get_contributors_combined_filters( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test contributors endpoint with all filters combined.""" + # Mock count queries + setup_db_manager.fetchval.side_effect = [1, 1, 1, 0] + + # Mock data queries + setup_db_manager.fetch.side_effect = [ + [ + { + "user": "john-doe", + "total_prs": 5, + "merged_prs": 5, + "closed_prs": 0, + "avg_commits": 2.0, + } + ], + [ + { + "user": "john-doe", + "total_reviews": 3, + "prs_reviewed": 3, + } + ], + [ + { + "user": "john-doe", + "total_approvals": 2, + "prs_approved": 2, + } + ], + [], + ] + + response = client.get( + "/api/metrics/contributors" + "?user=john-doe" + "&repository=org/repo1" + "&start_time=2024-11-01T00:00:00Z" + "&end_time=2024-11-30T23:59:59Z" + "&page=1" + "&page_size=20" + ) + + assert response.status_code == 200 + data = response.json() + + # Verify time range + assert data["time_range"]["start_time"] == "2024-11-01T00:00:00+00:00" + assert data["time_range"]["end_time"] == "2024-11-30T23:59:59+00:00" + + # Verify pagination reflects custom page_size + assert data["pr_creators"]["pagination"]["page_size"] == 20 + + def test_get_contributors_null_values_handling( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test contributors endpoint handles null values gracefully.""" + # Mock count queries + setup_db_manager.fetchval.side_effect = [1, 1, 1, 1] + + # Mock data queries with null values + setup_db_manager.fetch.side_effect = [ + [ + { + "user": "john-doe", + "total_prs": 10, + "merged_prs": None, # NULL from database + "closed_prs": None, # NULL from database + "avg_commits": None, # NULL from database + } + ], + [ + { + "user": "jane-smith", + "total_reviews": 5, + "prs_reviewed": 1, + } + ], + [], + [], + ] + + response = client.get("/api/metrics/contributors") + + assert response.status_code == 200 + data = response.json() + + # Verify null values are converted to 0 + creator = data["pr_creators"]["data"][0] + assert creator["merged_prs"] == 0 + assert creator["closed_prs"] == 0 + assert creator["avg_commits_per_pr"] == 0.0 + + # Verify avg_reviews_per_pr calculation handles division correctly + reviewer = data["pr_reviewers"]["data"][0] + assert reviewer["avg_reviews_per_pr"] == 5.0 # 5 reviews / 1 PR + + def test_get_contributors_invalid_page_number(self, client: TestClient) -> None: + """Test contributors endpoint with invalid page number.""" + response = client.get("/api/metrics/contributors?page=0") + + assert response.status_code == 422 # FastAPI validation error + + def test_get_contributors_invalid_page_size(self, client: TestClient) -> None: + """Test contributors endpoint with invalid page size.""" + # Too large + response = client.get("/api/metrics/contributors?page_size=101") + assert response.status_code == 422 + + # Too small + response = client.get("/api/metrics/contributors?page_size=0") + assert response.status_code == 422 + + def test_get_contributors_db_manager_none(self, client: TestClient) -> None: + """Test endpoint returns 500 when db_manager is None.""" + with patch("webhook_server.app.db_manager", None): + response = client.get("/api/metrics/contributors") + + assert response.status_code == 500 + assert "Metrics database not available" in response.json()["detail"] + + def test_get_contributors_pool_none( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint returns 500 when database pool is not initialized.""" + # Simulate pool not initialized - helper methods raise ValueError + setup_db_manager.pool = None + setup_db_manager.fetchval.side_effect = ValueError("Database pool not initialized. Call connect() first.") + + response = client.get("/api/metrics/contributors") + + assert response.status_code == 500 + assert "Failed to fetch contributor metrics" in response.json()["detail"] + + def test_get_contributors_database_error( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint handles database errors gracefully.""" + setup_db_manager.fetchval.side_effect = Exception("Database connection lost") + + response = client.get("/api/metrics/contributors") + + assert response.status_code == 500 + assert "Failed to fetch contributor metrics" in response.json()["detail"] + + def test_get_contributors_metrics_server_disabled( + self, client: TestClient, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Test endpoint returns 404 when metrics server is disabled.""" + # Override the module-level fixture to disable metrics server + monkeypatch.setattr(webhook_server.app, "METRICS_SERVER_ENABLED", False) + + response = client.get("/api/metrics/contributors") + + assert response.status_code == 404 + assert "Metrics server is disabled" in response.json()["detail"] + + +class TestGetTrendsEndpoint(TestMetricsAPIEndpoints): + """Test GET /api/metrics/trends endpoint.""" + + def test_get_trends_success( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting trends data.""" + now = datetime.now(UTC) + + setup_db_manager.fetch.return_value = [ + { + "bucket": now - timedelta(hours=2), + "total_events": 10, + "successful_events": 9, + "failed_events": 1, + }, + { + "bucket": now - timedelta(hours=1), + "total_events": 15, + "successful_events": 14, + "failed_events": 1, + }, + ] + + response = client.get("/api/metrics/trends?bucket=hour") + + assert response.status_code == 200 + data = response.json() + assert len(data["trends"]) == 2 + assert data["trends"][0]["total_events"] == 10 + assert data["trends"][1]["total_events"] == 15 + + def test_get_trends_invalid_bucket(self, client: TestClient) -> None: + """Test trends endpoint with invalid bucket parameter.""" + response = client.get("/api/metrics/trends?bucket=invalid") + + assert response.status_code == 422 # Validation error + + def test_get_trends_day_bucket( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test getting trends data with day bucket.""" + now = datetime.now(UTC) + + setup_db_manager.fetch.return_value = [ + { + "bucket": now.replace(hour=0, minute=0, second=0, microsecond=0), + "total_events": 100, + "successful_events": 95, + "failed_events": 5, + }, + { + "bucket": now.replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=1), + "total_events": 80, + "successful_events": 78, + "failed_events": 2, + }, + ] + + response = client.get("/api/metrics/trends?bucket=day") + + assert response.status_code == 200 + data = response.json() + assert len(data["trends"]) == 2 + assert data["trends"][0]["total_events"] == 100 + assert data["trends"][1]["total_events"] == 80 + + def test_get_trends_with_time_range( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test trends endpoint with time range filtering.""" + start_time = "2024-11-01T00:00:00Z" + end_time = "2024-11-30T23:59:59Z" + + setup_db_manager.fetch.return_value = [ + { + "bucket": datetime(2024, 11, 15, 12, 0, 0, tzinfo=UTC), + "total_events": 50, + "successful_events": 48, + "failed_events": 2, + }, + ] + + response = client.get(f"/api/metrics/trends?bucket=hour&start_time={start_time}&end_time={end_time}") + + assert response.status_code == 200 + data = response.json() + assert len(data["trends"]) == 1 + assert data["trends"][0]["total_events"] == 50 + # API returns ISO format with +00:00 instead of Z + assert data["time_range"]["start_time"] == "2024-11-01T00:00:00+00:00" + assert data["time_range"]["end_time"] == "2024-11-30T23:59:59+00:00" + + def test_get_trends_empty_results( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test trends endpoint returns empty list when no data matches.""" + setup_db_manager.fetch.return_value = [] + + response = client.get("/api/metrics/trends?bucket=hour") + + assert response.status_code == 200 + data = response.json() + assert data["trends"] == [] + assert "time_range" in data + + def test_get_trends_database_error( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test trends endpoint handles database errors gracefully.""" + setup_db_manager.fetch.side_effect = Exception("Database connection failed") + + response = client.get("/api/metrics/trends?bucket=hour") + + assert response.status_code == 500 + assert "Failed to fetch metrics trends" in response.json()["detail"] + + def test_get_trends_db_manager_none(self, client: TestClient) -> None: + """Test endpoint returns 500 when db_manager is None.""" + with patch("webhook_server.app.db_manager", None): + response = client.get("/api/metrics/trends?bucket=hour") + + assert response.status_code == 500 + assert "Metrics database not available" in response.json()["detail"] + + def test_get_trends_pool_none( + self, + client: TestClient, + setup_db_manager: Mock, + ) -> None: + """Test endpoint returns 500 when database pool is not initialized.""" + # Simulate pool not initialized - helper methods raise ValueError + setup_db_manager.pool = None + setup_db_manager.fetch.side_effect = ValueError("Database pool not initialized. Call connect() first.") + + response = client.get("/api/metrics/trends?bucket=hour") + + assert response.status_code == 500 + assert "Failed to fetch metrics trends" in response.json()["detail"] diff --git a/webhook_server/tests/test_metrics_dashboard.py b/webhook_server/tests/test_metrics_dashboard.py new file mode 100644 index 00000000..8cee0f8e --- /dev/null +++ b/webhook_server/tests/test_metrics_dashboard.py @@ -0,0 +1,910 @@ +"""Comprehensive tests for MetricsDashboardController to achieve 90%+ coverage.""" + +from __future__ import annotations + +from datetime import UTC, datetime +from unittest.mock import AsyncMock, Mock, mock_open, patch + +import pytest +from fastapi import HTTPException, WebSocket, WebSocketDisconnect +from fastapi.responses import HTMLResponse + +from webhook_server.web.metrics_dashboard import MetricsDashboardController + + +@pytest.fixture +def mock_db_manager() -> AsyncMock: + """Create a mock DatabaseManager.""" + db = AsyncMock() + db.fetch = AsyncMock(return_value=[]) + return db + + +@pytest.fixture +def mock_logger() -> Mock: + """Create a mock logger.""" + return Mock() + + +@pytest.fixture +def controller(mock_db_manager: AsyncMock, mock_logger: Mock) -> MetricsDashboardController: + """Create a MetricsDashboardController instance with mocked dependencies.""" + return MetricsDashboardController(mock_db_manager, mock_logger) + + +@pytest.fixture +def mock_websocket() -> AsyncMock: + """Create a mock WebSocket.""" + ws = AsyncMock(spec=WebSocket) + ws.accept = AsyncMock() + ws.send_json = AsyncMock() + ws.close = AsyncMock() + return ws + + +@pytest.fixture +def sample_event() -> dict: + """Create a sample webhook event dictionary.""" + return { + "delivery_id": "abc123", + "repository": "org/repo", + "event_type": "pull_request", + "action": "opened", + "pr_number": 42, + "sender": "testuser", + "created_at": datetime(2025, 11, 24, 12, 34, 56, tzinfo=UTC), + "processed_at": datetime(2025, 11, 24, 12, 35, 0, tzinfo=UTC), + "duration_ms": 4000, + "status": "success", + "error_message": None, + "api_calls_count": 5, + "token_spend": 100, + "token_remaining": 4900, + } + + +@pytest.fixture +def sample_error_event() -> dict: + """Create a sample webhook event with error status.""" + return { + "delivery_id": "def456", + "repository": "org/repo", + "event_type": "issue_comment", + "action": "created", + "pr_number": None, + "sender": "erroruser", + "created_at": datetime(2025, 11, 24, 13, 0, 0, tzinfo=UTC), + "processed_at": datetime(2025, 11, 24, 13, 0, 5, tzinfo=UTC), + "duration_ms": 5000, + "status": "error", + "error_message": "API rate limit exceeded", + "api_calls_count": 10, + "token_spend": 200, + "token_remaining": 4700, + } + + +@pytest.fixture +def sample_partial_event() -> dict: + """Create a sample webhook event with partial status.""" + return { + "delivery_id": "ghi789", + "repository": "org/repo", + "event_type": "check_run", + "action": "completed", + "pr_number": 55, + "sender": "partialuser", + "created_at": datetime(2025, 11, 24, 14, 0, 0, tzinfo=UTC), + "processed_at": datetime(2025, 11, 24, 14, 0, 3, tzinfo=UTC), + "duration_ms": 3000, + "status": "partial", + "error_message": "Some operations failed", + "api_calls_count": 3, + "token_spend": 50, + "token_remaining": 4950, + } + + +class TestMetricsDashboardControllerInit: + """Test MetricsDashboardController initialization.""" + + def test_init(self, controller: MetricsDashboardController, mock_db_manager: AsyncMock, mock_logger: Mock) -> None: + """Test controller initialization.""" + assert controller.db_manager is mock_db_manager + assert controller.logger is mock_logger + assert isinstance(controller._websocket_connections, set) + assert len(controller._websocket_connections) == 0 + + def test_poll_interval_constant(self) -> None: + """Test POLL_INTERVAL_SECONDS constant is defined.""" + assert hasattr(MetricsDashboardController, "POLL_INTERVAL_SECONDS") + assert MetricsDashboardController.POLL_INTERVAL_SECONDS == 2.0 + + +class TestMetricsDashboardControllerShutdown: + """Test MetricsDashboardController shutdown method.""" + + @pytest.mark.asyncio + async def test_shutdown_with_active_connections( + self, controller: MetricsDashboardController, mock_logger: Mock + ) -> None: + """Test shutdown with active WebSocket connections.""" + # Create mock WebSocket connections + ws1 = AsyncMock(spec=WebSocket) + ws2 = AsyncMock(spec=WebSocket) + ws3 = AsyncMock(spec=WebSocket) + + # Add connections + controller._websocket_connections.add(ws1) + controller._websocket_connections.add(ws2) + controller._websocket_connections.add(ws3) + + # Execute shutdown + await controller.shutdown() + + # Verify all connections were closed + ws1.close.assert_called_once_with(code=1001, reason="Server shutdown") + ws2.close.assert_called_once_with(code=1001, reason="Server shutdown") + ws3.close.assert_called_once_with(code=1001, reason="Server shutdown") + + # Verify connections set is cleared + assert len(controller._websocket_connections) == 0 + + # Verify logging + assert mock_logger.info.call_count == 2 + mock_logger.info.assert_any_call("Shutting down MetricsDashboardController with 3 active connections") + mock_logger.info.assert_any_call("MetricsDashboardController shutdown completed") + + @pytest.mark.asyncio + async def test_shutdown_with_no_connections( + self, controller: MetricsDashboardController, mock_logger: Mock + ) -> None: + """Test shutdown with no active connections.""" + # Execute shutdown with empty connections + await controller.shutdown() + + # Verify logging for zero connections + mock_logger.info.assert_any_call("Shutting down MetricsDashboardController with 0 active connections") + mock_logger.info.assert_any_call("MetricsDashboardController shutdown completed") + + # Verify connections set is still empty + assert len(controller._websocket_connections) == 0 + + @pytest.mark.asyncio + async def test_shutdown_handles_close_errors( + self, controller: MetricsDashboardController, mock_logger: Mock + ) -> None: + """Test shutdown handles errors during WebSocket close.""" + # Create mock WebSocket that raises error on close + ws_error = AsyncMock(spec=WebSocket) + ws_error.close.side_effect = RuntimeError("Close failed") + + ws_ok = AsyncMock(spec=WebSocket) + + # Add connections + controller._websocket_connections.add(ws_error) + controller._websocket_connections.add(ws_ok) + + # Execute shutdown + await controller.shutdown() + + # Verify both connections attempted to close + ws_error.close.assert_called_once_with(code=1001, reason="Server shutdown") + ws_ok.close.assert_called_once_with(code=1001, reason="Server shutdown") + + # Verify error was logged + mock_logger.exception.assert_called_once_with("Error closing WebSocket connection during shutdown") + + # Verify connections set is cleared even with errors + assert len(controller._websocket_connections) == 0 + + +class TestGetDashboardPage: + """Test get_dashboard_page method.""" + + def test_get_dashboard_page_success(self, controller: MetricsDashboardController) -> None: + """Test successful HTML page serving.""" + mock_html_content = "Metrics Dashboard" + + with patch.object(controller, "_get_dashboard_html", return_value=mock_html_content): + response = controller.get_dashboard_page() + + assert isinstance(response, HTMLResponse) + assert response.body.decode() == mock_html_content + + def test_get_dashboard_page_file_not_found_error( + self, controller: MetricsDashboardController, mock_logger: Mock + ) -> None: + """Test get_dashboard_page with FileNotFoundError.""" + with patch.object(controller, "_get_dashboard_html", side_effect=FileNotFoundError("Template not found")): + with pytest.raises(HTTPException) as exc_info: + controller.get_dashboard_page() + + assert exc_info.value.status_code == 500 + assert exc_info.value.detail == "Internal server error" + mock_logger.exception.assert_called_once_with("Error serving metrics dashboard page") + + def test_get_dashboard_page_os_error(self, controller: MetricsDashboardController, mock_logger: Mock) -> None: + """Test get_dashboard_page with OSError.""" + with patch.object(controller, "_get_dashboard_html", side_effect=OSError("Read failed")): + with pytest.raises(HTTPException) as exc_info: + controller.get_dashboard_page() + + assert exc_info.value.status_code == 500 + assert exc_info.value.detail == "Internal server error" + mock_logger.exception.assert_called_once_with("Error serving metrics dashboard page") + + +class TestHandleWebSocket: + """Test handle_websocket method.""" + + @pytest.mark.asyncio + async def test_websocket_connection_accept( + self, controller: MetricsDashboardController, mock_websocket: AsyncMock, mock_logger: Mock + ) -> None: + """Test WebSocket connection is accepted and added to connections set.""" + # Mock asyncio.sleep to exit immediately + with patch("asyncio.sleep", side_effect=WebSocketDisconnect): + try: + await controller.handle_websocket(mock_websocket) + except WebSocketDisconnect: + pass + + # Verify connection was accepted + mock_websocket.accept.assert_called_once() + + # Verify connection was removed from set after disconnect + assert mock_websocket not in controller._websocket_connections + + # Verify logging + mock_logger.info.assert_any_call( + "WebSocket connection established for metrics streaming (repository=None, event_type=None, status=None)" + ) + + @pytest.mark.asyncio + async def test_websocket_event_streaming( + self, + controller: MetricsDashboardController, + mock_websocket: AsyncMock, + mock_db_manager: AsyncMock, + sample_event: dict, + ) -> None: + """Test event streaming with new events.""" + # Mock database to return one event, then empty + mock_db_manager.fetch.side_effect = [ + [sample_event], # First poll returns one event + [], # Second poll returns nothing + ] + + # Mock asyncio.sleep to control loop execution + sleep_call_count = 0 + + async def mock_sleep(_duration: float) -> None: + nonlocal sleep_call_count + sleep_call_count += 1 + if sleep_call_count >= 2: + raise WebSocketDisconnect + + with patch("asyncio.sleep", side_effect=mock_sleep): + try: + await controller.handle_websocket(mock_websocket) + except WebSocketDisconnect: + pass + + # Verify event was sent + assert mock_websocket.send_json.call_count == 1 + sent_message = mock_websocket.send_json.call_args[0][0] + assert sent_message["type"] == "metric_update" + assert sent_message["data"]["event"]["delivery_id"] == "abc123" + assert sent_message["data"]["summary_delta"]["successful_events"] == 1 + + @pytest.mark.asyncio + async def test_websocket_with_filters( + self, controller: MetricsDashboardController, mock_websocket: AsyncMock, mock_logger: Mock + ) -> None: + """Test WebSocket connection with filters applied.""" + # Mock asyncio.sleep to exit immediately + with patch("asyncio.sleep", side_effect=WebSocketDisconnect): + try: + await controller.handle_websocket( + mock_websocket, repository="org/repo", event_type="pull_request", status="success" + ) + except WebSocketDisconnect: + pass + + # Verify logging includes filters + mock_logger.info.assert_any_call( + "WebSocket connection established for metrics streaming " + "(repository=org/repo, event_type=pull_request, status=success)" + ) + + @pytest.mark.asyncio + async def test_websocket_disconnect_handling( + self, controller: MetricsDashboardController, mock_websocket: AsyncMock, mock_logger: Mock + ) -> None: + """Test WebSocketDisconnect handling.""" + # Mock send_json to raise WebSocketDisconnect + mock_websocket.send_json.side_effect = WebSocketDisconnect + + # Mock database to return an event + with patch.object( + controller, "_fetch_new_events", new=AsyncMock(return_value=[{"created_at": datetime.now(UTC)}]) + ): + await controller.handle_websocket(mock_websocket) + + # Verify client disconnected message + mock_logger.info.assert_any_call("WebSocket client disconnected") + + # Verify connection was removed + assert mock_websocket not in controller._websocket_connections + + @pytest.mark.asyncio + async def test_websocket_runtime_error_during_send( + self, controller: MetricsDashboardController, mock_websocket: AsyncMock, mock_logger: Mock + ) -> None: + """Test RuntimeError handling during send_json.""" + # Mock send_json to raise RuntimeError + mock_websocket.send_json.side_effect = RuntimeError("Connection closed") + + # Mock database to return an event + with patch.object( + controller, "_fetch_new_events", new=AsyncMock(return_value=[{"created_at": datetime.now(UTC)}]) + ): + await controller.handle_websocket(mock_websocket) + + # Verify disconnect was logged (RuntimeError gets converted to WebSocketDisconnect) + mock_logger.debug.assert_any_call("WebSocket connection closed: RuntimeError") + + @pytest.mark.asyncio + async def test_websocket_exception_handling( + self, controller: MetricsDashboardController, mock_websocket: AsyncMock, mock_logger: Mock + ) -> None: + """Test general exception handling in WebSocket handler.""" + # Mock _fetch_new_events to raise an exception + with patch.object(controller, "_fetch_new_events", new=AsyncMock(side_effect=ValueError("Database error"))): + # Mock asyncio.sleep to limit retries + sleep_call_count = 0 + + async def mock_sleep(_duration: float) -> None: + nonlocal sleep_call_count + sleep_call_count += 1 + if sleep_call_count >= 2: + raise KeyboardInterrupt # Force exit + + with patch("asyncio.sleep", side_effect=mock_sleep): + try: + await controller.handle_websocket(mock_websocket) + except KeyboardInterrupt: + pass + + # Verify error was logged + mock_logger.exception.assert_any_call("Error during metrics monitoring iteration") + + @pytest.mark.asyncio + async def test_websocket_initial_timestamp_set_when_no_events( + self, + controller: MetricsDashboardController, + mock_websocket: AsyncMock, + mock_db_manager: AsyncMock, + ) -> None: + """Test last_seen_timestamp is set to now when no events found.""" + # Mock database to return empty list twice + mock_db_manager.fetch.return_value = [] + + # Mock asyncio.sleep to control loop execution + sleep_call_count = 0 + + async def mock_sleep(_duration: float) -> None: + nonlocal sleep_call_count + sleep_call_count += 1 + if sleep_call_count >= 2: + raise WebSocketDisconnect + + with patch("asyncio.sleep", side_effect=mock_sleep): + try: + await controller.handle_websocket(mock_websocket) + except WebSocketDisconnect: + pass + + # Verify fetch was called with timestamp after first empty poll + assert mock_db_manager.fetch.call_count == 2 + # Second call should have last_seen_timestamp set + second_call_args = mock_db_manager.fetch.call_args_list[1][0] + # First positional arg is the query, second is the timestamp (must be provided) + assert len(second_call_args) > 1, "Expected timestamp to be passed after first empty poll" + # Timestamp was passed and should be a datetime instance + assert isinstance(second_call_args[1], datetime) + + @pytest.mark.asyncio + async def test_websocket_cleanup_in_finally_block( + self, controller: MetricsDashboardController, mock_websocket: AsyncMock + ) -> None: + """Test connection cleanup in finally block when exception occurs in monitoring loop.""" + # Mock _fetch_new_events to raise an exception that's not caught + # This will trigger the general exception handler and finally block + with patch.object(controller, "_fetch_new_events", new=AsyncMock(side_effect=KeyError("Unexpected error"))): + # Mock asyncio.sleep to also raise so we don't retry + with patch("asyncio.sleep", side_effect=KeyError("Unexpected error")): + # Exception should be caught and handled + await controller.handle_websocket(mock_websocket) + + # Verify connection was removed even with exception + assert mock_websocket not in controller._websocket_connections + + @pytest.mark.asyncio + async def test_websocket_close_on_general_exception( + self, controller: MetricsDashboardController, mock_websocket: AsyncMock, mock_logger: Mock + ) -> None: + """Test WebSocket close on general exception.""" + # Mock _fetch_new_events to raise a non-retriable exception + with patch.object(controller, "_fetch_new_events", new=AsyncMock(side_effect=RuntimeError("Fatal error"))): + # Mock asyncio.sleep to avoid retries + with patch("asyncio.sleep", side_effect=RuntimeError("Fatal error")): + await controller.handle_websocket(mock_websocket) + + # Verify error was logged + mock_logger.exception.assert_any_call("Error in WebSocket handler") + + # Verify close was attempted with error code + mock_websocket.close.assert_called_once_with(code=1011, reason="Internal server error") + + @pytest.mark.asyncio + async def test_websocket_close_exception_suppressed( + self, controller: MetricsDashboardController, mock_websocket: AsyncMock + ) -> None: + """Test that exceptions during close are suppressed.""" + # Mock close to raise an exception + mock_websocket.close.side_effect = RuntimeError("Close failed") + + # Mock _fetch_new_events to raise an exception + with patch.object(controller, "_fetch_new_events", new=AsyncMock(side_effect=ValueError("Error"))): + with patch("asyncio.sleep", side_effect=ValueError("Error")): + # Should not raise, exception should be suppressed + await controller.handle_websocket(mock_websocket) + + +class TestFetchNewEvents: + """Test _fetch_new_events method.""" + + @pytest.mark.asyncio + async def test_fetch_new_events_no_filters( + self, controller: MetricsDashboardController, mock_db_manager: AsyncMock, sample_event: dict + ) -> None: + """Test fetching events with no filters.""" + mock_db_manager.fetch.return_value = [sample_event] + + events = await controller._fetch_new_events( + last_seen_timestamp=None, repository=None, event_type=None, status=None + ) + + assert len(events) == 1 + assert events[0]["delivery_id"] == "abc123" + + # Verify query has no WHERE clause + query = mock_db_manager.fetch.call_args[0][0] + assert "WHERE" not in query + assert "ORDER BY created_at DESC" in query + assert "LIMIT 100" in query + + @pytest.mark.asyncio + async def test_fetch_new_events_with_timestamp_filter( + self, controller: MetricsDashboardController, mock_db_manager: AsyncMock, sample_event: dict + ) -> None: + """Test fetching events with last_seen_timestamp filter.""" + timestamp = datetime(2025, 11, 24, 12, 0, 0, tzinfo=UTC) + mock_db_manager.fetch.return_value = [sample_event] + + events = await controller._fetch_new_events( + last_seen_timestamp=timestamp, repository=None, event_type=None, status=None + ) + + assert len(events) == 1 + + # Verify query has WHERE created_at > timestamp + query_args = mock_db_manager.fetch.call_args[0] + query = query_args[0] + assert "WHERE created_at > $1" in query + assert query_args[1] == timestamp + + @pytest.mark.asyncio + async def test_fetch_new_events_with_repository_filter( + self, controller: MetricsDashboardController, mock_db_manager: AsyncMock, sample_event: dict + ) -> None: + """Test fetching events with repository filter.""" + mock_db_manager.fetch.return_value = [sample_event] + + events = await controller._fetch_new_events( + last_seen_timestamp=None, repository="org/repo", event_type=None, status=None + ) + + assert len(events) == 1 + + # Verify query has WHERE repository = $1 + query_args = mock_db_manager.fetch.call_args[0] + query = query_args[0] + assert "WHERE repository = $1" in query + assert query_args[1] == "org/repo" + + @pytest.mark.asyncio + async def test_fetch_new_events_with_event_type_filter( + self, controller: MetricsDashboardController, mock_db_manager: AsyncMock, sample_event: dict + ) -> None: + """Test fetching events with event_type filter.""" + mock_db_manager.fetch.return_value = [sample_event] + + events = await controller._fetch_new_events( + last_seen_timestamp=None, repository=None, event_type="pull_request", status=None + ) + + assert len(events) == 1 + + # Verify query has WHERE event_type = $1 + query_args = mock_db_manager.fetch.call_args[0] + query = query_args[0] + assert "WHERE event_type = $1" in query + assert query_args[1] == "pull_request" + + @pytest.mark.asyncio + async def test_fetch_new_events_with_status_filter( + self, controller: MetricsDashboardController, mock_db_manager: AsyncMock, sample_event: dict + ) -> None: + """Test fetching events with status filter.""" + mock_db_manager.fetch.return_value = [sample_event] + + events = await controller._fetch_new_events( + last_seen_timestamp=None, repository=None, event_type=None, status="success" + ) + + assert len(events) == 1 + + # Verify query has WHERE status = $1 + query_args = mock_db_manager.fetch.call_args[0] + query = query_args[0] + assert "WHERE status = $1" in query + assert query_args[1] == "success" + + @pytest.mark.asyncio + async def test_fetch_new_events_with_all_filters( + self, controller: MetricsDashboardController, mock_db_manager: AsyncMock, sample_event: dict + ) -> None: + """Test fetching events with all filters combined.""" + timestamp = datetime(2025, 11, 24, 12, 0, 0, tzinfo=UTC) + mock_db_manager.fetch.return_value = [sample_event] + + events = await controller._fetch_new_events( + last_seen_timestamp=timestamp, repository="org/repo", event_type="pull_request", status="success" + ) + + assert len(events) == 1 + + # Verify query has all WHERE conditions + query_args = mock_db_manager.fetch.call_args[0] + query = query_args[0] + assert "created_at > $1" in query + assert "repository = $2" in query + assert "event_type = $3" in query + assert "status = $4" in query + + # Verify all parameters are passed + assert query_args[1] == timestamp + assert query_args[2] == "org/repo" + assert query_args[3] == "pull_request" + assert query_args[4] == "success" + + @pytest.mark.asyncio + async def test_fetch_new_events_database_error( + self, controller: MetricsDashboardController, mock_db_manager: AsyncMock, mock_logger: Mock + ) -> None: + """Test database error propagates from _fetch_new_events.""" + mock_db_manager.fetch.side_effect = Exception("Database connection failed") + + # Exception should propagate instead of returning empty list + with pytest.raises(Exception, match="Database connection failed"): + await controller._fetch_new_events(last_seen_timestamp=None, repository=None, event_type=None, status=None) + + # Error should NOT be logged at this level (handled by outer handler) + mock_logger.exception.assert_not_called() + + @pytest.mark.asyncio + async def test_fetch_new_events_converts_rows_to_dicts( + self, controller: MetricsDashboardController, mock_db_manager: AsyncMock + ) -> None: + """Test that database rows are converted to dictionaries. + + Note: asyncpg Record objects are dict-like and support dict(row) conversion. + We use a plain dict here to simulate this behavior since the conversion + in the actual code uses dict(row) on asyncpg Record objects. + """ + # Use plain dict to simulate asyncpg Record behavior (supports dict(row)) + mock_row = {"delivery_id": "test123", "status": "success"} + mock_db_manager.fetch.return_value = [mock_row] + + events = await controller._fetch_new_events( + last_seen_timestamp=None, repository=None, event_type=None, status=None + ) + + # Verify we got results with correct data + assert len(events) == 1 + assert events[0]["delivery_id"] == "test123" + assert events[0]["status"] == "success" + + +class TestBuildMetricUpdateMessage: + """Test _build_metric_update_message method.""" + + def test_build_message_for_success_status(self, controller: MetricsDashboardController, sample_event: dict) -> None: + """Test message format for success status events.""" + message = controller._build_metric_update_message(sample_event) + + assert message["type"] == "metric_update" + assert "timestamp" in message + assert "data" in message + + event_data = message["data"]["event"] + assert event_data["delivery_id"] == "abc123" + assert event_data["repository"] == "org/repo" + assert event_data["event_type"] == "pull_request" + assert event_data["status"] == "success" + assert event_data["duration_ms"] == 4000 + + summary = message["data"]["summary_delta"] + assert summary["total_events"] == 1 + assert summary["successful_events"] == 1 + assert summary["failed_events"] == 0 + assert summary["partial_events"] == 0 + + def test_build_message_for_error_status( + self, controller: MetricsDashboardController, sample_error_event: dict + ) -> None: + """Test message format for error status events.""" + message = controller._build_metric_update_message(sample_error_event) + + assert message["type"] == "metric_update" + + event_data = message["data"]["event"] + assert event_data["status"] == "error" + assert event_data["error_message"] == "API rate limit exceeded" + + summary = message["data"]["summary_delta"] + assert summary["total_events"] == 1 + assert summary["successful_events"] == 0 + assert summary["failed_events"] == 1 + assert summary["partial_events"] == 0 + + def test_build_message_for_partial_status( + self, controller: MetricsDashboardController, sample_partial_event: dict + ) -> None: + """Test message format for partial status events.""" + message = controller._build_metric_update_message(sample_partial_event) + + assert message["type"] == "metric_update" + + event_data = message["data"]["event"] + assert event_data["status"] == "partial" + + summary = message["data"]["summary_delta"] + assert summary["total_events"] == 1 + assert summary["successful_events"] == 0 + assert summary["failed_events"] == 0 + assert summary["partial_events"] == 1 + + def test_build_message_datetime_serialization( + self, controller: MetricsDashboardController, sample_event: dict + ) -> None: + """Test datetime serialization in message.""" + message = controller._build_metric_update_message(sample_event) + + event_data = message["data"]["event"] + assert event_data["created_at"] == "2025-11-24T12:34:56+00:00" + assert event_data["processed_at"] == "2025-11-24T12:35:00+00:00" + + def test_build_message_with_none_values(self, controller: MetricsDashboardController) -> None: + """Test message building with None values. + + Note: When event.get() is called with a default and the key exists with value None, + it returns None (not the default). This test reflects that behavior. + """ + event = { + "delivery_id": None, + "repository": None, + "event_type": None, + "action": None, + "pr_number": None, + "sender": None, + "status": None, + "duration_ms": None, + "created_at": None, + "processed_at": None, + "error_message": None, + "api_calls_count": None, + "token_spend": None, + "token_remaining": None, + } + + message = controller._build_metric_update_message(event) + + event_data = message["data"]["event"] + # When dict has key with None value, .get(key, default) returns None, not default + assert event_data["delivery_id"] is None + assert event_data["repository"] is None + assert event_data["event_type"] is None + assert event_data["sender"] is None + assert event_data["status"] is None + assert event_data["duration_ms"] is None + assert event_data["created_at"] is None + assert event_data["processed_at"] is None + assert event_data["api_calls_count"] is None + assert event_data["token_spend"] is None + assert event_data["token_remaining"] is None + + +class TestSerializeDatetime: + """Test _serialize_datetime method.""" + + def test_serialize_datetime_with_valid_datetime(self, controller: MetricsDashboardController) -> None: + """Test serialization with valid datetime object.""" + dt = datetime(2025, 11, 24, 12, 34, 56, tzinfo=UTC) + result = controller._serialize_datetime(dt) + + assert result == "2025-11-24T12:34:56+00:00" + + def test_serialize_datetime_with_none(self, controller: MetricsDashboardController) -> None: + """Test serialization with None input.""" + result = controller._serialize_datetime(None) + assert result is None + + +class TestGetDashboardHtml: + """Test _get_dashboard_html method.""" + + def test_get_dashboard_html_success(self, controller: MetricsDashboardController) -> None: + """Test successful template loading.""" + mock_html = "Dashboard" + + # Mock the file open operation + m = mock_open(read_data=mock_html) + + with patch("builtins.open", m): + result = controller._get_dashboard_html() + + assert result == mock_html + + # Verify file was opened with correct path and encoding + m.assert_called_once() + call_args = m.call_args + assert "metrics_dashboard.html" in str(call_args[0][0]) + assert call_args[1]["encoding"] == "utf-8" + + def test_get_dashboard_html_file_not_found(self, controller: MetricsDashboardController, mock_logger: Mock) -> None: + """Test FileNotFoundError handling.""" + with patch("builtins.open", side_effect=FileNotFoundError("Template not found")): + result = controller._get_dashboard_html() + + # Should return fallback HTML + assert "Metrics Dashboard Template Error" in result + assert "" in result + + # Verify error was logged + mock_logger.exception.assert_called_once() + assert "Metrics dashboard template not found" in mock_logger.exception.call_args[0][0] + + def test_get_dashboard_html_os_error(self, controller: MetricsDashboardController, mock_logger: Mock) -> None: + """Test OSError handling.""" + with patch("builtins.open", side_effect=OSError("Permission denied")): + result = controller._get_dashboard_html() + + # Should return fallback HTML + assert "Metrics Dashboard Template Error" in result + assert "" in result + + # Verify error was logged + mock_logger.exception.assert_called_once() + assert "Failed to read metrics dashboard template" in mock_logger.exception.call_args[0][0] + + +class TestGetFallbackHtml: + """Test _get_fallback_html method.""" + + def test_get_fallback_html_returns_valid_html(self, controller: MetricsDashboardController) -> None: + """Test fallback HTML generation.""" + result = controller._get_fallback_html() + + # Verify it's valid HTML + assert result.startswith("") + assert "" in result + + # Verify error message content + assert "Metrics Dashboard Template Error" in result + assert "could not be loaded" in result + assert "Refresh Page" in result + + # Verify styling exists + assert "" in result + + # Verify error icon + assert "⚠️" in result + + +class TestIntegrationScenarios: + """Integration tests for complex scenarios.""" + + @pytest.mark.asyncio + async def test_full_websocket_lifecycle( + self, + controller: MetricsDashboardController, + mock_websocket: AsyncMock, + mock_db_manager: AsyncMock, + sample_event: dict, + ) -> None: + """Test complete WebSocket lifecycle from connect to disconnect.""" + # Setup: Return event on first poll, empty on second + mock_db_manager.fetch.side_effect = [[sample_event], []] + + # Control loop execution + sleep_count = 0 + + async def controlled_sleep(_duration: float) -> None: + nonlocal sleep_count + sleep_count += 1 + if sleep_count >= 2: + raise WebSocketDisconnect + + with patch("asyncio.sleep", side_effect=controlled_sleep): + await controller.handle_websocket(mock_websocket) + + # Verify full lifecycle + mock_websocket.accept.assert_called_once() + assert mock_websocket.send_json.call_count == 1 + assert mock_websocket not in controller._websocket_connections + + @pytest.mark.asyncio + async def test_multiple_websocket_connections(self, controller: MetricsDashboardController) -> None: + """Test handling multiple simultaneous WebSocket connections.""" + ws1 = AsyncMock(spec=WebSocket) + ws2 = AsyncMock(spec=WebSocket) + ws3 = AsyncMock(spec=WebSocket) + + # Add all connections + controller._websocket_connections.add(ws1) + controller._websocket_connections.add(ws2) + controller._websocket_connections.add(ws3) + + assert len(controller._websocket_connections) == 3 + + # Shutdown should close all + await controller.shutdown() + + assert len(controller._websocket_connections) == 0 + ws1.close.assert_called_once() + ws2.close.assert_called_once() + ws3.close.assert_called_once() + + @pytest.mark.asyncio + async def test_timestamp_tracking_across_multiple_events( + self, + controller: MetricsDashboardController, + mock_websocket: AsyncMock, + mock_db_manager: AsyncMock, + ) -> None: + """Test last_seen_timestamp is updated correctly across multiple events.""" + event1 = {"created_at": datetime(2025, 11, 24, 12, 0, 0, tzinfo=UTC), "status": "success"} + event2 = {"created_at": datetime(2025, 11, 24, 13, 0, 0, tzinfo=UTC), "status": "success"} + + # Return two events on first poll, then empty + mock_db_manager.fetch.side_effect = [[event1, event2], []] + + sleep_count = 0 + + async def controlled_sleep(_duration: float) -> None: + nonlocal sleep_count + sleep_count += 1 + if sleep_count >= 2: + raise WebSocketDisconnect + + with patch("asyncio.sleep", side_effect=controlled_sleep): + await controller.handle_websocket(mock_websocket) + + # Verify both events were sent + assert mock_websocket.send_json.call_count == 2 diff --git a/webhook_server/tests/test_metrics_tracker.py b/webhook_server/tests/test_metrics_tracker.py new file mode 100644 index 00000000..e3705ca0 --- /dev/null +++ b/webhook_server/tests/test_metrics_tracker.py @@ -0,0 +1,326 @@ +"""Tests for MetricsTracker webhook event tracking.""" + +from unittest.mock import AsyncMock, Mock + +import pytest + +from webhook_server.libs.metrics_tracker import MetricsTracker + + +class TestMetricsTracker: + """Test suite for MetricsTracker class.""" + + @pytest.fixture + def mock_db_manager(self) -> Mock: + """Create a mock database manager.""" + mock = Mock() + # Mock the execute method that MetricsTracker now uses + mock.execute = AsyncMock(return_value="INSERT 0 1") + return mock + + @pytest.fixture + def mock_logger(self) -> Mock: + """Create a mock logger.""" + return Mock() + + @pytest.fixture + def metrics_tracker( + self, + mock_db_manager: Mock, + mock_logger: Mock, + ) -> MetricsTracker: + """Create a MetricsTracker instance with mocked dependencies.""" + return MetricsTracker(mock_db_manager, mock_logger) + + def test_metrics_tracker_init( + self, + mock_db_manager: Mock, + mock_logger: Mock, + ) -> None: + """Test MetricsTracker initialization.""" + tracker = MetricsTracker(mock_db_manager, mock_logger) + + assert tracker.db_manager is mock_db_manager + assert tracker.logger is mock_logger + + @pytest.mark.asyncio + async def test_track_webhook_event_success( + self, + metrics_tracker: MetricsTracker, + mock_db_manager: Mock, + mock_logger: Mock, + ) -> None: + """Test tracking webhook event successfully.""" + await metrics_tracker.track_webhook_event( + delivery_id="test-delivery-id", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="testuser", + payload={"test": "data"}, + processing_time_ms=150, + status="success", + pr_number=42, + ) + + # Verify execute was called via DatabaseManager + mock_db_manager.execute.assert_called_once() + + # Verify the execute call parameters + # Parameter order: SQL query, uuid4(), delivery_id, repository, event_type, action, + # pr_number, sender, payload_json, duration_ms, status, + # error_message, api_calls_count, token_spend, token_remaining, metrics_available + call_args = mock_db_manager.execute.call_args + assert "INSERT INTO webhooks" in call_args[0][0] + assert call_args[0][2] == "test-delivery-id" # delivery_id + assert call_args[0][3] == "org/repo" # repository + assert call_args[0][4] == "pull_request" # event_type + assert call_args[0][5] == "opened" # action + assert call_args[0][6] == 42 # pr_number + assert call_args[0][7] == "testuser" # sender + assert call_args[0][9] == 150 # duration_ms + assert call_args[0][10] == "success" # status + + # Verify log message + mock_logger.info.assert_called_once() + assert "test-delivery-id" in mock_logger.info.call_args[0][0] + assert "org/repo" in mock_logger.info.call_args[0][0] + assert "success" in mock_logger.info.call_args[0][0] + + @pytest.mark.asyncio + async def test_track_webhook_event_with_error( + self, + metrics_tracker: MetricsTracker, + mock_db_manager: Mock, + mock_logger: Mock, + ) -> None: + """Test tracking webhook event with error status.""" + await metrics_tracker.track_webhook_event( + delivery_id="test-delivery-id", + repository="org/repo", + event_type="pull_request", + action="synchronize", + sender="testuser", + payload={"test": "data"}, + processing_time_ms=250, + status="error", + error_message="Test error message", + ) + + # Verify execute was called via DatabaseManager + mock_db_manager.execute.assert_called_once() + + # Verify execute was called with error message + call_args = mock_db_manager.execute.call_args + assert call_args[0][10] == "error" # status + assert call_args[0][11] == "Test error message" # error_message + + # Verify log message + mock_logger.info.assert_called_once() + + @pytest.mark.asyncio + async def test_track_webhook_event_with_api_metrics( + self, + metrics_tracker: MetricsTracker, + mock_db_manager: Mock, + mock_logger: Mock, # noqa: ARG002 + ) -> None: + """Test tracking webhook event with API usage metrics.""" + await metrics_tracker.track_webhook_event( + delivery_id="test-delivery-id", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="testuser", + payload={"test": "data"}, + processing_time_ms=150, + status="success", + api_calls_count=5, + token_spend=10, + token_remaining=4990, + ) + + # Verify execute was called with API metrics + call_args = mock_db_manager.execute.call_args + assert call_args[0][12] == 5 # api_calls_count + assert call_args[0][13] == 10 # token_spend + assert call_args[0][14] == 4990 # token_remaining + + @pytest.mark.asyncio + async def test_track_webhook_event_database_error( + self, + metrics_tracker: MetricsTracker, + mock_db_manager: Mock, + mock_logger: Mock, + ) -> None: + """Test handling database errors during tracking.""" + # Make execute raise an exception + mock_db_manager.execute.side_effect = Exception("Database error") + + with pytest.raises(Exception, match="Database error"): + await metrics_tracker.track_webhook_event( + delivery_id="test-delivery-id", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="testuser", + payload={"test": "data"}, + processing_time_ms=150, + status="success", + ) + + # Verify exception was logged + mock_logger.exception.assert_called_once() + assert "Failed to track webhook event" in mock_logger.exception.call_args[0][0] + assert "test-delivery-id" in mock_logger.exception.call_args[0][0] + assert "org/repo" in mock_logger.exception.call_args[0][0] + + @pytest.mark.asyncio + async def test_track_webhook_event_pool_not_initialized( + self, + mock_db_manager: Mock, + mock_logger: Mock, + ) -> None: + """Test error when database pool is not initialized.""" + # Make execute raise ValueError when pool is not initialized + mock_db_manager.execute.side_effect = ValueError("Database pool not initialized. Call connect() first.") + tracker = MetricsTracker(mock_db_manager, mock_logger) + + with pytest.raises(ValueError, match="Database pool not initialized"): + await tracker.track_webhook_event( + delivery_id="test-delivery-id", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="testuser", + payload={"test": "data"}, + processing_time_ms=150, + status="success", + ) + + # Verify exception was logged + mock_logger.exception.assert_called_once() + + @pytest.mark.asyncio + async def test_track_webhook_event_complex_payload( + self, + metrics_tracker: MetricsTracker, + mock_db_manager: Mock, + mock_logger: Mock, # noqa: ARG002 + ) -> None: + """Test tracking webhook event with complex payload structure.""" + complex_payload = { + "action": "opened", + "pull_request": { + "id": 123, + "number": 42, + "title": "Test PR", + "user": {"login": "testuser"}, + "labels": [{"name": "bug"}, {"name": "urgent"}], + }, + "repository": { + "name": "repo", + "owner": {"login": "org"}, + }, + } + + await metrics_tracker.track_webhook_event( + delivery_id="test-delivery-id", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="testuser", + payload=complex_payload, + processing_time_ms=150, + status="success", + pr_number=42, + ) + + # Verify payload was serialized to JSON + call_args = mock_db_manager.execute.call_args + payload_json = call_args[0][8] # payload_json parameter position + assert "pull_request" in payload_json + assert "repository" in payload_json + assert "labels" in payload_json + + @pytest.mark.asyncio + async def test_track_webhook_event_optional_pr_number( + self, + metrics_tracker: MetricsTracker, + mock_db_manager: Mock, + mock_logger: Mock, # noqa: ARG002 + ) -> None: + """Test tracking webhook event without PR number (e.g., issue_comment).""" + await metrics_tracker.track_webhook_event( + delivery_id="test-delivery-id", + repository="org/repo", + event_type="issue_comment", + action="created", + sender="testuser", + payload={"comment": {"body": "Great work!"}}, + processing_time_ms=100, + status="success", + pr_number=None, + ) + + # Verify pr_number is None in execute call + call_args = mock_db_manager.execute.call_args + assert call_args[0][6] is None # pr_number + + @pytest.mark.asyncio + async def test_track_webhook_event_all_optional_params( + self, + metrics_tracker: MetricsTracker, + mock_db_manager: Mock, + mock_logger: Mock, # noqa: ARG002 + ) -> None: + """Test tracking webhook event with all optional parameters set.""" + await metrics_tracker.track_webhook_event( + delivery_id="test-delivery-id", + repository="org/repo", + event_type="check_run", + action="completed", + sender="github-actions", + payload={"check_run": {"conclusion": "success"}}, + processing_time_ms=500, + status="success", + pr_number=42, + error_message=None, + api_calls_count=3, + token_spend=5, + token_remaining=4995, + ) + + # Verify all parameters were passed to execute + call_args = mock_db_manager.execute.call_args + assert len(call_args[0]) == 16 # SQL query + 15 parameters + assert call_args[0][6] == 42 # pr_number + assert call_args[0][11] is None # error_message + assert call_args[0][12] == 3 # api_calls_count + assert call_args[0][13] == 5 # token_spend + assert call_args[0][14] == 4995 # token_remaining + + @pytest.mark.asyncio + async def test_track_webhook_event_zero_api_calls( + self, + metrics_tracker: MetricsTracker, + mock_db_manager: Mock, + mock_logger: Mock, # noqa: ARG002 + ) -> None: + """Test tracking webhook event with zero API calls (default values).""" + await metrics_tracker.track_webhook_event( + delivery_id="test-delivery-id", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="testuser", + payload={"test": "data"}, + processing_time_ms=150, + status="success", + ) + + # Verify default zero values for API metrics + call_args = mock_db_manager.execute.call_args + assert call_args[0][12] == 0 # api_calls_count default + assert call_args[0][13] == 0 # token_spend default + assert call_args[0][14] == 0 # token_remaining default diff --git a/webhook_server/tests/test_migrations_env.py b/webhook_server/tests/test_migrations_env.py new file mode 100644 index 00000000..a0ab430d --- /dev/null +++ b/webhook_server/tests/test_migrations_env.py @@ -0,0 +1,153 @@ +"""Tests for Alembic migrations environment configuration. + +This test module verifies that database credentials are properly URL-encoded +when constructing the connection string, preventing malformed URLs when +credentials contain special characters. +""" + +import ast +import pathlib +from urllib.parse import quote + +import pytest + + +class TestMigrationsEnvURLEncoding: + """Test suite for migrations env.py URL encoding.""" + + @pytest.mark.parametrize( + "username,password,expected_username,expected_password", + [ + # Special characters that MUST be URL-encoded + ("user@domain.com", "p@ss:w/rd", "user%40domain.com", "p%40ss%3Aw%2Frd"), + # More special characters + ("admin#1", "pass?word", "admin%231", "pass%3Fword"), + # Mix of safe and unsafe characters + ("user_name-123", "P@$$w0rd!", "user_name-123", "P%40%24%24w0rd%21"), + # Simple credentials (no encoding needed) + ("simple_user", "simple_pass", "simple_user", "simple_pass"), + ], + ) + def test_url_encoding_credentials( + self, + username: str, + password: str, + expected_username: str, + expected_password: str, + ) -> None: + """Test that credentials with special characters are properly URL-encoded. + + This test verifies the fix for URL-encoding database credentials in + webhook_server/migrations/env.py lines 57-63. + + Args: + username: Test username (may contain special chars) + password: Test password (may contain special chars) + expected_username: Expected URL-encoded username + expected_password: Expected URL-encoded password + """ + # Verify our test expectations match urllib.parse.quote behavior + assert quote(username, safe="") == expected_username + assert quote(password, safe="") == expected_password + + # Verify URL encoding logic + # We can't directly execute env.py (it runs on import), so we test the logic + encoded_username = quote(username, safe="") + encoded_password = quote(password, safe="") + + db_url = f"postgresql+asyncpg://{encoded_username}:{encoded_password}@localhost:5432/test_db" + + # Verify URL contains encoded credentials + assert expected_username in db_url + assert expected_password in db_url + + # Verify URL is well-formed (no unencoded special chars after ://) + # Split by :// to get credentials part + credentials_part = db_url.split("://")[1].split("@")[0] + username_part, password_part = credentials_part.split(":") + + assert username_part == expected_username + assert password_part == expected_password + + def test_migrations_env_uses_quote_for_all_url_components(self) -> None: + """Verify that migrations env.py uses urllib.parse.quote for URL encoding. + + The quote() function properly URL-encodes special characters in credentials + and database names, preventing SQL injection and URL parsing issues. + """ + env_py_path = pathlib.Path(__file__).parent.parent / "migrations" / "env.py" + env_py_content = env_py_path.read_text() + + # Verify quote is imported from urllib.parse + assert "from urllib.parse import quote" in env_py_content + + # Parse AST to verify quote function is called for username, password, and database + tree = ast.parse(env_py_content) + + # Check that quote function is called at least 3 times (username, password, database) + quote_calls = 0 + for node in ast.walk(tree): + if isinstance(node, ast.Call): + # Direct call: quote(...) + if isinstance(node.func, ast.Name) and node.func.id == "quote": + quote_calls += 1 + + assert quote_calls >= 3, "Expected at least 3 calls to quote() for username, password, and database encoding" + + def test_special_chars_requiring_encoding(self) -> None: + """Test that special characters are properly identified and encoded. + + Characters that MUST be encoded in URL credentials: + - @ (at sign) - separates userinfo from host + - : (colon) - separates username from password + - / (slash) - path separator + - ? (question mark) - query string separator + - # (hash) - fragment separator + - % (percent) - encoding prefix + - & (ampersand) - query parameter separator + - = (equals) - query parameter value separator + - + (plus) - space in query strings + """ + special_chars = { + "@": "%40", + ":": "%3A", + "/": "%2F", + "?": "%3F", + "#": "%23", + "%": "%25", + "&": "%26", + "=": "%3D", + "+": "%2B", + " ": "%20", + } + + for char, expected_encoding in special_chars.items(): + # Test encoding with safe="" to encode ALL special chars + encoded = quote(char, safe="") + assert encoded == expected_encoding, ( + f"Character '{char}' should encode to '{expected_encoding}', got '{encoded}'" + ) + + def test_real_world_example(self) -> None: + """Test a real-world example with email username and complex password.""" + # Real-world scenario: email as username, complex password + username = "webhook-server@example.com" + password = "C0mpl3x!P@$$w0rd#2024" # pragma: allowlist secret + + encoded_username = quote(username, safe="") + encoded_password = quote(password, safe="") + + # Construct URL as in env.py + db_url = f"postgresql+asyncpg://{encoded_username}:{encoded_password}@db.example.com:5432/webhooks_db" + + # Verify URL is well-formed + assert "webhook-server%40example.com" in db_url # @ encoded + assert "C0mpl3x%21P%40%24%24w0rd%232024" in db_url # Special chars encoded + assert "@db.example.com" in db_url # Host separator @ NOT encoded + + # Verify no unencoded special chars in credentials part + credentials_part = db_url.split("://")[1].split("@")[0] + # Should not contain unencoded @ or : or # except the : separator + assert credentials_part.count(":") == 1 # Only the username:password separator + assert "@" not in credentials_part # @ should be encoded + assert "#" not in credentials_part # # should be encoded diff --git a/webhook_server/tests/test_models.py b/webhook_server/tests/test_models.py new file mode 100644 index 00000000..a9116ede --- /dev/null +++ b/webhook_server/tests/test_models.py @@ -0,0 +1,1407 @@ +""" +Comprehensive tests for SQLAlchemy models. + +Tests all 7 models: +- Webhook: Webhook event store with full payload and metrics +- PullRequest: PR master records with size metrics +- PREvent: PR timeline events for analytics +- PRReview: Review data for approval tracking +- PRLabel: Label history for workflow tracking +- CheckRun: Check run results for CI/CD metrics +- APIUsage: GitHub API usage tracking for rate limit monitoring +""" + +from datetime import UTC, datetime +from typing import Any +from uuid import UUID, uuid4 + +import pytest + +from webhook_server.libs.models import ( + APIUsage, + Base, + CheckRun, + PREvent, + PRLabel, + PRReview, + PullRequest, + Webhook, +) + + +class TestBase: + """Test the Base declarative class.""" + + def test_base_is_declarative_base(self) -> None: + """Verify Base is a valid SQLAlchemy declarative base.""" + assert hasattr(Base, "metadata") + assert hasattr(Base, "registry") + + +class TestWebhookModel: + """Test Webhook model instantiation and fields.""" + + def test_webhook_model_creation(self) -> None: + """Test creating Webhook instance with required fields.""" + webhook = Webhook( + delivery_id="test-delivery-123", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="test-user", + payload={"key": "value"}, + processed_at=datetime.now(UTC), + duration_ms=150, + status="success", + ) + + assert webhook.delivery_id == "test-delivery-123" + assert webhook.repository == "org/repo" + assert webhook.event_type == "pull_request" + assert webhook.action == "opened" + assert webhook.sender == "test-user" + assert webhook.payload == {"key": "value"} + assert webhook.status == "success" + assert webhook.duration_ms == 150 + + def test_webhook_model_with_optional_fields(self) -> None: + """Test Webhook with optional fields set.""" + webhook = Webhook( + delivery_id="test-delivery-456", + repository="org/repo", + event_type="pull_request", + action="synchronize", + pr_number=42, + sender="test-user", + payload={"data": "test"}, + processed_at=datetime.now(UTC), + duration_ms=200, + status="failure", + error_message="Test error", + api_calls_count=5, + token_spend=10, + token_remaining=4990, + ) + + assert webhook.pr_number == 42 + assert webhook.error_message == "Test error" + assert webhook.api_calls_count == 5 + assert webhook.token_spend == 10 + assert webhook.token_remaining == 4990 + + def test_webhook_repr(self) -> None: + """Test Webhook __repr__ method.""" + webhook = Webhook( + delivery_id="test-123", + repository="org/repo", + event_type="push", + action="created", + sender="user", + payload={}, + processed_at=datetime.now(UTC), + duration_ms=100, + status="success", + ) + + repr_str = repr(webhook) + assert "Webhook" in repr_str + assert "test-123" in repr_str + assert "org/repo" in repr_str + assert "push" in repr_str + assert "success" in repr_str + + def test_webhook_relationships(self) -> None: + """Test Webhook relationships are defined.""" + webhook = Webhook( + delivery_id="test-rel", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="user", + payload={}, + processed_at=datetime.now(UTC), + duration_ms=100, + status="success", + ) + + # Verify relationships exist (lazy loaded, empty by default) + assert hasattr(webhook, "pr_events") + assert hasattr(webhook, "check_runs") + assert hasattr(webhook, "api_usage") + assert webhook.pr_events == [] + assert webhook.check_runs == [] + assert webhook.api_usage == [] + + +class TestPullRequestModel: + """Test PullRequest model instantiation and fields.""" + + def test_pull_request_model_creation(self) -> None: + """Test creating PullRequest instance with required fields.""" + now = datetime.now(UTC) + pr = PullRequest( + repository="org/repo", + pr_number=123, + title="Test PR", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + assert pr.repository == "org/repo" + assert pr.pr_number == 123 + assert pr.title == "Test PR" + assert pr.author == "test-user" + assert pr.state == "open" + assert pr.created_at == now + assert pr.updated_at == now + + def test_pull_request_with_metrics(self) -> None: + """Test PullRequest with code metrics.""" + now = datetime.now(UTC) + pr = PullRequest( + repository="org/repo", + pr_number=456, + title="Feature PR", + author="dev", + created_at=now, + updated_at=now, + state="open", + draft=True, + additions=150, + deletions=50, + changed_files=5, + size_label="M", + ) + + assert pr.draft is True + assert pr.additions == 150 + assert pr.deletions == 50 + assert pr.changed_files == 5 + assert pr.size_label == "M" + + def test_pull_request_merged(self) -> None: + """Test PullRequest with merged state.""" + now = datetime.now(UTC) + pr = PullRequest( + repository="org/repo", + pr_number=789, + title="Merged PR", + author="dev", + created_at=now, + updated_at=now, + merged_at=now, + state="merged", + ) + + assert pr.state == "merged" + assert pr.merged_at == now + + def test_pull_request_closed(self) -> None: + """Test PullRequest with closed state.""" + now = datetime.now(UTC) + pr = PullRequest( + repository="org/repo", + pr_number=999, + title="Closed PR", + author="dev", + created_at=now, + updated_at=now, + closed_at=now, + state="closed", + ) + + assert pr.state == "closed" + assert pr.closed_at == now + + def test_pull_request_repr(self) -> None: + """Test PullRequest __repr__ method.""" + now = datetime.now(UTC) + pr = PullRequest( + repository="test-org/test-repo", + pr_number=42, + title="Very long PR title that should be truncated in the repr output for readability", + author="user", + created_at=now, + updated_at=now, + state="open", + ) + + repr_str = repr(pr) + assert "PullRequest" in repr_str + assert "test-org/test-repo" in repr_str + assert "42" in repr_str + assert "open" in repr_str + + def test_pull_request_relationships(self) -> None: + """Test PullRequest relationships are defined.""" + now = datetime.now(UTC) + pr = PullRequest( + repository="org/repo", + pr_number=1, + title="Test", + author="user", + created_at=now, + updated_at=now, + state="open", + ) + + # Verify relationships exist + assert hasattr(pr, "pr_events") + assert hasattr(pr, "pr_reviews") + assert hasattr(pr, "pr_labels") + assert hasattr(pr, "check_runs") + assert pr.pr_events == [] + assert pr.pr_reviews == [] + assert pr.pr_labels == [] + assert pr.check_runs == [] + + +class TestPREventModel: + """Test PREvent model instantiation and fields.""" + + def test_pr_event_model_creation(self) -> None: + """Test creating PREvent instance with required fields.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + + event = PREvent( + pr_id=pr_id, + webhook_id=webhook_id, + event_type="synchronize", + event_data={"commits": 3}, + ) + + assert event.pr_id == pr_id + assert event.webhook_id == webhook_id + assert event.event_type == "synchronize" + assert event.event_data == {"commits": 3} + + def test_pr_event_repr(self) -> None: + """Test PREvent __repr__ method.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + + event = PREvent( + pr_id=pr_id, + webhook_id=webhook_id, + event_type="opened", + event_data={}, + ) + + repr_str = repr(event) + assert "PREvent" in repr_str + assert str(pr_id) in repr_str + assert "opened" in repr_str + + +class TestPRReviewModel: + """Test PRReview model instantiation and fields.""" + + def test_pr_review_model_creation(self) -> None: + """Test creating PRReview instance with required fields.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + + review = PRReview( + pr_id=pr_id, + reviewer="test-reviewer", + review_type="approved", + ) + + assert review.pr_id == pr_id + assert review.reviewer == "test-reviewer" + assert review.review_type == "approved" + + def test_pr_review_changes_requested(self) -> None: + """Test PRReview with changes_requested type.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + + review = PRReview( + pr_id=pr_id, + reviewer="reviewer2", + review_type="changes_requested", + ) + + assert review.review_type == "changes_requested" + + def test_pr_review_repr(self) -> None: + """Test PRReview __repr__ method.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + + review = PRReview( + pr_id=pr_id, + reviewer="john-doe", + review_type="commented", + ) + + repr_str = repr(review) + assert "PRReview" in repr_str + assert str(pr_id) in repr_str + assert "john-doe" in repr_str + assert "commented" in repr_str + + +class TestPRLabelModel: + """Test PRLabel model instantiation and fields.""" + + def test_pr_label_model_creation(self) -> None: + """Test creating PRLabel instance with required fields.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + + label = PRLabel( + pr_id=pr_id, + label="verified", + ) + + assert label.pr_id == pr_id + assert label.label == "verified" + assert label.removed_at is None + + def test_pr_label_with_removal(self) -> None: + """Test PRLabel with removed_at timestamp.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + removed_time = datetime.now(UTC) + + label = PRLabel( + pr_id=pr_id, + label="needs-work", + removed_at=removed_time, + ) + + assert label.label == "needs-work" + assert label.removed_at == removed_time + + def test_pr_label_repr_active(self) -> None: + """Test PRLabel __repr__ for active label.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + + label = PRLabel( + pr_id=pr_id, + label="size/M", + ) + + repr_str = repr(label) + assert "PRLabel" in repr_str + assert str(pr_id) in repr_str + assert "size/M" in repr_str + + def test_pr_label_repr_removed(self) -> None: + """Test PRLabel __repr__ for removed label.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + removed_time = datetime.now(UTC) + + label = PRLabel( + pr_id=pr_id, + label="wip", + removed_at=removed_time, + ) + + repr_str = repr(label) + assert "PRLabel" in repr_str + assert "wip" in repr_str + assert "removed_at" in repr_str + + +class TestCheckRunModel: + """Test CheckRun model instantiation and fields.""" + + def test_check_run_model_creation(self) -> None: + """Test creating CheckRun instance with required fields.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + started = datetime.now(UTC) + + check_run = CheckRun( + pr_id=pr_id, + webhook_id=webhook_id, + check_name="tox", + status="completed", + started_at=started, + ) + + assert check_run.pr_id == pr_id + assert check_run.webhook_id == webhook_id + assert check_run.check_name == "tox" + assert check_run.status == "completed" + assert check_run.started_at == started + + def test_check_run_with_success(self) -> None: + """Test CheckRun with successful conclusion.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + started = datetime.now(UTC) + completed = datetime.now(UTC) + + check_run = CheckRun( + pr_id=pr_id, + webhook_id=webhook_id, + check_name="pre-commit", + status="completed", + conclusion="success", + started_at=started, + completed_at=completed, + duration_ms=5000, + ) + + assert check_run.conclusion == "success" + assert check_run.completed_at == completed + assert check_run.duration_ms == 5000 + + def test_check_run_with_failure(self) -> None: + """Test CheckRun with failed conclusion and output.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + started = datetime.now(UTC) + + check_run = CheckRun( + pr_id=pr_id, + webhook_id=webhook_id, + check_name="container-build", + status="completed", + conclusion="failure", + started_at=started, + output_title="Build failed", + output_summary="Docker build failed on step 5", + ) + + assert check_run.conclusion == "failure" + assert check_run.output_title == "Build failed" + assert check_run.output_summary == "Docker build failed on step 5" + + def test_check_run_in_progress(self) -> None: + """Test CheckRun in progress state.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + + check_run = CheckRun( + pr_id=pr_id, + webhook_id=webhook_id, + check_name="tests", + status="in_progress", + started_at=datetime.now(UTC), + ) + + assert check_run.status == "in_progress" + assert check_run.conclusion is None + assert check_run.completed_at is None + assert check_run.duration_ms is None + + def test_check_run_repr(self) -> None: + """Test CheckRun __repr__ method.""" + pr_id = UUID("12345678-1234-5678-1234-567812345678") + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + + check_run = CheckRun( + pr_id=pr_id, + webhook_id=webhook_id, + check_name="lint", + status="completed", + conclusion="success", + started_at=datetime.now(UTC), + ) + + repr_str = repr(check_run) + assert "CheckRun" in repr_str + assert str(pr_id) in repr_str + assert "lint" in repr_str + assert "completed" in repr_str + assert "success" in repr_str + + +class TestAPIUsageModel: + """Test APIUsage model instantiation and fields.""" + + def test_api_usage_model_creation(self) -> None: + """Test creating APIUsage instance with required fields.""" + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + + api_usage = APIUsage( + webhook_id=webhook_id, + repository="org/repo", + event_type="pull_request", + api_calls_count=5, + initial_rate_limit=5000, + final_rate_limit=4995, + ) + + assert api_usage.webhook_id == webhook_id + assert api_usage.repository == "org/repo" + assert api_usage.event_type == "pull_request" + assert api_usage.api_calls_count == 5 + assert api_usage.initial_rate_limit == 5000 + assert api_usage.final_rate_limit == 4995 + + def test_api_usage_with_token_spend(self) -> None: + """Test APIUsage with token_spend calculated.""" + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + + api_usage = APIUsage( + webhook_id=webhook_id, + repository="org/repo", + event_type="check_run", + api_calls_count=10, + initial_rate_limit=5000, + final_rate_limit=4990, + token_spend=10, + ) + + assert api_usage.token_spend == 10 + + def test_api_usage_repr(self) -> None: + """Test APIUsage __repr__ method.""" + webhook_id = UUID("87654321-4321-8765-4321-876543218765") + + api_usage = APIUsage( + webhook_id=webhook_id, + repository="test-org/test-repo", + event_type="issue_comment", + api_calls_count=3, + initial_rate_limit=5000, + final_rate_limit=4997, + token_spend=3, + ) + + repr_str = repr(api_usage) + assert "APIUsage" in repr_str + assert str(webhook_id) in repr_str + assert "test-org/test-repo" in repr_str + assert "3" in repr_str + + +class TestModelTableNames: + """Test that all models have correct table names.""" + + def test_webhook_table_name(self) -> None: + """Verify Webhook model has correct table name.""" + assert Webhook.__tablename__ == "webhooks" + + def test_pull_request_table_name(self) -> None: + """Verify PullRequest model has correct table name.""" + assert PullRequest.__tablename__ == "pull_requests" + + def test_pr_event_table_name(self) -> None: + """Verify PREvent model has correct table name.""" + assert PREvent.__tablename__ == "pr_events" + + def test_pr_review_table_name(self) -> None: + """Verify PRReview model has correct table name.""" + assert PRReview.__tablename__ == "pr_reviews" + + def test_pr_label_table_name(self) -> None: + """Verify PRLabel model has correct table name.""" + assert PRLabel.__tablename__ == "pr_labels" + + def test_check_run_table_name(self) -> None: + """Verify CheckRun model has correct table name.""" + assert CheckRun.__tablename__ == "check_runs" + + def test_api_usage_table_name(self) -> None: + """Verify APIUsage model has correct table name.""" + assert APIUsage.__tablename__ == "api_usage" + + +class TestCascadeDeleteBehavior: + """ + Test cascade delete behavior for all models. + + Verifies that deleting parent records correctly cascades to child records + as defined in relationship configurations (cascade="all, delete-orphan"). + """ + + @pytest.fixture + async def async_engine(self) -> Any: + """Create async SQLAlchemy engine for testing.""" + from sqlalchemy import JSON, Column, MetaData, String, Table + from sqlalchemy.ext.asyncio import create_async_engine + + # Create fresh metadata for testing to avoid modifying original + test_metadata = MetaData() + + # Recreate each table with SQLite-compatible types + for table_name, table in Base.metadata.tables.items(): + # Build columns with compatible types + new_columns = [] + for col in table.columns: + # Determine compatible column type + col_type = col.type + if col_type.__class__.__name__ == "JSONB": + col_type = JSON() + elif col_type.__class__.__name__ == "UUID": + col_type = String(36) + + # Create new column without server defaults + new_col = Column( + col.name, + col_type, + primary_key=col.primary_key, + nullable=col.nullable, + unique=col.unique if hasattr(col, "unique") else False, + index=col.index if hasattr(col, "index") else False, + ) + new_columns.append(new_col) + + # Create table (constraints will be handled by SQLAlchemy) + Table(table_name, test_metadata, *new_columns) + + # Use in-memory SQLite for testing + # Note: SQLite requires PRAGMA foreign_keys=ON to enable cascade deletes + from sqlalchemy import event + + engine = create_async_engine( + "sqlite+aiosqlite:///:memory:", + echo=False, + ) + + # Enable foreign keys for all connections (required for CASCADE DELETE in SQLite) + @event.listens_for(engine.sync_engine, "connect") + def set_sqlite_pragma(dbapi_conn, connection_record): # type: ignore + cursor = dbapi_conn.cursor() + cursor.execute("PRAGMA foreign_keys=ON") + cursor.close() + + # Create all tables + async with engine.begin() as conn: + await conn.run_sync(test_metadata.create_all) + + yield engine + + # Cleanup + await engine.dispose() + + @pytest.fixture + async def async_session(self, async_engine: Any) -> Any: + """Create async SQLAlchemy session for testing.""" + from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker + + async_session_maker = async_sessionmaker( + async_engine, + class_=AsyncSession, + expire_on_commit=False, + ) + + async with async_session_maker() as session: + yield session + + @pytest.mark.asyncio + async def test_webhook_cascade_delete_pr_events(self, async_session: Any) -> None: + """Test that deleting a Webhook cascades to PREvent records.""" + from sqlalchemy import select + + # Create webhook with related PREvent + now = datetime.now(UTC) + webhook_id = uuid4() + pr_id = uuid4() + pr_event_id = uuid4() + + webhook = Webhook( + id=webhook_id, + delivery_id="cascade-test-webhook-1", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="test-user", + payload={"test": "data"}, + created_at=now, + processed_at=now, + duration_ms=100, + status="success", + ) + + pr = PullRequest( + id=pr_id, + repository="org/repo", + pr_number=1, + title="Test PR", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + async_session.add_all([webhook, pr]) + await async_session.commit() + + # Create PREvent linked to webhook and PR + pr_event = PREvent( + id=pr_event_id, + pr_id=pr_id, + webhook_id=webhook_id, + event_type="opened", + event_data={"action": "opened"}, + created_at=now, + ) + async_session.add(pr_event) + await async_session.commit() + + # Delete webhook + await async_session.delete(webhook) + await async_session.commit() + + # Verify PREvent was cascade deleted + result = await async_session.execute(select(PREvent).where(PREvent.id == pr_event_id)) + assert result.scalar_one_or_none() is None + + # Verify webhook was deleted + result = await async_session.execute(select(Webhook).where(Webhook.id == webhook_id)) + assert result.scalar_one_or_none() is None + + @pytest.mark.asyncio + async def test_webhook_cascade_delete_check_runs(self, async_session: Any) -> None: + """Test that deleting a Webhook cascades to CheckRun records.""" + from sqlalchemy import select + + # Create webhook with related CheckRun + now = datetime.now(UTC) + webhook_id = uuid4() + pr_id = uuid4() + check_run_id = uuid4() + + webhook = Webhook( + id=webhook_id, + delivery_id="cascade-test-webhook-2", + repository="org/repo", + event_type="check_run", + action="completed", + sender="test-user", + payload={"test": "data"}, + created_at=now, + processed_at=now, + duration_ms=100, + status="success", + ) + + pr = PullRequest( + id=pr_id, + repository="org/repo", + pr_number=2, + title="Test PR", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + async_session.add_all([webhook, pr]) + await async_session.commit() + + # Create CheckRun linked to webhook and PR + check_run = CheckRun( + id=check_run_id, + pr_id=pr_id, + webhook_id=webhook_id, + check_name="tox", + status="completed", + conclusion="success", + started_at=now, + ) + async_session.add(check_run) + await async_session.commit() + + webhook_id = webhook.id + check_run_id = check_run.id + + # Delete webhook + await async_session.delete(webhook) + await async_session.commit() + + # Verify CheckRun was cascade deleted + result = await async_session.execute(select(CheckRun).where(CheckRun.id == check_run_id)) + assert result.scalar_one_or_none() is None + + # Verify webhook was deleted + result = await async_session.execute(select(Webhook).where(Webhook.id == webhook_id)) + assert result.scalar_one_or_none() is None + + @pytest.mark.asyncio + async def test_webhook_cascade_delete_api_usage(self, async_session: Any) -> None: + """Test that deleting a Webhook cascades to APIUsage records.""" + from sqlalchemy import select + + # Create webhook with related APIUsage + now = datetime.now(UTC) + webhook_id = uuid4() + api_usage_id = uuid4() + + webhook = Webhook( + id=webhook_id, + delivery_id="cascade-test-webhook-3", + repository="org/repo", + event_type="pull_request", + action="synchronize", + sender="test-user", + payload={"test": "data"}, + created_at=now, + processed_at=now, + duration_ms=100, + status="success", + ) + + async_session.add(webhook) + await async_session.commit() + + # Create APIUsage linked to webhook + api_usage = APIUsage( + id=api_usage_id, + webhook_id=webhook_id, + repository="org/repo", + event_type="pull_request", + api_calls_count=5, + initial_rate_limit=5000, + final_rate_limit=4995, + token_spend=5, + created_at=now, + ) + async_session.add(api_usage) + await async_session.commit() + + webhook_id = webhook.id + api_usage_id = api_usage.id + + # Delete webhook + await async_session.delete(webhook) + await async_session.commit() + + # Verify APIUsage was cascade deleted + result = await async_session.execute(select(APIUsage).where(APIUsage.id == api_usage_id)) + assert result.scalar_one_or_none() is None + + # Verify webhook was deleted + result = await async_session.execute(select(Webhook).where(Webhook.id == webhook_id)) + assert result.scalar_one_or_none() is None + + @pytest.mark.asyncio + async def test_webhook_cascade_delete_multiple_children(self, async_session: Any) -> None: + """Test that deleting a Webhook cascades to all related child records.""" + from sqlalchemy import select + + # Create webhook with multiple related records + now = datetime.now(UTC) + webhook_id = uuid4() + pr_id = uuid4() + pr_event_id = uuid4() + check_run_id = uuid4() + api_usage_id = uuid4() + + webhook = Webhook( + id=webhook_id, + delivery_id="cascade-test-webhook-multi", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="test-user", + payload={"test": "data"}, + created_at=now, + processed_at=now, + duration_ms=100, + status="success", + ) + + pr = PullRequest( + id=pr_id, + repository="org/repo", + pr_number=10, + title="Multi Test PR", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + async_session.add_all([webhook, pr]) + await async_session.commit() + + # Create multiple child records + pr_event = PREvent( + id=pr_event_id, + pr_id=pr_id, + webhook_id=webhook_id, + event_type="opened", + event_data={"action": "opened"}, + created_at=now, + ) + check_run = CheckRun( + id=check_run_id, + pr_id=pr_id, + webhook_id=webhook_id, + check_name="pre-commit", + status="completed", + started_at=now, + ) + api_usage = APIUsage( + id=api_usage_id, + webhook_id=webhook_id, + repository="org/repo", + event_type="pull_request", + api_calls_count=3, + initial_rate_limit=5000, + final_rate_limit=4997, + created_at=now, + ) + + async_session.add_all([pr_event, check_run, api_usage]) + await async_session.commit() + + pr_event_id = pr_event.id + check_run_id = check_run.id + api_usage_id = api_usage.id + webhook_id = webhook.id + + # Delete webhook + await async_session.delete(webhook) + await async_session.commit() + + # Verify all child records were cascade deleted + result = await async_session.execute(select(PREvent).where(PREvent.id == pr_event_id)) + assert result.scalar_one_or_none() is None + + result = await async_session.execute(select(CheckRun).where(CheckRun.id == check_run_id)) + assert result.scalar_one_or_none() is None + + result = await async_session.execute(select(APIUsage).where(APIUsage.id == api_usage_id)) + assert result.scalar_one_or_none() is None + + # Verify webhook was deleted + result = await async_session.execute(select(Webhook).where(Webhook.id == webhook_id)) + assert result.scalar_one_or_none() is None + + @pytest.mark.asyncio + async def test_pull_request_cascade_delete_pr_events(self, async_session: Any) -> None: + """Test that deleting a PullRequest cascades to PREvent records.""" + from sqlalchemy import select + + # Create PR with related PREvent + now = datetime.now(UTC) + pr_id = uuid4() + webhook_id = uuid4() + pr_event_id = uuid4() + + pr = PullRequest( + id=pr_id, + repository="org/repo", + pr_number=20, + title="Cascade Test PR", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + webhook = Webhook( + id=webhook_id, + delivery_id="pr-cascade-webhook-1", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="test-user", + payload={"test": "data"}, + created_at=now, + processed_at=now, + duration_ms=100, + status="success", + ) + + async_session.add_all([pr, webhook]) + await async_session.commit() + + # Create PREvent linked to PR + pr_event = PREvent( + id=pr_event_id, + pr_id=pr_id, + webhook_id=webhook_id, + event_type="opened", + event_data={"action": "opened"}, + created_at=now, + ) + async_session.add(pr_event) + await async_session.commit() + + pr_id = pr.id + pr_event_id = pr_event.id + + # Delete PR + await async_session.delete(pr) + await async_session.commit() + + # Verify PREvent was cascade deleted + result = await async_session.execute(select(PREvent).where(PREvent.id == pr_event_id)) + assert result.scalar_one_or_none() is None + + # Verify PR was deleted + result = await async_session.execute(select(PullRequest).where(PullRequest.id == pr_id)) + assert result.scalar_one_or_none() is None + + @pytest.mark.asyncio + async def test_pull_request_cascade_delete_pr_reviews(self, async_session: Any) -> None: + """Test that deleting a PullRequest cascades to PRReview records.""" + from sqlalchemy import select + + # Create PR with related PRReview + now = datetime.now(UTC) + pr_id = uuid4() + pr_review_id = uuid4() + + pr = PullRequest( + id=pr_id, + repository="org/repo", + pr_number=21, + title="Review Cascade PR", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + async_session.add(pr) + await async_session.commit() + + # Create PRReview linked to PR + pr_review = PRReview( + id=pr_review_id, + pr_id=pr_id, + reviewer="reviewer-1", + review_type="approved", + created_at=now, + ) + async_session.add(pr_review) + await async_session.commit() + + pr_id = pr.id + pr_review_id = pr_review.id + + # Delete PR + await async_session.delete(pr) + await async_session.commit() + + # Verify PRReview was cascade deleted + result = await async_session.execute(select(PRReview).where(PRReview.id == pr_review_id)) + assert result.scalar_one_or_none() is None + + # Verify PR was deleted + result = await async_session.execute(select(PullRequest).where(PullRequest.id == pr_id)) + assert result.scalar_one_or_none() is None + + @pytest.mark.asyncio + async def test_pull_request_cascade_delete_pr_labels(self, async_session: Any) -> None: + """Test that deleting a PullRequest cascades to PRLabel records.""" + from sqlalchemy import select + + # Create PR with related PRLabel + now = datetime.now(UTC) + pr_id = uuid4() + pr_label_id = uuid4() + + pr = PullRequest( + id=pr_id, + repository="org/repo", + pr_number=22, + title="Label Cascade PR", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + async_session.add(pr) + await async_session.commit() + + # Create PRLabel linked to PR + pr_label = PRLabel( + id=pr_label_id, + pr_id=pr_id, + label="verified", + added_at=now, + ) + async_session.add(pr_label) + await async_session.commit() + + pr_id = pr.id + pr_label_id = pr_label.id + + # Delete PR + await async_session.delete(pr) + await async_session.commit() + + # Verify PRLabel was cascade deleted + result = await async_session.execute(select(PRLabel).where(PRLabel.id == pr_label_id)) + assert result.scalar_one_or_none() is None + + # Verify PR was deleted + result = await async_session.execute(select(PullRequest).where(PullRequest.id == pr_id)) + assert result.scalar_one_or_none() is None + + @pytest.mark.asyncio + async def test_pull_request_cascade_delete_check_runs(self, async_session: Any) -> None: + """Test that deleting a PullRequest cascades to CheckRun records.""" + from sqlalchemy import select + + # Create PR with related CheckRun + now = datetime.now(UTC) + pr_id = uuid4() + webhook_id = uuid4() + check_run_id = uuid4() + + pr = PullRequest( + id=pr_id, + repository="org/repo", + pr_number=23, + title="CheckRun Cascade PR", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + webhook = Webhook( + id=webhook_id, + delivery_id="pr-cascade-webhook-2", + repository="org/repo", + event_type="check_run", + action="completed", + sender="test-user", + payload={"test": "data"}, + created_at=now, + processed_at=now, + duration_ms=100, + status="success", + ) + + async_session.add_all([pr, webhook]) + await async_session.commit() + + # Create CheckRun linked to PR + check_run = CheckRun( + id=check_run_id, + pr_id=pr_id, + webhook_id=webhook_id, + check_name="lint", + status="completed", + started_at=now, + ) + async_session.add(check_run) + await async_session.commit() + + pr_id = pr.id + check_run_id = check_run.id + + # Delete PR + await async_session.delete(pr) + await async_session.commit() + + # Verify CheckRun was cascade deleted + result = await async_session.execute(select(CheckRun).where(CheckRun.id == check_run_id)) + assert result.scalar_one_or_none() is None + + # Verify PR was deleted + result = await async_session.execute(select(PullRequest).where(PullRequest.id == pr_id)) + assert result.scalar_one_or_none() is None + + @pytest.mark.asyncio + async def test_pull_request_cascade_delete_all_children(self, async_session: Any) -> None: + """Test that deleting a PullRequest cascades to all related child records.""" + from sqlalchemy import select + + # Create PR with multiple related records + now = datetime.now(UTC) + pr_id = uuid4() + webhook_id = uuid4() + pr_event_id = uuid4() + pr_review_id = uuid4() + pr_label_id = uuid4() + check_run_id = uuid4() + + pr = PullRequest( + id=pr_id, + repository="org/repo", + pr_number=30, + title="Multi Child Cascade PR", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + webhook = Webhook( + id=webhook_id, + delivery_id="pr-cascade-webhook-multi", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="test-user", + payload={"test": "data"}, + created_at=now, + processed_at=now, + duration_ms=100, + status="success", + ) + + async_session.add_all([pr, webhook]) + await async_session.commit() + + # Create multiple child records + pr_event = PREvent( + id=pr_event_id, + pr_id=pr_id, + webhook_id=webhook_id, + event_type="opened", + event_data={"action": "opened"}, + created_at=now, + ) + pr_review = PRReview( + id=pr_review_id, + pr_id=pr_id, + reviewer="reviewer-1", + review_type="approved", + created_at=now, + ) + pr_label = PRLabel( + id=pr_label_id, + pr_id=pr_id, + label="size/L", + added_at=now, + ) + check_run = CheckRun( + id=check_run_id, + pr_id=pr_id, + webhook_id=webhook_id, + check_name="tests", + status="completed", + started_at=now, + ) + + async_session.add_all([pr_event, pr_review, pr_label, check_run]) + await async_session.commit() + + pr_event_id = pr_event.id + pr_review_id = pr_review.id + pr_label_id = pr_label.id + check_run_id = check_run.id + pr_id = pr.id + + # Delete PR + await async_session.delete(pr) + await async_session.commit() + + # Verify all child records were cascade deleted + result = await async_session.execute(select(PREvent).where(PREvent.id == pr_event_id)) + assert result.scalar_one_or_none() is None + + result = await async_session.execute(select(PRReview).where(PRReview.id == pr_review_id)) + assert result.scalar_one_or_none() is None + + result = await async_session.execute(select(PRLabel).where(PRLabel.id == pr_label_id)) + assert result.scalar_one_or_none() is None + + result = await async_session.execute(select(CheckRun).where(CheckRun.id == check_run_id)) + assert result.scalar_one_or_none() is None + + # Verify PR was deleted + result = await async_session.execute(select(PullRequest).where(PullRequest.id == pr_id)) + assert result.scalar_one_or_none() is None + + @pytest.mark.asyncio + async def test_cascade_delete_does_not_affect_unrelated_records(self, async_session: Any) -> None: + """Test that cascade delete only affects related records, not unrelated ones.""" + from sqlalchemy import select + + # Create two separate webhooks with their own children + now = datetime.now(UTC) + webhook1_id = uuid4() + webhook2_id = uuid4() + pr1_id = uuid4() + pr2_id = uuid4() + api_usage1_id = uuid4() + api_usage2_id = uuid4() + + webhook1 = Webhook( + id=webhook1_id, + delivery_id="cascade-isolation-webhook-1", + repository="org/repo", + event_type="pull_request", + action="opened", + sender="test-user", + payload={"test": "data"}, + created_at=now, + processed_at=now, + duration_ms=100, + status="success", + ) + + webhook2 = Webhook( + id=webhook2_id, + delivery_id="cascade-isolation-webhook-2", + repository="org/repo", + event_type="pull_request", + action="synchronize", + sender="test-user", + payload={"test": "data"}, + created_at=now, + processed_at=now, + duration_ms=100, + status="success", + ) + + pr1 = PullRequest( + id=pr1_id, + repository="org/repo", + pr_number=40, + title="Isolation Test PR 1", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + pr2 = PullRequest( + id=pr2_id, + repository="org/repo", + pr_number=41, + title="Isolation Test PR 2", + author="test-user", + created_at=now, + updated_at=now, + state="open", + ) + + async_session.add_all([webhook1, webhook2, pr1, pr2]) + await async_session.commit() + + # Create child records for each webhook + api_usage1 = APIUsage( + id=api_usage1_id, + webhook_id=webhook1_id, + repository="org/repo", + event_type="pull_request", + api_calls_count=3, + initial_rate_limit=5000, + final_rate_limit=4997, + created_at=now, + ) + api_usage2 = APIUsage( + id=api_usage2_id, + webhook_id=webhook2_id, + repository="org/repo", + event_type="pull_request", + api_calls_count=5, + initial_rate_limit=5000, + final_rate_limit=4995, + created_at=now, + ) + + async_session.add_all([api_usage1, api_usage2]) + await async_session.commit() + + api_usage1_id = api_usage1.id + api_usage2_id = api_usage2.id + + # Delete webhook1 + await async_session.delete(webhook1) + await async_session.commit() + + # Verify webhook1's child was deleted + result = await async_session.execute(select(APIUsage).where(APIUsage.id == api_usage1_id)) + assert result.scalar_one_or_none() is None + + # Verify webhook2's child was NOT deleted (unrelated) + result = await async_session.execute(select(APIUsage).where(APIUsage.id == api_usage2_id)) + assert result.scalar_one_or_none() is not None diff --git a/webhook_server/web/metrics_dashboard.py b/webhook_server/web/metrics_dashboard.py new file mode 100644 index 00000000..cfb47fef --- /dev/null +++ b/webhook_server/web/metrics_dashboard.py @@ -0,0 +1,450 @@ +"""Metrics dashboard controller for real-time webhook metrics streaming and visualization.""" + +from __future__ import annotations + +import asyncio +import logging +from datetime import UTC, datetime +from pathlib import Path +from typing import Any + +from fastapi import HTTPException, WebSocket, WebSocketDisconnect +from fastapi.responses import HTMLResponse + +from webhook_server.libs.database import DatabaseManager + + +class MetricsDashboardController: + """ + Controller for metrics dashboard functionality. + + Provides real-time streaming of webhook metrics from PostgreSQL database + via WebSocket connections. Follows the WebSocket pattern from LogViewerController + with periodic polling for database changes. + + Architecture: + - WebSocket connection management with graceful shutdown + - Periodic polling (1-2 seconds) to detect new webhook events + - Filtering by repository, event_type, status + - Real-time metrics updates to connected clients + + WebSocket Message Format: + { + "type": "metric_update", + "timestamp": "2025-11-24T12:34:56.789Z", + "data": { + "event": { + "delivery_id": "...", + "repository": "org/repo", + "event_type": "pull_request", + "status": "success", + "duration_ms": 5234, + "created_at": "...", + }, + "summary_delta": { + "total_events": 1, + "successful_events": 1, + } + } + } + + Example: + controller = MetricsDashboardController(db_manager, logger) + await controller.handle_websocket(websocket, repository="org/repo") + """ + + # Polling interval for database changes (seconds) + POLL_INTERVAL_SECONDS = 2.0 + + def __init__(self, db_manager: DatabaseManager, logger: logging.Logger) -> None: + """ + Initialize the metrics dashboard controller. + + Args: + db_manager: DatabaseManager instance for query execution + logger: Logger instance for this controller + + Architecture guarantees: + - db_manager is ALWAYS provided (required parameter) - no defensive checks needed + - logger is ALWAYS provided (required parameter) - no defensive checks needed + - _websocket_connections starts empty - legitimate to check size + """ + self.db_manager = db_manager + self.logger = logger + self._websocket_connections: set[WebSocket] = set() + + async def shutdown(self) -> None: + """ + Close all active WebSocket connections during shutdown. + + This method should be called during application shutdown to properly + close all WebSocket connections and prevent resource leaks. + + Follows the same pattern as LogViewerController.shutdown(). + """ + self.logger.info( + f"Shutting down MetricsDashboardController with {len(self._websocket_connections)} active connections" + ) + + # Create a copy of the connections set to avoid modification during iteration + connections_to_close = list(self._websocket_connections) + + for ws in connections_to_close: + try: + await ws.close(code=1001, reason="Server shutdown") + self.logger.debug("Successfully closed WebSocket connection during shutdown") + except Exception: + # Log the error but continue closing other connections + self.logger.exception("Error closing WebSocket connection during shutdown") + + # Clear the connections set + self._websocket_connections.clear() + self.logger.info("MetricsDashboardController shutdown completed") + + def get_dashboard_page(self) -> HTMLResponse: + """ + Serve the metrics dashboard HTML page. + + Returns: + HTML response with metrics dashboard interface + + Raises: + HTTPException: 500 for template loading errors + """ + try: + html_content = self._get_dashboard_html() + return HTMLResponse(content=html_content) + except Exception as e: + self.logger.exception("Error serving metrics dashboard page") + raise HTTPException(status_code=500, detail="Internal server error") from e + + async def handle_websocket( + self, + websocket: WebSocket, + repository: str | None = None, + event_type: str | None = None, + status: str | None = None, + ) -> None: + """ + Handle WebSocket connection for real-time metrics streaming. + + Accepts WebSocket connection, monitors database for new webhook events, + and streams updates to the client. Uses periodic polling (every 2 seconds) + to check for new events. + + Args: + websocket: WebSocket connection + repository: Filter by repository (e.g., "org/repo") + event_type: Filter by event type (e.g., "pull_request", "issue_comment") + status: Filter by status (e.g., "success", "error", "partial") + + Architecture: + - Polling-based monitoring (LISTEN/NOTIFY can be added later) + - Tracks last_seen_timestamp to detect new events + - Applies filters server-side for efficiency + - Sends both individual events and summary deltas + """ + await websocket.accept() + self._websocket_connections.add(websocket) + + try: + self.logger.info( + f"WebSocket connection established for metrics streaming " + f"(repository={repository}, event_type={event_type}, status={status})" + ) + + # Track last seen timestamp to detect new events + last_seen_timestamp: datetime | None = None + + # Start monitoring for new metrics + while True: + try: + # Query for new webhook events since last_seen_timestamp + new_events = await self._fetch_new_events( + last_seen_timestamp=last_seen_timestamp, + repository=repository, + event_type=event_type, + status=status, + ) + + # Send updates for each new event + for event in new_events: + try: + message = self._build_metric_update_message(event) + await websocket.send_json(message) + + # Update last_seen_timestamp + event_timestamp = event.get("created_at") + if event_timestamp: + if last_seen_timestamp is None or event_timestamp > last_seen_timestamp: + last_seen_timestamp = event_timestamp + + except (WebSocketDisconnect, RuntimeError) as e: + # Connection closed - stop sending and exit monitoring loop + self.logger.debug(f"WebSocket connection closed: {type(e).__name__}") + raise WebSocketDisconnect() from e + + # Ensure we don't repeatedly fetch historical events if no events are found + if last_seen_timestamp is None: + last_seen_timestamp = datetime.now(UTC) + + # Wait before next poll + await asyncio.sleep(self.POLL_INTERVAL_SECONDS) + + except WebSocketDisconnect: + # Re-raise to exit outer try block + raise + except Exception: + self.logger.exception("Error during metrics monitoring iteration") + # Continue monitoring despite errors in individual iterations + await asyncio.sleep(self.POLL_INTERVAL_SECONDS) + + except WebSocketDisconnect: + self.logger.info("WebSocket client disconnected") + except Exception: + self.logger.exception("Error in WebSocket handler") + try: + await websocket.close(code=1011, reason="Internal server error") + except Exception: + self.logger.debug("Failed to close WebSocket after error") + finally: + self._websocket_connections.discard(websocket) + + async def _fetch_new_events( + self, + last_seen_timestamp: datetime | None, + repository: str | None, + event_type: str | None, + status: str | None, + ) -> list[dict[str, Any]]: + """ + Fetch new webhook events from database since last_seen_timestamp. + + Builds dynamic query based on filters and timestamp to retrieve only + new events efficiently. + + Args: + last_seen_timestamp: Timestamp of last seen event (None = get latest) + repository: Filter by repository + event_type: Filter by event type + status: Filter by status + + Returns: + List of webhook event dictionaries with normalized fields + + Architecture: + - Uses parameterized queries to prevent SQL injection + - Applies filters server-side for efficiency + - Returns newest events first (descending timestamp) + - Limits to 100 events per poll to prevent overwhelming clients + """ + # Build WHERE clause dynamically based on filters + where_conditions = [] + query_params: list[Any] = [] + param_counter = 1 + + if last_seen_timestamp is not None: + where_conditions.append(f"created_at > ${param_counter}") + query_params.append(last_seen_timestamp) + param_counter += 1 + + if repository is not None: + where_conditions.append(f"repository = ${param_counter}") + query_params.append(repository) + param_counter += 1 + + if event_type is not None: + where_conditions.append(f"event_type = ${param_counter}") + query_params.append(event_type) + param_counter += 1 + + if status is not None: + where_conditions.append(f"status = ${param_counter}") + query_params.append(status) + param_counter += 1 + + where_clause = "WHERE " + " AND ".join(where_conditions) if where_conditions else "" + + # Query for new events (newest first, limit to 100 per poll) + query = f""" + SELECT + delivery_id, + repository, + event_type, + action, + pr_number, + sender, + created_at, + processed_at, + duration_ms, + status, + error_message, + api_calls_count, + token_spend, + token_remaining + FROM webhooks + {where_clause} + ORDER BY created_at DESC + LIMIT 100 + """ # noqa: S608 - Safe: all user inputs passed as bind parameters + + rows = await self.db_manager.fetch(query, *query_params) + + # Convert rows to dictionaries and ensure datetime objects are serializable + events: list[dict[str, Any]] = [] + for row in rows: + event = dict(row) + events.append(event) + + self.logger.debug(f"Fetched {len(events)} new events (filters: {where_clause})") + return events + + def _build_metric_update_message(self, event: dict[str, Any]) -> dict[str, Any]: + """ + Build WebSocket message for metric update. + + Converts database row to WebSocket message format with: + - Event details (delivery_id, repository, event_type, etc.) + - Summary delta (incremental counts for aggregation) + + Args: + event: Webhook event dictionary from database + + Returns: + WebSocket message dictionary matching specification + + Format: + { + "type": "metric_update", + "timestamp": "2025-11-24T12:34:56.789Z", + "data": { + "event": {...}, + "summary_delta": {...} + } + } + """ + # Extract event data + event_data = { + "delivery_id": event.get("delivery_id", ""), + "repository": event.get("repository", ""), + "event_type": event.get("event_type", ""), + "action": event.get("action"), + "pr_number": event.get("pr_number"), + "sender": event.get("sender", ""), + "status": event.get("status", ""), + "duration_ms": event.get("duration_ms", 0), + "created_at": self._serialize_datetime(event.get("created_at")), + "processed_at": self._serialize_datetime(event.get("processed_at")), + "error_message": event.get("error_message"), + "api_calls_count": event.get("api_calls_count", 0), + "token_spend": event.get("token_spend", 0), + "token_remaining": event.get("token_remaining", 0), + } + + # Calculate summary delta (incremental counts) + status = event.get("status", "") + summary_delta = { + "total_events": 1, + "successful_events": 1 if status == "success" else 0, + "failed_events": 1 if status == "error" else 0, + "partial_events": 1 if status == "partial" else 0, + } + + return { + "type": "metric_update", + "timestamp": datetime.now(UTC).isoformat(), + "data": { + "event": event_data, + "summary_delta": summary_delta, + }, + } + + def _serialize_datetime(self, dt: datetime | None) -> str | None: + """ + Serialize datetime to ISO format string for JSON. + + Args: + dt: datetime object to serialize + + Returns: + ISO format string or None if dt is None + """ + if dt is None: + return None + # Ensure timezone-aware datetime is serialized correctly + return dt.isoformat() + + def _get_dashboard_html(self) -> str: + """ + Load and return the metrics dashboard HTML template. + + Returns: + HTML content for metrics dashboard interface + + Raises: + FileNotFoundError: If template file cannot be found + IOError: If template file cannot be read + """ + template_path = Path(__file__).parent / "templates" / "metrics_dashboard.html" + + try: + with open(template_path, encoding="utf-8") as f: + return f.read() + except FileNotFoundError: + self.logger.exception(f"Metrics dashboard template not found at {template_path}") + return self._get_fallback_html() + except OSError: + self.logger.exception("Failed to read metrics dashboard template") + return self._get_fallback_html() + + def _get_fallback_html(self) -> str: + """ + Provide a minimal fallback HTML when template loading fails. + + Returns: + Basic HTML page with error message + """ + return """ + + + + + GitHub Webhook Server - Metrics Dashboard (Error) + + + +
+
⚠️
+

Metrics Dashboard Template Error

+

The metrics dashboard template could not be loaded. Please check the server logs for details.

+ +
+ +""" diff --git a/webhook_server/web/static/css/metrics_dashboard.css b/webhook_server/web/static/css/metrics_dashboard.css new file mode 100644 index 00000000..1dff3e00 --- /dev/null +++ b/webhook_server/web/static/css/metrics_dashboard.css @@ -0,0 +1,946 @@ +:root { + /* Light theme variables - Matched to Log Viewer */ + --bg-color: #f5f5f5; + --container-bg: #ffffff; + --text-color: #333333; + --text-secondary: #666666; + --border-color: #dddddd; + --input-bg: #ffffff; + --input-border: #dddddd; + + /* Primary colors - Matched to Log Viewer */ + --primary-color: #007bff; + --primary-hover: #0056b3; + --success-color: #28a745; + --error-color: #dc3545; + --warning-color: #ffc107; + + /* Button colors */ + --button-bg: #007bff; + --button-hover: #0056b3; + + /* Status indicator colors - Matched to Log Viewer */ + --status-connected-bg: #d4edda; + --status-connected-text: #155724; + --status-connected-border: #c3e6cb; + --status-disconnected-bg: #f8d7da; + --status-disconnected-text: #721c24; + --status-disconnected-border: #f5c6cb; + + /* Chart colors */ + --chart-success: #28a745; + --chart-error: #dc3545; + --chart-primary: #007bff; + --chart-warning: #ffc107; + + /* Card shadows */ + --card-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); + --card-shadow-hover: 0 4px 8px rgba(0, 0, 0, 0.15); +} + +[data-theme="dark"] { + /* Dark theme variables - Matched to Log Viewer */ + --bg-color: #1a1a1a; + --container-bg: #2d2d2d; + --text-color: #e0e0e0; + --text-secondary: #999999; + --border-color: #404040; + --input-bg: #3d3d3d; + --input-border: #555555; + + /* Primary colors */ + --primary-color: #0d6efd; + --primary-hover: #0b5ed7; + --success-color: #198754; + --error-color: #dc3545; + --warning-color: #ffc107; + + /* Button colors */ + --button-bg: #0d6efd; + --button-hover: #0b5ed7; + + /* Status indicator colors - Matched to Log Viewer */ + --status-connected-bg: #155724; + --status-connected-text: #d4edda; + --status-connected-border: #c3e6cb; + --status-disconnected-bg: #721c24; + --status-disconnected-text: #f8d7da; + --status-disconnected-border: #f5c6cb; + + /* Chart colors */ + --chart-success: #198754; + --chart-error: #dc3545; + --chart-primary: #0d6efd; + --chart-warning: #ffc107; + + /* Card shadows */ + --card-shadow: 0 2px 4px rgba(0, 0, 0, 0.3); + --card-shadow-hover: 0 4px 8px rgba(0, 0, 0, 0.4); +} + +/* Base styles */ +body { + font-family: Arial, sans-serif; /* Changed to Arial to match Log Viewer */ + margin: 0; + background-color: var(--bg-color); + color: var(--text-color); + font-size: 0.875rem; + line-height: 1.5; + transition: background-color 0.3s ease, color 0.3s ease; +} + +/* Typography */ +h1 { + font-size: 2rem; + font-weight: 700; + margin: 0 0 0.5rem 0; + color: var(--text-color); +} + +h2 { + font-size: 1.25rem; + font-weight: 600; + margin: 0 0 1rem 0; + color: var(--text-color); +} + +h3 { + font-size: 1rem; + font-weight: 600; + margin: 0; + color: var(--text-color); +} + +/* Container */ +.container { + max-width: 100%; + width: 100%; + margin: 0 auto; + background: var(--container-bg); /* Changed to container-bg to match Log Viewer container style */ + padding: 20px; /* Moved padding from body to container */ + border-radius: 8px; /* Added border radius */ + box-shadow: var(--card-shadow); /* Added shadow */ + transition: background-color 0.3s ease; + box-sizing: border-box; /* Ensures padding is included in width calculation */ +} + +/* Header */ +.header { + background: transparent; /* Changed to transparent as it's inside container now */ + border-radius: 0; + padding: 0 0 20px 0; /* Removed internal padding, added bottom padding */ + margin-bottom: 20px; + box-shadow: none; /* Removed shadow */ + display: flex; + justify-content: space-between; + align-items: center; + border: none; /* Removed border */ + border-bottom: 1px solid var(--border-color); /* Added bottom border only */ +} + +.header h1 { + font-size: 1.5rem; + margin-bottom: 0.25rem; +} + +.header p { + margin: 0; + color: var(--text-secondary); + font-size: 0.875rem; +} + +.theme-toggle { + background: var(--button-bg); + color: white; + border: none; + padding: 8px 16px; + border-radius: 4px; /* Changed to 4px to match Log Viewer */ + cursor: pointer; + font-size: 0.875rem; + font-weight: 500; + transition: background-color 0.3s ease; +} + +.theme-toggle:hover { + background: var(--button-hover); +} + +/* Status indicator */ +.status { + padding: 10px 16px; + margin-bottom: 15px; + border-radius: 4px; /* Changed to 4px */ + font-size: 0.875rem; + font-weight: 500; + display: flex; + align-items: center; + gap: 8px; + transition: all 0.3s ease; + position: relative; + overflow: hidden; +} + +.status.connected { + background-color: var(--status-connected-bg); + color: var(--status-connected-text); + border: 1px solid var(--status-connected-border); +} + +.status.connected::before { + content: ''; + display: inline-block; + width: 8px; + height: 8px; + border-radius: 50%; + background-color: var(--success-color); + margin-right: 8px; + animation: pulse 2s infinite; +} + +@keyframes pulse { + 0% { + transform: scale(0.95); + box-shadow: 0 0 0 0 rgba(40, 167, 69, 0.7); + } + 70% { + transform: scale(1); + box-shadow: 0 0 0 6px rgba(40, 167, 69, 0); + } + 100% { + transform: scale(0.95); + box-shadow: 0 0 0 0 rgba(40, 167, 69, 0); + } +} + +.status.disconnected { + background-color: var(--status-disconnected-bg); + color: var(--status-disconnected-text); + border: 1px solid var(--status-disconnected-border); +} + +/* Control Panel */ +.control-panel { + background: var(--container-bg); + border: 1px solid var(--border-color); + border-radius: 8px; + margin-bottom: 20px; + overflow: hidden; + transition: all 0.3s ease; + box-shadow: var(--card-shadow); +} + +.panel-header { + padding: 10px 20px; + background: var(--input-bg); /* Slightly different bg for header */ + border-bottom: 1px solid var(--border-color); + display: flex; + justify-content: space-between; + align-items: center; +} + +.btn-icon { + background: none; + border: none; + cursor: pointer; + font-size: 16px; + color: var(--text-secondary); + transition: transform 0.3s ease, color 0.2s ease; +} + +.btn-icon:hover { + color: var(--text-color); +} + +.collapse-btn { + transition: transform 0.3s ease; +} + +.panel-content { + padding: 20px; + max-height: 1000px; + overflow: hidden; + transition: max-height 0.3s ease-out, padding 0.3s ease-out, opacity 0.3s ease-out; + opacity: 1; +} + +.control-panel.collapsed .panel-content { + max-height: 0; + padding: 0 20px; + opacity: 0; +} + +.control-panel.collapsed .collapse-btn { + transform: rotate(180deg); +} + +.chart-content { + max-height: 1000px; + overflow: hidden; + transition: max-height 0.3s ease-out, opacity 0.3s ease-out; + opacity: 1; +} + +.chart-container.collapsed .chart-content { + max-height: 0; + opacity: 0; +} + +.chart-container.collapsed .collapse-btn { + transform: rotate(180deg); +} + +.filters { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(200px, 1fr)); + gap: 15px; + margin-bottom: 20px; +} + +.filter-group { + display: flex; + flex-direction: column; +} + +.filter-group label { + margin-bottom: 5px; + font-weight: bold; /* Bold to match Log Viewer */ + font-size: 14px; /* Adjusted size */ + color: var(--text-color); +} + +.filter-group input, +.filter-group select { + padding: 8px 12px; + border: 1px solid var(--input-border); + background-color: var(--input-bg); + color: var(--text-color); + border-radius: 4px; /* Changed to 4px */ + width: 100%; + font-size: 0.875rem; + box-sizing: border-box; + transition: border-color 0.2s; +} + +.filter-group input:focus, +.filter-group select:focus { + outline: none; + border-color: var(--primary-color); +} + +.controls-row { + display: flex; + justify-content: space-between; + align-items: center; + border-top: 1px solid var(--border-color); + padding-top: 15px; +} + +.controls-actions { + display: flex; + gap: 10px; +} + +/* Toggle switch */ +.toggle-group { + display: flex; + align-items: center; + gap: 10px; +} + +.toggle-switch { + position: relative; + display: inline-block; + width: 40px; + height: 20px; +} + +.toggle-switch input { + opacity: 0; + width: 0; + height: 0; +} + +.slider { + position: absolute; + cursor: pointer; + top: 0; + left: 0; + right: 0; + bottom: 0; + background-color: var(--input-border); + transition: .4s; + border-radius: 20px; +} + +.slider:before { + position: absolute; + content: ""; + height: 16px; + width: 16px; + left: 2px; + bottom: 2px; + background-color: white; + transition: .4s; + border-radius: 50%; +} + +.toggle-switch input:checked + .slider { + background-color: var(--button-bg); +} + +.toggle-switch input:checked + .slider:before { + transform: translateX(20px); +} + +/* KPI Grid */ +.kpi-grid { + display: grid; + grid-template-columns: repeat(4, 1fr); + gap: 15px; + margin-bottom: 20px; +} + +/* KPI Card */ +.kpi-card { + background: var(--container-bg); + border: 1px solid var(--border-color); + border-radius: 8px; + padding: 15px; + box-shadow: var(--card-shadow); + transition: all 0.3s ease; +} + +.kpi-card:hover { + box-shadow: var(--card-shadow-hover); + transform: translateY(-2px); +} + +.kpi-label { + font-size: 0.75rem; + font-weight: 600; + text-transform: uppercase; + color: var(--text-secondary); + letter-spacing: 0.5px; + margin-bottom: 5px; +} + +.kpi-value { + font-size: 1.5rem; + font-weight: 700; + color: var(--text-color); + margin-bottom: 5px; + font-family: 'Monaco', 'Courier New', monospace; +} + +.kpi-trend { + display: flex; + align-items: center; + gap: 6px; + font-size: 0.75rem; + font-weight: 500; +} + +.kpi-trend.positive { color: var(--success-color); } +.kpi-trend.negative { color: var(--error-color); } +.kpi-trend.neutral { color: var(--text-secondary); } + +/* Dashboard Grid */ +.dashboard-grid { + display: grid; + grid-template-columns: repeat(2, 1fr); + gap: 15px; + margin-bottom: 20px; +} + +/* Chart container */ +.chart-container { + background: var(--container-bg); + border: 1px solid var(--border-color); + border-radius: 8px; + padding: 15px; + box-shadow: var(--card-shadow); + transition: background-color 0.3s ease; + display: flex; + flex-direction: column; +} + +.chart-container.full-width { + grid-column: 1 / -1; +} + +.chart-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 10px; + padding-bottom: 5px; + border-bottom: 1px solid var(--border-color); +} + +.header-actions { + display: flex; + gap: 8px; + align-items: center; +} + +.chart-wrapper { + position: relative; + height: 250px; /* Standardized height */ + width: 100%; + flex: 1; +} + +/* Tables inside charts */ +.table-wrapper { + overflow-y: auto; + height: 250px; +} + +table { + width: 100%; + border-collapse: collapse; + font-size: 0.75rem; +} + +thead th { + padding: 8px; + text-align: left; + font-weight: 600; + color: var(--text-secondary); + border-bottom: 1px solid var(--border-color); + background: var(--input-bg); + position: sticky; + top: 0; +} + +tbody td { + padding: 8px; + color: var(--text-color); + border-bottom: 1px solid var(--border-color); +} + +/* Buttons */ +.btn { + padding: 8px 16px; + background-color: var(--button-bg); + color: white; + border: none; + border-radius: 4px; /* Changed to 4px */ + cursor: pointer; + font-size: 0.875rem; + font-weight: 500; + transition: background-color 0.3s ease; +} + +.btn:hover { + background-color: var(--button-hover); +} + +.btn-primary { background-color: var(--button-bg); } +.btn-primary:hover { background-color: var(--button-hover); } + +.btn-success { background-color: var(--success-color); } +.btn-danger { background-color: var(--error-color); } + +/* Responsive breakpoints */ +@media (max-width: 1024px) { + .kpi-grid { + grid-template-columns: repeat(2, 1fr); + } + .dashboard-grid { + grid-template-columns: 1fr; /* Stack vertically on smaller screens */ + } +} + +@media (max-width: 640px) { + .header { + flex-direction: column; + align-items: flex-start; + gap: 10px; + } + .kpi-grid { + grid-template-columns: 1fr; + } + .controls-row { + flex-direction: column; + align-items: stretch; + gap: 15px; + } + .controls-actions { + flex-direction: column; + } +} + +/* PR Contributors Grid */ +.contributors-grid { + display: grid; + grid-template-columns: repeat(3, 1fr); + gap: 15px; + margin-top: 10px; +} + +.contributor-section { + background: var(--input-bg); + border: 1px solid var(--border-color); + border-radius: 4px; + padding: 10px; +} + +.contributor-section h3 { + font-size: 0.9rem; + font-weight: 600; + margin: 0 0 10px 0; + color: var(--text-color); + text-align: center; + border-bottom: 1px solid var(--border-color); + padding-bottom: 8px; +} + +.contributor-section .table-wrapper { + height: 280px; + overflow-y: auto; +} + +@media (max-width: 1024px) { + .contributors-grid { + grid-template-columns: 1fr; + } +} + +/* Loading Spinner Overlay + * + * Accessibility Requirements: + * - MUST include role="status" for screen reader announcement + * - MUST include aria-live="polite" to announce loading state changes + * - MUST include aria-busy="true" when visible, "false" when hidden + * - Consider aria-hidden="true" on decorative spinner element + * + * Example HTML: + *
+ * + *

Loading data...

+ *
+ */ +.loading-spinner { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background: rgba(0, 0, 0, 0.5); + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + z-index: 9999; +} + +.loading-spinner .spinner { + border: 4px solid rgba(255, 255, 255, 0.3); + border-top: 4px solid #007bff; + border-radius: 50%; + width: 50px; + height: 50px; + animation: spin 1s linear infinite; +} + +.loading-spinner p { + margin-top: 20px; + color: white; + font-size: 16px; + font-weight: 500; +} + +@keyframes spin { + 0% { transform: rotate(0deg); } + 100% { transform: rotate(360deg); } +} + +/* Modal Styles */ +.modal { + display: none; + position: fixed; + z-index: 10000; + left: 0; + top: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.5); + animation: fadeIn 0.3s ease; +} + +.modal.show { + display: flex; + align-items: center; + justify-content: center; +} + +@keyframes fadeIn { + from { opacity: 0; } + to { opacity: 1; } +} + +.modal-content { + background-color: var(--container-bg); + border-radius: 8px; + padding: 0; + max-width: 500px; + width: 90%; + box-shadow: 0 4px 20px rgba(0, 0, 0, 0.3); + animation: slideUp 0.3s ease; +} + +@keyframes slideUp { + from { + transform: translateY(50px); + opacity: 0; + } + to { + transform: translateY(0); + opacity: 1; + } +} + +.modal-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 20px; + border-bottom: 1px solid var(--border-color); +} + +.modal-header h3 { + margin: 0; + color: var(--text-color); +} + +.close-modal { + background: none; + border: none; + font-size: 28px; + cursor: pointer; + color: var(--text-secondary); + transition: color 0.2s ease; + padding: 0; + width: 32px; + height: 32px; + display: flex; + align-items: center; + justify-content: center; +} + +.close-modal:hover { + color: var(--text-color); +} + +.modal-body { + padding: 20px; + max-height: 70vh; + overflow-y: auto; +} + +.setting-group { + margin-bottom: 20px; +} + +.setting-group:last-child { + margin-bottom: 0; +} + +.setting-group h4 { + margin: 0 0 10px 0; + color: var(--text-color); + font-size: 0.9rem; + font-weight: 600; +} + +.setting-group label { + display: block; + margin-bottom: 8px; + color: var(--text-color); + cursor: pointer; + user-select: none; +} + +.setting-group input[type="checkbox"], +.setting-group input[type="radio"] { + margin-right: 8px; + cursor: pointer; +} + +.setting-group select { + width: 100%; + padding: 8px 12px; + border-radius: 4px; + border: 1px solid var(--border-color); + background-color: var(--input-bg); + color: var(--text-color); + font-size: 0.875rem; + cursor: pointer; +} + +.setting-group select:focus { + outline: none; + border-color: var(--primary-color); +} + +.setting-group .btn { + margin-right: 8px; + margin-bottom: 8px; +} + +.btn-secondary { + background-color: var(--text-secondary); + color: white; +} + +.btn-secondary:hover { + background-color: var(--text-color); +} + +/* Pagination Controls */ +.pagination-controls { + display: flex; + justify-content: space-between; + align-items: center; + padding: 10px 15px; + margin-top: 10px; + border-top: 1px solid var(--border-color); + gap: 15px; + flex-wrap: wrap; +} + +.pagination-size { + display: flex; + align-items: center; + gap: 8px; +} + +.pagination-size label { + font-size: 0.875rem; + color: var(--text-color); + margin: 0; + font-weight: normal; +} + +.page-size-select { + padding: 4px 8px; + border: 1px solid var(--input-border); + background-color: var(--input-bg); + color: var(--text-color); + border-radius: 4px; + font-size: 0.875rem; + cursor: pointer; +} + +.page-size-select:focus { + outline: none; + border-color: var(--primary-color); +} + +.pagination-nav { + display: flex; + align-items: center; + gap: 10px; +} + +.pagination-info { + font-size: 0.875rem; + color: var(--text-secondary); + min-width: 100px; + text-align: center; +} + +.btn-pagination { + padding: 6px 12px; + background-color: var(--button-bg); + color: white; + border: none; + border-radius: 4px; + cursor: pointer; + font-size: 0.875rem; + transition: background-color 0.3s ease; +} + +.btn-pagination:hover:not(:disabled) { + background-color: var(--button-hover); +} + +.btn-pagination:disabled { + background-color: var(--text-secondary); + opacity: 0.5; + cursor: not-allowed; +} + +.pagination-total { + font-size: 0.875rem; + color: var(--text-secondary); +} + +@media (max-width: 768px) { + .pagination-controls { + flex-direction: column; + align-items: stretch; + } + + .pagination-size, + .pagination-nav, + .pagination-total { + justify-content: center; + } +} + +/* Clickable usernames */ +.clickable-username { + color: var(--primary-color); + cursor: pointer; + text-decoration: underline; + transition: color 0.2s ease; +} + +.clickable-username:hover { + color: var(--primary-hover); + text-decoration: none; +} + +/* Dark theme - improved clickable username contrast (match PR links) */ +[data-theme="dark"] .clickable-username { + color: #6eb3ff; /* Same as link color */ +} + +[data-theme="dark"] .clickable-username:hover { + color: #8dc5ff; /* Same as link hover color */ +} + +/* User PRs badges */ +.badge-merged { + display: inline-block; + padding: 2px 6px; + background-color: var(--success-color); + color: white; + border-radius: 3px; + font-size: 0.75rem; + font-weight: 600; + margin-left: 5px; +} + +/* Links - General styling with improved dark theme contrast */ +a { + color: var(--primary-color); + text-decoration: none; + transition: color 0.2s ease; +} + +a:hover { + color: var(--primary-hover); + text-decoration: underline; +} + +/* Dark theme - improved link contrast */ +[data-theme="dark"] a { + color: #6eb3ff; /* Lighter blue for better contrast on dark background */ +} + +[data-theme="dark"] a:hover { + color: #8dc5ff; /* Even lighter blue on hover */ +} diff --git a/webhook_server/web/static/js/metrics/api-client.js b/webhook_server/web/static/js/metrics/api-client.js new file mode 100644 index 00000000..cef76256 --- /dev/null +++ b/webhook_server/web/static/js/metrics/api-client.js @@ -0,0 +1,541 @@ +/** + * Metrics API Client - REST API Wrapper for GitHub Webhook Metrics + * + * This module provides a centralized, production-ready client for all metrics API endpoints + * with comprehensive error handling and timeout management. + * + * Features: + * - Automatic timeout handling with AbortController + * - Consistent error response format + * - URL parameter building with proper encoding + * - Singleton pattern for global access + * + * API Endpoints: + * - GET /api/metrics/summary - Overall metrics summary + * - GET /api/metrics/webhooks - Recent webhook events (with pagination) + * - GET /api/metrics/repositories - Repository statistics + * - GET /api/metrics/webhooks/{delivery_id} - Specific webhook event details + * + * Usage: + * import { apiClient } from './api-client.js'; + * + * // Fetch summary + * const summary = await apiClient.fetchSummary(); + * + * // Fetch webhooks with filters + * const webhooks = await apiClient.fetchWebhooks({ + * repository: 'org/repo', + * status: 'error', + * limit: 50 + * }); + * + * Error Handling: + * All methods return standardized error objects: + * { + * error: 'Error type', + * detail: 'Detailed error message', + * status: 404 // HTTP status code (if applicable) + * } + */ + +class MetricsAPIClient { + /** + * Create a new Metrics API client. + * + * @param {string} baseURL - Base URL for API endpoints (default: '/api/metrics') + * @param {number} timeout - Request timeout in milliseconds (default: 10000) + */ + constructor(baseURL = '/api/metrics', timeout = 10000) { + this.baseURL = baseURL; + this.timeout = timeout; + } + + /** + * Fetch overall metrics summary. + * + * Returns aggregated metrics including: + * - Total events, success/error/partial counts + * - Top repositories by event volume + * - Event type distribution + * - Average processing time + * + * @param {string|null} startTime - ISO 8601 start time filter (optional) + * @param {string|null} endTime - ISO 8601 end time filter (optional) + * @returns {Promise} Summary data or error object + * + * Response format (success): + * { + * summary: { + * total_events: 1234, + * successful_events: 1180, + * failed_events: 45, + * partial_events: 9, + * avg_processing_time_ms: 523.4 + * }, + * top_repositories: [ + * { repository: 'org/repo1', total_events: 450, ... }, + * ... + * ], + * event_type_distribution: { + * pull_request: 567, + * issue_comment: 345, + * ... + * } + * } + * + * Response format (error): + * { + * error: 'Network error', + * detail: 'Failed to connect to server', + * status: null + * } + */ + async fetchSummary(startTime = null, endTime = null) { + const params = {}; + if (startTime) params.start_time = startTime; + if (endTime) params.end_time = endTime; + + return await this._fetch('/summary', params); + } + + /** + * Fetch webhook events with filtering and pagination. + * + * Supports comprehensive filtering by repository, event type, status, time range, + * and pagination for efficient data loading. + * + * @param {Object} options - Filter and pagination options + * @param {string} options.repository - Filter by repository (e.g., 'org/repo') + * @param {string} options.event_type - Filter by event type (e.g., 'pull_request', 'issue_comment') + * @param {string} options.status - Filter by status ('success', 'error', 'partial') + * @param {string} options.start_time - ISO 8601 start time filter + * @param {string} options.end_time - ISO 8601 end time filter + * @param {number} options.page - Page number (1-indexed, default: 1) + * @param {number} options.page_size - Items per page (default: 10) + * @returns {Promise} Webhook events data or error object + * + * Response format (success): + * { + * data: [ + * { + * delivery_id: 'abc123...', + * repository: 'org/repo', + * event_type: 'pull_request', + * action: 'opened', + * pr_number: 42, + * sender: 'username', + * created_at: '2025-11-24T12:34:56.789Z', + * processed_at: '2025-11-24T12:35:01.234Z', + * duration_ms: 4445, + * status: 'success', + * error_message: null, + * api_calls_count: 12, + * token_spend: 150, + * token_remaining: 4850 + * }, + * ... + * ], + * pagination: { + * total: 1234, + * page: 1, + * page_size: 100, + * total_pages: 13, + * has_next: true, + * has_prev: false + * } + * } + * + * Response format (error): + * { + * error: 'HTTP error', + * detail: 'Failed to fetch webhook events', + * status: 500 + * } + */ + async fetchWebhooks(options = {}) { + const params = {}; + + // Add filters if provided + if (options.repository) params.repository = options.repository; + if (options.event_type) params.event_type = options.event_type; + if (options.status) params.status = options.status; + if (options.start_time) params.start_time = options.start_time; + if (options.end_time) params.end_time = options.end_time; + + // Add pagination parameters + if (options.page !== undefined) params.page = options.page; + if (options.page_size !== undefined) params.page_size = options.page_size; + + return await this._fetch('/webhooks', params); + } + + /** + * Fetch repository statistics. + * + * Returns per-repository metrics including event counts, success rates, + * and processing times. + * + * @param {string|null} startTime - ISO 8601 start time filter (optional) + * @param {string|null} endTime - ISO 8601 end time filter (optional) + * @param {Object} extraParams - Additional parameters (page, page_size, repository, user) + * @returns {Promise} Repository statistics or error object + * + * Response format (success): + * { + * time_range: { + * start_time: '2025-11-01T00:00:00Z', + * end_time: '2025-11-25T23:59:59Z' + * }, + * data: [ + * { + * repository: 'org/repo1', + * total_events: 450, + * successful_events: 440, + * failed_events: 8, + * partial_events: 2, + * avg_processing_time_ms: 523.4, + * last_event_at: '2025-11-24T12:34:56.789Z' + * }, + * ... + * ], + * pagination: { + * total: 50, + * page: 1, + * page_size: 10, + * total_pages: 5, + * has_next: true, + * has_prev: false + * } + * } + * + * Response format (error): + * { + * error: 'Request timeout', + * detail: 'Request exceeded 10000ms timeout', + * status: null + * } + */ + async fetchRepositories(startTime = null, endTime = null, extraParams = {}) { + const params = { ...extraParams }; + if (startTime) params.start_time = startTime; + if (endTime) params.end_time = endTime; + + const response = await this._fetch('/repositories', params); + if (response.error) return response; + + // Normalize response: extract data array while preserving pagination + return { + repositories: response.data || [], + data: response.data || [], + pagination: response.pagination, + time_range: response.time_range + }; + } + + /** + * Fetch event trends (time series data). + * + * Returns aggregated event counts over time buckets. + * + * @param {string|null} startTime - ISO 8601 start time filter + * @param {string|null} endTime - ISO 8601 end time filter + * @param {string} bucket - Time bucket ('hour', 'day') + * @returns {Promise} Trends data or error object + */ + async fetchTrends(startTime = null, endTime = null, bucket = 'hour') { + const params = { bucket }; + if (startTime) params.start_time = startTime; + if (endTime) params.end_time = endTime; + + return await this._fetch('/trends', params); + } + + /** + * Fetch PR contributors statistics. + * + * Returns PR creators, reviewers, and approvers with activity metrics. + * + * @param {string|null} startTime - ISO 8601 start time filter (optional) + * @param {string|null} endTime - ISO 8601 end time filter (optional) + * @param {number} limit - Maximum contributors per category (default: 10) + * @param {Object} extraParams - Additional parameters (repository, user, page, page_size) + * @returns {Promise} Contributors data or error object + */ + async fetchContributors(startTime = null, endTime = null, limit = 10, extraParams = {}) { + const params = { limit, ...extraParams }; + if (startTime) params.start_time = startTime; + if (endTime) params.end_time = endTime; + + return await this._fetch('/contributors', params); + } + + /** + * Fetch user pull requests. + * + * Returns pull requests for a specific user or all users. + * + * @param {string|null} startTime - ISO 8601 start time filter (optional) + * @param {string|null} endTime - ISO 8601 end time filter (optional) + * @param {Object} params - Additional parameters (user, repository, page, page_size) + * @returns {Promise} User PRs data with pagination or error object + */ + async fetchUserPRs(startTime = null, endTime = null, params = {}) { + const queryParams = { ...params }; + if (startTime) queryParams.start_time = startTime; + if (endTime) queryParams.end_time = endTime; + + return await this._fetch('/user-prs', queryParams); + } + + /** + * Fetch specific webhook event by delivery ID. + * + * Returns complete details for a single webhook event including full payload. + * + * @param {string} deliveryId - GitHub webhook delivery ID + * @returns {Promise} Webhook event details or error object + * + * Response format (success): + * { + * delivery_id: 'abc123...', + * repository: 'org/repo', + * event_type: 'pull_request', + * action: 'opened', + * pr_number: 42, + * sender: 'username', + * created_at: '2025-11-24T12:34:56.789Z', + * processed_at: '2025-11-24T12:35:01.234Z', + * duration_ms: 4445, + * status: 'success', + * error_message: null, + * api_calls_count: 12, + * token_spend: 150, + * token_remaining: 4850, + * payload: { ... } // Full GitHub webhook payload + * } + * + * Response format (error - not found): + * { + * error: 'Not found', + * detail: 'Webhook event not found', + * status: 404 + * } + */ + async fetchWebhookById(deliveryId) { + if (!deliveryId) { + return { + error: 'Invalid parameter', + detail: 'deliveryId is required', + status: null + }; + } + + return await this._fetch(`/webhooks/${encodeURIComponent(deliveryId)}`); + } + + /** + * Internal fetch wrapper with timeout and error handling. + * + * @private + * @param {string} endpoint - API endpoint path (e.g., '/summary', '/webhooks') + * @param {Object} params - Query parameters as key-value pairs + * @returns {Promise} Response data or standardized error object + */ + async _fetch(endpoint, params = {}) { + const controller = new AbortController(); + + // Set up timeout + const timeoutId = setTimeout(() => { + controller.abort(); + console.warn(`[API Client] Request timeout for ${endpoint}`); + }, this.timeout); + + try { + // Build URL with query parameters + const url = this._buildURL(endpoint, params); + console.log(`[API Client] Fetching: ${url}`); + + // Execute fetch with timeout signal + const response = await fetch(url, { + method: 'GET', + headers: { + 'Accept': 'application/json', + }, + signal: controller.signal + }); + + // Clear timeout on successful response + clearTimeout(timeoutId); + + // Handle HTTP errors + if (!response.ok) { + return await this._handleHTTPError(response); + } + + // Parse JSON response + try { + const data = await response.json(); + console.log(`[API Client] Success: ${endpoint}`, data); + return data; + } catch (parseError) { + console.error(`[API Client] JSON parse error for ${endpoint}:`, parseError); + return { + error: 'Invalid response format', + detail: 'Server returned invalid JSON response', + status: response.status + }; + } + + } catch (error) { + // Clear timeout + clearTimeout(timeoutId); + + // Handle different error types + if (error.name === 'AbortError') { + console.warn(`[API Client] Request aborted: ${endpoint}`); + return { + error: 'Request timeout', + detail: `Request exceeded ${this.timeout}ms timeout`, + status: null + }; + } + + // Network errors (no connection, DNS failure, etc.) + if (error instanceof TypeError) { + console.error(`[API Client] Network error for ${endpoint}:`, error); + return { + error: 'Network error', + detail: 'Failed to connect to server. Please check your network connection.', + status: null + }; + } + + // Generic error fallback + console.error(`[API Client] Unexpected error for ${endpoint}:`, error); + return { + error: 'Unknown error', + detail: error.message || 'An unexpected error occurred', + status: null + }; + } + } + + /** + * Handle HTTP error responses with detailed error extraction. + * + * @private + * @param {Response} response - Fetch API Response object + * @returns {Promise} Standardized error object + */ + async _handleHTTPError(response) { + console.error(`[API Client] HTTP ${response.status} error: ${response.url}`); + + // Try to extract error detail from response body + let detail = `HTTP ${response.status} error`; + try { + const errorData = await response.json(); + if (errorData.detail) { + detail = errorData.detail; + } else if (errorData.message) { + detail = errorData.message; + } + } catch (error) { + // Failed to parse error response - use default detail + detail = response.statusText || detail; + } + + // Return standardized error object + return { + error: 'HTTP error', + detail: detail, + status: response.status + }; + } + + /** + * Build complete URL with query parameters. + * + * @private + * @param {string} endpoint - API endpoint path + * @param {Object} params - Query parameters as key-value pairs + * @returns {string} Complete URL with encoded query string + */ + _buildURL(endpoint, params = {}) { + const url = new URL(this.baseURL + endpoint, window.location.origin); + + // Add query parameters + for (const [key, value] of Object.entries(params)) { + if (value !== null && value !== undefined) { + url.searchParams.append(key, value); + } + } + + return url.toString(); + } + + /** + * Check if API is available by fetching summary endpoint. + * + * Useful for health checks and determining if metrics server is enabled. + * Distinguishes between "metrics disabled" and "temporary failures". + * + * @returns {Promise} Object with availability status and reason + * @returns {boolean} available - True if API is available + * @returns {string} reason - Reason for unavailability ('disabled', 'network_error', 'server_error', etc.) + * @returns {number|null} status - HTTP status code if available + * + * @example + * const { available, reason, status } = await apiClient.isAvailable(); + * if (!available) { + * if (reason === 'disabled') { + * console.log('Metrics feature is disabled'); + * } else { + * console.log('Temporary failure:', reason); + * } + * } + */ + async isAvailable() { + const result = await this.fetchSummary(); + + if (!result.error) { + return { available: true, reason: 'ok', status: 200 }; + } + + // Distinguish between metrics disabled vs temporary failure + const status = result.status; + let reason = 'unknown'; + + if (status === 404) { + reason = 'disabled'; // Endpoint not found - metrics feature disabled + } else if (status === 503) { + reason = 'service_unavailable'; // Service temporarily unavailable + } else if (status >= 500) { + reason = 'server_error'; // Server-side error + } else if (status >= 400 && status < 500) { + reason = 'client_error'; // Client-side error (auth, bad request, etc.) + } else if (!status) { + reason = 'network_error'; // Network failure (no response) + } + + return { + available: false, + reason: reason, + status: status, + detail: result.detail || result.error + }; + } +} + +// Export singleton instance for global access +export const apiClient = new MetricsAPIClient(); + +// Also export class for testing or multiple instances +export { MetricsAPIClient }; + +// Browser globals for non-module usage +if (typeof window !== 'undefined') { + window.MetricsAPI = { + apiClient: apiClient, + MetricsAPIClient: MetricsAPIClient + }; +} diff --git a/webhook_server/web/static/js/metrics/charts.js b/webhook_server/web/static/js/metrics/charts.js new file mode 100644 index 00000000..49361e75 --- /dev/null +++ b/webhook_server/web/static/js/metrics/charts.js @@ -0,0 +1,650 @@ +/** + * Chart.js Configuration for GitHub Webhook Server Metrics Dashboard + * + * Provides chart creation, update, and theme management functions for all + * visualizations in the metrics dashboard. + * + * Chart Types: + * - Event Trends Chart (line) - Shows success/error/total events over time + * - Event Distribution Chart (pie) - Shows breakdown of event types + * - API Usage Chart (bar) - Shows API calls per day + * + * @module charts + */ + +// ============================================================================ +// Color Schemes +// ============================================================================ + +const COLORS = { + success: { + solid: 'rgba(16, 185, 129, 1)', // Green + alpha50: 'rgba(16, 185, 129, 0.5)', + alpha20: 'rgba(16, 185, 129, 0.2)', + }, + error: { + solid: 'rgba(239, 68, 68, 1)', // Red + alpha50: 'rgba(239, 68, 68, 0.5)', + alpha20: 'rgba(239, 68, 68, 0.2)', + }, + total: { + solid: 'rgba(37, 99, 235, 1)', // Blue + alpha50: 'rgba(37, 99, 235, 0.5)', + alpha20: 'rgba(37, 99, 235, 0.2)', + }, + primary: { + solid: 'rgba(37, 99, 235, 1)', // Primary blue + alpha50: 'rgba(37, 99, 235, 0.5)', + alpha20: 'rgba(37, 99, 235, 0.2)', + }, + // Pie chart color palette + pie: [ + 'rgba(37, 99, 235, 0.8)', // Blue + 'rgba(16, 185, 129, 0.8)', // Green + 'rgba(251, 191, 36, 0.8)', // Yellow + 'rgba(239, 68, 68, 0.8)', // Red + 'rgba(168, 85, 247, 0.8)', // Purple + 'rgba(236, 72, 153, 0.8)', // Pink + 'rgba(14, 165, 233, 0.8)', // Sky + 'rgba(34, 197, 94, 0.8)', // Emerald + 'rgba(249, 115, 22, 0.8)', // Orange + 'rgba(139, 92, 246, 0.8)', // Violet + ], +}; + +// Theme-specific colors +const THEME_COLORS = { + light: { + gridColor: 'rgba(0, 0, 0, 0.1)', + textColor: '#374151', + borderColor: '#e5e7eb', + }, + dark: { + gridColor: 'rgba(255, 255, 255, 0.1)', + textColor: '#d1d5db', + borderColor: '#374151', + }, +}; + +// ============================================================================ +// Chart Creation Functions +// ============================================================================ + +/** + * Create Event Trends Chart (Line Chart) + * + * Displays three lines: + * - Success events (green) + * - Error events (red) + * - Total events (blue) + * + * @param {string} canvasId - Canvas element ID + * @returns {Chart} Chart.js instance + */ +function createEventTrendsChart(canvasId) { + const ctx = document.getElementById(canvasId); + if (!ctx) { + console.error(`Canvas element with ID '${canvasId}' not found`); + return null; + } + + const isDark = document.documentElement.getAttribute('data-theme') === 'dark'; + const theme = isDark ? THEME_COLORS.dark : THEME_COLORS.light; + + return new Chart(ctx, { + type: 'line', + data: { + labels: [], + datasets: [ + { + label: 'Success Events', + data: [], + borderColor: COLORS.success.solid, + backgroundColor: COLORS.success.alpha20, + borderWidth: 2, + tension: 0.4, + fill: true, + pointRadius: 4, + pointHoverRadius: 6, + pointBackgroundColor: COLORS.success.solid, + pointBorderColor: '#fff', + pointBorderWidth: 2, + }, + { + label: 'Error Events', + data: [], + borderColor: COLORS.error.solid, + backgroundColor: COLORS.error.alpha20, + borderWidth: 2, + tension: 0.4, + fill: true, + pointRadius: 4, + pointHoverRadius: 6, + pointBackgroundColor: COLORS.error.solid, + pointBorderColor: '#fff', + pointBorderWidth: 2, + }, + { + label: 'Total Events', + data: [], + borderColor: COLORS.total.solid, + backgroundColor: COLORS.total.alpha20, + borderWidth: 2, + tension: 0.4, + fill: true, + pointRadius: 4, + pointHoverRadius: 6, + pointBackgroundColor: COLORS.total.solid, + pointBorderColor: '#fff', + pointBorderWidth: 2, + }, + ], + }, + options: { + responsive: true, + maintainAspectRatio: false, + interaction: { + mode: 'index', + intersect: false, + }, + plugins: { + legend: { + display: true, + position: 'top', + labels: { + color: theme.textColor, // Use theme-specific text color + padding: 15, + font: { + size: 13, + weight: '600', + }, + usePointStyle: true, + pointStyle: 'circle', + boxWidth: 12, + boxHeight: 12, + }, + }, + tooltip: { + mode: 'index', + intersect: false, + backgroundColor: isDark ? 'rgba(31, 41, 55, 0.95)' : 'rgba(255, 255, 255, 0.95)', + titleColor: theme.textColor, + bodyColor: theme.textColor, + borderColor: theme.borderColor, + borderWidth: 1, + padding: 12, + displayColors: true, + callbacks: { + title: (tooltipItems) => { + return tooltipItems[0].label; + }, + label: (context) => { + const label = context.dataset.label || ''; + const value = context.parsed.y; + return `${label}: ${value}`; + }, + }, + }, + }, + scales: { + x: { + grid: { + display: false, + }, + ticks: { + color: theme.textColor, + maxRotation: 0, + autoSkip: true, + maxTicksLimit: 8, + }, + border: { + color: theme.borderColor, + }, + }, + y: { + beginAtZero: true, + grid: { + color: theme.gridColor, + drawBorder: false, + }, + ticks: { + color: theme.textColor, + precision: 0, + }, + border: { + display: false, + }, + }, + }, + }, + }); +} + +/** + * Create Event Distribution Chart (Pie Chart) + * + * Displays event types as pie segments with percentage labels. + * + * @param {string} canvasId - Canvas element ID + * @returns {Chart} Chart.js instance + */ +function createEventDistributionChart(canvasId) { + const ctx = document.getElementById(canvasId); + if (!ctx) { + console.error(`Canvas element with ID '${canvasId}' not found`); + return null; + } + + const isDark = document.documentElement.getAttribute('data-theme') === 'dark'; + const theme = isDark ? THEME_COLORS.dark : THEME_COLORS.light; + + return new Chart(ctx, { + type: 'pie', + data: { + labels: [], + datasets: [ + { + data: [], + backgroundColor: COLORS.pie, + borderColor: isDark ? '#1f2937' : '#ffffff', + borderWidth: 2, + hoverBorderWidth: 3, + hoverOffset: 8, + }, + ], + }, + options: { + responsive: true, + maintainAspectRatio: false, + plugins: { + legend: { + display: true, + position: 'bottom', + labels: { + color: theme.textColor, // Use theme-specific text color + padding: 12, + font: { + size: 12, + weight: '600', + }, + generateLabels: (chart) => { + const data = chart.data; + if (data.labels.length && data.datasets.length) { + const dataset = data.datasets[0]; + const total = dataset.data.reduce((acc, val) => acc + val, 0); + + return data.labels.map((label, i) => { + const value = dataset.data[i]; + const percentage = total > 0 ? ((value / total) * 100).toFixed(1) : 0; + + return { + text: `${label} (${percentage}%)`, + fillStyle: dataset.backgroundColor[i], + strokeStyle: isDark ? '#1f2937' : '#ffffff', + lineWidth: 2, + hidden: false, + index: i, + fontColor: theme.textColor, // Add theme-specific text color + }; + }); + } + return []; + }, + }, + }, + tooltip: { + backgroundColor: isDark ? 'rgba(31, 41, 55, 0.95)' : 'rgba(255, 255, 255, 0.95)', + titleColor: theme.textColor, + bodyColor: theme.textColor, + borderColor: theme.borderColor, + borderWidth: 1, + padding: 12, + displayColors: true, + callbacks: { + label: (context) => { + const label = context.label || ''; + const value = context.parsed; + const dataset = context.dataset; + const total = dataset.data.reduce((acc, val) => acc + val, 0); + const percentage = total > 0 ? ((value / total) * 100).toFixed(1) : 0; + + return `${label}: ${value} (${percentage}%)`; + }, + }, + }, + }, + }, + }); +} + +/** + * Create API Usage Chart (Bar Chart) + * + * Displays API calls per day as vertical bars. + * + * @param {string} canvasId - Canvas element ID + * @returns {Chart} Chart.js instance + */ +function createAPIUsageChart(canvasId) { + const ctx = document.getElementById(canvasId); + if (!ctx) { + console.error(`Canvas element with ID '${canvasId}' not found`); + return null; + } + + const isDark = document.documentElement.getAttribute('data-theme') === 'dark'; + const theme = isDark ? THEME_COLORS.dark : THEME_COLORS.light; + + return new Chart(ctx, { + type: 'bar', + data: { + labels: [], + datasets: [ + { + label: 'API Calls', + data: [], + backgroundColor: COLORS.primary.alpha50, + borderColor: COLORS.primary.solid, + borderWidth: 2, + borderRadius: 6, + hoverBackgroundColor: COLORS.primary.solid, + }, + ], + }, + options: { + responsive: true, + maintainAspectRatio: false, + interaction: { + mode: 'index', + intersect: false, + }, + plugins: { + legend: { + display: true, + position: 'top', + labels: { + color: theme.textColor, // Use theme-specific text color + padding: 15, + font: { + size: 13, + weight: '600', + }, + usePointStyle: true, + pointStyle: 'rectRounded', + boxWidth: 12, + boxHeight: 12, + }, + }, + tooltip: { + backgroundColor: isDark ? 'rgba(31, 41, 55, 0.95)' : 'rgba(255, 255, 255, 0.95)', + titleColor: theme.textColor, + bodyColor: theme.textColor, + borderColor: theme.borderColor, + borderWidth: 1, + padding: 12, + displayColors: true, + callbacks: { + title: (tooltipItems) => { + return tooltipItems[0].label; + }, + label: (context) => { + const label = context.dataset.label || ''; + const value = context.parsed.y; + return `${label}: ${value}`; + }, + }, + }, + }, + scales: { + x: { + grid: { + display: false, + }, + ticks: { + color: theme.textColor, + maxRotation: 0, + autoSkip: true, + maxTicksLimit: 6, + }, + border: { + color: theme.borderColor, + }, + }, + y: { + beginAtZero: true, + grid: { + color: theme.gridColor, + drawBorder: false, + }, + ticks: { + color: theme.textColor, + precision: 0, + }, + border: { + display: false, + }, + }, + }, + }, + }); +} + +// ============================================================================ +// Chart Update Functions +// ============================================================================ + +/** + * Update Event Trends Chart with new data + * + * @param {Chart} chart - Chart.js instance + * @param {Object} data - Chart data + * @param {Array} data.labels - Time labels + * @param {Array} data.success - Success event counts + * @param {Array} data.errors - Error event counts + * @param {Array} data.total - Total event counts + */ +function updateEventTrendsChart(chart, data) { + if (!chart || !data) { + console.error('Invalid chart or data provided to updateEventTrendsChart'); + return; + } + + // Update labels + chart.data.labels = data.labels || []; + + // Update datasets + if (chart.data.datasets[0]) { + chart.data.datasets[0].data = data.success || []; + } + if (chart.data.datasets[1]) { + chart.data.datasets[1].data = data.errors || []; + } + if (chart.data.datasets[2]) { + chart.data.datasets[2].data = data.total || []; + } + + // Trigger chart update + chart.update('active'); +} + +/** + * Update Event Distribution Chart with new data + * + * @param {Chart} chart - Chart.js instance + * @param {Object} data - Chart data + * @param {Array} data.labels - Event type labels + * @param {Array} data.values - Event counts + */ +function updateEventDistributionChart(chart, data) { + if (!chart || !data) { + console.error('Invalid chart or data provided to updateEventDistributionChart'); + return; + } + + // Update labels + chart.data.labels = data.labels || []; + + // Update dataset + if (chart.data.datasets[0]) { + chart.data.datasets[0].data = data.values || []; + + // Ensure we have enough colors + const colorCount = COLORS.pie.length; + const dataCount = data.values ? data.values.length : 0; + if (dataCount > colorCount) { + // Generate additional colors if needed + const additionalColors = []; + for (let i = 0; i < dataCount - colorCount; i++) { + const hue = (i * 137.5) % 360; // Golden angle for distribution + additionalColors.push(`hsla(${hue}, 70%, 60%, 0.8)`); + } + chart.data.datasets[0].backgroundColor = [...COLORS.pie, ...additionalColors]; + } + } + + // Trigger chart update + chart.update('active'); +} + +/** + * Update API Usage Chart with new data + * + * @param {Chart} chart - Chart.js instance + * @param {Object} data - Chart data + * @param {Array} data.labels - Date labels + * @param {Array} data.values - API call counts + */ +function updateAPIUsageChart(chart, data) { + if (!chart || !data) { + console.error('Invalid chart or data provided to updateAPIUsageChart'); + return; + } + + // Update labels + chart.data.labels = data.labels || []; + + // Update dataset + if (chart.data.datasets[0]) { + chart.data.datasets[0].data = data.values || []; + } + + // Trigger chart update + chart.update('active'); +} + +// ============================================================================ +// Theme Management +// ============================================================================ + +/** + * Update chart theme (dark/light mode) + * + * @param {Chart} chart - Chart.js instance + * @param {boolean} isDark - True for dark theme, false for light theme + */ +function updateChartTheme(chart, isDark) { + if (!chart) { + console.error('Invalid chart provided to updateChartTheme'); + return; + } + + const theme = isDark ? THEME_COLORS.dark : THEME_COLORS.light; + + // Update legend colors + if (chart.options.plugins?.legend?.labels) { + chart.options.plugins.legend.labels.color = theme.textColor; + } + + // Update tooltip colors + if (chart.options.plugins?.tooltip) { + chart.options.plugins.tooltip.backgroundColor = isDark + ? 'rgba(31, 41, 55, 0.95)' + : 'rgba(255, 255, 255, 0.95)'; + chart.options.plugins.tooltip.titleColor = theme.textColor; + chart.options.plugins.tooltip.bodyColor = theme.textColor; + chart.options.plugins.tooltip.borderColor = theme.borderColor; + } + + // Update scale colors + if (chart.options.scales?.x) { + if (chart.options.scales.x.ticks) { + chart.options.scales.x.ticks.color = theme.textColor; + } + if (chart.options.scales.x.border) { + chart.options.scales.x.border.color = theme.borderColor; + } + } + + if (chart.options.scales?.y) { + if (chart.options.scales.y.grid) { + chart.options.scales.y.grid.color = theme.gridColor; + } + if (chart.options.scales.y.ticks) { + chart.options.scales.y.ticks.color = theme.textColor; + } + } + + // Update pie chart border colors + if (chart.config.type === 'pie' && chart.data.datasets[0]) { + chart.data.datasets[0].borderColor = isDark ? '#1f2937' : '#ffffff'; + } + + // Trigger chart update + chart.update('active'); +} + +/** + * Update all charts theme + * + * @param {Object} charts - Object containing all chart instances + * @param {boolean} isDark - True for dark theme, false for light theme + */ +function updateAllChartsTheme(charts, isDark) { + if (!charts || typeof charts !== 'object') { + console.error('Invalid charts object provided to updateAllChartsTheme'); + return; + } + + Object.values(charts).forEach(chart => { + if (chart) { + updateChartTheme(chart, isDark); + } + }); +} + +// ============================================================================ +// Exports +// ============================================================================ + +// Export functions for use in dashboard.js +if (typeof module !== 'undefined' && module.exports) { + // Node.js/CommonJS + module.exports = { + createEventTrendsChart, + createEventDistributionChart, + createAPIUsageChart, + updateEventTrendsChart, + updateEventDistributionChart, + updateAPIUsageChart, + updateChartTheme, + updateAllChartsTheme, + COLORS, + THEME_COLORS, + }; +} + +// Browser globals +if (typeof window !== 'undefined') { + window.MetricsCharts = { + createEventTrendsChart, + createEventDistributionChart, + createAPIUsageChart, + updateEventTrendsChart, + updateEventDistributionChart, + updateAPIUsageChart, + updateChartTheme, + updateAllChartsTheme, + COLORS, + THEME_COLORS, + }; +} diff --git a/webhook_server/web/static/js/metrics/dashboard.js b/webhook_server/web/static/js/metrics/dashboard.js new file mode 100644 index 00000000..4c77b4d8 --- /dev/null +++ b/webhook_server/web/static/js/metrics/dashboard.js @@ -0,0 +1,2014 @@ +/** + * Metrics Dashboard - Main JavaScript Controller + * + * This module handles: + * - Initial data loading via REST API + * - KPI card updates + * - Chart updates via charts.js + * - Theme management (dark/light mode) + * - Time range filtering + * - Manual refresh + */ + +// Dashboard Controller +class MetricsDashboard { + constructor() { + this.apiClient = null; // Will be initialized in initialize() + this.charts = {}; // Will hold Chart.js instances + this.currentData = { + summary: null, + webhooks: null, + repositories: null + }; + this.timeRange = '24h'; // Default time range + this.repositoryFilter = ''; // Repository filter lowercase for local comparisons (empty = show all) + this.repositoryFilterRaw = ''; // Repository filter original case for API calls + this.userFilter = ''; // User filter (empty = show all) + + // Pagination state for each section + this.pagination = { + topRepositories: { page: 1, pageSize: 10, total: 0, totalPages: 0 }, + recentEvents: { page: 1, pageSize: 10, total: 0, totalPages: 0 }, + prCreators: { page: 1, pageSize: 10, total: 0, totalPages: 0 }, + prReviewers: { page: 1, pageSize: 10, total: 0, totalPages: 0 }, + prApprovers: { page: 1, pageSize: 10, total: 0, totalPages: 0 }, + userPrs: { page: 1, pageSize: 10, total: 0, totalPages: 0 } + }; + + // Load saved page sizes from localStorage + Object.keys(this.pagination).forEach(section => { + const saved = localStorage.getItem(`pageSize_${section}`); + if (saved) { + this.pagination[section].pageSize = parseInt(saved); + } + }); + + this.initialize(); + } + + /** + * Initialize dashboard - load theme, data, and charts. + */ + async initialize() { + console.log('[Dashboard] Initializing metrics dashboard'); + + // 1. Initialize API client (from api-client.js loaded globally) + this.apiClient = window.MetricsAPI?.apiClient; + if (!this.apiClient) { + console.error('[Dashboard] MetricsAPI client not found - ensure api-client.js is loaded'); + this.showError('Metrics API client not available. Please refresh the page.'); + return; + } + + // 2. Set ready status + this.updateConnectionStatus(true); + + // 3. Initialize theme + this.initializeTheme(); + + // 4. Set up event listeners + this.setupEventListeners(); + + // 5. Populate date inputs with default 24h range logic so they are not empty + const { startTime, endTime } = this.getTimeRangeDates(this.timeRange); + const startInput = document.getElementById('startTime'); + const endInput = document.getElementById('endTime'); + if (startInput && endInput) { + startInput.value = this.formatDateForInput(startTime); + endInput.value = this.formatDateForInput(endTime); + } + + // 6. Show loading state + this.showLoading(true); + + try { + // 7. Load initial data via REST API + await this.loadInitialData(); + + // 8. Initialize charts (calls functions from charts.js) + this.initializeCharts(); + + console.log('[Dashboard] Dashboard initialization complete'); + } catch (error) { + console.error('[Dashboard] Initialization error:', error); + this.showError('Failed to load dashboard data. Please refresh the page.'); + } finally { + this.showLoading(false); + } + } + + /** + * Load initial data from REST API endpoints. + */ + async loadInitialData() { + console.log('[Dashboard] Loading initial data...'); + + try { + const { startTime, endTime } = this.getTimeRangeDates(this.timeRange); + console.log(`[Dashboard] Time range: ${this.timeRange} (${startTime} to ${endTime})`); + + // Fetch all data in parallel using apiClient + // Use bucket='hour' for ranges <= 24h, 'day' for others + const bucket = (this.timeRange === '1h' || this.timeRange === '24h') ? 'hour' : 'day'; + + const [summaryData, webhooksData, reposData, trendsData, contributorsData, userPrsData] = await Promise.all([ + this.apiClient.fetchSummary(startTime, endTime), + this.apiClient.fetchWebhooks({ page: 1, page_size: 10, start_time: startTime, end_time: endTime }), + this.apiClient.fetchRepositories(startTime, endTime, { page: 1, page_size: 10 }), + this.apiClient.fetchTrends(startTime, endTime, bucket).catch(err => { + console.warn('[Dashboard] Trends endpoint not available:', err); + return { trends: [] }; // Return empty trends if endpoint doesn't exist + }), + this.apiClient.fetchContributors(startTime, endTime, 10, { page: 1, page_size: 10 }), + this.apiClient.fetchUserPRs(startTime, endTime, { page: 1, page_size: 10 }).catch(err => { + console.warn('[Dashboard] User PRs endpoint error:', err); + return { data: [], pagination: { total: 0, page: 1, page_size: 10, total_pages: 0 } }; + }) + ]); + + // Check for errors in responses + if (summaryData.error) { + console.error('[Dashboard] Summary fetch error:', summaryData); + throw new Error(summaryData.detail || 'Failed to fetch summary data'); + } + if (webhooksData.error) { + console.error('[Dashboard] Webhooks fetch error:', webhooksData); + throw new Error(webhooksData.detail || 'Failed to fetch webhooks data'); + } + if (reposData.error) { + console.error('[Dashboard] Repositories fetch error:', reposData); + throw new Error(reposData.detail || 'Failed to fetch repositories data'); + } + if (trendsData.error) { + console.error('[Dashboard] Trends fetch error:', trendsData); + // Don't fail completely if trends fail, just log it + } + + // Store data (preserve full paginated responses for tables) + this.currentData = { + summary: summaryData.summary || summaryData, + topRepositories: summaryData.top_repositories || [], // Store top-level top_repositories + webhooks: webhooksData, // Store full response with pagination + repositories: reposData, // Store full response with pagination + trends: trendsData.trends || [], + contributors: contributorsData, // Store full response with pagination + eventTypeDistribution: summaryData.event_type_distribution || {} // Store top-level event_type_distribution + }; + + console.log('[Dashboard] Initial data loaded:', this.currentData); + + // Update UI with loaded data + this.updateKPICards(summaryData.summary || summaryData); + this.updateCharts(this.currentData); + + // Update User PRs table + console.log('[Dashboard] Updating User PRs table with data:', userPrsData); + this.updateUserPRsTable(userPrsData); + + // Populate user filter dropdown + this.populateUserFilter(); + + } catch (error) { + console.error('[Dashboard] Error loading initial data:', error); + throw error; + } + } + + /** + * Calculate start and end dates based on selected time range. + * @param {string} range - Time range identifier + * @returns {Object} { startTime, endTime } in ISO format + */ + getTimeRangeDates(range) { + const now = new Date(); + let start = new Date(); + + switch (range) { + case '1h': + start.setHours(now.getHours() - 1); + break; + case '24h': + start.setHours(now.getHours() - 24); + break; + case '7d': + start.setDate(now.getDate() - 7); + break; + case '30d': + start.setDate(now.getDate() - 30); + break; + case 'custom': { + // Handle custom range inputs + const startInput = document.getElementById('startTime'); + const endInput = document.getElementById('endTime'); + if (startInput && endInput && startInput.value && endInput.value) { + return { + startTime: new Date(startInput.value).toISOString(), + endTime: new Date(endInput.value).toISOString() + }; + } + // Fallback to 24h if inputs invalid + start.setHours(now.getHours() - 24); + break; + } + default: + // Default to 24h if unknown + start.setHours(now.getHours() - 24); + } + + return { + startTime: start.toISOString(), + endTime: now.toISOString() + }; + } + + /** + * Format ISO date string for datetime-local input. + * Converts ISO string to local timezone and formats for HTML5 datetime-local input. + * + * @param {string} isoString - ISO date string + * @returns {string} Formatted string (YYYY-MM-DDThh:mm) + */ + formatDateForInput(isoString) { + const date = new Date(isoString); + // Adjust for local timezone for display + const localDate = new Date(date.getTime() - (date.getTimezoneOffset() * 60000)); + return localDate.toISOString().slice(0, 16); + } + + /** + * Update KPI cards with new data. + * + * @param {Object} summary - Summary data + */ + updateKPICards(summary) { + if (!summary) { + console.warn('[Dashboard] No summary data to update KPI cards'); + return; + } + + // Total Events - use 0 as fallback, not undefined + this.updateKPICard('total-events', { + value: summary.total_events ?? 0, + trend: summary.total_events_trend ?? 0 + }); + + // Success Rate - calculate from available data + const successRate = summary.success_rate ?? + (summary.total_events > 0 ? (summary.successful_events / summary.total_events * 100) : 0); + this.updateKPICard('success-rate', { + value: `${successRate.toFixed(2)}%`, + trend: summary.success_rate_trend ?? 0 + }); + + // Failed Events + this.updateKPICard('failed-events', { + value: summary.failed_events ?? 0, + trend: summary.failed_events_trend ?? 0 + }); + + // Average Duration + const avgDuration = summary.avg_duration_ms ?? summary.avg_processing_time_ms ?? 0; + this.updateKPICard('avg-duration', { + value: window.MetricsUtils.formatDuration(avgDuration), + trend: summary.avg_duration_trend ?? 0 + }); + + console.log('[Dashboard] KPI cards updated'); + } + + /** + * Update individual KPI card. + * + * @param {string} cardId - KPI card element ID + * @param {Object} data - Card data + */ + updateKPICard(cardId, data) { + const cardElement = document.getElementById(cardId); + if (!cardElement) { + console.warn(`[Dashboard] KPI card not found: ${cardId}`); + return; + } + + // Update value + const valueElement = cardElement.querySelector('.kpi-value'); + if (valueElement) { + valueElement.textContent = data.value; + } + + // Update trend + const trendElement = cardElement.querySelector('.kpi-trend'); + if (trendElement) { + const trend = data.trend || 0; + const trendClass = trend > 0 ? 'positive' : trend < 0 ? 'negative' : 'neutral'; + const trendIcon = trend > 0 ? '↑' : trend < 0 ? '↓' : '→'; + + trendElement.className = `kpi-trend ${trendClass}`; + trendElement.innerHTML = ` + ${trendIcon} + ${Math.abs(trend).toFixed(1)}% + vs last period + `; + } + } + + /** + * Initialize all charts (calls functions from charts.js). + */ + initializeCharts() { + console.log('[Dashboard] Initializing charts...'); + + if (!window.MetricsCharts) { + console.error('[Dashboard] MetricsCharts library not loaded'); + return; + } + + if (!this.currentData.summary || !this.currentData.webhooks || !this.currentData.repositories) { + console.warn('[Dashboard] Missing data for chart initialization'); + return; + } + + try { + // Event Trends Chart (line chart) + this.charts.eventTrends = window.MetricsCharts.createEventTrendsChart('eventTrendsChart'); + + // Event Distribution Pie Chart + this.charts.eventDistribution = window.MetricsCharts.createEventDistributionChart('eventDistributionChart'); + + // API Usage Chart (bar chart) + this.charts.apiUsage = window.MetricsCharts.createAPIUsageChart('apiUsageChart'); + + // Initial chart update with data + this.updateCharts(this.currentData); + + console.log('[Dashboard] Charts initialized:', Object.keys(this.charts)); + } catch (error) { + console.error('[Dashboard] Error initializing charts:', error); + } + } + + /** + * Normalize repositories data from paginated response to array. + * Handles both paginated response objects and plain arrays. + * Supports both current ({ data: [...] }) and legacy ({ repositories: [...] }) shapes. + * + * @param {Object|Array} repositories - Repositories data (paginated response or array) + * @returns {Array} Normalized array of repositories + */ + normalizeRepositories(repositories) { + if (!repositories) { + return []; + } + // If already an array, return as-is + if (Array.isArray(repositories)) { + return repositories; + } + // Handle paginated response format: { data: [...] } or legacy { repositories: [...] } + return repositories.data || repositories.repositories || []; + } + + /** + * Update all charts with new data. + * + * @param {Object} data - Complete dashboard data + */ + updateCharts(data) { + if (!data || !window.MetricsCharts) { + console.warn('[Dashboard] No data or MetricsCharts library not available'); + return; + } + + // Create working copy to avoid mutating original data + // This allows filter to be cleared and original data restored + // Extract arrays from paginated responses for filtering + const workingData = { + summary: { ...data.summary }, + webhooks: data.webhooks?.data || data.webhooks || [], + repositories: this.normalizeRepositories(data.repositories), + trends: data.trends, + contributors: data.contributors ? { + pr_creators: data.contributors.pr_creators?.data || data.contributors.pr_creators || [], + pr_reviewers: data.contributors.pr_reviewers?.data || data.contributors.pr_reviewers || [], + pr_approvers: data.contributors.pr_approvers?.data || data.contributors.pr_approvers || [] + } : null, + eventTypeDistribution: data.eventTypeDistribution + }; + + const summary = workingData.summary; + let webhooks = workingData.webhooks; + let repositories = workingData.repositories; + const trends = workingData.trends; + + // Apply repository filter + let filteredWebhooks = webhooks; + let filteredRepositories = repositories; + let filteredContributors = workingData.contributors; + let filteredSummary = summary; + + if (this.repositoryFilter) { + // Filter webhooks and repositories + filteredWebhooks = this.filterDataByRepository(webhooks); + filteredRepositories = this.filterDataByRepository(repositories); + + // Recalculate event type distribution from filtered webhooks + const eventTypeCount = {}; + filteredWebhooks.forEach(event => { + const eventType = event.event_type || 'unknown'; + eventTypeCount[eventType] = (eventTypeCount[eventType] || 0) + 1; + }); + workingData.eventTypeDistribution = eventTypeCount; + + // Filter contributors by repository + // Extract repository from webhook events to find users active in this repo + if (workingData.contributors) { + const usersInRepo = new Set(); + filteredWebhooks.forEach(event => { + const user = event.sender || event.user || (event.payload && (event.payload.sender || event.payload.user)); + if (user) { + usersInRepo.add(user); + } + }); + + filteredContributors = { + pr_creators: (workingData.contributors.pr_creators || []).filter(c => usersInRepo.has(c.user)), + pr_reviewers: (workingData.contributors.pr_reviewers || []).filter(c => usersInRepo.has(c.user)), + pr_approvers: (workingData.contributors.pr_approvers || []).filter(c => usersInRepo.has(c.user)) + }; + } + + // Recalculate summary for filtered data + filteredSummary = { + ...summary, // Keep original fields + total_events: filteredWebhooks.length, + successful_events: filteredWebhooks.filter(e => e.status === 'success').length, + failed_events: filteredWebhooks.filter(e => e.status === 'error').length, + }; + filteredSummary.success_rate = filteredSummary.total_events > 0 + ? (filteredSummary.successful_events / filteredSummary.total_events * 100) + : 0; + + console.log(`[Dashboard] Filtered by repository: ${filteredWebhooks.length} events, ${filteredRepositories.length} repos`); + } + + // Apply user filter second (on already-filtered data) + if (this.userFilter && filteredContributors) { + filteredContributors = { + pr_creators: this.filterDataByUser(filteredContributors.pr_creators || []), + pr_reviewers: this.filterDataByUser(filteredContributors.pr_reviewers || []), + pr_approvers: this.filterDataByUser(filteredContributors.pr_approvers || []) + }; + + console.log(`[Dashboard] Filtered by user: ${filteredContributors.pr_creators.length} creators, ${filteredContributors.pr_reviewers.length} reviewers, ${filteredContributors.pr_approvers.length} approvers`); + } + + // ALWAYS update KPI cards (whether filtered or not) + this.updateKPICards(filteredSummary); + + // Use filtered data for chart updates + webhooks = filteredWebhooks; + repositories = filteredRepositories; + if (filteredContributors) { + workingData.contributors = filteredContributors; + } + + try { + // Update Event Trends Chart (line chart) + if (this.charts.eventTrends) { + let trendsData; + + // When filtering by repository, always use filtered webhooks + if (this.repositoryFilter) { + // Use filtered webhooks to calculate trends + trendsData = this.prepareEventTrendsData(webhooks); + console.log('[Dashboard] Event Trends using filtered webhooks data:', { + totalEvents: webhooks.length, + errors: trendsData.errors.reduce((a, b) => a + b, 0), + success: trendsData.success.reduce((a, b) => a + b, 0) + }); + } else if (trends && trends.length > 0) { + // Use aggregated trends data from API + trendsData = this.processTrendsData(trends); + console.log('[Dashboard] Event Trends using API trends data:', { + buckets: trends.length, + totalFailed: trends.reduce((sum, t) => sum + t.failed_events, 0), + totalSuccess: trends.reduce((sum, t) => sum + t.successful_events, 0) + }); + } else if (webhooks) { + // Fallback to calculating from webhooks list (less accurate) + trendsData = this.prepareEventTrendsData(webhooks); + console.log('[Dashboard] Event Trends using fallback webhooks data:', { + totalEvents: webhooks.length, + errors: trendsData.errors.reduce((a, b) => a + b, 0), + success: trendsData.success.reduce((a, b) => a + b, 0) + }); + } + + if (trendsData) { + window.MetricsCharts.updateEventTrendsChart(this.charts.eventTrends, trendsData); + console.log('[Dashboard] Event Trends chart data:', { + totalErrors: trendsData.errors.reduce((a, b) => a + b, 0), + totalSuccess: trendsData.success.reduce((a, b) => a + b, 0), + totalTotal: trendsData.total.reduce((a, b) => a + b, 0) + }); + } + } + + // Update Event Distribution Chart (pie chart) + if (this.charts.eventDistribution && summary) { + const eventDist = workingData.eventTypeDistribution || summary.event_type_distribution || {}; + + if (eventDist && Object.keys(eventDist).length > 0) { + const distData = { + labels: Object.keys(eventDist), + values: Object.values(eventDist) + }; + window.MetricsCharts.updateEventDistributionChart(this.charts.eventDistribution, distData); + console.log('[Dashboard] Event distribution chart updated'); + } else { + console.warn('[Dashboard] No event type distribution data available'); + } + } + + // Update API Usage Chart (bar chart) + if (this.charts.apiUsage && repositories) { + const apiData = this.prepareAPIUsageData(repositories); + window.MetricsCharts.updateAPIUsageChart(this.charts.apiUsage, apiData); + } + + // Update Repository Table with top repositories from summary (has percentage field) + if (data.topRepositories && data.topRepositories.length > 0) { + // Top repositories from summary endpoint (has percentage field) + // Apply repository filter if active + const topRepos = this.repositoryFilter + ? data.topRepositories.filter(repo => + repo.repository && repo.repository.toLowerCase().includes(this.repositoryFilter)) + : data.topRepositories; + this.updateRepositoryTable(topRepos); + } + + // Update Recent Events Table with filtered data + if (data.webhooks) { + // Preserve pagination shape if original had it, otherwise pass filtered array + const webhooksForTable = data.webhooks.data + ? { ...data.webhooks, data: filteredWebhooks } + : filteredWebhooks; + this.updateRecentEventsTable(webhooksForTable); + } + + // Update Contributors Tables with filtered data + if (data.contributors) { + // Preserve pagination shapes for each contributor type + const contributorsForTable = { + pr_creators: data.contributors.pr_creators?.data + ? { ...data.contributors.pr_creators, data: filteredContributors.pr_creators } + : filteredContributors.pr_creators, + pr_reviewers: data.contributors.pr_reviewers?.data + ? { ...data.contributors.pr_reviewers, data: filteredContributors.pr_reviewers } + : filteredContributors.pr_reviewers, + pr_approvers: data.contributors.pr_approvers?.data + ? { ...data.contributors.pr_approvers, data: filteredContributors.pr_approvers } + : filteredContributors.pr_approvers + }; + this.updateContributorsTables(contributorsForTable); + } + + console.log('[Dashboard] Charts updated'); + } catch (error) { + console.error('[Dashboard] Error updating charts:', error); + } + } + + /** + * Process trends data from API for chart. + * @param {Array} trends - Trends data from API + * @returns {Object} Chart data + */ + processTrendsData(trends) { + // Sort by bucket time + const sortedTrends = [...trends].sort((a, b) => new Date(a.bucket) - new Date(b.bucket)); + + // Format labels based on bucket granularity + const labels = sortedTrends.map(t => { + const date = new Date(t.bucket); + // Simple heuristic: if buckets are < 24h apart, show time, else date + // For now just use local time string + return date.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' }) + + (this.timeRange !== '1h' && this.timeRange !== '24h' ? ` ${date.getMonth() + 1}/${date.getDate()}` : ''); + }); + + return { + labels: labels, + success: sortedTrends.map(t => t.successful_events), + errors: sortedTrends.map(t => t.failed_events), + total: sortedTrends.map(t => t.total_events) + }; + } + + /** + * Update repository table with new data. + * + * @param {Object|Array} reposData - Repository data with pagination ({data: [...], pagination: {...}}) or plain array + */ + updateRepositoryTable(reposData) { + const tableBody = document.getElementById('repository-table-body'); + if (!tableBody) { + console.warn('[Dashboard] Repository table body not found'); + return; + } + + // Handle both paginated response and plain array formats + const repositories = Array.isArray(reposData) ? reposData : (reposData.data || reposData.repositories || []); + const pagination = Array.isArray(reposData) ? null : reposData.pagination; + + // Update pagination state if available + if (pagination) { + this.pagination.topRepositories = { + page: pagination.page, + pageSize: pagination.page_size, + total: pagination.total, + totalPages: pagination.total_pages + }; + } + + if (!repositories || !Array.isArray(repositories) || repositories.length === 0) { + tableBody.innerHTML = 'No repository data available'; + return; + } + + // Generate table rows - show percentage of total events + const rows = repositories.map(repo => { + const percentage = repo.percentage || 0; // Percentage of total events + return ` + + ${this.escapeHtml(repo.repository || 'Unknown')} + ${repo.total_events || 0} + ${percentage.toFixed(1)}% + + `; + }).join(''); + + tableBody.innerHTML = rows; + + // Add pagination controls + const container = document.querySelector('[data-section="top-repositories"] .chart-content'); + const existingControls = container?.querySelector('.pagination-controls'); + if (existingControls) { + existingControls.remove(); + } + + if (container && pagination) { + container.insertAdjacentHTML('beforeend', this.createPaginationControls('top-repositories')); + } + } + + /** + * Update recent events table with new data. + * + * @param {Object|Array} eventsData - Recent webhook events (can be array or {data: [...], pagination: {...}}) + */ + updateRecentEventsTable(eventsData) { + const tableBody = document.querySelector('#recentEventsTable tbody'); + if (!tableBody) { + console.warn('[Dashboard] Recent events table body not found'); + return; + } + + // Handle both array format and paginated response format + const events = Array.isArray(eventsData) ? eventsData : (eventsData.data || eventsData.events || []); + const pagination = Array.isArray(eventsData) ? null : eventsData.pagination; + + // Update pagination state if available + if (pagination) { + this.pagination.recentEvents = { + page: pagination.page, + pageSize: pagination.page_size, + total: pagination.total, + totalPages: pagination.total_pages + }; + } + + if (!events || !Array.isArray(events) || events.length === 0) { + tableBody.innerHTML = 'No recent events'; + return; + } + + // Generate table rows + const rows = events.map(event => { + const time = new Date(event.created_at).toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' }); + const status = event.status || 'unknown'; + const statusClass = status === 'success' ? 'status-success' : status === 'error' ? 'status-error' : 'status-partial'; + + return ` + + ${time} + ${this.escapeHtml(event.repository || 'Unknown')} + ${this.escapeHtml(event.event_type || 'unknown')} + ${status} + + `; + }).join(''); + + tableBody.innerHTML = rows; + + // Add pagination controls + const container = document.querySelector('[data-section="recent-events"] .chart-content'); + const existingControls = container?.querySelector('.pagination-controls'); + if (existingControls) { + existingControls.remove(); + } + + if (container && pagination) { + container.insertAdjacentHTML('beforeend', this.createPaginationControls('recent-events')); + } + } + + /** + * Update PR contributors tables with new data. + * + * @param {Object} contributors - Contributors data with pagination + */ + updateContributorsTables(contributors) { + if (!contributors) { + console.warn('[Dashboard] No contributors data available'); + return; + } + + // Extract data and pagination for PR Creators + const prCreatorsData = contributors.pr_creators?.data || contributors.pr_creators || []; + const prCreatorsPagination = contributors.pr_creators?.pagination; + + if (prCreatorsPagination) { + this.pagination.prCreators = { + page: prCreatorsPagination.page, + pageSize: prCreatorsPagination.page_size, + total: prCreatorsPagination.total, + totalPages: prCreatorsPagination.total_pages + }; + } + + // Update PR Creators table + this.updateContributorsTable( + 'pr-creators-table-body', + prCreatorsData, + (creator) => ` + + ${this.escapeHtml(creator.user)} + ${creator.total_prs} + ${creator.merged_prs} + ${creator.closed_prs} + ${creator.avg_commits_per_pr || 0} + + ` + ); + + // Add pagination controls for PR Creators + const creatorsContainer = document.querySelector('[data-section="pr-creators"]'); + const creatorsExistingControls = creatorsContainer?.querySelector('.pagination-controls'); + if (creatorsExistingControls) { + creatorsExistingControls.remove(); + } + if (creatorsContainer && prCreatorsPagination) { + creatorsContainer.insertAdjacentHTML('beforeend', this.createPaginationControls('prCreators')); + } + + // Extract data and pagination for PR Reviewers + const prReviewersData = contributors.pr_reviewers?.data || contributors.pr_reviewers || []; + const prReviewersPagination = contributors.pr_reviewers?.pagination; + + if (prReviewersPagination) { + this.pagination.prReviewers = { + page: prReviewersPagination.page, + pageSize: prReviewersPagination.page_size, + total: prReviewersPagination.total, + totalPages: prReviewersPagination.total_pages + }; + } + + // Update PR Reviewers table + this.updateContributorsTable( + 'pr-reviewers-table-body', + prReviewersData, + (reviewer) => ` + + ${this.escapeHtml(reviewer.user)} + ${reviewer.total_reviews} + ${reviewer.prs_reviewed} + ${reviewer.avg_reviews_per_pr} + + ` + ); + + // Add pagination controls for PR Reviewers + const reviewersContainer = document.querySelector('[data-section="pr-reviewers"]'); + const reviewersExistingControls = reviewersContainer?.querySelector('.pagination-controls'); + if (reviewersExistingControls) { + reviewersExistingControls.remove(); + } + if (reviewersContainer && prReviewersPagination) { + reviewersContainer.insertAdjacentHTML('beforeend', this.createPaginationControls('prReviewers')); + } + + // Extract data and pagination for PR Approvers + const prApproversData = contributors.pr_approvers?.data || contributors.pr_approvers || []; + const prApproversPagination = contributors.pr_approvers?.pagination; + + if (prApproversPagination) { + this.pagination.prApprovers = { + page: prApproversPagination.page, + pageSize: prApproversPagination.page_size, + total: prApproversPagination.total, + totalPages: prApproversPagination.total_pages + }; + } + + // Update PR Approvers table + this.updateContributorsTable( + 'pr-approvers-table-body', + prApproversData, + (approver) => ` + + ${this.escapeHtml(approver.user)} + ${approver.total_approvals} + ${approver.prs_approved} + + ` + ); + + // Add pagination controls for PR Approvers + const approversContainer = document.querySelector('[data-section="pr-approvers"]'); + const approversExistingControls = approversContainer?.querySelector('.pagination-controls'); + if (approversExistingControls) { + approversExistingControls.remove(); + } + if (approversContainer && prApproversPagination) { + approversContainer.insertAdjacentHTML('beforeend', this.createPaginationControls('prApprovers')); + } + } + + /** + * Generic contributor table updater. + * + * @param {string} tableBodyId - Table body element ID + * @param {Array} data - Contributors data array + * @param {Function} rowGenerator - Function to generate table row HTML + */ + updateContributorsTable(tableBodyId, data, rowGenerator) { + const tableBody = document.getElementById(tableBodyId); + if (!tableBody) { + console.warn(`[Dashboard] Table body not found: ${tableBodyId}`); + return; + } + + if (!data || data.length === 0) { + tableBody.innerHTML = 'No data available'; + return; + } + + const rows = data.map(rowGenerator).join(''); + tableBody.innerHTML = rows; + } + + /** + * Set up event listeners for UI controls. + */ + setupEventListeners() { + // Theme toggle button + const themeToggle = document.getElementById('theme-toggle'); + if (themeToggle) { + themeToggle.addEventListener('click', () => this.toggleTheme()); + } + + // Time range selector + const timeRangeSelect = document.getElementById('time-range-select'); + if (timeRangeSelect) { + timeRangeSelect.addEventListener('change', (e) => this.changeTimeRange(e.target.value)); + } + + // Custom date inputs + const startTimeInput = document.getElementById('startTime'); + const endTimeInput = document.getElementById('endTime'); + + if (startTimeInput && endTimeInput) { + const handleCustomDateChange = () => { + // Switch dropdown to custom if not already + if (timeRangeSelect && timeRangeSelect.value !== 'custom') { + timeRangeSelect.value = 'custom'; + this.timeRange = 'custom'; + } + // Only reload if both dates are valid + if (startTimeInput.value && endTimeInput.value) { + this.changeTimeRange('custom'); + } + }; + + startTimeInput.addEventListener('change', handleCustomDateChange); + endTimeInput.addEventListener('change', handleCustomDateChange); + } + + // Manual refresh button + const refreshButton = document.getElementById('refresh-button'); + if (refreshButton) { + refreshButton.addEventListener('click', () => this.manualRefresh()); + } + + // Repository filter + const repositoryFilterInput = document.getElementById('repositoryFilter'); + if (repositoryFilterInput) { + repositoryFilterInput.addEventListener('input', (e) => this.filterByRepository(e.target.value)); + } + + // User filter + const userFilterSelect = document.getElementById('userFilter'); + if (userFilterSelect) { + userFilterSelect.addEventListener('change', (e) => this.filterByUser(e.target.value)); + } + + // Clickable usernames + document.addEventListener('click', (e) => { + if (e.target.classList.contains('clickable-username')) { + const username = e.target.dataset.user; + const userFilterSelect = document.getElementById('userFilter'); + if (userFilterSelect) { + userFilterSelect.value = username; + this.filterByUser(username); + } + } + }); + + // Pagination listeners + this.setupPaginationListeners(); + + // Collapse buttons + this.setupCollapseButtons(); + + // Chart settings buttons + const eventTrendsSettings = document.getElementById('eventTrendsSettings'); + if (eventTrendsSettings) { + eventTrendsSettings.addEventListener('click', () => this.openModal('eventTrendsModal')); + } + + const apiUsageSettings = document.getElementById('apiUsageSettings'); + if (apiUsageSettings) { + apiUsageSettings.addEventListener('click', () => this.openModal('apiUsageModal')); + } + + // Close modal buttons + document.querySelectorAll('.close-modal').forEach(btn => { + btn.addEventListener('click', (e) => { + const modal = e.target.closest('.modal'); + if (modal) this.closeModal(modal.id); + }); + }); + + // Click outside modal to close + document.querySelectorAll('.modal').forEach(modal => { + modal.addEventListener('click', (e) => { + if (e.target === modal) this.closeModal(modal.id); + }); + }); + + // Event Trends settings + document.getElementById('showSuccess')?.addEventListener('change', () => this.updateTrendsVisibility()); + document.getElementById('showErrors')?.addEventListener('change', () => this.updateTrendsVisibility()); + document.getElementById('showTotal')?.addEventListener('change', () => this.updateTrendsVisibility()); + document.querySelectorAll('input[name="trendChartType"]').forEach(radio => { + radio.addEventListener('change', (e) => this.changeTrendsChartType(e.target.value)); + }); + document.getElementById('exportTrendsCsv')?.addEventListener('click', () => this.exportTrendsData('csv')); + document.getElementById('exportTrendsJson')?.addEventListener('click', () => this.exportTrendsData('json')); + document.getElementById('downloadTrendsChart')?.addEventListener('click', () => this.downloadChart('eventTrendsChart')); + + // API Usage settings + document.getElementById('apiTopN')?.addEventListener('change', (e) => this.updateApiTopN(parseInt(e.target.value))); + document.querySelectorAll('input[name="apiSortOrder"]').forEach(radio => { + radio.addEventListener('change', (e) => this.updateApiSortOrder(e.target.value)); + }); + document.querySelectorAll('input[name="apiChartType"]').forEach(radio => { + radio.addEventListener('change', (e) => this.changeApiChartType(e.target.value)); + }); + document.getElementById('exportApiCsv')?.addEventListener('click', () => this.exportApiData('csv')); + document.getElementById('exportApiJson')?.addEventListener('click', () => this.exportApiData('json')); + document.getElementById('downloadApiChart')?.addEventListener('click', () => this.downloadChart('apiUsageChart')); + + console.log('[Dashboard] Event listeners set up'); + } + + /** + * Set up collapse button listeners and restore collapsed state. + */ + setupCollapseButtons() { + const collapseButtons = document.querySelectorAll('.collapse-btn'); + collapseButtons.forEach(btn => { + btn.addEventListener('click', (e) => { + const sectionId = e.currentTarget.dataset.section; + this.toggleSection(sectionId); + }); + }); + + // Restore collapsed state from localStorage + this.restoreCollapsedSections(); + } + + /** + * Toggle a section's collapsed state. + * @param {string} sectionId - Section identifier + */ + toggleSection(sectionId) { + const section = document.querySelector(`[data-section="${sectionId}"]`); + if (!section) { + console.warn(`[Dashboard] Section not found: ${sectionId}`); + return; + } + + section.classList.toggle('collapsed'); + + // Update button icon + const btn = section.querySelector(`.collapse-btn[data-section="${sectionId}"]`); + if (btn) { + btn.textContent = section.classList.contains('collapsed') ? '▲' : '▼'; + btn.title = section.classList.contains('collapsed') ? 'Expand' : 'Collapse'; + } + + // Save state + this.saveCollapsedState(sectionId, section.classList.contains('collapsed')); + + console.log(`[Dashboard] Section ${sectionId} ${section.classList.contains('collapsed') ? 'collapsed' : 'expanded'}`); + } + + /** + * Save collapsed state to localStorage. + * @param {string} sectionId - Section identifier + * @param {boolean} isCollapsed - Whether section is collapsed + */ + saveCollapsedState(sectionId, isCollapsed) { + const state = JSON.parse(localStorage.getItem('collapsedSections') || '{}'); + state[sectionId] = isCollapsed; + localStorage.setItem('collapsedSections', JSON.stringify(state)); + } + + /** + * Restore collapsed sections from localStorage. + */ + restoreCollapsedSections() { + const state = JSON.parse(localStorage.getItem('collapsedSections') || '{}'); + Object.keys(state).forEach(sectionId => { + if (state[sectionId]) { + const section = document.querySelector(`[data-section="${sectionId}"]`); + if (section) { + section.classList.add('collapsed'); + const btn = section.querySelector(`.collapse-btn[data-section="${sectionId}"]`); + if (btn) { + btn.textContent = '▲'; + btn.title = 'Expand'; + } + } + } + }); + console.log('[Dashboard] Collapsed sections restored from localStorage'); + } + + /** + * Initialize theme from localStorage and apply it. + */ + initializeTheme() { + const savedTheme = localStorage.getItem('theme') || 'light'; + document.documentElement.setAttribute('data-theme', savedTheme); + console.log(`[Dashboard] Theme initialized: ${savedTheme}`); + } + + /** + * Toggle between dark and light theme. + */ + toggleTheme() { + const currentTheme = document.documentElement.getAttribute('data-theme') || 'light'; + const newTheme = currentTheme === 'light' ? 'dark' : 'light'; + + document.documentElement.setAttribute('data-theme', newTheme); + localStorage.setItem('theme', newTheme); + + console.log(`[Dashboard] Theme changed to: ${newTheme}`); + + // Recreate charts with new theme colors + if (this.currentData && this.currentData.summary) { + // Destroy existing charts + Object.values(this.charts).forEach(chart => { + if (chart && typeof chart.destroy === 'function') { + chart.destroy(); + } + }); + + // Clear charts object + this.charts = {}; + + // Recreate charts with new theme + this.initializeCharts(); + } + } + + /** + * Change time range and reload data. + * + * @param {string} timeRange - New time range ('24h', '7d', '30d', etc.) + */ + async changeTimeRange(timeRange) { + console.log(`[Dashboard] Changing time range to: ${timeRange}`); + this.timeRange = timeRange; + + // If preset selected, populate inputs + if (timeRange !== 'custom') { + const { startTime, endTime } = this.getTimeRangeDates(timeRange); + const startInput = document.getElementById('startTime'); + const endInput = document.getElementById('endTime'); + + if (startInput && endInput) { + startInput.value = this.formatDateForInput(startTime); + endInput.value = this.formatDateForInput(endTime); + } + } + + // For custom range, validation + if (timeRange === 'custom') { + const startInput = document.getElementById('startTime'); + const endInput = document.getElementById('endTime'); + if (!startInput?.value || !endInput?.value) { + return; + } + } + + this.showLoading(true); + try { + await this.loadInitialData(); + } catch (error) { + console.error('[Dashboard] Error changing time range:', error); + this.showError('Failed to load data for selected time range'); + } finally { + this.showLoading(false); + } + } + + /** + * Manually refresh all data. + */ + async manualRefresh() { + console.log('[Dashboard] Manual refresh triggered'); + + this.showLoading(true); + try { + await this.loadInitialData(); + this.updateCharts(this.currentData); + this.showSuccessNotification('Dashboard refreshed successfully'); + } catch (error) { + console.error('[Dashboard] Error during manual refresh:', error); + this.showError('Failed to refresh dashboard'); + } finally { + this.showLoading(false); + } + } + + /** + * Filter dashboard data by repository name. + * + * @param {string} filterValue - Repository name or partial name to filter by + */ + filterByRepository(filterValue) { + // Keep original input for API call (backend may be case-sensitive) + const trimmedFilter = filterValue.trim(); + + // Check if filter actually changed (case-insensitive comparison) + if (trimmedFilter.toLowerCase() === this.repositoryFilter) { + return; // No change, skip update + } + + // Store BOTH: original case for API calls, lowercase for local filtering + this.repositoryFilterRaw = trimmedFilter; + this.repositoryFilter = trimmedFilter.toLowerCase(); + console.log(`[Dashboard] Filtering by repository: "${this.repositoryFilter || '(showing all)'}"`); + + // ALWAYS re-render charts and tables (even when filter is cleared) + if (this.currentData) { + this.updateCharts(this.currentData); + } + } + + /** + * Filter data array by repository name. + * + * @param {Array} data - Array of data objects with 'repository' field + * @returns {Array} Filtered data + */ + filterDataByRepository(data) { + if (!this.repositoryFilter || !Array.isArray(data)) { + return data; // No filter or invalid data, return as-is + } + + // Use lowercase for local includes() check + return data.filter(item => { + const repo = (item.repository || '').toLowerCase(); + return repo.includes(this.repositoryFilter); + }); + } + + /** + * Filter dashboard data by user. + * + * @param {string} filterValue - User to filter by + */ + filterByUser(filterValue) { + const newFilter = filterValue.trim(); + + // Check if filter actually changed + if (newFilter === this.userFilter) { + return; // No change, skip update + } + + this.userFilter = newFilter; + console.log(`[Dashboard] Filtering by user: "${this.userFilter || '(showing all users)'}"`); + + // Re-render charts and tables + if (this.currentData) { + this.updateCharts(this.currentData); + } + } + + /** + * Filter data array by user. + * + * @param {Array} data - Array of contributor data + * @returns {Array} Filtered data + */ + filterDataByUser(data) { + if (!this.userFilter || !Array.isArray(data)) { + return data; // No filter or invalid data, return as-is + } + + return data.filter(item => { + const user = (item.user || '').toLowerCase(); + return user === this.userFilter.toLowerCase(); + }); + } + + /** + * Populate user filter dropdown from contributors data. + */ + populateUserFilter() { + const userFilterSelect = document.getElementById('userFilter'); + if (!userFilterSelect) { + console.warn('[Dashboard] User filter dropdown not found'); + return; + } + + // Collect all unique users from contributors data + const users = new Set(); + + if (this.currentData.contributors) { + const { pr_creators, pr_reviewers, pr_approvers } = this.currentData.contributors; + + // Extract data arrays from paginated responses + const creatorsData = pr_creators?.data || pr_creators || []; + const reviewersData = pr_reviewers?.data || pr_reviewers || []; + const approversData = pr_approvers?.data || pr_approvers || []; + + // Add users from all contributor types + [...creatorsData, ...reviewersData, ...approversData] + .forEach(contributor => { + if (contributor.user) { + users.add(contributor.user); + } + }); + } + + // Clear existing options except "All Users" + userFilterSelect.innerHTML = ''; + + // Add user options sorted alphabetically + Array.from(users).sort().forEach(user => { + const option = document.createElement('option'); + option.value = user; + option.textContent = user; + userFilterSelect.appendChild(option); + }); + + console.log(`[Dashboard] User filter populated with ${users.size} users`); + } + + /** + * Update connection status indicator. + * + * @param {boolean} ready - Dashboard ready status + */ + updateConnectionStatus(ready) { + const statusElement = document.getElementById('connection-status'); + const statusText = document.getElementById('statusText'); + + if (!statusElement || !statusText) { + return; + } + + if (ready) { + statusElement.className = 'status connected'; + statusText.textContent = 'Ready'; + } else { + statusElement.className = 'status disconnected'; + statusText.textContent = 'Initializing...'; + } + + console.log(`[Dashboard] Status: ${ready ? 'Ready' : 'Initializing'}`); + } + + /** + * Show loading spinner. + * + * @param {boolean} show - Whether to show or hide loading spinner + */ + showLoading(show) { + const spinner = document.getElementById('loading-spinner'); + if (spinner) { + spinner.style.display = show ? 'flex' : 'none'; + spinner.setAttribute('aria-busy', show ? 'true' : 'false'); + } + } + + /** + * Show error message. + * + * @param {string} message - Error message to display + */ + showError(message) { + console.error(`[Dashboard] Error: ${message}`); + // Could implement toast notification here + alert(message); + } + + /** + * Show success notification. + * + * @param {string} message - Success message + */ + showSuccessNotification(message) { + console.log(`[Dashboard] Success: ${message}`); + // Could implement toast notification here + } + + /** + * Prepare event trends data for line chart. + * Groups events by time buckets based on the selected time range. + * + * @param {Array} events - Array of webhook events + * @returns {Object} Chart data with labels, success, errors, and total arrays + */ + prepareEventTrendsData(events) { + if (!events || !Array.isArray(events)) { + return { labels: [], success: [], errors: [], total: [] }; + } + + // Use selected range end time as anchor instead of "now" + const { startTime, endTime } = this.getTimeRangeDates(this.timeRange); + const anchor = new Date(endTime); + const labels = []; + const successCounts = []; + const errorCounts = []; + const totalCounts = []; + + // Determine bucket configuration based on time range + let bucketCount; + let bucketSize; // in milliseconds + let labelFormatter; + + switch (this.timeRange) { + case '1h': + // 12 buckets of 5 minutes each + bucketCount = 12; + bucketSize = 5 * 60 * 1000; // 5 minutes + labelFormatter = (date) => date.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit' }); + break; + case '24h': + // 24 hourly buckets + bucketCount = 24; + bucketSize = 60 * 60 * 1000; // 1 hour + labelFormatter = (date) => date.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit' }); + break; + case '7d': + // 7 daily buckets + bucketCount = 7; + bucketSize = 24 * 60 * 60 * 1000; // 1 day + labelFormatter = (date) => date.toLocaleDateString('en-US', { month: 'short', day: 'numeric' }); + break; + case '30d': + // 30 daily buckets + bucketCount = 30; + bucketSize = 24 * 60 * 60 * 1000; // 1 day + labelFormatter = (date) => date.toLocaleDateString('en-US', { month: 'short', day: 'numeric' }); + break; + case 'custom': { + // For custom ranges, derive buckets from date range + const { startTime, endTime } = this.getTimeRangeDates('custom'); + const start = new Date(startTime); + const end = new Date(endTime); + const rangeDuration = end - start; + + // Choose bucket size based on range duration + if (rangeDuration <= 2 * 60 * 60 * 1000) { // <= 2 hours + bucketCount = 12; + bucketSize = Math.ceil(rangeDuration / 12); + labelFormatter = (date) => date.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit' }); + } else if (rangeDuration <= 48 * 60 * 60 * 1000) { // <= 48 hours + bucketCount = Math.ceil(rangeDuration / (60 * 60 * 1000)); // hourly buckets + bucketSize = 60 * 60 * 1000; + labelFormatter = (date) => date.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit' }); + } else { + bucketCount = Math.min(30, Math.ceil(rangeDuration / (24 * 60 * 60 * 1000))); // daily buckets, max 30 + bucketSize = 24 * 60 * 60 * 1000; + labelFormatter = (date) => date.toLocaleDateString('en-US', { month: 'short', day: 'numeric' }); + } + break; + } + default: + // Fallback to 24 hourly buckets + bucketCount = 24; + bucketSize = 60 * 60 * 1000; + labelFormatter = (date) => date.toLocaleTimeString('en-US', { hour: '2-digit', minute: '2-digit' }); + } + + // Create time buckets + for (let i = bucketCount - 1; i >= 0; i--) { + const bucketTime = new Date(anchor.getTime() - i * bucketSize); + labels.push(labelFormatter(bucketTime)); + successCounts.push(0); + errorCounts.push(0); + totalCounts.push(0); + } + + // Count events in each bucket + events.forEach(event => { + const eventTime = new Date(event.created_at); + const timeDiff = anchor - eventTime; + const bucketIndex = Math.floor(timeDiff / bucketSize); + + if (bucketIndex >= 0 && bucketIndex < bucketCount) { + const index = bucketCount - 1 - bucketIndex; + totalCounts[index]++; + if (event.status === 'success') { + successCounts[index]++; + } else if (event.status === 'error') { + errorCounts[index]++; + } + } + }); + + return { + labels, + success: successCounts, + errors: errorCounts, + total: totalCounts + }; + } + + /** + * Prepare API usage data for bar chart. + * Shows top N repositories by API usage. + * + * @param {Array} repositories - Array of repository statistics + * @param {number} topN - Number of top repositories to show (default: 7) + * @param {string} sortOrder - Sort order ('asc' or 'desc', default: 'desc') + * @returns {Object} Chart data with labels and values arrays + */ + prepareAPIUsageData(repositories, topN = 7, sortOrder = 'desc') { + if (!repositories || !Array.isArray(repositories)) { + return { labels: [], values: [] }; + } + + // Filter and sort by total_api_calls + let sorted = repositories.filter(r => r.total_api_calls > 0); + + if (sortOrder === 'asc') { + sorted.sort((a, b) => a.total_api_calls - b.total_api_calls); + } else { + sorted.sort((a, b) => b.total_api_calls - a.total_api_calls); + } + + // Take top N + sorted = sorted.slice(0, topN); + + return { + labels: sorted.map(r => r.repository?.split('/')[1] || r.repository || 'Unknown'), + values: sorted.map(r => r.total_api_calls || 0) + }; + } + + /** + * Open a modal dialog. + * @param {string} modalId - The ID of the modal to open + */ + openModal(modalId) { + const modal = document.getElementById(modalId); + if (modal) { + modal.classList.add('show'); + console.log(`[Dashboard] Opened modal: ${modalId}`); + } + } + + /** + * Close a modal dialog. + * @param {string} modalId - The ID of the modal to close + */ + closeModal(modalId) { + const modal = document.getElementById(modalId); + if (modal) { + modal.classList.remove('show'); + console.log(`[Dashboard] Closed modal: ${modalId}`); + } + } + + /** + * Update Event Trends chart dataset visibility. + */ + updateTrendsVisibility() { + const showSuccess = document.getElementById('showSuccess')?.checked; + const showErrors = document.getElementById('showErrors')?.checked; + const showTotal = document.getElementById('showTotal')?.checked; + + const chart = this.charts.eventTrends; + if (chart && chart.data.datasets) { + // Datasets: [0] Success, [1] Errors, [2] Total + chart.data.datasets[0].hidden = !showSuccess; + chart.data.datasets[1].hidden = !showErrors; + chart.data.datasets[2].hidden = !showTotal; + chart.update(); + console.log('[Dashboard] Updated Event Trends visibility'); + } + } + + /** + * Change Event Trends chart type. + * @param {string} type - Chart type ('line', 'area', 'bar') + */ + changeTrendsChartType(type) { + const chart = this.charts.eventTrends; + if (chart && chart.data.datasets) { + chart.data.datasets.forEach(dataset => { + if (type === 'area') { + dataset.fill = true; + dataset.type = 'line'; + } else if (type === 'bar') { + dataset.fill = false; + dataset.type = 'bar'; + } else { + dataset.fill = false; + dataset.type = 'line'; + } + }); + chart.update(); + console.log(`[Dashboard] Changed Event Trends chart type to: ${type}`); + } + } + + /** + * Update API Usage chart top N repositories. + * @param {number} n - Number of top repositories to show + */ + updateApiTopN(n) { + if (this.currentData && this.currentData.repositories) { + const repositories = this.normalizeRepositories(this.currentData.repositories); + const apiData = this.prepareAPIUsageData(repositories, n); + if (this.charts.apiUsage) { + window.MetricsCharts.updateAPIUsageChart(this.charts.apiUsage, apiData); + console.log(`[Dashboard] Updated API Usage to show top ${n} repositories`); + } + } + } + + /** + * Update API Usage chart sort order. + * @param {string} order - Sort order ('asc' or 'desc') + */ + updateApiSortOrder(order) { + console.log(`[Dashboard] API sort order changed to: ${order}`); + // Re-render with new sort order + if (this.currentData && this.currentData.repositories) { + const repositories = this.normalizeRepositories(this.currentData.repositories); + const apiData = this.prepareAPIUsageData(repositories, undefined, order); + if (this.charts.apiUsage) { + window.MetricsCharts.updateAPIUsageChart(this.charts.apiUsage, apiData); + } + } + } + + /** + * Change API Usage chart type. + * @param {string} type - Chart type ('bar', 'horizontalBar', 'line') + */ + changeApiChartType(type) { + const chart = this.charts.apiUsage; + if (chart) { + if (type === 'horizontalBar') { + chart.config.options.indexAxis = 'y'; + chart.config.type = 'bar'; + } else if (type === 'line') { + chart.config.options.indexAxis = 'x'; + chart.config.type = 'line'; + } else { + chart.config.options.indexAxis = 'x'; + chart.config.type = 'bar'; + } + chart.update(); + console.log(`[Dashboard] Changed API Usage chart type to: ${type}`); + } + } + + /** + * Export Event Trends data. + * @param {string} format - Export format ('csv' or 'json') + */ + exportTrendsData(format) { + const data = this.currentData.trends || []; + if (data.length === 0) { + console.warn('[Dashboard] No trends data to export'); + return; + } + this.downloadData(data, `event-trends.${format}`, format); + console.log(`[Dashboard] Exported Event Trends data as ${format}`); + } + + /** + * Export API Usage data. + * @param {string} format - Export format ('csv' or 'json') + */ + exportApiData(format) { + // Guard: ensure repositories data exists + if (!this.currentData.repositories) { + console.warn('[Dashboard] No repositories data available to export'); + return; + } + + const repositories = this.normalizeRepositories(this.currentData.repositories); + if (repositories.length === 0) { + console.warn('[Dashboard] No API usage data to export'); + return; + } + this.downloadData(repositories, `api-usage.${format}`, format); + console.log(`[Dashboard] Exported API Usage data as ${format}`); + } + + /** + * Escape a CSV value by wrapping in quotes if needed and escaping internal quotes. + * @param {*} value - Value to escape + * @return {string} - Escaped CSV value + */ + escapeCsvValue(value) { + // Convert to string + const stringValue = String(value ?? ''); + + // Check if value needs escaping (contains comma, quote, or newline) + const needsEscaping = /[",\n\r]/.test(stringValue); + + if (needsEscaping) { + // Escape quotes by doubling them + const escapedValue = stringValue.replace(/"/g, '""'); + // Wrap in quotes + return `"${escapedValue}"`; + } + + return stringValue; + } + + /** + * Download data as CSV or JSON file. + * @param {Array} data - Data array to download + * @param {string} filename - Output filename + * @param {string} format - Format ('csv' or 'json') + */ + downloadData(data, filename, format) { + let content, mimeType; + + if (format === 'csv') { + // Convert to CSV + if (!data.length) return; + const headers = Object.keys(data[0]).map(h => this.escapeCsvValue(h)).join(','); + const rows = data.map(row => + Object.values(row).map(v => this.escapeCsvValue(v)).join(',') + ); + content = [headers, ...rows].join('\n'); + mimeType = 'text/csv'; + } else { + // JSON format + content = JSON.stringify(data, null, 2); + mimeType = 'application/json'; + } + + const blob = new Blob([content], { type: mimeType }); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = filename; + a.click(); + URL.revokeObjectURL(url); + } + + /** + * Download chart as PNG image. + * @param {string} chartId - Canvas element ID + */ + downloadChart(chartId) { + const canvas = document.getElementById(chartId); + if (!canvas) { + console.warn(`[Dashboard] Canvas not found: ${chartId}`); + return; + } + + const url = canvas.toDataURL('image/png'); + const a = document.createElement('a'); + a.href = url; + a.download = `${chartId}.png`; + a.click(); + console.log(`[Dashboard] Downloaded chart: ${chartId}`); + } + + /** + * Convert kebab-case to camelCase for pagination state keys + * @param {string} kebabCase - kebab-case identifier + * @returns {string} camelCase identifier + */ + toCamelCase(kebabCase) { + return kebabCase.replace(/-([a-z])/g, (g) => g[1].toUpperCase()); + } + + /** + * Create pagination controls HTML + * @param {string} section - Section identifier (kebab-case from HTML) + * @returns {string} Pagination HTML + */ + createPaginationControls(section) { + // Convert kebab-case to camelCase for pagination state lookup + const stateKey = this.toCamelCase(section); + const state = this.pagination[stateKey]; + if (!state) { + console.warn(`[Dashboard] No pagination state for section: ${section} (${stateKey})`); + return ''; + } + const { page, pageSize, total, totalPages } = state; + + const hasNext = page < totalPages; + const hasPrev = page > 1; + + return ` +
+
+ + + +
+
+ + Page ${page} of ${totalPages || 1} + +
+
+ Total: ${total} items +
+
+ `; + } + + /** + * Handle page size change + * @param {string} section - Section identifier + * @param {number} newSize - New page size + */ + async changePageSize(section, newSize) { + this.pagination[section].pageSize = newSize; + this.pagination[section].page = 1; // Reset to page 1 + localStorage.setItem(`pageSize_${section}`, newSize); + + await this.loadSectionData(section); + } + + /** + * Handle page navigation + * @param {string} section - Section identifier + * @param {string} action - 'next' or 'prev' + */ + async navigatePage(section, action) { + const state = this.pagination[section]; + + if (action === 'next' && state.page < state.totalPages) { + state.page++; + } else if (action === 'prev' && state.page > 1) { + state.page--; + } + + await this.loadSectionData(section); + } + + /** + * Set up pagination event listeners + */ + setupPaginationListeners() { + // Page size selectors + document.addEventListener('change', (e) => { + if (e.target.classList.contains('page-size-select')) { + const section = e.target.dataset.section; // kebab-case from HTML + const stateKey = this.toCamelCase(section); // Convert to camelCase + const newSize = parseInt(e.target.value); + this.changePageSize(stateKey, newSize); + } + }); + + // Navigation buttons + document.addEventListener('click', (e) => { + if (e.target.classList.contains('btn-pagination')) { + const section = e.target.dataset.section; // kebab-case from HTML + const stateKey = this.toCamelCase(section); // Convert to camelCase + const action = e.target.dataset.action; + if (!e.target.disabled) { + this.navigatePage(stateKey, action); + } + } + }); + } + + /** + * Load data for a specific section with pagination + * @param {string} section - Section identifier + */ + async loadSectionData(section) { + const state = this.pagination[section]; + const { startTime, endTime } = this.getTimeRangeDates(this.timeRange); + + this.showLoading(true); + + try { + let data; + const params = { + page: state.page, + page_size: state.pageSize + }; + + // Add filters + if (this.repositoryFilterRaw) { + params.repository = this.repositoryFilterRaw; + } + if (this.userFilter) { + params.user = this.userFilter; + } + + switch (section) { + case 'topRepositories': + data = await this.apiClient.fetchRepositories(startTime, endTime, params); + this.updateRepositoryTable(data); + break; + case 'recentEvents': + params.start_time = startTime; + params.end_time = endTime; + data = await this.apiClient.fetchWebhooks(params); + this.updateRecentEventsTable(data); + break; + case 'prCreators': + case 'prReviewers': + case 'prApprovers': + data = await this.apiClient.fetchContributors(startTime, endTime, state.pageSize, params); + this.updateContributorsTables(data); + break; + case 'userPrs': + data = await this.apiClient.fetchUserPRs(startTime, endTime, params); + this.updateUserPRsTable(data); + break; + } + } catch (error) { + console.error(`[Dashboard] Error loading ${section} data:`, error); + } finally { + this.showLoading(false); + } + } + + /** + * Update User PRs table with new data. + * @param {Object} prsData - User PRs data with pagination + */ + updateUserPRsTable(prsData) { + const tableBody = document.getElementById('user-prs-table-body'); + if (!tableBody) return; + + const prs = prsData.data || []; + const pagination = prsData.pagination; + + if (pagination) { + this.pagination.userPrs = { + page: pagination.page, + pageSize: pagination.page_size, + total: pagination.total, + totalPages: pagination.total_pages + }; + } + + if (!prs || prs.length === 0) { + tableBody.innerHTML = 'No pull requests found'; + } else { + const rows = prs.map(pr => { + // Soft fallbacks for missing/invalid date fields + const created = pr.created_at ? this.formatDateSafe(pr.created_at) : '-'; + const updated = pr.updated_at ? this.formatDateSafe(pr.updated_at) : '-'; + const stateClass = pr.state === 'open' ? 'status-success' : 'status-error'; + const mergedBadge = pr.merged ? 'Merged' : ''; + + // Soft fallbacks for missing fields + const prNumber = pr.number || 'N/A'; + const title = pr.title || 'Untitled'; + const repository = pr.repository || 'Unknown'; + const state = pr.state || 'unknown'; + const commitsCount = pr.commits_count || 0; + + return ` + + #${prNumber} + ${this.escapeHtml(title)} + ${this.escapeHtml(repository)} + ${this.escapeHtml(state)} ${mergedBadge} + ${created} + ${updated} + ${commitsCount} + + `; + }).join(''); + tableBody.innerHTML = rows; + } + + // Add pagination controls + const container = document.querySelector('[data-section="user-prs"] .chart-content'); + const existingControls = container?.querySelector('.pagination-controls'); + if (existingControls) { + existingControls.remove(); + } + + if (container && pagination) { + container.insertAdjacentHTML('beforeend', this.createPaginationControls('user-prs')); + } + } + + /** + * Clean up resources on page unload. + */ + destroy() { + console.log('[Dashboard] Destroying dashboard...'); + + // Destroy charts + Object.values(this.charts).forEach(chart => { + if (chart && typeof chart.destroy === 'function') { + chart.destroy(); + } + }); + + console.log('[Dashboard] Dashboard destroyed'); + } + + /** + * Escape HTML to prevent XSS. + * + * @param {string} text - Text to escape + * @returns {string} Escaped text + */ + escapeHtml(text) { + const div = document.createElement('div'); + div.textContent = text; + return div.innerHTML; + } + + /** + * Safely format a date string, handling invalid dates. + * + * @param {string} dateString - ISO date string + * @returns {string} Formatted date or fallback + */ + formatDateSafe(dateString) { + try { + const date = new Date(dateString); + if (isNaN(date.getTime())) { + return '-'; + } + return date.toLocaleDateString(); + } catch (_error) { + return '-'; + } + } +} + + +// Initialize dashboard on DOMContentLoaded +document.addEventListener('DOMContentLoaded', () => { + console.log('[Dashboard] DOM loaded, initializing dashboard...'); + + // Create global dashboard instance + window.metricsDashboard = new MetricsDashboard(); + + // Clean up on page unload + window.addEventListener('beforeunload', () => { + if (window.metricsDashboard) { + window.metricsDashboard.destroy(); + } + }); +}); diff --git a/webhook_server/web/static/js/metrics/utils.js b/webhook_server/web/static/js/metrics/utils.js new file mode 100644 index 00000000..6e937155 --- /dev/null +++ b/webhook_server/web/static/js/metrics/utils.js @@ -0,0 +1,601 @@ +/** + * Utility Functions for GitHub Webhook Metrics Dashboard + * + * Common helper functions for time formatting, number formatting, + * data processing, DOM manipulation, and validation. + * + * No external dependencies - vanilla JavaScript only. + */ + +// ============================================================================ +// Time and Duration Formatting +// ============================================================================ + +/** + * Format milliseconds to human-readable duration + * @param {number} ms - Duration in milliseconds + * @returns {string} Formatted duration (e.g., "5.8s", "1m 30s", "2h 15m") + */ +function formatDuration(ms) { + if (ms == null || isNaN(ms)) { + return '-'; + } + + const absMs = Math.abs(ms); + + // Less than 1 second - show milliseconds + if (absMs < 1000) { + return `${Math.round(absMs)}ms`; + } + + // Less than 1 minute - show seconds with 1 decimal + if (absMs < 60000) { + return `${(absMs / 1000).toFixed(1)}s`; + } + + // Less than 1 hour - show minutes and seconds + if (absMs < 3600000) { + const mins = Math.floor(absMs / 60000); + const secs = Math.floor((absMs % 60000) / 1000); + return secs > 0 ? `${mins}m ${secs}s` : `${mins}m`; + } + + // Hours and minutes + const hours = Math.floor(absMs / 3600000); + const mins = Math.floor((absMs % 3600000) / 60000); + return mins > 0 ? `${hours}h ${mins}m` : `${hours}h`; +} + +/** + * Format ISO timestamp to local time + * @param {string} isoString - ISO 8601 timestamp + * @param {boolean} includeSeconds - Whether to include seconds in output + * @returns {string} Formatted local time (e.g., "2024-11-24 14:35:22") + */ +function formatTimestamp(isoString, includeSeconds = true) { + if (!isoString) { + return '-'; + } + + try { + const date = new Date(isoString); + if (isNaN(date.getTime())) { + return '-'; + } + + const year = date.getFullYear(); + const month = String(date.getMonth() + 1).padStart(2, '0'); + const day = String(date.getDate()).padStart(2, '0'); + const hours = String(date.getHours()).padStart(2, '0'); + const minutes = String(date.getMinutes()).padStart(2, '0'); + const seconds = String(date.getSeconds()).padStart(2, '0'); + + if (includeSeconds) { + return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}`; + } + return `${year}-${month}-${day} ${hours}:${minutes}`; + } catch (error) { + console.error('Error formatting timestamp:', error); + return '-'; + } +} + +/** + * Format ISO timestamp to relative time + * @param {string} isoString - ISO 8601 timestamp + * @returns {string} Relative time (e.g., "2 minutes ago", "5 hours ago") + */ +function formatRelativeTime(isoString) { + if (!isoString) { + return '-'; + } + + try { + const date = new Date(isoString); + if (isNaN(date.getTime())) { + return '-'; + } + + const now = new Date(); + const diffMs = now - date; + const diffSec = Math.floor(diffMs / 1000); + + // Future time + if (diffSec < 0) { + return 'in the future'; + } + + // Just now (< 10 seconds) + if (diffSec < 10) { + return 'just now'; + } + + // Seconds ago (< 1 minute) + if (diffSec < 60) { + return `${diffSec} seconds ago`; + } + + // Minutes ago (< 1 hour) + const diffMin = Math.floor(diffSec / 60); + if (diffMin < 60) { + return diffMin === 1 ? '1 minute ago' : `${diffMin} minutes ago`; + } + + // Hours ago (< 1 day) + const diffHours = Math.floor(diffMin / 60); + if (diffHours < 24) { + return diffHours === 1 ? '1 hour ago' : `${diffHours} hours ago`; + } + + // Days ago (< 30 days) + const diffDays = Math.floor(diffHours / 24); + if (diffDays < 30) { + return diffDays === 1 ? '1 day ago' : `${diffDays} days ago`; + } + + // Months ago (< 12 months) + const diffMonths = Math.floor(diffDays / 30); + if (diffMonths < 12) { + return diffMonths === 1 ? '1 month ago' : `${diffMonths} months ago`; + } + + // Years ago + const diffYears = Math.floor(diffMonths / 12); + return diffYears === 1 ? '1 year ago' : `${diffYears} years ago`; + } catch (error) { + console.error('Error formatting relative time:', error); + return '-'; + } +} + +// ============================================================================ +// Number Formatting +// ============================================================================ + +/** + * Format number with thousand separators + * @param {number} num - Number to format + * @returns {string} Formatted number (e.g., "8,745") + */ +function formatNumber(num) { + if (num == null || isNaN(num)) { + return '-'; + } + + return num.toLocaleString('en-US'); +} + +/** + * Format number as percentage + * @param {number} num - Number in percentage form (0-100, not 0-1) + * @param {number} decimals - Number of decimal places + * @returns {string} Formatted percentage (e.g., "96.32%") + */ +function formatPercentage(num, decimals = 2) { + if (num == null) { + return '-'; + } + + const value = Number(num); + if (!Number.isFinite(value)) { + return '-'; + } + + return `${value.toFixed(decimals)}%`; +} + +/** + * Format bytes to human-readable size + * @param {number} bytes - Number of bytes + * @param {number} decimals - Number of decimal places + * @returns {string} Formatted size (e.g., "1.5 MB") + */ +function formatBytes(bytes, decimals = 2) { + if (bytes == null || isNaN(bytes)) { + return '-'; + } + + // Handle zero bytes + if (bytes === 0) { + return '0 B'; + } + + // Handle negative values + if (bytes < 0) { + return 'Invalid'; + } + + // Handle sub-byte values (0 < bytes < 1) + if (bytes < 1) { + return '< 1 B'; + } + + const k = 1024; + const sizes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']; + const i = Math.floor(Math.log(bytes) / Math.log(k)); + const safeIndex = Math.min(i, sizes.length - 1); + const size = bytes / Math.pow(k, safeIndex); + + return `${size.toFixed(decimals)} ${sizes[safeIndex]}`; +} + +// ============================================================================ +// Data Processing +// ============================================================================ + +/** + * Calculate trend between current and previous values + * @param {number} current - Current value + * @param {number} previous - Previous value + * @returns {Object} Trend object with direction, value, and icon + */ +function calculateTrend(current, previous) { + if (current == null || isNaN(current)) { + return { direction: 'neutral', value: '-', icon: '→' }; + } + + if (previous == null || isNaN(previous) || previous === 0) { + return { direction: 'neutral', value: '-', icon: '→' }; + } + + const change = ((current - previous) / previous) * 100; + + // No significant change (< 0.1%) + if (Math.abs(change) < 0.1) { + return { direction: 'neutral', value: '0%', icon: '→' }; + } + + return { + direction: change > 0 ? 'up' : 'down', + value: `${Math.abs(change).toFixed(1)}%`, + icon: change > 0 ? '↑' : '↓' + }; +} + +/** + * Aggregate events by time range + * @param {Array} events - Array of event objects with timestamp property + * @param {string} range - Time range: 'hour', 'day', 'week' + * @returns {Object} Object with time buckets as keys + */ +function aggregateByTimeRange(events, range = 'hour') { + if (!Array.isArray(events) || events.length === 0) { + return {}; + } + + const buckets = {}; + + events.forEach(event => { + if (!event || !event.timestamp) { + return; + } + + try { + const date = new Date(event.timestamp); + if (isNaN(date.getTime())) { + return; + } + + let bucketKey; + + switch (range) { + case 'hour': + // Bucket by hour: "2024-11-24T14" + bucketKey = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')}T${String(date.getHours()).padStart(2, '0')}`; + break; + + case 'day': + // Bucket by day: "2024-11-24" + bucketKey = `${date.getFullYear()}-${String(date.getMonth() + 1).padStart(2, '0')}-${String(date.getDate()).padStart(2, '0')}`; + break; + + case 'week': { + // Bucket by week: "2024-W47" + const weekNumber = getWeekNumber(date); + bucketKey = `${date.getFullYear()}-W${String(weekNumber).padStart(2, '0')}`; + break; + } + + default: + bucketKey = date.toISOString(); + } + + if (!buckets[bucketKey]) { + buckets[bucketKey] = []; + } + buckets[bucketKey].push(event); + } catch (error) { + console.error('Error aggregating event:', error); + } + }); + + return buckets; +} + +/** + * Get ISO week number for a date + * @param {Date} date - Date object + * @returns {number} ISO week number (1-53) + */ +function getWeekNumber(date) { + const d = new Date(Date.UTC(date.getFullYear(), date.getMonth(), date.getDate())); + const dayNum = d.getUTCDay() || 7; + d.setUTCDate(d.getUTCDate() + 4 - dayNum); + const yearStart = new Date(Date.UTC(d.getUTCFullYear(), 0, 1)); + return Math.ceil((((d - yearStart) / 86400000) + 1) / 7); +} + +/** + * Calculate success rate percentage + * @param {number} successful - Number of successful events + * @param {number} total - Total number of events + * @returns {number} Success rate percentage (0-100) + */ +function calculateSuccessRate(successful, total) { + if (total == null || isNaN(total) || total === 0) { + return 0; + } + + if (successful == null || isNaN(successful)) { + return 0; + } + + return (successful / total) * 100; +} + +// ============================================================================ +// DOM Helpers +// ============================================================================ + +/** + * Escape HTML to prevent XSS attacks + * @param {string} str - String to escape + * @returns {string} Escaped string safe for HTML insertion + */ +function escapeHTML(str) { + if (str == null) { + return ''; + } + + // Guard for non-browser environments (Node.js/test context without jsdom) + if (typeof document === 'undefined') { + // Fallback: basic HTML escaping without DOM + return String(str) + .replace(/&/g, '&') + .replace(//g, '>') + .replace(/"/g, '"') + .replace(/'/g, '''); + } + + const div = document.createElement('div'); + div.textContent = str; + return div.innerHTML; +} + +/** + * Debounce function calls + * @param {Function} func - Function to debounce + * @param {number} delay - Delay in milliseconds + * @returns {Function} Debounced function + */ +function debounce(func, delay = 300) { + let timeoutId; + + return function debounced(...args) { + clearTimeout(timeoutId); + timeoutId = setTimeout(() => { + func.apply(this, args); + }, delay); + }; +} + +/** + * Throttle function calls + * @param {Function} func - Function to throttle + * @param {number} limit - Minimum time between calls in milliseconds + * @returns {Function} Throttled function + */ +function throttle(func, limit = 300) { + let inThrottle; + let lastFunc; + let lastRan; + + return function throttled(...args) { + if (!inThrottle) { + func.apply(this, args); + lastRan = Date.now(); + inThrottle = true; + } else { + clearTimeout(lastFunc); + lastFunc = setTimeout(() => { + if ((Date.now() - lastRan) >= limit) { + func.apply(this, args); + lastRan = Date.now(); + } + inThrottle = false; + }, limit - (Date.now() - lastRan)); + } + }; +} + +// ============================================================================ +// Storage Helpers +// ============================================================================ + +/** + * Get value from localStorage with fallback + * @param {string} key - Storage key + * @param {*} defaultValue - Default value if key not found + * @returns {*} Stored value or default value + */ +function getLocalStorage(key, defaultValue = null) { + try { + const item = localStorage.getItem(key); + if (item === null) { + return defaultValue; + } + + // Try to parse as JSON + try { + return JSON.parse(item); + } catch { + // Return as string if not valid JSON + return item; + } + } catch (error) { + console.error('Error reading from localStorage:', error); + return defaultValue; + } +} + +/** + * Set value to localStorage safely + * @param {string} key - Storage key + * @param {*} value - Value to store + * @returns {boolean} True if successful, false otherwise + */ +function setLocalStorage(key, value) { + try { + const serialized = typeof value === 'string' ? value : JSON.stringify(value); + localStorage.setItem(key, serialized); + return true; + } catch (error) { + console.error('Error writing to localStorage:', error); + return false; + } +} + +// ============================================================================ +// Validation +// ============================================================================ + +/** + * Validate time range + * @param {string|Date} startTime - Start time + * @param {string|Date} endTime - End time + * @returns {boolean} True if valid time range + */ +function isValidTimeRange(startTime, endTime) { + if (!startTime || !endTime) { + return false; + } + + try { + const start = new Date(startTime); + const end = new Date(endTime); + + if (isNaN(start.getTime()) || isNaN(end.getTime())) { + return false; + } + + // End time must be after start time + return end > start; + } catch (error) { + console.error('Error validating time range:', error); + return false; + } +} + +/** + * Validate repository format (org/repo) + * @param {string} repo - Repository string to validate + * @returns {boolean} True if valid repository format + */ +function isValidRepository(repo) { + if (!repo || typeof repo !== 'string') { + return false; + } + + // Repository format: org/repo + // - org: alphanumeric, hyphens (1-39 chars) + // - repo: alphanumeric, hyphens, underscores, dots (1-100 chars) + const repoPattern = /^[a-zA-Z0-9-]{1,39}\/[a-zA-Z0-9._-]{1,100}$/; + return repoPattern.test(repo); +} + +// ============================================================================ +// Export Functions (for module usage) +// ============================================================================ + +// Export all functions for potential module usage +if (typeof module !== 'undefined' && module.exports) { + module.exports = { + // Time and Duration + formatDuration, + formatTimestamp, + formatRelativeTime, + // Number Formatting + formatNumber, + formatPercentage, + formatBytes, + // Data Processing + calculateTrend, + aggregateByTimeRange, + calculateSuccessRate, + // DOM Helpers + escapeHTML, + debounce, + throttle, + // Storage Helpers + getLocalStorage, + setLocalStorage, + // Validation + isValidTimeRange, + isValidRepository + }; +} + +// Browser globals for non-module usage +if (typeof window !== 'undefined') { + window.MetricsUtils = { + // Time and Duration + formatDuration, + formatTimestamp, + formatRelativeTime, + // Number Formatting + formatNumber, + formatPercentage, + formatBytes, + // Data Processing + calculateTrend, + aggregateByTimeRange, + calculateSuccessRate, + // DOM Helpers + escapeHTML, + debounce, + throttle, + // Storage Helpers + getLocalStorage, + setLocalStorage, + // Validation + isValidTimeRange, + isValidRepository + }; +} + +// ESM exports (modern module syntax) +export { + // Time and Duration + formatDuration, + formatTimestamp, + formatRelativeTime, + // Number Formatting + formatNumber, + formatPercentage, + formatBytes, + // Data Processing + calculateTrend, + aggregateByTimeRange, + calculateSuccessRate, + // DOM Helpers + escapeHTML, + debounce, + throttle, + // Storage Helpers + getLocalStorage, + setLocalStorage, + // Validation + isValidTimeRange, + isValidRepository +}; diff --git a/webhook_server/web/templates/log_viewer.html b/webhook_server/web/templates/log_viewer.html index 50b0eff5..b74dfb80 100644 --- a/webhook_server/web/templates/log_viewer.html +++ b/webhook_server/web/templates/log_viewer.html @@ -4,6 +4,7 @@ GitHub Webhook Server - Log Viewer + diff --git a/webhook_server/web/templates/metrics_dashboard.html b/webhook_server/web/templates/metrics_dashboard.html new file mode 100644 index 00000000..0574fbdc --- /dev/null +++ b/webhook_server/web/templates/metrics_dashboard.html @@ -0,0 +1,399 @@ + + + + + + GitHub Webhook Server - Metrics Dashboard + + + + + +
+
+
+

GitHub Webhook Server - Metrics Dashboard

+

Real-time monitoring of webhook processing metrics

+
+ +
+ +
+ Connecting... +
+ + + + +
+
+

Filters & Controls

+ +
+ +
+
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+
+ +
+
+
+
+ +
+
+
Total Events
+
-
+
+ - + - +
+
+
+
Success Rate
+
-
+
+ - + - +
+
+
+
Failed Events
+
-
+
+ - + - +
+
+
+
Avg Duration
+
-
+
+ - + - +
+
+
+ +
+
+
+

Event Trends

+
+ + +
+
+
+
+ +
+
+
+ +
+
+

API Usage Trends

+
+ + +
+
+
+
+ +
+
+
+ +
+
+

Event Distribution

+ +
+
+
+ +
+
+
+ +
+
+

Top Repositories

+ +
+
+
+ + + + + + + + + + + + + +
RepositoryEvents%
Loading...
+
+
+
+ +
+
+

Recent Events

+ +
+
+
+ + + + + + + + + + + + + + +
TimeRepositoryEventStatus
Loading...
+
+
+
+ +
+
+

PR Contributors

+ +
+
+
+ +
+

PR Creators

+
+ + + + + + + + + + + + + + + +
UserTotal PRsMergedClosedAvg Commits
Loading...
+
+
+ + +
+

PR Reviewers

+
+ + + + + + + + + + + + + + +
UserTotal ReviewsPRs ReviewedAvg/PR
Loading...
+
+
+ + +
+

PR Approvers

+
+ + + + + + + + + + + + + +
UserTotal ApprovalsPRs Approved
Loading...
+
+
+
+
+
+ +
+
+

User Pull Requests

+ +
+
+
+ + + + + + + + + + + + + + + + + +
PRTitleRepositoryStateCreatedUpdatedCommits
Loading...
+
+
+
+
+
+ + + + + + + + + + + + + +