diff --git a/pytest.ini b/pytest.ini index c62fb38a..ba28bf30 100644 --- a/pytest.ini +++ b/pytest.ini @@ -4,3 +4,6 @@ addopts = --pdbcls=IPython.terminal.debugger:TerminalPdb --cov-config=pyproject.toml --cov-report=html --cov-report=term --cov=webhook_server --log-cli-level=DEBUG + -m 'not e2e' +markers = + e2e: "End-to-end tests that require real GitHub interactions (deselect with '-m \"not e2e\"')" diff --git a/webhook_server/tests/e2e/README.md b/webhook_server/tests/e2e/README.md new file mode 100644 index 00000000..2615a22a --- /dev/null +++ b/webhook_server/tests/e2e/README.md @@ -0,0 +1,817 @@ +# End-to-End (E2E) Testing Guide + +## Purpose and Audience + +This guide helps developers set up and run end-to-end tests for the GitHub webhook server. E2E tests verify the complete workflow: GitHub sends webhooks → smee.io proxies them → server processes them → changes appear in GitHub. + +**Target audience:** Developers contributing to the webhook server who need to validate full integration behavior. + +## Overview + +The E2E testing infrastructure provides: + +- **Automated Infrastructure Setup**: Fixtures automatically start smee client and Docker container +- **Real GitHub Integration**: Tests interact with actual GitHub repositories using `gh` CLI +- **Health Monitoring**: Waits for server to be healthy before running tests +- **Automatic Cleanup**: Ensures resources are properly cleaned up after tests complete or fail +- **Isolation**: Uses pytest markers to prevent accidental execution + +## Prerequisites + +Before running E2E tests, ensure you have these tools installed: + +### 1. Docker and Docker Compose + +```bash +# Verify Docker is installed +docker info + +# Verify Docker Compose is installed +docker compose version +``` + +### 2. Node.js and smee-client + +```bash +# Install smee-client globally +npm install -g smee-client + +# Verify installation +which smee +smee --version +``` + +### 3. GitHub CLI + +**CRITICAL:** All GitHub operations use `gh` CLI (NOT PyGithub, NOT direct HTTP calls). + +```bash +# Install gh CLI (if not already installed) +# macOS: brew install gh +# Linux: See https://github.com/cli/cli/blob/trunk/docs/install_linux.md + +# Verify installation +gh --version + +# Authenticate with GitHub +gh auth login + +# Verify authentication +gh auth status +``` + +### 4. Python Dependencies + +```bash +# Install all dependencies including test group +uv sync +``` + +## Configuration + +### Required: .dev/.env File + +**CRITICAL:** E2E tests require a `.dev/.env` file with configuration. Tests will fail if this file is missing. + +Create `.dev/.env` in the project root: + +```bash +# .dev/.env +SERVER_PORT=5000 +SMEE_URL=https://smee.io/YOUR_UNIQUE_CHANNEL +TEST_REPO=owner/repo-name +DOCKER_COMPOSE_FILE=.dev/docker-compose.yaml +TZ=America/New_York +``` + +**How to get a Smee URL:** + +1. Visit https://smee.io/ +2. Click "Start a new channel" +3. Copy the generated URL (e.g., `https://smee.io/abc123def456`) +4. Add to `.dev/.env` as `SMEE_URL=https://smee.io/webhook_server` + +**Configuration details:** + +- `SERVER_PORT`: Local server port that webhooks are forwarded to (default: `5000`) + - Port `5000` maps to container port `5000` (see docker-compose.yaml) + - Smee client forwards to `localhost:5000/webhook_server` +- `SMEE_URL`: Webhook proxy URL from smee.io (get your own at https://smee.io/) +- `TEST_REPO`: GitHub repository for E2E tests (format: `owner/repo-name`) + - Default test repository: `myk-org/for-testing-only` + - Must have write access to this repository +- `DOCKER_COMPOSE_FILE`: Path to docker-compose.yaml file (relative to project root) + - Default: `.dev/docker-compose.yaml` +- `TZ`: Timezone for server logs (optional, defaults to UTC) + +**Why .dev/.env instead of environment variables?** + +- Persistent configuration across sessions +- Easy to update without re-exporting variables +- Version-controlled example without secrets +- Consistent with Docker Compose environment setup + +## Running E2E Tests + +### CRITICAL: Pytest Marker Requirement + +**All E2E tests MUST have the `@pytest.mark.e2e` decorator and MUST be run with `-m e2e` flag.** + +E2E tests will NOT run without the marker flag to prevent accidental execution during regular test runs. + +### Run All E2E Tests + +```bash +# From project root +uv run --group tests pytest webhook_server/tests/e2e/ -v -m e2e +``` + +### Run Specific Test File + +```bash +# Example: Run pull request flow tests +uv run --group tests pytest webhook_server/tests/e2e/test_pull_request_flow.py -v -m e2e +``` + +### Run Specific Test Case + +```bash +# Example: Run a single test function +uv run --group tests pytest webhook_server/tests/e2e/test_pull_request_flow.py::test_create_pr_basic_flow -v -m e2e +``` + +### What Happens During Test Execution + +1. **Fixture Setup** (once per session): + - Loads `.dev/.env` configuration + - Validates `SMEE_URL` and `SERVER_PORT` exist + - Starts smee client to proxy webhooks from smee.io to local server port + - Starts Docker container with webhook server + - Waits for container health check (via Docker healthcheck, max 60 seconds) + +2. **Test Execution**: + - Tests use `gh` CLI to interact with GitHub (create PRs, add comments, etc.) + - GitHub sends webhooks to smee.io + - Smee client proxies webhooks to local server + - Server processes webhooks and performs actions + - Tests verify results in GitHub using `gh` CLI + +3. **Cleanup** (automatic): + - Stops smee client gracefully (5-second timeout, then kill) + - Stops Docker Compose container + - Cleanup happens even if tests fail or are interrupted (Ctrl+C) + +## Fixtures + +### `server_envs` (session-scoped) + +Loads and validates environment variables from `.dev/.env`. + +**Returns:** `dict` with: +- `server_port`: Local server port (e.g., `"5000"`) +- `smee_url`: Smee.io webhook proxy URL (e.g., `https://smee.io/abc123`) +- `test_repo`: Test repository name (e.g., `"owner/repo-name"`) +- `project_root`: Absolute path to project root +- `docker_compose_file`: Absolute path to `docker-compose.yaml` + +**Raises:** +- `E2EInfrastructureError` if `.dev/.env` file does not exist +- `E2EInfrastructureError` if `SERVER_PORT` or `SMEE_URL` are missing + +**Example usage:** Typically not used directly (consumed by `e2e_server` fixture). + +### `e2e_server` (session-scoped) + +Main fixture that manages complete E2E infrastructure lifecycle. + +**Returns:** `None` (tests interact with GitHub via `gh` CLI, not server directly) + +**Lifecycle:** +1. Starts smee client (automatic) +2. Starts Docker Compose container (automatic) +3. Waits for container health via Docker healthcheck (automatic) +4. Yields to tests +5. Cleanup: stops smee + Docker Compose (automatic) + +**Example usage:** + +```python +import pytest + +@pytest.mark.e2e +def test_webhook_processing(e2e_server): + """Infrastructure is running, ready to test.""" + # Use gh CLI to interact with GitHub + # Server processes webhooks automatically + # Verify results in GitHub + pass +``` + +## Writing E2E Tests + +### Test Structure Requirements + +**MANDATORY:** All E2E tests must: +1. Have `@pytest.mark.e2e` decorator +2. Accept `e2e_server` fixture parameter +3. Use `gh` CLI for GitHub operations (NOT PyGithub, NOT HTTP calls) +4. Clean up created resources (PRs, branches) + +### Basic Test Template + +```python +import pytest +import subprocess + +@pytest.mark.e2e +def test_webhook_flow(e2e_server): + """Test description explaining what is being validated.""" + # Infrastructure is running (smee + docker-compose) + + # Step 1: Perform GitHub action using gh CLI + result = subprocess.run( + ["gh", "pr", "create", "--title", "Test PR", "--body", "Test body"], + capture_output=True, + text=True, + check=True, + ) + pr_url = result.stdout.strip() + pr_number = pr_url.split("/")[-1] + + # Step 2: Wait for webhook processing (if needed) + import time + time.sleep(5) # Give server time to process webhook + + # Step 3: Verify results using gh CLI + result = subprocess.run( + ["gh", "pr", "view", pr_number, "--json", "labels"], + capture_output=True, + text=True, + check=True, + ) + + # Step 4: Clean up + subprocess.run( + ["gh", "pr", "close", pr_number, "--delete-branch"], + check=True, + ) +``` + +### GitHub CLI Usage Patterns + +**Creating Pull Requests:** + +```python +# Create PR with title and body +result = subprocess.run( + ["gh", "pr", "create", "--title", "Test PR", "--body", "Test description"], + capture_output=True, + text=True, + check=True, +) +pr_url = result.stdout.strip() +``` + +**Viewing PR Data:** + +```python +# Get PR data as JSON +result = subprocess.run( + ["gh", "pr", "view", pr_number, "--json", "labels,state,comments"], + capture_output=True, + text=True, + check=True, +) +pr_data = json.loads(result.stdout) +``` + +**Adding Comments:** + +```python +# Add comment to PR +subprocess.run( + ["gh", "pr", "comment", pr_number, "--body", "/verify-owners"], + check=True, +) +``` + +**Closing PRs:** + +```python +# Close PR and delete branch +subprocess.run( + ["gh", "pr", "close", pr_number, "--delete-branch"], + check=True, +) +``` + +**API Calls:** + +```python +# Use GitHub API directly +result = subprocess.run( + ["gh", "api", f"/repos/owner/repo/pulls/{pr_number}/reviews"], + capture_output=True, + text=True, + check=True, +) +reviews = json.loads(result.stdout) +``` + +### Test Repository + +E2E tests use the `myk-org/for-testing-only` repository: + +- **Purpose:** Dedicated test repository with controlled configuration +- **OWNERS Files:** Contains nested OWNERS files for ownership testing +- **Validation Toggle:** Tests can enable/disable validation via `tests/config.py` +- **Pre-commit Hooks:** Can be configured to pass/fail for testing check runs +- **Tox Tests:** Can be configured to pass/fail for testing status checks + +**Repository structure:** + +``` +for-testing-only/ +├── src/ +│ └── OWNERS # Nested OWNERS file +├── tests/ +│ ├── config.py # VALIDATION_ENABLED toggle +│ └── OWNERS # Nested OWNERS file +├── OWNERS # Root OWNERS file +├── .pre-commit-config.yaml # Pre-commit configuration +└── tox.ini # Tox configuration +``` + +## Troubleshooting + +### Test Discovery: No Tests Run + +**Symptom:** `collected 0 items` or tests are skipped + +**Cause:** Missing `-m e2e` marker flag + +**Solution:** + +```bash +# WRONG - tests won't run +uv run --group tests pytest webhook_server/tests/e2e/ -v + +# CORRECT - tests will run +uv run --group tests pytest webhook_server/tests/e2e/ -v -m e2e +``` + +### Configuration Error: .dev/.env Not Found + +**Symptom:** `E2EInfrastructureError: Required .dev/.env file not found` + +**Cause:** Missing `.dev/.env` configuration file + +**Solution:** + +```bash +# Create .dev/.env file +cat > .dev/.env << 'EOF' +SERVER_PORT=5000 +SMEE_URL=https://smee.io/webhook_server +TEST_REPO=owner/repo-name +DOCKER_COMPOSE_FILE=.dev/docker-compose.yaml +TZ=America/New_York +EOF + +# Get your Smee URL from https://smee.io/ +``` + +### Configuration Error: Missing SMEE_URL or SERVER_PORT + +**Symptom:** `E2EInfrastructureError: SMEE_URL environment variable is required` + +**Cause:** `.dev/.env` exists but missing required variables + +**Solution:** Edit `.dev/.env` and ensure all required variables are set: + +```bash +SERVER_PORT=5000 +SMEE_URL=https://smee.io/abc123def456 +TEST_REPO=owner/repo-name +DOCKER_COMPOSE_FILE=.dev/docker-compose.yaml +``` + +### Infrastructure Error: smee-client Not Found + +**Symptom:** `E2EInfrastructureError: smee client not found` + +**Cause:** smee-client not installed or not in PATH + +**Solution:** + +```bash +# Install smee-client globally +npm install -g smee-client + +# Verify installation +which smee +smee --version +``` + +### Infrastructure Error: Container Health Check Failed + +**Symptom:** `E2EInfrastructureError: Webhook server container health check failed` + +**Cause:** Docker container did not become healthy within 60 seconds + +**Solutions:** + +1. Check Docker is running: + ```bash + docker info + ``` + +2. Check container status: + ```bash + docker compose --file .dev/docker-compose.yaml ps + ``` + +3. Check container logs for errors: + ```bash + docker compose --file .dev/docker-compose.yaml logs + ``` + +4. Verify configuration file exists: + ```bash + ls -la .dev/data/config.yaml + ``` + +5. Verify port is not in use: + ```bash + lsof -i :5000 + ``` + +6. Rebuild container: + ```bash + docker compose --file .dev/docker-compose.yaml down + docker compose --file .dev/docker-compose.yaml build + docker compose --file .dev/docker-compose.yaml up -d + ``` + +### GitHub CLI Error: Not Authenticated + +**Symptom:** `gh: authentication required` or `gh: could not determine authenticated user` + +**Cause:** GitHub CLI not authenticated + +**Solution:** + +```bash +# Authenticate with GitHub +gh auth login + +# Follow interactive prompts to authenticate + +# Verify authentication +gh auth status +``` + +### Port Conflict Error + +**Symptom:** `port is already allocated` or `address already in use` + +**Cause:** Port 5000 is already in use by another process + +**Solutions:** + +1. Stop existing containers: + ```bash + docker compose --file .dev/docker-compose.yaml down + ``` + +2. Find process using port: + ```bash + lsof -i :5000 + ``` + +3. Kill process or change port in `docker-compose.yaml` + +### Smee Client Issues + +**Symptom:** Webhooks not reaching local server + +**Cause:** Smee client not running or incorrect URL configuration + +**Solutions:** + +1. Check smee process is running: + ```bash + ps aux | grep smee + ``` + +2. Verify SMEE_URL is correct in `.dev/.env` + +3. Test smee URL in browser - should show "Ready" message at https://smee.io/webhook_server + +4. Check smee client logs (printed during test setup) + +### Docker Compose Fails to Start + +**Symptom:** `E2EInfrastructureError: Failed to start docker-compose` + +**Solutions:** + +1. Verify Docker is running: + ```bash + docker info + ``` + +2. Validate docker-compose file: + ```bash + docker compose --file .dev/docker-compose.yaml config + ``` + +3. Check for syntax errors in docker-compose.yaml + +4. Ensure data directory exists: + ```bash + mkdir -p .dev/data + ``` + +5. Check Docker permissions: + ```bash + # Linux: add user to docker group + sudo usermod -aG docker $USER + # Logout and login for changes to take effect + ``` + +## Architecture + +### Infrastructure Components + +1. **Smee Client** (smee-client via npm) + - Proxies webhooks from smee.io to local server + - Automatically started by fixture + - Runs as subprocess with PID tracking + - Graceful shutdown with 5-second timeout + +2. **Docker Compose** (docker-compose.yaml) + - Runs webhook server in container + - Mounts `.dev/data` for configuration + - Exposes port 5000 (maps to container port 5000) + - Has health check endpoint for monitoring + +3. **GitHub CLI** (gh) + - All GitHub operations use `gh` CLI + - Authenticated with user's GitHub account + - Provides JSON output for parsing + - More reliable than direct API calls + +### Startup Sequence + +1. `server_envs` fixture: + - Loads `.dev/.env` file + - Validates `SMEE_URL` and `SERVER_PORT` + - Returns environment configuration + +2. `e2e_server` fixture: + - Starts smee client (subprocess) + - Starts Docker Compose container + - Waits for container health check (max 60 seconds) + - Yields control to tests + +3. Test execution: + - Tests use `gh` CLI to interact with GitHub + - GitHub sends webhooks to smee.io + - Smee proxies webhooks to local server + - Server processes webhooks + - Tests verify results in GitHub + +4. Cleanup (automatic): + - Stops smee client (terminate → wait 5s → kill if needed) + - Stops Docker Compose container + - Cleanup runs even if tests fail + +### Cleanup Guarantees + +Cleanup is guaranteed to run even if: +- Tests fail with assertions +- Tests raise exceptions +- Tests are interrupted (Ctrl+C) +- Setup fails partway through + +This is achieved using pytest's fixture yield mechanism with proper error handling. + +## Performance Considerations + +### Session-Scoped Fixtures + +Infrastructure starts ONCE per test session, not per test: + +- Faster test execution (no repeated setup/teardown) +- Consistent environment across all tests +- Shared Docker container reduces resource usage + +### Parallel Execution + +**NOT RECOMMENDED:** Running E2E tests in parallel is not supported: + +- Only one server instance can bind to port 5000 +- Smee client can only proxy to one server +- GitHub webhooks are serialized by nature +- Tests may interfere with each other + +**Run tests sequentially:** + +```bash +# CORRECT - sequential execution +uv run --group tests pytest webhook_server/tests/e2e/ -v -m e2e + +# WRONG - parallel execution will fail +uv run --group tests pytest webhook_server/tests/e2e/ -v -m e2e -n auto +``` + +### Test Isolation + +While infrastructure is shared, tests should be isolated: + +- Create unique PRs/branches per test +- Clean up created resources after test +- Use unique identifiers in test data +- Don't rely on specific GitHub state + +## Security Considerations + +### Smee.io Webhook Visibility + +**WARNING:** Webhooks proxied through smee.io are publicly visible. + +- Anyone with your smee URL can view webhook payloads +- Webhook payloads contain repository data, user information +- Each test run should use a unique smee channel + +**For sensitive testing:** + +1. Use a fresh smee channel per session (https://smee.io/) +2. Don't use smee.io - set up private webhook relay +3. Use test repositories with no sensitive data + +### GitHub Authentication + +- Tests use your personal GitHub authentication (`gh auth`) +- Tests run with your GitHub permissions +- Tests interact with real GitHub repositories +- Be careful with destructive operations + +### Log Viewer Access + +**CRITICAL:** The webhook server log viewer (`/logs/*`) has NO authentication. + +- Deploy only on trusted networks (VPN, localhost) +- Never expose to public internet +- Logs contain sensitive data (tokens, webhook payloads) +- Use reverse proxy with authentication for external access + +## File Structure + +``` +webhook_server/tests/e2e/ +├── __init__.py # Package marker +├── conftest.py # Session fixtures (server_envs, e2e_server) +├── server_utils.py # Utility functions (smee, docker-compose, health) +├── test_pull_request_flow.py # PR workflow tests +├── test_issue_comment_commands.py # Comment command tests (/verify-owners, etc.) +├── test_check_runs.py # Check run and status tests +├── test_owners_discovery.py # OWNERS file discovery tests +├── test_reviews.py # PR review and approval tests +└── README.md # This file +``` + +## CI/CD Integration + +### GitHub Actions Example + +```yaml +name: E2E Tests + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + e2e-tests: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Docker + uses: docker/setup-buildx-action@v3 + + - name: Install Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + + - name: Install smee-client + run: npm install -g smee-client + + - name: Install GitHub CLI + run: | + # Ubuntu includes gh in apt repositories + sudo apt-get update + sudo apt-get install -y gh + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install uv + run: pip install uv + + - name: Install dependencies + run: uv sync + + - name: Create .dev/.env + run: | + mkdir -p .dev + cat > .dev/.env << EOF + SERVER_PORT=5000 + SMEE_URL=${{ secrets.SMEE_URL }} + TEST_REPO=${{ secrets.TEST_REPO }} + DOCKER_COMPOSE_FILE=.dev/docker-compose.yaml + TZ=UTC + EOF + + - name: Authenticate GitHub CLI + run: | + echo "${{ secrets.GH_TOKEN }}" | gh auth login --with-token + + - name: Run E2E tests + run: uv run --group tests pytest webhook_server/tests/e2e/ -v -m e2e +``` + +**Required secrets:** +- `SMEE_URL`: Your smee.io channel URL +- `GH_TOKEN`: GitHub personal access token for `gh` CLI + +## Related Documentation + +- [Main test suite documentation](../README.md) +- [Docker Compose configuration](../../../.dev/docker-compose.yaml) +- [Smee.io documentation](https://smee.io/) +- [GitHub CLI documentation](https://cli.github.com/manual/) +- [pytest fixtures documentation](https://docs.pytest.org/en/stable/fixture.html) +- [pytest markers documentation](https://docs.pytest.org/en/stable/example/markers.html) + +## Quick Reference + +### Commands Cheat Sheet + +```bash +# Run all E2E tests +uv run --group tests pytest webhook_server/tests/e2e/ -v -m e2e + +# Run specific test file +uv run --group tests pytest webhook_server/tests/e2e/test_pull_request_flow.py -v -m e2e + +# Run specific test +uv run --group tests pytest webhook_server/tests/e2e/test_file.py::test_name -v -m e2e + +# Check Docker container status +docker compose --file .dev/docker-compose.yaml ps + +# View Docker container logs +docker compose --file .dev/docker-compose.yaml logs + +# Restart Docker container +docker compose --file .dev/docker-compose.yaml restart + +# Stop infrastructure manually +docker compose --file .dev/docker-compose.yaml down + +# Verify gh authentication +gh auth status + +# Get new smee channel +# Visit https://smee.io/ and click "Start a new channel" +``` + +### Required .dev/.env Template + +```bash +# .dev/.env +SERVER_PORT=5000 +SMEE_URL=https://smee.io/webhook_server +TEST_REPO=owner/repo-name +DOCKER_COMPOSE_FILE=.dev/docker-compose.yaml +TZ=America/New_York +``` + +### Pytest Marker Requirement + +```python +# REQUIRED - all E2E tests must have this decorator +@pytest.mark.e2e +def test_something(e2e_server): + pass +``` diff --git a/webhook_server/tests/e2e/__init__.py b/webhook_server/tests/e2e/__init__.py new file mode 100644 index 00000000..a55e9745 --- /dev/null +++ b/webhook_server/tests/e2e/__init__.py @@ -0,0 +1 @@ +"""End-to-end tests for GitHub webhook server.""" diff --git a/webhook_server/tests/e2e/conftest.py b/webhook_server/tests/e2e/conftest.py new file mode 100644 index 00000000..c897e056 --- /dev/null +++ b/webhook_server/tests/e2e/conftest.py @@ -0,0 +1,388 @@ +"""Pytest configuration for E2E tests. + +This module provides session-scoped fixtures for end-to-end testing of the GitHub webhook server. +The fixture manages the testing infrastructure including: +- Environment configuration from .dev/.env +- Smee webhook proxy lifecycle +- Docker Compose container lifecycle +- Server health monitoring +- Proper cleanup on test completion/failure +""" + +import os +import subprocess +from collections.abc import Generator +from pathlib import Path + +import pytest +import shortuuid +from dotenv import load_dotenv +from simple_logger.logger import get_logger + +from webhook_server.tests.e2e.helpers import cleanup_pr, create_pr, delete_branch +from webhook_server.tests.e2e.server_utils import ( + E2EInfrastructureError, + start_docker_compose, + start_smee_client, + stop_docker_compose, + stop_smee_client, + wait_for_container_health, +) + +# Configure logging for E2E tests +logger = get_logger(name="e2e-tests") + + +@pytest.fixture(scope="session") +def server_envs() -> dict[str, str]: + """Load and validate environment variables for E2E tests. + + This fixture: + 1. Checks if .dev/.env file exists (fails if missing) + 2. Loads environment variables from .dev/.env + 3. Validates required variables (SERVER_PORT, SMEE_URL, TEST_REPO, DOCKER_COMPOSE_FILE) + 4. Returns validated environment variables + + Returns: + dict with keys: + - server_port: Local server port (e.g., "19876") + - smee_url: Smee.io webhook proxy URL + - test_repo: Test repository name (owner/repo-name) + - project_root: Project root directory path + - docker_compose_file: Path to docker-compose.yaml + + Raises: + E2EInfrastructureError: If .dev/.env file does not exist or required variables are missing + """ + project_root = Path(__file__).parent.parent.parent.parent + env_file = project_root / ".dev" / ".env" + + if not env_file.exists(): + raise E2EInfrastructureError( + f"Required .dev/.env file not found at {env_file}\n" + "Create .dev/.env with:\n" + " SERVER_PORT=19876\n" + " SMEE_URL=https://smee.io/YOUR_CHANNEL\n" + " TEST_REPO=owner/repo-name\n" + " DOCKER_COMPOSE_FILE=.dev/docker-compose.yaml" + ) + + load_dotenv(dotenv_path=env_file) + logger.info(f"Loaded environment variables from {env_file}") + + server_port = os.environ.get("SERVER_PORT") + smee_url = os.environ.get("SMEE_URL") + test_repo = os.environ.get("TEST_REPO") + docker_compose_file = os.environ.get("DOCKER_COMPOSE_FILE") + + if not server_port: + raise E2EInfrastructureError( + "SERVER_PORT environment variable is required. Add to .dev/.env: SERVER_PORT=19876" + ) + + if not smee_url: + raise E2EInfrastructureError( + "SMEE_URL environment variable is required. Add to .dev/.env: SMEE_URL=https://smee.io/YOUR_CHANNEL" + ) + + if not test_repo: + raise E2EInfrastructureError( + "TEST_REPO environment variable is required. Add to .dev/.env: TEST_REPO=owner/repo-name" + ) + + if not docker_compose_file: + raise E2EInfrastructureError( + "DOCKER_COMPOSE_FILE environment variable is required. " + "Add to .dev/.env: DOCKER_COMPOSE_FILE=.dev/docker-compose.yaml" + ) + + # Resolve docker-compose file path relative to project root if not absolute + compose_path = Path(docker_compose_file) + if not compose_path.is_absolute(): + compose_path = project_root / docker_compose_file + + if not compose_path.exists(): + raise E2EInfrastructureError( + f"Docker compose file not found at {compose_path}. Check DOCKER_COMPOSE_FILE in .dev/.env" + ) + + return { + "server_port": server_port, + "smee_url": smee_url, + "test_repo": test_repo, + "project_root": str(project_root), + "docker_compose_file": str(compose_path), + } + + +@pytest.fixture(scope="session") +def github_webhook_cleanup(server_envs: dict[str, str]) -> Generator[None, None, None]: + """Manages GitHub webhook lifecycle (session-scoped). + + This fixture ensures the GitHub webhook is properly cleaned up after all tests complete. + + Args: + server_envs: Validated environment variables + + Yields: + None + + Cleanup: + Removes webhook from GitHub repository after all tests complete + """ + # Setup: Nothing needed - webhook is configured in repository settings manually + yield + + # Cleanup: Remove webhook after all tests + test_repo = server_envs["test_repo"] + smee_url = server_envs["smee_url"] + + logger.info(f"Cleaning up GitHub webhook for {test_repo}") + + # Get webhook ID for the smee URL + result = subprocess.run( + ["gh", "api", f"repos/{test_repo}/hooks", "--jq", f'.[] | select(.config.url == "{smee_url}") | .id'], + capture_output=True, + text=True, + check=True, + ) + + webhook_id = result.stdout.strip() + if webhook_id: + logger.info(f"Removing webhook {webhook_id} ({smee_url})") + subprocess.run( + ["gh", "api", "-X", "DELETE", f"repos/{test_repo}/hooks/{webhook_id}"], + capture_output=True, + text=True, + check=True, + ) + logger.info("GitHub webhook removed successfully") + else: + logger.info(f"No webhook found with URL {smee_url} - nothing to clean up") + + +@pytest.fixture(scope="session") +def e2e_server(server_envs: dict[str, str], github_webhook_cleanup: None) -> Generator[None, None, None]: + """Session-scoped fixture that manages E2E testing infrastructure. + + This fixture manages the complete E2E testing infrastructure: + 1. Starts smee client to proxy webhooks + 2. Starts docker-compose container for the webhook server + 3. Waits for server to be healthy + 4. Yields control to tests + 5. Performs cleanup (stops docker-compose + smee) + + Args: + server_envs: Fixture that provides validated environment variables + + Yields: + None - tests interact with GitHub directly, not the server + + Raises: + E2EInfrastructureError: If setup or teardown fails + """ + # Get environment variables from fixture + server_port = server_envs["server_port"] + smee_url = server_envs["smee_url"] + project_root = server_envs["project_root"] + docker_compose_file = server_envs["docker_compose_file"] + + logger.info(f"Starting E2E infrastructure on port {server_port}") + + # Step 1: Start smee client (BEFORE docker-compose) + smee_process = start_smee_client(server_port=server_port, smee_url=smee_url) + + # Step 2: Start docker-compose container + start_docker_compose(docker_compose_file=docker_compose_file, project_root=project_root) + + # Step 3: Wait for docker container health check + wait_for_container_health( + docker_compose_file=docker_compose_file, + project_root=project_root, + container_name="github-webhook-server-e2e", + timeout=60, + ) + + # Step 4: Yield control to tests + logger.info("E2E infrastructure ready for testing") + yield + + # Step 5: Cleanup (pytest handles this automatically after yield) + logger.info("Cleaning up E2E infrastructure...") + + # Stop smee client + stop_smee_client(smee_process) + + # Stop docker-compose + stop_docker_compose(docker_compose_file=docker_compose_file, project_root=project_root) + + logger.info("E2E infrastructure cleanup complete") + + +@pytest.fixture(scope="session") +def test_repository_name(server_envs: dict[str, str]) -> str: + """Provides the test repository name from environment. + + Args: + server_envs: Validated environment variables + + Returns: + str: Test repository in format owner/repo-name + """ + return server_envs["test_repo"] + + +@pytest.fixture(scope="session") +def cloned_test_repo( + tmp_path_factory: pytest.TempPathFactory, test_repository_name: str +) -> Generator[Path, None, None]: + """Clone test repository to temporary directory (session-scoped). + + Args: + tmp_path_factory: Pytest temporary path factory for session-scoped temp dirs + test_repository_name: Test repository name from environment + + Yields: + Path: Path to cloned repository + + This fixture clones the test repository once per session and provides the path. + """ + repo_dir = tmp_path_factory.mktemp("e2e-repos") / "test-repo" + logger.info(f"Cloning {test_repository_name} to {repo_dir}") + + # Use SSH URL to avoid authentication prompts + ssh_url = f"git@github.com:{test_repository_name}.git" + subprocess.run( + ["git", "clone", ssh_url, str(repo_dir)], + capture_output=True, + text=True, + check=True, + ) + + logger.info(f"Repository cloned to {repo_dir}") + yield repo_dir + + +@pytest.fixture(scope="class") +def branch_for_tests(cloned_test_repo: Path, test_repository_name: str) -> Generator[str, None, None]: + """Provides a test branch for the current test. + + Args: + cloned_test_repo: Path to cloned repository + test_repository_name: Test repository name from environment + + Yields: + str: Branch name + + This fixture creates a branch and cleans it up after the test. + """ + # Reset repository to clean state before creating branch + logger.info("Resetting test repository to clean state before creating branch") + subprocess.run( + ["git", "reset", "--hard", "HEAD"], + cwd=cloned_test_repo, + capture_output=True, + text=True, + check=True, + ) + subprocess.run( + ["git", "clean", "-fd"], + cwd=cloned_test_repo, + capture_output=True, + text=True, + check=True, + ) + subprocess.run( + ["git", "checkout", "main"], + cwd=cloned_test_repo, + capture_output=True, + text=True, + check=True, + ) + subprocess.run( + ["git", "pull", "origin", "main"], + cwd=cloned_test_repo, + capture_output=True, + text=True, + check=True, + ) + + branch_name = f"test-pr-{shortuuid.uuid()}" + logger.info(f"Setting up test branch: {branch_name}") + + # Create and checkout new branch from main + subprocess.run( + ["git", "checkout", "-b", branch_name, "main"], + cwd=cloned_test_repo, + capture_output=True, + text=True, + check=True, + ) + + yield branch_name + + logger.info(f"Cleaning up test branch: {branch_name}") + delete_branch(branch_name, test_repository_name) + + +@pytest.fixture(scope="class") +def pr_for_tests( + cloned_test_repo: Path, branch_for_tests: str, test_repository_name: str +) -> Generator[str, None, None]: + """Provides a test PR for the current test. + + Args: + cloned_test_repo: Path to cloned repository + branch_for_tests: Branch name from branch_for_tests fixture + test_repository_name: Test repository name from environment + + Yields: + str: PR number + + This fixture creates a PR and cleans it up after the test. + """ + # Add commit to branch using local git + logger.info(f"Adding commit to branch: {branch_for_tests}") + test_file = cloned_test_repo / "README.md" + test_file.write_text("# Test PR\n\nThis is an automated E2E test PR.\n") + + subprocess.run( + ["git", "add", "README.md"], + cwd=cloned_test_repo, + capture_output=True, + text=True, + check=True, + ) + + subprocess.run( + ["git", "commit", "-m", "test: automated E2E test commit"], + cwd=cloned_test_repo, + capture_output=True, + text=True, + check=True, + ) + + # Push branch to remote + logger.info(f"Pushing branch '{branch_for_tests}' to remote") + subprocess.run( + ["git", "push", "-u", "origin", branch_for_tests], + cwd=cloned_test_repo, + capture_output=True, + text=True, + check=True, + ) + + # Create PR with conventional-commit title + logger.info(f"Creating PR from branch: {branch_for_tests}") + pr_number = create_pr( + title="test: automated E2E test PR", + branch=branch_for_tests, + body="This PR was created by an automated E2E test.", + test_repo=test_repository_name, + ) + logger.info(f"Created PR #{pr_number}") + + yield pr_number + + logger.info(f"Cleaning up PR #{pr_number}") + cleanup_pr(pr_number, branch_for_tests, test_repository_name) diff --git a/webhook_server/tests/e2e/helpers.py b/webhook_server/tests/e2e/helpers.py new file mode 100644 index 00000000..28cb193f --- /dev/null +++ b/webhook_server/tests/e2e/helpers.py @@ -0,0 +1,351 @@ +"""Helper functions for E2E testing with myk-org/for-testing-only repository. + +This module provides helper functions for E2E tests: +- Branch management (delete) +- Pull request lifecycle (create, comment, close) +- Check run monitoring and waiting +- Label monitoring and waiting +- Configuration toggling in test repository + +All operations use `gh` CLI for GitHub API calls and local git for repository operations. +""" + +import json +import re +import subprocess +from typing import Any + +from simple_logger.logger import get_logger +from timeout_sampler import TimeoutSampler + +# Check runs that should PASS +PASSING_CHECK_RUNS = [ + "build-container", + "pre-commit", + "python-module-install", + "tox", +] + +# Check runs that should FAIL (not mergeable without approval) +FAILING_CHECK_RUNS = [ + "can-be-merged", +] + +# Check runs that should be QUEUED (waiting) +QUEUED_CHECK_RUNS = [ + "verified", +] + +# Configure logging for E2E test helpers +logger = get_logger(name="e2e-test-helpers") + + +def delete_branch(branch_name: str, test_repo: str) -> None: + """Delete a branch using GitHub API. + + This is a cleanup function - logs errors but doesn't raise exceptions. + + Args: + branch_name: Name of the branch to delete + test_repo: Test repository in format owner/repo-name + """ + logger.info(f"Deleting branch '{branch_name}' from {test_repo}") + + try: + subprocess.run( + ["gh", "api", "-X", "DELETE", f"repos/{test_repo}/git/refs/heads/{branch_name}"], + capture_output=True, + text=True, + check=True, + ) + logger.info(f"Branch '{branch_name}' deleted successfully") + except subprocess.CalledProcessError as ex: + logger.error(f"Failed to delete branch '{branch_name}': {ex.stderr}") + + +def create_pr(title: str, branch: str, body: str = "", base: str = "main", test_repo: str = "") -> str: + """Create a pull request from branch to base. + + Args: + title: PR title + branch: Head branch for the PR + body: PR body/description (default: "") + base: Base branch to merge into (default: "main") + test_repo: Test repository in format owner/repo-name + + Returns: + str: PR number extracted from the created PR URL + + Raises: + subprocess.CalledProcessError: If PR creation fails + """ + logger.info(f"Creating PR: '{title}' ({branch} -> {base}) in {test_repo}") + + result = subprocess.run( + [ + "gh", + "pr", + "create", + "--repo", + test_repo, + "--title", + title, + "--body", + body, + "--head", + branch, + "--base", + base, + ], + capture_output=True, + text=True, + check=True, + ) + + # Extract PR number from URL (e.g., https://github.com/owner/repo/pull/123) + pr_url = result.stdout.strip() + match = re.search(r"/pull/(\d+)$", pr_url) + if not match: + raise ValueError(f"Failed to extract PR number from URL: {pr_url}") + + pr_number = match.group(1) + logger.info(f"PR #{pr_number} created successfully: {pr_url}") + return pr_number + + +def get_check_runs(pr_number: str, test_repo: str) -> list[dict[str, Any]]: + """Get all check runs for a pull request. + + Args: + pr_number: PR number to get check runs for + test_repo: Test repository in format owner/repo-name + + Returns: + list[dict[str, Any]]: List of check run dictionaries from GitHub API + + Raises: + subprocess.CalledProcessError: If check run retrieval fails + """ + logger.debug(f"Getting check runs for PR #{pr_number}") + + # Get PR head SHA + result = subprocess.run( + ["gh", "pr", "view", pr_number, "--repo", test_repo, "--json", "headRefOid", "--jq", ".headRefOid"], + capture_output=True, + text=True, + check=True, + ) + head_sha = result.stdout.strip() + logger.debug(f"PR #{pr_number} head SHA: {head_sha}") + + # Get check runs for the commit + result = subprocess.run( + ["gh", "api", f"repos/{test_repo}/commits/{head_sha}/check-runs", "--jq", ".check_runs"], + capture_output=True, + text=True, + check=True, + ) + + check_runs: list[dict[str, Any]] = json.loads(result.stdout) + logger.debug(f"Found {len(check_runs)} check runs for PR #{pr_number}") + return check_runs + + +def wait_for_check_runs( + pr_number: str, + test_repo: str, + expected_checks: list[str], + timeout: int = 180, +) -> None: + """Wait for check runs to complete using TimeoutSampler. + + Args: + pr_number: PR number to monitor + test_repo: Test repository in format owner/repo-name + expected_checks: List of expected check run names + timeout: Timeout in seconds (default: 180) + + Raises: + TimeoutExpiredError: If check runs don't complete within timeout + """ + checks_to_wait = expected_checks + logger.info(f"Waiting for check runs on PR #{pr_number}: {checks_to_wait}") + + def _check_completed() -> bool: + """Check if all expected check runs are completed.""" + check_runs = get_check_runs(pr_number, test_repo) + + # Build map of check run name -> status + check_status: dict[str, str] = {} + for check_run in check_runs: + name = check_run.get("name", "") + status = check_run.get("status", "") + check_status[name] = status + + # Check if all expected checks are completed + for check_name in checks_to_wait: + if check_name not in check_status: + logger.debug(f"Check run '{check_name}' not found yet") + return False + if check_status[check_name] != "completed": + logger.debug(f"Check run '{check_name}' status: {check_status[check_name]}") + return False + + logger.info(f"All expected check runs completed for PR #{pr_number}") + return True + + # Use TimeoutSampler to wait for completion + for sample in TimeoutSampler( + wait_timeout=timeout, + sleep=5, + func=_check_completed, + ): + if sample: + break + + +def get_pr_labels(pr_number: str, test_repo: str) -> list[str]: + """Get all labels for a pull request. + + Args: + pr_number: PR number to get labels for + test_repo: Test repository in format owner/repo-name + + Returns: + list[str]: List of label names + + Raises: + subprocess.CalledProcessError: If label retrieval fails + """ + logger.debug(f"Getting labels for PR #{pr_number}") + + result = subprocess.run( + ["gh", "pr", "view", pr_number, "--repo", test_repo, "--json", "labels", "--jq", ".labels[].name"], + capture_output=True, + text=True, + check=True, + ) + + labels = result.stdout.strip().split("\n") if result.stdout.strip() else [] + logger.debug(f"Found {len(labels)} labels for PR #{pr_number}: {labels}") + return labels + + +def wait_for_labels( + pr_number: str, + test_repo: str, + expected_labels: list[str], + timeout: int = 180, +) -> None: + """Wait for expected labels to be added to PR using TimeoutSampler. + + Args: + pr_number: PR number to monitor + test_repo: Test repository in format owner/repo-name + expected_labels: List of expected label names + timeout: Timeout in seconds (default: 180) + + Raises: + TimeoutExpiredError: If labels are not added within timeout + """ + logger.info(f"Waiting for labels on PR #{pr_number}: {expected_labels}") + + def _check_labels_present() -> bool: + """Check if all expected labels are present.""" + current_labels = get_pr_labels(pr_number, test_repo) + + # Check if all expected labels exist + for label in expected_labels: + if label not in current_labels: + logger.debug(f"Label '{label}' not found yet") + return False + + logger.info(f"All expected labels present on PR #{pr_number}") + return True + + # Use TimeoutSampler to wait for labels + for sample in TimeoutSampler( + wait_timeout=timeout, + sleep=5, + func=_check_labels_present, + ): + if sample: + break + + +def close_pr(pr_number: str, test_repo: str) -> None: + """Close a pull request without merging. + + This is a cleanup function - logs errors but doesn't raise exceptions. + + Args: + pr_number: PR number to close + test_repo: Test repository in format owner/repo-name + """ + logger.info(f"Closing PR #{pr_number}") + + try: + subprocess.run( + ["gh", "pr", "close", pr_number, "--repo", test_repo], + capture_output=True, + text=True, + check=True, + ) + logger.info(f"PR #{pr_number} closed successfully") + except subprocess.CalledProcessError as ex: + logger.exception(f"Failed to close PR #{pr_number}: {ex.stderr}") + + +def cleanup_pr(pr_number: str, branch: str, test_repo: str) -> None: + """Cleanup helper: close PR and delete branch. + + This is a cleanup function - never raises exceptions. + + Args: + pr_number: PR number to close + branch: Branch to delete + test_repo: Test repository in format owner/repo-name + """ + logger.info(f"Cleaning up PR #{pr_number} and branch '{branch}'") + close_pr(pr_number, test_repo) + delete_branch(branch, test_repo) + logger.info(f"Cleanup complete for PR #{pr_number}") + + +def get_repo_issues(test_repo: str) -> list[dict[str, Any]]: + """Get all issues linked/referenced by a PR. + + Args: + test_repo: Test repository in format owner/repo-name + + Returns: + list[dict[str, Any]]: List of linked issue dictionaries + + Raises: + subprocess.CalledProcessError: If API call fails + """ + logger.debug(f"Getting {test_repo} issues") + + # Get all issues in the repository + result = subprocess.run( + ["gh", "api", f"repos/{test_repo}/issues", "--jq", ".[]"], + capture_output=True, + text=True, + check=True, + ) + + if not result.stdout.strip(): + return [] + + # Parse all issues + all_issues = [] + for line in result.stdout.strip().split("\n"): + if line.strip(): + try: + issue = json.loads(line) + all_issues.append(issue) + except json.JSONDecodeError: + continue + + logger.debug(f"Found {len(all_issues)} issues for {test_repo}") + return all_issues diff --git a/webhook_server/tests/e2e/server_utils.py b/webhook_server/tests/e2e/server_utils.py new file mode 100644 index 00000000..8b1432cc --- /dev/null +++ b/webhook_server/tests/e2e/server_utils.py @@ -0,0 +1,215 @@ +"""Utility functions for E2E testing infrastructure. + +This module provides utility functions for managing the E2E testing infrastructure: +- Smee client lifecycle management +- Docker Compose container lifecycle management +- Container health monitoring +""" + +import json +import subprocess + +from simple_logger.logger import get_logger +from timeout_sampler import TimeoutSampler + +# Configure logging for E2E server utilities +logger = get_logger(name="e2e-server-utils") + + +class E2EInfrastructureError(Exception): + """Raised when E2E infrastructure setup or teardown fails.""" + + +def start_smee_client(server_port: str, smee_url: str) -> subprocess.Popen: + """Start smee client to proxy webhooks from smee.io to local server. + + Args: + server_port: Local server port to forward webhooks to (e.g., "5000") + smee_url: Smee.io webhook proxy URL to listen to + + Returns: + subprocess.Popen: The running smee client process + + Raises: + E2EInfrastructureError: If smee client is not found or fails to start + """ + logger.info(f"Starting smee client: {smee_url} -> localhost:{server_port}") + try: + smee_process = subprocess.Popen( + ["smee", "-u", smee_url, "-p", server_port, "-P", "/webhook_server"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + ) + logger.info(f"Smee client started (PID: {smee_process.pid})") + return smee_process + except FileNotFoundError: + raise E2EInfrastructureError("smee client not found. Install with: npm install -g smee-client") from None + + +def stop_smee_client(smee_process: subprocess.Popen | None) -> None: + """Stop smee client gracefully with timeout and fallback to kill. + + Args: + smee_process: The running smee client process, or None if not started + """ + if not smee_process: + return + + logger.info(f"Stopping smee client (PID: {smee_process.pid})...") + try: + smee_process.terminate() + try: + smee_process.wait(timeout=5) + logger.info("Smee client stopped successfully") + except subprocess.TimeoutExpired: + logger.warning("Smee client did not terminate, killing process...") + smee_process.kill() + smee_process.wait() + logger.info("Smee client killed") + except Exception: + logger.exception("Error stopping smee client") + + +def start_docker_compose(docker_compose_file: str, project_root: str) -> None: + """Start docker-compose container for the webhook server. + + Args: + docker_compose_file: Path to docker-compose.yaml file + project_root: Project root directory path + + Raises: + E2EInfrastructureError: If docker-compose fails to start + """ + logger.info("Building and starting docker-compose container...") + result = subprocess.run( + ["docker", "compose", "--file", docker_compose_file, "up", "-d", "--build", "--build"], + cwd=project_root, + capture_output=True, + text=True, + check=False, + ) + + if result.returncode != 0: + raise E2EInfrastructureError( + f"Failed to start docker-compose:\nSTDOUT: {result.stdout}\nSTDERR: {result.stderr}" + ) + + logger.info("Docker-compose container started successfully") + + +def stop_docker_compose(docker_compose_file: str, project_root: str) -> None: + """Stop docker-compose container gracefully. + + This function logs errors but does not raise exceptions to ensure cleanup completes. + + Args: + docker_compose_file: Path to docker-compose.yaml file + project_root: Project root directory path + """ + logger.info("Stopping docker-compose container...") + try: + result = subprocess.run( + ["docker", "compose", "--file", docker_compose_file, "down"], + cwd=project_root, + capture_output=True, + text=True, + check=False, + timeout=30, + ) + + if result.returncode != 0: + logger.error(f"Failed to stop docker-compose:\nSTDOUT: {result.stdout}\nSTDERR: {result.stderr}") + else: + logger.info("Docker-compose container stopped successfully") + except subprocess.TimeoutExpired: + logger.error("Docker-compose down command timed out") + except Exception: + logger.exception("Error stopping docker-compose") + + +def check_container_health( + docker_compose_file: str, + project_root: str, + container_name: str = "github-webhook-server-e2e", +) -> bool: + """Check if webhook server container is healthy. + + Args: + docker_compose_file: Path to docker-compose.yaml file + project_root: Project root directory path + container_name: Name of the container to check (default: "github-webhook-server-e2e") + + Returns: + bool: True if container is healthy, False otherwise + + Raises: + E2EInfrastructureError: If docker command fails + """ + result = subprocess.run( + ["docker", "compose", "--file", docker_compose_file, "ps", "--format", "json"], + cwd=project_root, + capture_output=True, + text=True, + check=False, + ) + + if result.returncode != 0: + raise E2EInfrastructureError( + f"Failed to check container status:\nSTDOUT: {result.stdout}\nSTDERR: {result.stderr}" + ) + + containers = json.loads(result.stdout) if result.stdout.strip() else [] + if not isinstance(containers, list): + containers = [containers] + + # Find the webhook server container specifically + for container in containers: + # Check by service name or container name + if container.get("Service") == container_name or container.get("Name") == container_name: + health = container.get("Health", "") + logger.debug(f"Webhook server container health: {health or 'unknown'}") + return health == "healthy" + + # Container not found yet + logger.debug("Webhook server container not found yet") + return False + + +def wait_for_container_health( + docker_compose_file: str, + project_root: str, + container_name: str = "github-webhook-server-e2e", + timeout: int = 60, +) -> None: + """Wait for webhook server container to be healthy. + + Args: + docker_compose_file: Path to docker-compose.yaml file + project_root: Project root directory path + container_name: Name of the container to check (default: "github-webhook-server-e2e") + timeout: Maximum time to wait in seconds (default: 60) + + Raises: + E2EInfrastructureError: If container does not become healthy within timeout + """ + logger.info("Waiting for container to be healthy (via docker healthcheck)...") + + def _check_health() -> bool: + """Wrapper for check_container_health to use with TimeoutSampler.""" + return check_container_health(docker_compose_file, project_root, container_name) + + try: + for sample in TimeoutSampler( + wait_timeout=timeout, + sleep=2, + func=_check_health, + exceptions_dict={json.JSONDecodeError: []}, + ): + if sample: + logger.info("Webhook server container is healthy") + break + except Exception as ex: + raise E2EInfrastructureError( + f"Webhook server container health check failed: {ex}. Check docker-compose logs for details." + ) from ex diff --git a/webhook_server/tests/e2e/test_pull_request_flow.py b/webhook_server/tests/e2e/test_pull_request_flow.py new file mode 100644 index 00000000..6f4a067a --- /dev/null +++ b/webhook_server/tests/e2e/test_pull_request_flow.py @@ -0,0 +1,191 @@ +"""End-to-end tests for pull request creation workflow. + +This module contains E2E tests that verify the webhook server correctly processes PRs. +Tests ONLY verify webhook server behavior - setup/teardown handled by fixtures. +""" + +import subprocess + +import pytest +from simple_logger.logger import get_logger + +from webhook_server.tests.e2e.helpers import ( + FAILING_CHECK_RUNS, + PASSING_CHECK_RUNS, + QUEUED_CHECK_RUNS, + get_check_runs, + get_pr_labels, + get_repo_issues, + wait_for_check_runs, + wait_for_labels, +) + +# Configure logging for E2E PR flow tests +logger = get_logger(name="e2e-test-pr-flow") + +# Expected labels for a basic PR (size/M for minimal test file, branch-main for target branch) +EXPECTED_LABELS = ["size/M", "branch-main"] + + +@pytest.mark.e2e +class TestPullRequestFlow: + """Test class for pull request workflow verification. + + All tests in this class verify different aspects of webhook server behavior + when a PR is created. Each test is independent but uses the same PR fixture. + """ + + def test_labels_added(self, e2e_server: None, pr_for_tests: str, test_repository_name: str) -> None: + """Verify webhook server adds expected labels to PR. + + Expected labels: + - size/M: Based on PR changes size + - branch-main: Target branch label + + Args: + e2e_server: E2E server infrastructure + pr_for_tests: PR number from fixture + test_repository_name: Test repository name from environment + """ + logger.info(f"Waiting for webhook server to add labels to PR #{pr_for_tests}...") + wait_for_labels(pr_for_tests, test_repository_name, expected_labels=EXPECTED_LABELS, timeout=120) + labels = get_pr_labels(pr_for_tests, test_repository_name) + + for expected_label in EXPECTED_LABELS: + assert expected_label in labels, ( + f"Expected label '{expected_label}' not found on PR #{pr_for_tests}. Found labels: {labels}" + ) + + logger.info(f"Labels verified: {EXPECTED_LABELS} found on PR") + + def test_passing_check_runs(self, e2e_server: None, pr_for_tests: str, test_repository_name: str) -> None: + """Verify webhook server creates passing check runs. + + Expected passing check runs: + - build-container: Container build check + - pre-commit: Pre-commit hooks check + - python-module-install: Python module installation check + - tox: Tox tests check + + Args: + e2e_server: E2E server infrastructure + pr_for_tests: PR number from fixture + test_repository_name: Test repository name from environment + """ + logger.info(f"Waiting for passing check runs on PR #{pr_for_tests}...") + wait_for_check_runs(pr_for_tests, test_repository_name, expected_checks=PASSING_CHECK_RUNS, timeout=120) + + check_runs = get_check_runs(pr_for_tests, test_repository_name) + check_run_map = {run["name"]: run for run in check_runs} + + for check_name in PASSING_CHECK_RUNS: + assert check_name in check_run_map, f"Expected check run '{check_name}' not found on PR #{pr_for_tests}" + check_run = check_run_map[check_name] + status = check_run.get("status", "") + conclusion = check_run.get("conclusion", "") + + assert status == "completed", f"Check run '{check_name}' has status '{status}', expected 'completed'" + assert conclusion == "success", ( + f"Check run '{check_name}' completed with conclusion '{conclusion}', expected 'success'" + ) + + logger.info(f"All {len(PASSING_CHECK_RUNS)} passing check runs completed successfully") + + def test_failing_check_runs(self, e2e_server: None, pr_for_tests: str, test_repository_name: str) -> None: + """Verify webhook server creates failing check runs. + + Expected failing check runs: + - can-be-merged: Should fail because PR is not approved yet + + Args: + e2e_server: E2E server infrastructure + pr_for_tests: PR number from fixture + test_repository_name: Test repository name from environment + """ + logger.info(f"Waiting for failing check runs on PR #{pr_for_tests}...") + wait_for_check_runs(pr_for_tests, test_repository_name, expected_checks=FAILING_CHECK_RUNS, timeout=120) + + logger.info(f"Verifying failing check runs on PR #{pr_for_tests}...") + check_runs = get_check_runs(pr_for_tests, test_repository_name) + check_run_map = {run["name"]: run for run in check_runs} + + for check_name in FAILING_CHECK_RUNS: + assert check_name in check_run_map, f"Expected check run '{check_name}' not found on PR #{pr_for_tests}" + check_run = check_run_map[check_name] + status = check_run.get("status", "") + conclusion = check_run.get("conclusion", "") + + assert status == "completed", f"Check run '{check_name}' has status '{status}', expected 'completed'" + assert conclusion == "failure", ( + f"Check run '{check_name}' completed with conclusion '{conclusion}', expected 'failure'" + ) + + logger.info(f"All {len(FAILING_CHECK_RUNS)} check runs failed as expected (not approved/mergeable)") + + def test_queued_check_runs(self, e2e_server: None, pr_for_tests: str, test_repository_name: str) -> None: + """Verify webhook server creates queued check runs. + + Expected queued check runs: + - verified: Should be queued/waiting + + Args: + e2e_server: E2E server infrastructure + pr_for_tests: PR number from fixture + test_repository_name: Test repository name from environment + """ + logger.info(f"Verifying queued check runs on PR #{pr_for_tests}...") + check_runs = get_check_runs(pr_for_tests, test_repository_name) + check_run_map = {run["name"]: run for run in check_runs} + + for check_name in QUEUED_CHECK_RUNS: + assert check_name in check_run_map, f"Expected check run '{check_name}' not found on PR #{pr_for_tests}" + check_run = check_run_map[check_name] + status = check_run.get("status", "") + + assert status == "queued", f"Check run '{check_name}' has status '{status}', expected 'queued'" + + logger.info(f"All {len(QUEUED_CHECK_RUNS)} check runs are queued as expected") + + def test_issue_created(self, e2e_server: None, pr_for_tests: str, test_repository_name: str) -> None: + """Verify webhook server creates an issue for the PR. + + Args: + e2e_server: E2E server infrastructure + pr_for_tests: PR number from fixture + test_repository_name: Test repository name from environment + """ + logger.info(f"Verifying issue was created for PR #{pr_for_tests}...") + for issue in get_repo_issues(test_repository_name): + if issue["body"] == f"[Auto generated]\nNumber: [#{pr_for_tests}]": + logger.info(f"Issue created for PR {pr_for_tests} found") + return + + pytest.fail(f"Expected at least one issue to be created for PR #{pr_for_tests}, but found none") + + def test_welcome_message(self, e2e_server: None, pr_for_tests: str, test_repository_name: str) -> None: + """Verify webhook server posts a welcome message to the PR. + + The welcome message contains a unique identifier string that is used to detect + if the message already exists. This test checks for that identifier. + + Args: + e2e_server: E2E server infrastructure + pr_for_tests: PR number from fixture + test_repository_name: Test repository name from environment + """ + welcome_msg_identifier = "Report bugs in [Issues](https://github.com/myakove/github-webhook-server/issues)" + + logger.info(f"Verifying welcome message on PR #{pr_for_tests}...") + result = subprocess.run( + ["gh", "api", f"repos/{test_repository_name}/issues/{pr_for_tests}/comments", "--jq", ".[].body"], + capture_output=True, + text=True, + check=True, + ) + + assert welcome_msg_identifier in result.stdout, ( + f"Expected welcome message with identifier '{welcome_msg_identifier}' in PR #{pr_for_tests} comments, " + f"but not found. Comments: {result.stdout[:500]}" + ) + + logger.info("Welcome message verified")