diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index d9381e8b..d4494c44 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -38,11 +38,13 @@ get_repository_github_app_api, ) from webhook_server.utils.helpers import ( + _redact_secrets, format_task_fields, get_api_with_highest_rate_limit, get_apis_and_tokes_from_config, get_github_repo_api, prepare_log_prefix, + run_command, ) @@ -123,6 +125,7 @@ def __init__(self, hook_data: dict[Any, Any], headers: Headers, logger: logging. # Format: /tmp/tmp{random}/github-webhook-{repo_name} # This prevents predictable paths and ensures isolation between concurrent webhook handlers self.clone_repo_dir: str = tempfile.mkdtemp(prefix=f"github-webhook-{self.repository_name}-") + self._repo_cloned: bool = False # Track if repository has been cloned # Initialize auto-verified users from API users self.add_api_users_to_auto_verified_and_merged_users() @@ -166,6 +169,143 @@ async def _get_token_metrics(self) -> str: self.logger.debug(f"{self.log_prefix} Failed to get token metrics: {ex}") return "" + async def _clone_repository( + self, + pull_request: PullRequest | None = None, + checkout_ref: str | None = None, + ) -> None: + """Clone repository for webhook processing with worktrees. + + Clones the repository to self.clone_repo_dir. + Handlers create isolated worktrees from this single clone for their operations. + + Args: + pull_request: PullRequest object (for PR events) + checkout_ref: Git ref to checkout (for push events, e.g., "refs/tags/v11.0.104") + + Raises: + RuntimeError: If clone fails (aborts webhook processing) + """ + # Log start FIRST - even before early returns + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'repo_clone', 'started')} " + "Cloning repository for handler worktrees" + ) + + if self._repo_cloned: + self.logger.debug(f"{self.log_prefix} Repository already cloned") + return + + # Validate that at least one argument is provided + if pull_request is None and not checkout_ref: + self.logger.error( + f"{self.log_prefix} {format_task_fields('webhook_processing', 'repo_clone', 'failed')} " + "Invalid arguments: either pull_request or checkout_ref must be provided" + ) + raise ValueError( + f"{self.log_prefix} _clone_repository() requires either pull_request or checkout_ref to be provided" + ) + + try: + github_token = self.token + clone_url_with_token = self.repository.clone_url.replace("https://", f"https://{github_token}@") + + rc, _, err = await run_command( + command=f"git clone {clone_url_with_token} {self.clone_repo_dir}", + log_prefix=self.log_prefix, + redact_secrets=[github_token], + mask_sensitive=self.mask_sensitive, + ) + + def redact_output(value: str) -> str: + return _redact_secrets(value or "", [github_token], mask_sensitive=self.mask_sensitive) + + if not rc: + redacted_err = redact_output(err) + self.logger.error( + f"{self.log_prefix} {format_task_fields('webhook_processing', 'repo_clone', 'failed')} " + f"Failed to clone repository: {redacted_err}" + ) + raise RuntimeError(f"Failed to clone repository: {redacted_err}") + + # Configure git user + git_cmd = f"git -C {self.clone_repo_dir}" + rc, _, _ = await run_command( + command=f"{git_cmd} config user.name '{self.repository.owner.login}'", + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, + ) + if not rc: + self.logger.warning(f"{self.log_prefix} Failed to configure git user.name") + + rc, _, _ = await run_command( + command=f"{git_cmd} config user.email '{self.repository.owner.login}@users.noreply.github.com'", + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, + ) + if not rc: + self.logger.warning(f"{self.log_prefix} Failed to configure git user.email") + + # Configure PR fetch to enable origin/pr/* checkouts + rc, _, _ = await run_command( + command=( + f"{git_cmd} config --local --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pr/*" + ), + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, + ) + if not rc: + self.logger.warning(f"{self.log_prefix} Failed to configure PR fetch refs") + + # Fetch all refs including PRs + rc, _, _ = await run_command( + command=f"{git_cmd} remote update", + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, + ) + if not rc: + self.logger.warning(f"{self.log_prefix} Failed to fetch remote refs") + + # Determine checkout target + if pull_request: + checkout_target = await asyncio.to_thread(lambda: pull_request.base.ref) + else: + # For push events: "refs/tags/v11.0.104" → "v11.0.104" + # "refs/heads/main" → "main" + # checkout_ref guaranteed to be non-None by validation at function start + assert checkout_ref is not None # mypy type narrowing + checkout_target = checkout_ref.replace("refs/tags/", "").replace("refs/heads/", "") + + # Checkout target branch/tag + rc, _, err = await run_command( + command=f"{git_cmd} checkout {checkout_target}", + log_prefix=self.log_prefix, + mask_sensitive=self.mask_sensitive, + ) + if not rc: + redacted_err = redact_output(err) + self.logger.error( + f"{self.log_prefix} {format_task_fields('webhook_processing', 'repo_clone', 'failed')} " + f"Failed to checkout {checkout_target}: {redacted_err}" + ) + raise RuntimeError(f"Failed to checkout {checkout_target}: {redacted_err}") + + self._repo_cloned = True + self.logger.success( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('webhook_processing', 'repo_clone', 'completed')} " + f"Repository cloned to {self.clone_repo_dir} (ref: {checkout_target})" + ) + + except RuntimeError: + # Re-raise RuntimeError unchanged to avoid double-wrapping + raise + except Exception as ex: + self.logger.exception( + f"{self.log_prefix} {format_task_fields('webhook_processing', 'repo_clone', 'failed')} " + f"Exception during repository clone: {ex}" + ) + raise RuntimeError(f"Repository clone failed: {ex}") from ex + async def process(self) -> Any: event_log: str = f"Event type: {self.github_event}. event ID: {self.x_github_delivery}" self.logger.step( # type: ignore[attr-defined] @@ -192,6 +332,10 @@ async def process(self) -> Any: f"Processing push event", ) self.logger.debug(f"{self.log_prefix} {event_log}") + + # Clone repository for push operations (PyPI uploads, container builds) + await self._clone_repository(checkout_ref=self.hook_data["ref"]) + await PushHandler(github_webhook=self).process_push_webhook_data() token_metrics = await self._get_token_metrics() self.logger.success( # type: ignore[attr-defined] @@ -242,6 +386,9 @@ async def process(self) -> Any: self.parent_committer = pull_request.user.login self.last_committer = getattr(self.last_commit.committer, "login", self.parent_committer) + # Clone repository for local file processing (OWNERS, changed files) + await self._clone_repository(pull_request=pull_request) + if self.github_event == "issue_comment": self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('webhook_processing', 'webhook_routing', 'processing')} " @@ -444,6 +591,8 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: value="create-issue-for-new-pr", return_on_none=global_create_issue_for_new_pr, extra_dict=repository_config ) + self.mask_sensitive = self.config.get_value("mask-sensitive-data", return_on_none=True) + async def get_pull_request(self, number: int | None = None) -> PullRequest | None: if number: self.logger.debug(f"{self.log_prefix} Attempting to get PR by number: {number}") @@ -552,12 +701,12 @@ def _current_pull_request_supported_retest(self) -> list[str]: return current_pull_request_supported_retest def __del__(self) -> None: - """Cleanup temporary clone directory on object destruction. + """Remove the shared clone directory when the webhook object is destroyed. - This ensures the base temp directory created by tempfile.mkdtemp() is removed - when the webhook handler is destroyed, preventing temp directory leaks. - The subdirectories (created with -uuid4() suffix) are cleaned up by - _prepare_cloned_repo_dir context manager in handlers. + GithubWebhook now creates a single clone via tempfile.mkdtemp() and individual + handlers operate on worktrees created by git_worktree_checkout, which already + clean up their own directories. Only the base clone directory must be removed + here to prevent accumulating stale repositories on disk. """ if hasattr(self, "clone_repo_dir") and os.path.exists(self.clone_repo_dir): try: diff --git a/webhook_server/libs/handlers/check_run_handler.py b/webhook_server/libs/handlers/check_run_handler.py index a95b1559..aae3a517 100644 --- a/webhook_server/libs/handlers/check_run_handler.py +++ b/webhook_server/libs/handlers/check_run_handler.py @@ -36,6 +36,8 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF self.logger = self.github_webhook.logger self.log_prefix: str = self.github_webhook.log_prefix self.repository: Repository = self.github_webhook.repository + self._repository_private: bool | None = None + self._branch_required_status_checks: list[str] | None = None if isinstance(self.owners_file_handler, OwnersFileHandler): self.labels_handler = LabelsHandler( github_webhook=self.github_webhook, owners_file_handler=self.owners_file_handler @@ -393,17 +395,26 @@ async def all_required_status_checks(self, pull_request: PullRequest) -> list[st return _all_required_status_checks async def get_branch_required_status_checks(self, pull_request: PullRequest) -> list[str]: - if self.repository.private: + # Check if private repo first (cache to avoid repeated API calls) + if self._repository_private is None: + self._repository_private = self.repository.private + + if self._repository_private: self.logger.info( f"{self.log_prefix} Repository is private, skipping getting branch protection required status checks" ) return [] + # Cache branch protection settings in instance variable to avoid repeated API calls + if self._branch_required_status_checks is not None: + return self._branch_required_status_checks + pull_request_branch = await asyncio.to_thread(self.repository.get_branch, pull_request.base.ref) branch_protection = await asyncio.to_thread(pull_request_branch.get_protection) branch_required_status_checks = branch_protection.required_status_checks.contexts self.logger.debug(f"{self.log_prefix} branch_required_status_checks: {branch_required_status_checks}") - return branch_required_status_checks + self._branch_required_status_checks = branch_required_status_checks + return self._branch_required_status_checks async def required_check_in_progress( self, pull_request: PullRequest, last_commit_check_runs: list[CheckRun] diff --git a/webhook_server/libs/handlers/owners_files_handler.py b/webhook_server/libs/handlers/owners_files_handler.py index 79a37026..5cf3c4e1 100644 --- a/webhook_server/libs/handlers/owners_files_handler.py +++ b/webhook_server/libs/handlers/owners_files_handler.py @@ -1,11 +1,11 @@ import asyncio +import shlex from collections.abc import Coroutine from pathlib import Path from typing import TYPE_CHECKING, Any import yaml from asyncstdlib import functools -from github.ContentFile import ContentFile from github.GithubException import GithubException from github.NamedUser import NamedUser from github.PaginatedList import PaginatedList @@ -14,7 +14,7 @@ from github.Repository import Repository from webhook_server.utils.constants import COMMAND_ADD_ALLOWED_USER_STR, ROOT_APPROVERS_KEY -from webhook_server.utils.helpers import format_task_fields +from webhook_server.utils.helpers import format_task_fields, run_command if TYPE_CHECKING: from webhook_server.libs.github_api import GithubWebhook @@ -33,10 +33,11 @@ async def initialize(self, pull_request: PullRequest) -> "OwnersFileHandler": Phase 1: Fetch independent data in parallel (changed files + OWNERS data) Phase 2: Process derived data in parallel (approvers + reviewers) """ + # Phase 1: Parallel data fetching - independent GitHub API operations self.changed_files, self.all_repository_approvers_and_reviewers = await asyncio.gather( self.list_changed_files(pull_request=pull_request), - self.get_all_repository_approvers_and_reviewers(pull_request=pull_request), + self.get_all_repository_approvers_and_reviewers(), ) # Phase 2: Parallel data processing - all depend on phase 1 but independent of each other @@ -83,9 +84,52 @@ def allowed_users(self) -> list[str]: return _allowed_users async def list_changed_files(self, pull_request: PullRequest) -> list[str]: - changed_files = [_file.filename for _file in await asyncio.to_thread(pull_request.get_files)] - self.logger.debug(f"{self.log_prefix} Changed files: {changed_files}") - return changed_files + """List changed files in the PR using git diff on cloned repository. + + Uses local git diff command instead of GitHub API to reduce API calls. + The repository is already cloned to self.github_webhook.clone_repo_dir. + + Args: + pull_request: PyGithub PullRequest object + + Returns: + List of changed file paths relative to repository root + """ + try: + # Get base and head SHAs (wrap property accesses in asyncio.to_thread) + base_sha, head_sha = await asyncio.gather( + asyncio.to_thread(lambda: pull_request.base.sha), + asyncio.to_thread(lambda: pull_request.head.sha), + ) + + # Run git diff command on cloned repository + # Quote clone_repo_dir to handle paths with spaces or special characters + git_diff_command = ( + f"git -C {shlex.quote(self.github_webhook.clone_repo_dir)} diff --name-only {base_sha}...{head_sha}" + ) + + success, out, _ = await run_command( + command=git_diff_command, + log_prefix=self.log_prefix, + verify_stderr=False, + mask_sensitive=self.github_webhook.mask_sensitive, + ) + + # Check success flag - return empty list if git diff failed + if not success: + self.logger.error(f"{self.log_prefix} git diff command failed") + return [] + + # Parse output: split by newlines and filter empty lines + changed_files = [line.strip() for line in out.splitlines() if line.strip()] + + self.logger.debug(f"{self.log_prefix} Changed files: {changed_files}") + return changed_files + + except Exception: + # Log error and return empty list if git diff fails + self.logger.exception(f"{self.log_prefix} Failed to get changed files via git diff") + return [] def _validate_owners_content(self, content: Any, path: str) -> bool: """Validate OWNERS file content structure.""" @@ -107,53 +151,115 @@ def _validate_owners_content(self, content: Any, path: str) -> bool: self.logger.error(f"{self.log_prefix} Invalid OWNERS file {path}: {e}") return False - async def _get_file_content(self, content_path: str, pull_request: PullRequest) -> tuple[ContentFile, str]: - self.logger.debug(f"{self.log_prefix} Get OWNERS file from {content_path}") + async def _get_file_content_from_local( + self, content_path: Path, base_path: Path | None = None + ) -> tuple[str, str] | None: + """Read OWNERS file from local cloned repository. + + Args: + content_path: Path object pointing to OWNERS file in clone_repo_dir + base_path: Base path to compute relative path from (defaults to clone_repo_dir) - _path = await asyncio.to_thread(self.repository.get_contents, content_path, pull_request.base.ref) + Returns: + Tuple of (file_content, relative_path_str) or None if file is unreadable + """ + _base_path = base_path if base_path else Path(self.github_webhook.clone_repo_dir) + relative_path = content_path.relative_to(_base_path) + self.logger.debug(f"{self.log_prefix} Reading OWNERS file from local clone: {relative_path}") - if isinstance(_path, list): - _path = _path[0] + try: + # Read file content from local filesystem (wrap in thread pool for I/O) + file_content = await asyncio.to_thread(content_path.read_text, encoding="utf-8") + return file_content, str(relative_path) + + except OSError as ex: + # File may have been deleted or become unreadable between rglob and read_text + self.logger.warning( + f"{self.log_prefix} Failed to read OWNERS file {relative_path}: {ex}. Skipping this file." + ) + return None + + except UnicodeDecodeError as ex: + # File has invalid encoding - log and skip to allow processing to continue + self.logger.warning( + f"{self.log_prefix} OWNERS file {relative_path} has invalid encoding: {ex}. Skipping this file." + ) + return None - return _path, content_path + async def get_all_repository_approvers_and_reviewers(self) -> dict[str, dict[str, Any]]: + """Get all repository approvers and reviewers from OWNERS files. - async def get_all_repository_approvers_and_reviewers(self, pull_request: PullRequest) -> dict[str, dict[str, Any]]: + Reads OWNERS files from local cloned repository. + The clone is already checked out to the base branch by _clone_repository. + + Returns: + Dictionary mapping OWNERS file paths to their approvers and reviewers + """ # Dictionary mapping OWNERS file paths to their approvers and reviewers _owners: dict[str, dict[str, Any]] = {} tasks: list[Coroutine[Any, Any, Any]] = [] - max_owners_files = 1000 # Configurable limit + max_owners_files = 1000 # Intentionally hardcoded limit to prevent runaway processing owners_count = 0 - self.logger.debug(f"{self.log_prefix} Get git tree") - tree = await asyncio.to_thread(self.repository.get_git_tree, pull_request.base.ref, recursive=True) - - for element in tree.tree: - if element.type == "blob" and element.path.endswith("OWNERS"): - owners_count += 1 - if owners_count > max_owners_files: - self.logger.error(f"{self.log_prefix} Too many OWNERS files (>{max_owners_files})") - break - - content_path = element.path - self.logger.debug(f"{self.log_prefix} Found OWNERS file: {content_path}") - tasks.append(self._get_file_content(content_path, pull_request)) + # Clone is already checked out to base branch by _clone_repository + + clone_path = Path(self.github_webhook.clone_repo_dir) + + # Find all OWNERS files via filesystem walk + self.logger.debug(f"{self.log_prefix} Finding OWNERS files in local clone") + + # Use rglob to recursively find all OWNERS files + def find_owners_files() -> list[Path]: + return [ + p + for p in clone_path.rglob("OWNERS") + if not any(part.startswith(".") for part in p.relative_to(clone_path).parts) + ] + + owners_files = await asyncio.to_thread(find_owners_files) + + for owners_file_path in owners_files: + owners_count += 1 + if owners_count > max_owners_files: + self.logger.error(f"{self.log_prefix} Too many OWNERS files (>{max_owners_files})") + break + + relative_path = owners_file_path.relative_to(clone_path) + self.logger.debug(f"{self.log_prefix} Found OWNERS file: {relative_path}") + tasks.append(self._get_file_content_from_local(owners_file_path)) + + results = await asyncio.gather(*tasks, return_exceptions=True) + + for idx, result in enumerate(results): + # Handle unexpected exceptions from _get_file_content_from_local + if isinstance(result, BaseException): + # Get the relative path from the original owners_files list for logging + relative_path_str = ( + str(owners_files[idx].relative_to(clone_path)) if idx < len(owners_files) else "unknown" + ) + self.logger.exception( + f"{self.log_prefix} Unexpected exception reading OWNERS file {relative_path_str}: {result}" + ) + continue - results = await asyncio.gather(*tasks) + # Skip files that couldn't be read (deleted or unreadable) + if result is None: + continue - for result in results: - _path, _content_path = result + # At this point, result must be a tuple (file_content, relative_path_str) + file_content, relative_path_str = result try: - content = yaml.safe_load(_path.decoded_content) - if self._validate_owners_content(content, _content_path): - parent_path = str(Path(_content_path).parent) - if not parent_path: + content = yaml.safe_load(file_content) + if self._validate_owners_content(content, relative_path_str): + parent_path = str(Path(relative_path_str).parent) + if not parent_path or parent_path == ".": parent_path = "." _owners[parent_path] = content - except yaml.YAMLError as exp: - self.logger.error(f"{self.log_prefix} Invalid OWNERS file {_content_path}: {exp}") + except yaml.YAMLError: + self.logger.exception(f"{self.log_prefix} Invalid OWNERS file {relative_path_str}") continue return _owners diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index d2dc9421..6d08302f 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -616,17 +616,31 @@ async def _delete_registry_tag_via_regctl( f"-p {self.github_webhook.container_repository_password}" ) - rc, out, err = await self.runner_handler.run_podman_command(command=reg_login_cmd) + redact_values = [ + self.github_webhook.container_repository_username, + self.github_webhook.container_repository_password, + ] + + rc, out, err = await self.runner_handler.run_podman_command( + command=reg_login_cmd, + redact_secrets=redact_values, + ) if rc: try: tag_ls_cmd = f"regctl tag ls {self.github_webhook.container_repository} --include {pr_tag}" - rc, out, err = await self.runner_handler.run_podman_command(command=tag_ls_cmd) + rc, out, err = await self.runner_handler.run_podman_command( + command=tag_ls_cmd, + redact_secrets=redact_values, + ) if rc and out: tag_del_cmd = f"regctl tag delete {repository_full_tag}" - rc, del_out, del_err = await self.runner_handler.run_podman_command(command=tag_del_cmd) + rc, del_out, del_err = await self.runner_handler.run_podman_command( + command=tag_del_cmd, + redact_secrets=redact_values, + ) if rc: await asyncio.to_thread( pull_request.create_issue_comment, f"Successfully removed PR tag: {repository_full_tag}." @@ -673,16 +687,27 @@ async def _delete_registry_tag_via_regctl( self.logger.error(f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. OUT:{out}. ERR:{err}") async def close_issue_for_merged_or_closed_pr(self, pull_request: PullRequest, hook_action: str) -> None: - for issue in await asyncio.to_thread(self.repository.get_issues): - if issue.body == self._generate_issue_body(pull_request=pull_request): - self.logger.info(f"{self.log_prefix} Closing issue {issue.title} for PR: {pull_request.title}") - await asyncio.to_thread( - issue.create_comment, - f"{self.log_prefix} Closing issue for PR: {pull_request.title}.\nPR was {hook_action}.", - ) - await asyncio.to_thread(issue.edit, state="closed") + issue_body = self._generate_issue_body(pull_request=pull_request) - break + def _find_matching_issue() -> Any | None: + for existing_issue in self.repository.get_issues(): + if existing_issue.body == issue_body: + return existing_issue + return None + + matching_issue = await asyncio.to_thread(_find_matching_issue) + if not matching_issue: + return + + pr_title = await asyncio.to_thread(lambda: pull_request.title) + issue_title = await asyncio.to_thread(lambda: matching_issue.title) + + self.logger.info(f"{self.log_prefix} Closing issue {issue_title} for PR: {pr_title}") + await asyncio.to_thread( + matching_issue.create_comment, + f"{self.log_prefix} Closing issue for PR: {pr_title}.\nPR was {hook_action}.", + ) + await asyncio.to_thread(matching_issue.edit, state="closed") async def process_opened_or_synchronize_pull_request(self, pull_request: PullRequest) -> None: self.logger.step( # type: ignore[attr-defined] diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index ae307193..8ff09873 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -1,7 +1,6 @@ import asyncio import re from typing import TYPE_CHECKING -from uuid import uuid4 from github.Repository import Repository @@ -94,21 +93,19 @@ async def _issue_on_error(_error: str) -> None: f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'started')} " f"Starting PyPI upload process for tag: {tag_name}", ) - clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" - uv_cmd_dir = f"--directory {clone_repo_dir}" self.logger.info(f"{self.log_prefix} Start uploading to pypi") - self.logger.debug(f"{self.log_prefix} Clone repo dir: {clone_repo_dir}") - _dist_dir: str = f"{clone_repo_dir}/pypi-dist" - async with self.runner_handler._prepare_cloned_repo_dir( - checkout=tag_name, clone_repo_dir=clone_repo_dir - ) as _res: - if not _res[0]: + async with self.runner_handler._checkout_worktree(checkout=tag_name) as (success, worktree_path, out, err): + uv_cmd_dir = f"--directory {worktree_path}" + _dist_dir: str = f"{worktree_path}/pypi-dist" + self.logger.debug(f"{self.log_prefix} Worktree path: {worktree_path}") + + if not success: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'failed')} " f"PyPI upload failed: repository preparation failed", ) - _error = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) + _error = self.check_run_handler.get_check_run_text(out=out, err=err) await _issue_on_error(_error=_error) return diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index c1d93fe8..af5af197 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -4,7 +4,6 @@ import shutil from collections.abc import AsyncGenerator from typing import TYPE_CHECKING, Any -from uuid import uuid4 import shortuuid from github.Branch import Branch @@ -13,6 +12,7 @@ from webhook_server.libs.handlers.check_run_handler import CheckRunHandler from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler +from webhook_server.utils import helpers as helpers_module from webhook_server.utils.constants import ( BUILD_CONTAINER_STR, CHERRY_PICKED_LABEL_PREFIX, @@ -42,153 +42,78 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF github_webhook=self.github_webhook, owners_file_handler=self.owners_file_handler ) - @property - def mask_sensitive(self) -> bool: - """Get mask_sensitive configuration value.""" - return self.github_webhook.config.get_value("mask-sensitive-data", return_on_none=True) - @contextlib.asynccontextmanager - async def _prepare_cloned_repo_dir( + async def _checkout_worktree( self, - clone_repo_dir: str, pull_request: PullRequest | None = None, is_merged: bool = False, checkout: str = "", tag_name: str = "", - ) -> AsyncGenerator[tuple[bool, Any, Any], None]: - git_cmd = f"git --work-tree={clone_repo_dir} --git-dir={clone_repo_dir}/.git" - self.logger.debug(f"{self.log_prefix} Preparing cloned repo dir {clone_repo_dir} with git cmd: {git_cmd}") - result: tuple[bool, str, str] = (True, "", "") - success = True - - try: - # Clone the repository - github_token = self.github_webhook.token - clone_url_with_token = self.repository.clone_url.replace("https://", f"https://{github_token}@") - rc, out, err = await run_command( - command=(f"git clone {clone_url_with_token} {clone_repo_dir}"), - log_prefix=self.log_prefix, - redact_secrets=[github_token], - mask_sensitive=self.mask_sensitive, + ) -> AsyncGenerator[tuple[bool, str, str, str], None]: + """Create worktree from existing clone for handler operations. + + Uses centralized clone from github_webhook.clone_repo_dir and creates + a worktree for isolated checkout operations. No cloning happens here. + + Args: + pull_request: Pull request object + is_merged: Whether PR is merged + checkout: Specific branch/commit to checkout + tag_name: Tag name to checkout + + Yields: + tuple: (success: bool, worktree_path: str, stdout: str, stderr: str) + """ + pr_number: int | None = None + base_ref: str | None = None + if pull_request: + pr_number = await asyncio.to_thread(lambda: pull_request.number) + base_ref = await asyncio.to_thread(lambda: pull_request.base.ref) + + # Determine what to checkout + checkout_target = "" + if checkout: + checkout_target = checkout + elif tag_name: + checkout_target = tag_name + elif is_merged and pull_request and base_ref is not None: + checkout_target = base_ref + elif pull_request and pr_number is not None: + checkout_target = f"origin/pr/{pr_number}" + else: + raise RuntimeError( + f"{self.log_prefix} Unable to determine checkout target: " + "no checkout/tag_name provided and pull_request is missing." ) - if not rc: - result = (rc, out, err) - success = False - - if success: - rc, out, err = await run_command( - command=f"{git_cmd} config user.name '{self.repository.owner.login}'", - log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, - ) - if not rc: - result = (rc, out, err) - success = False - - if success: - rc, out, err = await run_command( - command=f"{git_cmd} config user.email '{self.repository.owner.email}'", - log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, - ) - if not rc: - result = (rc, out, err) - success = False - if success: + # Use centralized clone + repo_dir = self.github_webhook.clone_repo_dir + self.logger.debug(f"{self.log_prefix} Creating worktree from {repo_dir} with checkout: {checkout_target}") + + # Create worktree for this operation + async with helpers_module.git_worktree_checkout( + repo_dir=repo_dir, + checkout=checkout_target, + log_prefix=self.log_prefix, + mask_sensitive=self.github_webhook.mask_sensitive, + ) as (success, worktree_path, out, err): + result: tuple[bool, str, str, str] = (success, worktree_path, out, err) + + # Merge base branch if needed (for PR testing) + if success and pull_request and not is_merged and not tag_name: + merge_ref = base_ref + if merge_ref is None: + merge_ref = await asyncio.to_thread(lambda: pull_request.base.ref) + git_cmd = f"git -C {worktree_path}" rc, out, err = await run_command( - command=( - f"{git_cmd} config --local --add remote.origin.fetch +refs/pull/*/head:refs/remotes/origin/pr/*" - ), + command=f"{git_cmd} merge origin/{merge_ref} -m 'Merge {merge_ref}'", log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, + mask_sensitive=self.github_webhook.mask_sensitive, ) if not rc: - result = (rc, out, err) - success = False - - if success: - rc, out, err = await run_command( - command=f"{git_cmd} remote update", - log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, - ) - if not rc: - result = (rc, out, err) - success = False - - # Checkout to requested branch/tag - if checkout and success: - rc, out, err = await run_command( - command=f"{git_cmd} checkout {checkout}", - log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, - ) - if not rc: - result = (rc, out, err) - success = False - - if success and pull_request: - rc, out, err = await run_command( - command=f"{git_cmd} merge origin/{pull_request.base.ref} -m 'Merge {pull_request.base.ref}'", - log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, - ) - if not rc: - result = (rc, out, err) - success = False - - # Checkout the branch if pull request is merged or for release - else: - if success: - if is_merged and pull_request: - rc, out, err = await run_command( - command=f"{git_cmd} checkout {pull_request.base.ref}", - log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, - ) - if not rc: - result = (rc, out, err) - success = False + result = (False, worktree_path, out, err) - elif tag_name: - rc, out, err = await run_command( - command=f"{git_cmd} checkout {tag_name}", - log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, - ) - if not rc: - result = (rc, out, err) - success = False - - # Checkout the pull request - else: - if _pull_request := await self.github_webhook.get_pull_request(): - rc, out, err = await run_command( - command=f"{git_cmd} checkout origin/pr/{_pull_request.number}", - log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, - ) - if not rc: - result = (rc, out, err) - success = False - - if pull_request and success: - rc, out, err = await run_command( - command=( - f"{git_cmd} merge origin/{pull_request.base.ref} " - f"-m 'Merge {pull_request.base.ref}'" - ), - log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, - ) - if not rc: - result = (rc, out, err) - - finally: yield result - self.logger.debug(f"{self.log_prefix} Deleting {clone_repo_dir}") - shutil.rmtree(clone_repo_dir, ignore_errors=True) def is_podman_bug(self, err: str) -> bool: _err = "Error: current system boot ID differs from cached boot ID; an unhandled reboot has occurred" @@ -199,11 +124,12 @@ def fix_podman_bug(self) -> None: shutil.rmtree("/tmp/storage-run-1000/containers", ignore_errors=True) shutil.rmtree("/tmp/storage-run-1000/libpod/tmp", ignore_errors=True) - async def run_podman_command( - self, command: str, redact_secrets: list[str] | None = None, mask_sensitive: bool = True - ) -> tuple[bool, str, str]: + async def run_podman_command(self, command: str, redact_secrets: list[str] | None = None) -> tuple[bool, str, str]: rc, out, err = await run_command( - command=command, log_prefix=self.log_prefix, redact_secrets=redact_secrets, mask_sensitive=mask_sensitive + command=command, + log_prefix=self.log_prefix, + redact_secrets=redact_secrets, + mask_sensitive=self.github_webhook.mask_sensitive, ) if rc: @@ -215,7 +141,7 @@ async def run_podman_command( command=command, log_prefix=self.log_prefix, redact_secrets=redact_secrets, - mask_sensitive=mask_sensitive, + mask_sensitive=self.github_webhook.mask_sensitive, ) return rc, out, err @@ -232,48 +158,50 @@ async def run_tox(self, pull_request: PullRequest) -> None: if await self.check_run_handler.is_check_run_in_progress(check_run=TOX_STR): self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {TOX_STR}.") - clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" python_ver = ( f"--python={self.github_webhook.tox_python_version}" if self.github_webhook.tox_python_version else "" ) - cmd = f"uvx {python_ver} {TOX_STR} --workdir {clone_repo_dir} --root {clone_repo_dir} -c {clone_repo_dir}" _tox_tests = self.github_webhook.tox.get(pull_request.base.ref, "") - if _tox_tests and _tox_tests != "all": - tests = _tox_tests.replace(" ", "") - cmd += f" -e {tests}" - self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting tox check status to in-progress", ) await self.check_run_handler.set_run_tox_check_in_progress() - self.logger.debug(f"{self.log_prefix} Tox command to run: {cmd}") self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " - f"Preparing repository clone for tox execution", + f"Preparing repository checkout for tox execution", ) - async with self._prepare_cloned_repo_dir(clone_repo_dir=clone_repo_dir, pull_request=pull_request) as _res: + async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): + # Build tox command with worktree path + cmd = f"uvx {python_ver} {TOX_STR} --workdir {worktree_path} --root {worktree_path} -c {worktree_path}" + if _tox_tests and _tox_tests != "all": + tests = _tox_tests.replace(" ", "") + cmd += f" -e {tests}" + self.logger.debug(f"{self.log_prefix} Tox command to run: {cmd}") + output: dict[str, Any] = { "title": "Tox", "summary": "", "text": None, } - if not _res[0]: + if not success: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " f"Repository preparation failed for tox", ) self.logger.error(f"{self.log_prefix} Repository preparation failed for tox") - output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) + output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) return await self.check_run_handler.set_run_tox_check_failure(output=output) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} Executing tox command" ) rc, out, err = await run_command( - command=cmd, log_prefix=self.log_prefix, mask_sensitive=self.mask_sensitive + command=cmd, + log_prefix=self.log_prefix, + mask_sensitive=self.github_webhook.mask_sensitive, ) output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) @@ -303,9 +231,6 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: if await self.check_run_handler.is_check_run_in_progress(check_run=PRE_COMMIT_STR): self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {PRE_COMMIT_STR}.") - clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" - cmd = f" uvx --directory {clone_repo_dir} {PREK_STR} run --all-files" - self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting pre-commit check status to in-progress", @@ -314,21 +239,23 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " - f"Preparing repository clone for pre-commit execution", + f"Preparing repository checkout for pre-commit execution", ) - async with self._prepare_cloned_repo_dir(pull_request=pull_request, clone_repo_dir=clone_repo_dir) as _res: + async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): + cmd = f" uvx --directory {worktree_path} {PREK_STR} run --all-files" + output: dict[str, Any] = { "title": "Pre-Commit", "summary": "", "text": None, } - if not _res[0]: + if not success: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " f"Repository preparation failed for pre-commit", ) self.logger.error(f"{self.log_prefix} Repository preparation failed for pre-commit") - output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) + output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) return await self.check_run_handler.set_run_pre_commit_check_failure(output=output) self.logger.step( # type: ignore[attr-defined] @@ -336,7 +263,9 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: f"Executing pre-commit command", ) rc, out, err = await run_command( - command=cmd, log_prefix=self.log_prefix, mask_sensitive=self.mask_sensitive + command=cmd, + log_prefix=self.log_prefix, + mask_sensitive=self.github_webhook.mask_sensitive, ) output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) @@ -380,8 +309,6 @@ async def run_build_container( ): return - clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" - if pull_request and set_check: if await self.check_run_handler.is_check_run_in_progress(check_run=BUILD_CONTAINER_STR) and not is_merged: self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {BUILD_CONTAINER_STR}.") @@ -390,51 +317,54 @@ async def run_build_container( f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting container build check status to in-progress", ) - await self.check_run_handler.set_container_build_in_progress() + if set_check: + await self.check_run_handler.set_container_build_in_progress() _container_repository_and_tag = self.github_webhook.container_repository_and_tag( pull_request=pull_request, is_merged=is_merged, tag=tag ) no_cache: str = " --no-cache" if is_merged else "" - build_cmd: str = ( - f"--network=host {no_cache} -f " - f"{clone_repo_dir}/{self.github_webhook.dockerfile} " - f"{clone_repo_dir} -t {_container_repository_and_tag}" - ) - if self.github_webhook.container_build_args: - build_args = " ".join(f"--build-arg {arg}" for arg in self.github_webhook.container_build_args) - build_cmd = f"{build_args} {build_cmd}" - - if self.github_webhook.container_command_args: - build_cmd = f"{' '.join(self.github_webhook.container_command_args)} {build_cmd}" - - if command_args: - build_cmd = f"{command_args} {build_cmd}" - - podman_build_cmd: str = f"podman build {build_cmd}" - self.logger.debug(f"{self.log_prefix} Podman build command to run: {podman_build_cmd}") self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " - f"Preparing repository clone for container build", + f"Preparing repository checkout for container build", ) - async with self._prepare_cloned_repo_dir( + async with self._checkout_worktree( pull_request=pull_request, is_merged=is_merged, tag_name=tag, - clone_repo_dir=clone_repo_dir, - ) as _res: + ) as (success, worktree_path, out, err): + # Build container build command with worktree path + build_cmd: str = ( + f"--network=host {no_cache} -f " + f"{worktree_path}/{self.github_webhook.dockerfile} " + f"{worktree_path} -t {_container_repository_and_tag}" + ) + + if self.github_webhook.container_build_args: + build_args = " ".join(f"--build-arg {arg}" for arg in self.github_webhook.container_build_args) + build_cmd = f"{build_args} {build_cmd}" + + if self.github_webhook.container_command_args: + build_cmd = f"{' '.join(self.github_webhook.container_command_args)} {build_cmd}" + + if command_args: + build_cmd = f"{command_args} {build_cmd}" + + podman_build_cmd: str = f"podman build {build_cmd}" + self.logger.debug(f"{self.log_prefix} Podman build command to run: {podman_build_cmd}") + output: dict[str, Any] = { "title": "Build container", "summary": "", "text": None, } - if not _res[0]: + if not success: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " f"Repository preparation failed for container build", ) - output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) + output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) if pull_request and set_check: return await self.check_run_handler.set_container_build_failure(output=output) @@ -443,7 +373,7 @@ async def run_build_container( f"Executing container build command", ) build_rc, build_out, build_err = await self.run_podman_command( - command=podman_build_cmd, mask_sensitive=self.mask_sensitive + command=podman_build_cmd, ) output["text"] = self.check_run_handler.get_check_run_text(err=build_err, out=build_out) @@ -480,7 +410,6 @@ async def run_build_container( self.github_webhook.container_repository_username, self.github_webhook.container_repository_password, ], - mask_sensitive=self.mask_sensitive, ) if push_rc: self.logger.step( # type: ignore[attr-defined] @@ -539,7 +468,6 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: if await self.check_run_handler.is_check_run_in_progress(check_run=PYTHON_MODULE_INSTALL_STR): self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {PYTHON_MODULE_INSTALL_STR}.") - clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" self.logger.info(f"{self.log_prefix} Installing python module") self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " @@ -548,23 +476,20 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: await self.check_run_handler.set_python_module_install_in_progress() self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " - f"Preparing repository clone for Python module installation", + f"Preparing repository checkout for Python module installation", ) - async with self._prepare_cloned_repo_dir( - pull_request=pull_request, - clone_repo_dir=clone_repo_dir, - ) as _res: + async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): output: dict[str, Any] = { "title": "Python module installation", "summary": "", "text": None, } - if not _res[0]: + if not success: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " f"Repository preparation failed for Python module installation", ) - output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) + output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) return await self.check_run_handler.set_python_module_install_failure(output=output) self.logger.step( # type: ignore[attr-defined] @@ -572,9 +497,9 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: f"Executing Python module installation command", ) rc, out, err = await run_command( - command=f"uvx pip wheel --no-cache-dir -w {clone_repo_dir}/dist {clone_repo_dir}", + command=f"uvx pip wheel --no-cache-dir -w {worktree_path}/dist {worktree_path}", log_prefix=self.log_prefix, - mask_sensitive=self.mask_sensitive, + mask_sensitive=self.github_webhook.mask_sensitive, ) output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) @@ -705,36 +630,35 @@ async def cherry_pick(self, pull_request: PullRequest, target_branch: str, revie commit_hash = pull_request.merge_commit_sha commit_msg_striped = pull_request.title.replace("'", "") pull_request_url = pull_request.html_url - clone_repo_dir = f"{self.github_webhook.clone_repo_dir}-{uuid4()}" - git_cmd = f"git --work-tree={clone_repo_dir} --git-dir={clone_repo_dir}/.git" github_token = self.github_webhook.token - hub_cmd = f"GITHUB_TOKEN={github_token} hub --work-tree={clone_repo_dir} --git-dir={clone_repo_dir}/.git" - commands: list[str] = [ - f"{git_cmd} checkout {target_branch}", - f"{git_cmd} pull origin {target_branch}", - f"{git_cmd} checkout -b {new_branch_name} origin/{target_branch}", - f"{git_cmd} cherry-pick {commit_hash}", - f"{git_cmd} push origin {new_branch_name}", - f'bash -c "{hub_cmd} pull-request -b {target_branch} ' - f"-h {new_branch_name} -l {CHERRY_PICKED_LABEL_PREFIX} " - f"-m '{CHERRY_PICKED_LABEL_PREFIX}: [{target_branch}] " - f"{commit_msg_striped}' -m 'cherry-pick {pull_request_url} " - f"into {target_branch}' -m 'requested-by {requested_by}'\"", - ] - - rc, out, err = None, "", "" - async with self._prepare_cloned_repo_dir(pull_request=pull_request, clone_repo_dir=clone_repo_dir) as _res: + + async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): + git_cmd = f"git --work-tree={worktree_path} --git-dir={worktree_path}/.git" + hub_cmd = f"GITHUB_TOKEN={github_token} hub --work-tree={worktree_path} --git-dir={worktree_path}/.git" + commands: list[str] = [ + f"{git_cmd} checkout {target_branch}", + f"{git_cmd} pull origin {target_branch}", + f"{git_cmd} checkout -b {new_branch_name} origin/{target_branch}", + f"{git_cmd} cherry-pick {commit_hash}", + f"{git_cmd} push origin {new_branch_name}", + f'bash -c "{hub_cmd} pull-request -b {target_branch} ' + f"-h {new_branch_name} -l {CHERRY_PICKED_LABEL_PREFIX} " + f"-m '{CHERRY_PICKED_LABEL_PREFIX}: [{target_branch}] " + f"{commit_msg_striped}' -m 'cherry-pick {pull_request_url} " + f"into {target_branch}' -m 'requested-by {requested_by}'\"", + ] + output = { "title": "Cherry-pick details", "summary": "", "text": None, } - if not _res[0]: + if not success: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " f"Repository preparation failed for cherry-pick", ) - output["text"] = self.check_run_handler.get_check_run_text(out=_res[1], err=_res[2]) + output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) await self.check_run_handler.set_cherry_pick_failure(output=output) self.logger.step( # type: ignore[attr-defined] @@ -747,7 +671,7 @@ async def cherry_pick(self, pull_request: PullRequest, target_branch: str, revie command=cmd, log_prefix=self.log_prefix, redact_secrets=[github_token], - mask_sensitive=self.mask_sensitive, + mask_sensitive=self.github_webhook.mask_sensitive, ) if not rc: self.logger.step( # type: ignore[attr-defined] @@ -757,8 +681,16 @@ async def cherry_pick(self, pull_request: PullRequest, target_branch: str, revie ) output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) await self.check_run_handler.set_cherry_pick_failure(output=output) - redacted_out = _redact_secrets(out, [github_token], mask_sensitive=self.mask_sensitive) - redacted_err = _redact_secrets(err, [github_token], mask_sensitive=self.mask_sensitive) + redacted_out = _redact_secrets( + out, + [github_token], + mask_sensitive=self.github_webhook.mask_sensitive, + ) + redacted_err = _redact_secrets( + err, + [github_token], + mask_sensitive=self.github_webhook.mask_sensitive, + ) self.logger.error(f"{self.log_prefix} Cherry pick failed: {redacted_out} --- {redacted_err}") local_branch_name = f"{pull_request.head.ref}-{target_branch}" await asyncio.to_thread( diff --git a/webhook_server/tests/conftest.py b/webhook_server/tests/conftest.py index f2d30dc4..3665b48d 100644 --- a/webhook_server/tests/conftest.py +++ b/webhook_server/tests/conftest.py @@ -14,6 +14,29 @@ os.environ["ENABLE_LOG_SERVER"] = "true" from webhook_server.libs.github_api import GithubWebhook +# OWNERS test data - single source of truth for all test fixtures +# This constant is used by both Repository.get_contents() and owners_files_test_data fixture +OWNERS_TEST_DATA: dict[str, dict[str, list[str] | bool]] = { + "OWNERS": { + "approvers": ["root_approver1", "root_approver2"], + "reviewers": ["root_reviewer1", "root_reviewer2"], + }, + "folder1/OWNERS": { + "approvers": ["folder1_approver1", "folder1_approver2"], + "reviewers": ["folder1_reviewer1", "folder1_reviewer2"], + }, + "folder2/OWNERS": {}, + "folder/folder4/OWNERS": { + "approvers": ["folder4_approver1", "folder4_approver2"], + "reviewers": ["folder4_reviewer1", "folder4_reviewer2"], + }, + "folder5/OWNERS": { + "root-approvers": False, + "approvers": ["folder5_approver1", "folder5_approver2"], + "reviewers": ["folder5_reviewer1", "folder5_reviewer2"], + }, +} + class Tree: def __init__(self, path: str): @@ -52,44 +75,12 @@ def get_git_tree(self, sha: str, recursive: bool): return Tree("") def get_contents(self, path: str, ref: str): - owners_data = yaml.dump({ - "approvers": ["root_approver1", "root_approver2"], - "reviewers": ["root_reviewer1", "root_reviewer2"], - }) - - folder1_owners_data = yaml.dump({ - "approvers": ["folder1_approver1", "folder1_approver2"], - "reviewers": ["folder1_reviewer1", "folder1_reviewer2"], - }) - - folder4_owners_data = yaml.dump({ - "approvers": ["folder4_approver1", "folder4_approver2"], - "reviewers": ["folder4_reviewer1", "folder4_reviewer2"], - }) - - folder5_owners_data = yaml.dump({ - "root-approvers": False, - "approvers": ["folder5_approver1", "folder5_approver2"], - "reviewers": ["folder5_reviewer1", "folder5_reviewer2"], - }) - if path == "OWNERS": - return ContentFile(owners_data) - - elif path == "folder1/OWNERS": - return ContentFile(folder1_owners_data) - - elif path == "folder2/OWNERS": - return ContentFile(yaml.dump({})) - - elif path == "folder/folder4/OWNERS": - return ContentFile(folder4_owners_data) - + # Use centralized OWNERS_TEST_DATA constant + if path in OWNERS_TEST_DATA: + return ContentFile(yaml.dump(OWNERS_TEST_DATA[path])) elif path == "folder": return ContentFile(yaml.dump({})) - elif path == "folder5/OWNERS": - return ContentFile(folder5_owners_data) - @dataclass class Label: @@ -193,3 +184,16 @@ def optimize_test_environment(): # Restore original timeout os.environ["PYTEST_TIMEOUT"] = original_timeout + + +@pytest.fixture +def owners_files_test_data(): + """Shared OWNERS test data structure used across multiple test files. + + Returns a dict mapping file paths to YAML-serialized OWNERS content. + This fixture eliminates duplication between test_pull_request_owners.py + and test_owners_files_handler.py. + + Uses centralized OWNERS_TEST_DATA constant to ensure consistency. + """ + return {path: yaml.dump(data) for path, data in OWNERS_TEST_DATA.items()} diff --git a/webhook_server/tests/test_github_api.py b/webhook_server/tests/test_github_api.py index fc0c993b..1ca97725 100644 --- a/webhook_server/tests/test_github_api.py +++ b/webhook_server/tests/test_github_api.py @@ -1,6 +1,7 @@ import asyncio import os import tempfile +from pathlib import Path from typing import Any from unittest.mock import AsyncMock, Mock, patch @@ -74,6 +75,34 @@ def minimal_headers(self) -> dict[str, str]: def logger(self): return get_logger(name="test") + @pytest.fixture + def to_thread_sync(self) -> Any: + """Async helper to make asyncio.to_thread awaitable while executing inline.""" + + async def _to_thread_sync(fn: Any, *args: Any, **kwargs: Any) -> Any: + return fn(*args, **kwargs) + + return _to_thread_sync + + @pytest.fixture + def get_value_side_effect(self) -> Any: + """Side effect function for Config.get_value mock in clone tests.""" + + def _get_value_side_effect(value: str, *_args: Any, **_kwargs: Any) -> Any: + if value == "mask-sensitive-data": + return True + if value == "container": + return {} + if value == "pypi": + return {} + if value == "tox": + return {} + if value == "verified-job": + return True + return None + + return _get_value_side_effect + @patch("webhook_server.libs.github_api.Config") @patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") @patch("webhook_server.libs.github_api.get_github_repo_api") @@ -261,11 +290,13 @@ async def test_process_pull_request_event( "get_contents", return_value=Mock(decoded_content=b"approvers:\n - user1\nreviewers:\n - user2"), ), + patch.object(webhook, "_clone_repository", new=AsyncMock(return_value=None)), ): await webhook.process() mock_process_pr.assert_called_once() @patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"}) + @patch("webhook_server.libs.github_api.get_github_repo_api") @patch("webhook_server.libs.github_api.get_repository_github_app_api") @patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") @patch("webhook_server.libs.handlers.push_handler.PushHandler.process_push_webhook_data") @@ -280,6 +311,7 @@ async def test_process_push_event( mock_process_push: Mock, mock_api_rate_limit: Mock, mock_repo_api: Mock, + mock_get_repo: Mock, push_payload: dict[str, Any], ) -> None: """Test processing push event.""" @@ -290,8 +322,13 @@ async def test_process_push_event( mock_user.login = "test-user" mock_api.get_user.return_value = mock_user + # Mock repository with proper clone_url + mock_repository = Mock() + mock_repository.clone_url = "https://github.com/test/repo.git" + mock_api_rate_limit.return_value = (mock_api, "TOKEN", "USER") mock_repo_api.return_value = Mock() + mock_get_repo.return_value = mock_repository mock_get_apis.return_value = [] # Return empty list to skip the problematic property code mock_repo_local_data.return_value = {} mock_process_push.return_value = None @@ -299,7 +336,8 @@ async def test_process_push_event( headers = Headers({"X-GitHub-Event": "push"}) webhook = GithubWebhook(hook_data=push_payload, headers=headers, logger=Mock()) - await webhook.process() + with patch.object(webhook, "_clone_repository", new=AsyncMock(return_value=None)): + await webhook.process() mock_process_push.assert_called_once() @patch.dict(os.environ, {"WEBHOOK_SERVER_DATA_DIR": "webhook_server/tests/manifests"}) @@ -362,6 +400,7 @@ async def test_process_issue_comment_event( "get_contents", return_value=Mock(decoded_content=b"approvers:\n - user1\nreviewers:\n - user2"), ), + patch.object(webhook, "_clone_repository", new=AsyncMock(return_value=None)), ): await webhook.process() mock_process_comment.assert_called_once() @@ -746,7 +785,8 @@ async def test_process_check_run_event(self, minimal_hook_data: dict, minimal_he mock_pr_handler.return_value.check_if_can_be_merged = AsyncMock(return_value=None) webhook = GithubWebhook(check_run_data, headers, logger) - await webhook.process() + with patch.object(webhook, "_clone_repository", new=AsyncMock(return_value=None)): + await webhook.process() mock_check_handler.return_value.process_pull_request_check_run_webhook_data.assert_awaited_once() mock_pr_handler.return_value.check_if_can_be_merged.assert_awaited_once() @@ -1020,3 +1060,362 @@ async def test_get_last_commit(self, minimal_hook_data: dict, minimal_headers: d result = await gh._get_last_commit(mock_pr) assert result == mock_commits[-1] + + @pytest.mark.asyncio + async def test_clone_repository_success( + self, + minimal_hook_data: dict, + minimal_headers: dict, + logger: Mock, + get_value_side_effect: Any, + to_thread_sync: Any, + ) -> None: + """Test successful repository clone for PR.""" + with patch("webhook_server.libs.github_api.Config") as mock_config: + mock_config.return_value.repository = True + mock_config.return_value.repository_local_data.return_value = {} + mock_config.return_value.get_value.side_effect = get_value_side_effect + + with patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api: + mock_get_api.return_value = (Mock(), "test-token", "apiuser") + + with patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo_api: + mock_repo = Mock() + mock_repo.clone_url = "https://github.com/org/test-repo.git" + mock_owner = Mock() + mock_owner.login = "test-owner" + mock_repo.owner = mock_owner + mock_get_repo_api.return_value = mock_repo + + with patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_app_api: + mock_get_app_api.return_value = Mock() + + with patch("webhook_server.utils.helpers.get_repository_color_for_log_prefix") as mock_color: + mock_color.return_value = "test-repo" + + gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) + + # Mock pull request + mock_pr = Mock() + mock_base = Mock() + mock_base.ref = "main" + mock_pr.base = mock_base + + # Mock run_command to succeed for all git operations + async def mock_run_command(*_args: Any, **_kwargs: Any) -> tuple[bool, str, str]: + return (True, "", "") + + with ( + patch("webhook_server.libs.github_api.run_command", side_effect=mock_run_command), + patch("asyncio.to_thread", side_effect=to_thread_sync), + ): + await gh._clone_repository(pull_request=mock_pr) + + # Verify clone succeeded + assert gh._repo_cloned is True + + @pytest.mark.asyncio + async def test_clone_repository_already_cloned( + self, minimal_hook_data: dict, minimal_headers: dict, logger: Mock + ) -> None: + """Test early return when repository already cloned.""" + with patch("webhook_server.libs.github_api.Config") as mock_config: + mock_config.return_value.repository = True + mock_config.return_value.repository_local_data.return_value = {} + + with patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api: + mock_get_api.return_value = (Mock(), "test-token", "apiuser") + + with patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo_api: + mock_get_repo_api.return_value = Mock() + + with patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_app_api: + mock_get_app_api.return_value = Mock() + + with patch("webhook_server.utils.helpers.get_repository_color_for_log_prefix") as mock_color: + mock_color.return_value = "test-repo" + + gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) + gh._repo_cloned = True # Mark as already cloned + + mock_pr = Mock() + + with patch("webhook_server.libs.github_api.run_command") as mock_run_cmd: + await gh._clone_repository(pull_request=mock_pr) + + # Verify run_command was never called + mock_run_cmd.assert_not_called() + + @pytest.mark.asyncio + async def test_clone_repository_clone_failure( + self, + minimal_hook_data: dict, + minimal_headers: dict, + logger: Mock, + get_value_side_effect: Any, + ) -> None: + """Test RuntimeError raised when git clone fails.""" + with patch("webhook_server.libs.github_api.Config") as mock_config: + mock_config.return_value.repository = True + mock_config.return_value.repository_local_data.return_value = {} + mock_config.return_value.get_value.side_effect = get_value_side_effect + + with patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api: + mock_get_api.return_value = (Mock(), "test-token", "apiuser") + + with patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo_api: + mock_repo = Mock() + mock_repo.clone_url = "https://github.com/org/test-repo.git" + mock_get_repo_api.return_value = mock_repo + + with patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_app_api: + mock_get_app_api.return_value = Mock() + + with patch("webhook_server.utils.helpers.get_repository_color_for_log_prefix") as mock_color: + mock_color.return_value = "test-repo" + + gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) + + mock_pr = Mock() + + # Mock run_command to fail on clone + async def mock_run_command(command: str, **_kwargs: Any) -> tuple[bool, str, str]: + if "git clone" in command: + return (False, "", "Permission denied") + return (True, "", "") + + with ( + patch("webhook_server.libs.github_api.run_command", side_effect=mock_run_command), + pytest.raises(RuntimeError, match="Failed to clone repository"), + ): + await gh._clone_repository(pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_clone_repository_checkout_failure( + self, + minimal_hook_data: dict, + minimal_headers: dict, + logger: Mock, + get_value_side_effect: Any, + to_thread_sync: Any, + ) -> None: + """Test RuntimeError raised when git checkout fails.""" + with patch("webhook_server.libs.github_api.Config") as mock_config: + mock_config.return_value.repository = True + mock_config.return_value.repository_local_data.return_value = {} + mock_config.return_value.get_value.side_effect = get_value_side_effect + + with patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api: + mock_get_api.return_value = (Mock(), "test-token", "apiuser") + + with patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo_api: + mock_repo = Mock() + mock_repo.clone_url = "https://github.com/org/test-repo.git" + mock_owner = Mock() + mock_owner.login = "test-owner" + mock_repo.owner = mock_owner + mock_get_repo_api.return_value = mock_repo + + with patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_app_api: + mock_get_app_api.return_value = Mock() + + with patch("webhook_server.utils.helpers.get_repository_color_for_log_prefix") as mock_color: + mock_color.return_value = "test-repo" + + gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) + + # Mock pull request + mock_pr = Mock() + mock_base = Mock() + mock_base.ref = "main" + mock_pr.base = mock_base + + # Mock run_command: succeed for clone/config, fail for checkout + async def mock_run_command(**kwargs: Any) -> tuple[bool, str, str]: + command = kwargs.get("command", "") + if "checkout main" in command: + return (False, "", "Branch not found") + return (True, "", "") + + with ( + patch("webhook_server.libs.github_api.run_command", side_effect=mock_run_command), + patch("asyncio.to_thread", side_effect=to_thread_sync), + pytest.raises(RuntimeError, match="Failed to checkout"), + ): + await gh._clone_repository(pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_clone_repository_git_config_warnings( + self, + minimal_hook_data: dict, + minimal_headers: dict, + logger: Mock, + get_value_side_effect: Any, + to_thread_sync: Any, + ) -> None: + """Test that git config failures log warnings but don't raise exceptions.""" + with patch("webhook_server.libs.github_api.Config") as mock_config: + mock_config.return_value.repository = True + mock_config.return_value.repository_local_data.return_value = {} + mock_config.return_value.get_value.side_effect = get_value_side_effect + + with patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api: + mock_get_api.return_value = (Mock(), "test-token", "apiuser") + + with patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo_api: + mock_repo = Mock() + mock_repo.clone_url = "https://github.com/org/test-repo.git" + mock_owner = Mock() + mock_owner.login = "test-owner" + mock_repo.owner = mock_owner + mock_get_repo_api.return_value = mock_repo + + with patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_app_api: + mock_get_app_api.return_value = Mock() + + with patch("webhook_server.utils.helpers.get_repository_color_for_log_prefix") as mock_color: + mock_color.return_value = "test-repo" + + gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) + + # Mock pull request + mock_pr = Mock() + mock_base = Mock() + mock_base.ref = "main" + mock_pr.base = mock_base + + # Mock run_command: succeed for clone/checkout, fail for config commands + async def mock_run_command(**kwargs: Any) -> tuple[bool, str, str]: + command = kwargs.get("command", "") + if "config user.name" in command or "config user.email" in command: + return (False, "", "Config failed") + if "config --local --add" in command or "remote update" in command: + return (False, "", "Config failed") + return (True, "", "") + + mock_logger = Mock() + + with ( + patch("webhook_server.libs.github_api.run_command", side_effect=mock_run_command), + patch("asyncio.to_thread", side_effect=to_thread_sync), + patch.object(gh, "logger", mock_logger), + ): + await gh._clone_repository(pull_request=mock_pr) + + # Verify clone succeeded despite config failures + assert gh._repo_cloned is True + + # Verify warnings were logged for each config failure + warning_calls = [call for call in mock_logger.warning.call_args_list] + assert len(warning_calls) == 4 # user.name, user.email, fetch config, remote update + + @pytest.mark.asyncio + async def test_clone_repository_general_exception( + self, + minimal_hook_data: dict, + minimal_headers: dict, + logger: Mock, + get_value_side_effect: Any, + ) -> None: + """Test exception handling during clone operation.""" + with patch("webhook_server.libs.github_api.Config") as mock_config: + mock_config.return_value.repository = True + mock_config.return_value.repository_local_data.return_value = {} + mock_config.return_value.get_value.side_effect = get_value_side_effect + + with patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api: + mock_get_api.return_value = (Mock(), "test-token", "apiuser") + + with patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo_api: + mock_repo = Mock() + mock_repo.clone_url = "https://github.com/org/test-repo.git" + mock_get_repo_api.return_value = mock_repo + + with patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_app_api: + mock_get_app_api.return_value = Mock() + + with patch("webhook_server.utils.helpers.get_repository_color_for_log_prefix") as mock_color: + mock_color.return_value = "test-repo" + + gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) + + mock_pr = Mock() + + # Mock run_command to raise an exception + async def mock_run_command(*_args: Any, **_kwargs: Any) -> tuple[bool, str, str]: + raise ValueError("Unexpected error during git operation") + + with ( + patch("webhook_server.libs.github_api.run_command", side_effect=mock_run_command), + pytest.raises(RuntimeError, match="Repository clone failed"), + ): + await gh._clone_repository(pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_clone_repository_no_arguments( + self, minimal_hook_data: dict, minimal_headers: dict, logger: Mock + ) -> None: + """Test _clone_repository raises ValueError when no arguments provided.""" + with patch("webhook_server.libs.github_api.Config") as mock_config: + mock_config.return_value.repository = True + mock_config.return_value.repository_local_data.return_value = {} + + with patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api: + mock_get_api.return_value = (Mock(), "test-token", "apiuser") + + with patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo_api: + mock_get_repo_api.return_value = Mock() + + with patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_app_api: + mock_get_app_api.return_value = Mock() + + with patch("webhook_server.utils.helpers.get_repository_color_for_log_prefix") as mock_color: + mock_color.return_value = "test-repo" + + gh = GithubWebhook(minimal_hook_data, minimal_headers, logger) + + # Test that calling _clone_repository with no arguments raises ValueError + with pytest.raises(ValueError, match="requires either pull_request or checkout_ref"): + await gh._clone_repository() + + @pytest.mark.asyncio + async def test_clone_repository_empty_checkout_ref( + self, + minimal_hook_data: dict, + minimal_headers: dict, + logger: Mock, + tmp_path: Path, + ) -> None: + """Test _clone_repository raises ValueError when checkout_ref is empty string.""" + with ( + patch("webhook_server.libs.github_api.Config") as mock_config_cls, + patch("webhook_server.libs.github_api.get_api_with_highest_rate_limit") as mock_get_api, + patch("webhook_server.libs.github_api.get_github_repo_api") as mock_get_repo, + patch("webhook_server.libs.github_api.get_repository_github_app_api") as mock_get_github_app_api, + ): + # Setup mocks + mock_config = Mock() + mock_config.repository_data = {"enabled": True} + mock_config.get_value.return_value = None + mock_config.data_dir = str(tmp_path) + mock_config_cls.return_value = mock_config + + mock_api = Mock() + mock_api.get_rate_limit.return_value = Mock(rate=Mock(remaining=5000, limit=5000)) + mock_get_api.return_value = (mock_api, "test-token", "test-user") + + mock_repository = Mock() + mock_repository.clone_url = "https://github.com/test/repo.git" + mock_get_repo.return_value = mock_repository + mock_get_github_app_api.return_value = mock_api + + # Create webhook + webhook = GithubWebhook( + hook_data=minimal_hook_data, + headers=Headers(minimal_headers), + logger=logger, + ) + + # Test that calling _clone_repository with empty string raises ValueError + with pytest.raises(ValueError, match="requires either pull_request or checkout_ref"): + await webhook._clone_repository(checkout_ref="") diff --git a/webhook_server/tests/test_owners_files_handler.py b/webhook_server/tests/test_owners_files_handler.py index 2b0a53ef..87a6be15 100644 --- a/webhook_server/tests/test_owners_files_handler.py +++ b/webhook_server/tests/test_owners_files_handler.py @@ -1,3 +1,4 @@ +from pathlib import Path from unittest.mock import AsyncMock, Mock, call, patch import pytest @@ -5,7 +6,6 @@ from github.GithubException import GithubException from webhook_server.libs.handlers.owners_files_handler import OwnersFileHandler -from webhook_server.tests.conftest import ContentFile class TestOwnersFileHandler: @@ -33,52 +33,6 @@ def owners_file_handler(self, mock_github_webhook: Mock) -> OwnersFileHandler: """Create an OwnersFileHandler instance.""" return OwnersFileHandler(mock_github_webhook) - @pytest.fixture - def mock_tree(self) -> Mock: - """Create a mock git tree with OWNERS files.""" - tree = Mock() - tree.tree = [ - Mock(type="blob", path="OWNERS"), - Mock(type="blob", path="folder1/OWNERS"), - Mock(type="blob", path="folder2/OWNERS"), - Mock(type="blob", path="folder/folder4/OWNERS"), - Mock(type="blob", path="folder5/OWNERS"), - Mock(type="blob", path="README.md"), # Non-OWNERS file - ] - return tree - - @pytest.fixture - def mock_content_files(self) -> dict[str, ContentFile]: - """Create mock content files for different OWNERS files.""" - return { - "OWNERS": ContentFile( - yaml.dump({ - "approvers": ["root_approver1", "root_approver2"], - "reviewers": ["root_reviewer1", "root_reviewer2"], - }) - ), - "folder1/OWNERS": ContentFile( - yaml.dump({ - "approvers": ["folder1_approver1", "folder1_approver2"], - "reviewers": ["folder1_reviewer1", "folder1_reviewer2"], - }) - ), - "folder2/OWNERS": ContentFile(yaml.dump({})), - "folder/folder4/OWNERS": ContentFile( - yaml.dump({ - "approvers": ["folder4_approver1", "folder4_approver2"], - "reviewers": ["folder4_reviewer1", "folder4_reviewer2"], - }) - ), - "folder5/OWNERS": ContentFile( - yaml.dump({ - "root-approvers": False, - "approvers": ["folder5_approver1", "folder5_approver2"], - "reviewers": ["folder5_reviewer1", "folder5_reviewer2"], - }) - ), - } - @pytest.mark.asyncio async def test_initialize(self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock) -> None: """Test the initialize method.""" @@ -132,18 +86,36 @@ async def test_ensure_initialized_initialized(self, owners_file_handler: OwnersF owners_file_handler._ensure_initialized() # Should not raise @pytest.mark.asyncio - async def test_list_changed_files(self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock) -> None: - """Test list_changed_files method.""" - mock_file1 = Mock() - mock_file1.filename = "file1.py" - mock_file2 = Mock() - mock_file2.filename = "file2.py" - mock_pull_request.get_files.return_value = [mock_file1, mock_file2] - - result = await owners_file_handler.list_changed_files(mock_pull_request) - - assert result == ["file1.py", "file2.py"] - mock_pull_request.get_files.assert_called_once() + async def test_list_changed_files( + self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock, tmp_path: Path + ) -> None: + """Test list_changed_files method using git diff.""" + # Set up mock PR SHAs + mock_pull_request.base.sha = "base123abc" + mock_pull_request.head.sha = "head456def" + + # Set up handler properties + owners_file_handler.github_webhook.clone_repo_dir = str(tmp_path) + owners_file_handler.github_webhook.mask_sensitive = True + + # Mock run_command to return git diff output + with patch( + "webhook_server.libs.handlers.owners_files_handler.run_command", new=AsyncMock() + ) as mock_run_command: + mock_run_command.return_value = (True, "file1.py\nfile2.py\n", "") + + result = await owners_file_handler.list_changed_files(mock_pull_request) + + # Verify result + assert result == ["file1.py", "file2.py"] + + # Verify run_command was called with correct git command + mock_run_command.assert_called_once_with( + command=f"git -C {tmp_path} diff --name-only base123abc...head456def", + log_prefix="[TEST]", + verify_stderr=False, + mask_sensitive=True, + ) def test_validate_owners_content_valid(self, owners_file_handler: OwnersFileHandler) -> None: """Test _validate_owners_content with valid content.""" @@ -176,43 +148,62 @@ def test_validate_owners_content_reviewers_not_strings(self, owners_file_handler assert owners_file_handler._validate_owners_content(invalid_content, "test/path") is False @pytest.mark.asyncio - async def test_get_file_content(self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock) -> None: - """Test _get_file_content method.""" - mock_content = ContentFile("test content") - owners_file_handler.repository.get_contents = Mock(return_value=mock_content) + async def test_get_file_content_from_local(self, owners_file_handler: OwnersFileHandler, tmp_path: Path) -> None: + """Test _get_file_content_from_local method.""" + # Create a temporary OWNERS file + owners_file = tmp_path / "test" / "OWNERS" + owners_file.parent.mkdir(parents=True, exist_ok=True) + owners_file.write_text("test content") + + # Set clone_repo_dir to tmp_path + owners_file_handler.github_webhook.clone_repo_dir = str(tmp_path) - result = await owners_file_handler._get_file_content("test/path", mock_pull_request) + result = await owners_file_handler._get_file_content_from_local(owners_file) - assert result == (mock_content, "test/path") - owners_file_handler.repository.get_contents.assert_called_once_with("test/path", "main") + assert result == ("test content", "test/OWNERS") @pytest.mark.asyncio - async def test_get_file_content_list_result( - self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock + async def test_get_file_content_from_local_file_not_found( + self, owners_file_handler: OwnersFileHandler, tmp_path: Path ) -> None: - """Test _get_file_content when repository returns a list.""" - mock_content = ContentFile("test content") - owners_file_handler.repository.get_contents = Mock(return_value=[mock_content]) + """Test _get_file_content_from_local method with missing file.""" + # Create path to non-existent file + missing_file = tmp_path / "test" / "OWNERS" + + # Set clone_repo_dir to tmp_path + owners_file_handler.github_webhook.clone_repo_dir = str(tmp_path) + owners_file_handler.logger.warning = Mock() - result = await owners_file_handler._get_file_content("test/path", mock_pull_request) + result = await owners_file_handler._get_file_content_from_local(missing_file) - assert result == (mock_content, "test/path") + assert result is None + owners_file_handler.logger.warning.assert_called_once() + # Verify the warning message contains expected information + warning_call = owners_file_handler.logger.warning.call_args[0][0] + assert "Failed to read OWNERS file" in warning_call + assert "test/OWNERS" in warning_call + assert "Skipping this file" in warning_call @pytest.mark.asyncio async def test_get_all_repository_approvers_and_reviewers( self, owners_file_handler: OwnersFileHandler, - mock_pull_request: Mock, - mock_tree: Mock, - mock_content_files: dict[str, ContentFile], + owners_files_test_data: dict[str, str], + tmp_path: Path, ) -> None: - owners_file_handler.repository.get_git_tree = Mock(return_value=mock_tree) + """Test reading OWNERS files from local cloned repository.""" + # Create a temporary directory structure with OWNERS files + for file_path, content in owners_files_test_data.items(): + full_path = tmp_path / file_path + full_path.parent.mkdir(parents=True, exist_ok=True) + full_path.write_text(content) + + # Set clone_repo_dir to tmp_path (simulating already cloned repo) + owners_file_handler.github_webhook.clone_repo_dir = str(tmp_path) - def mock_get_contents(path: str, ref: str) -> ContentFile: - return mock_content_files.get(path, ContentFile("")) + # No worktree needed - read directly from clone + result = await owners_file_handler.get_all_repository_approvers_and_reviewers() - owners_file_handler.repository.get_contents = Mock(side_effect=mock_get_contents) - result = await owners_file_handler.get_all_repository_approvers_and_reviewers(mock_pull_request) expected = { ".": {"approvers": ["root_approver1", "root_approver2"], "reviewers": ["root_reviewer1", "root_reviewer2"]}, "folder1": { @@ -234,44 +225,60 @@ def mock_get_contents(path: str, ref: str) -> ContentFile: @pytest.mark.asyncio async def test_get_all_repository_approvers_and_reviewers_too_many_files( - self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock + self, owners_file_handler: OwnersFileHandler, tmp_path: Path ) -> None: - mock_tree = Mock() - mock_tree.tree = [Mock(type="blob", path=f"file{i}/OWNERS") for i in range(1001)] - owners_file_handler.repository.get_git_tree = Mock(return_value=mock_tree) + """Test that too many OWNERS files are handled correctly.""" + # Create 1001 OWNERS files + for i in range(1001): + owners_file = tmp_path / f"file{i}" / "OWNERS" + owners_file.parent.mkdir(parents=True, exist_ok=True) + owners_file.write_text(yaml.dump({"approvers": [], "reviewers": []})) + + # Set clone_repo_dir to tmp_path + owners_file_handler.github_webhook.clone_repo_dir = str(tmp_path) owners_file_handler.logger.error = Mock() - owners_file_handler.repository.get_contents = Mock( - return_value=ContentFile(yaml.dump({"approvers": [], "reviewers": []})) - ) - result = await owners_file_handler.get_all_repository_approvers_and_reviewers(mock_pull_request) + + # No worktree needed - read directly from clone + result = await owners_file_handler.get_all_repository_approvers_and_reviewers() + assert len(result) == 1000 owners_file_handler.logger.error.assert_called_once() @pytest.mark.asyncio async def test_get_all_repository_approvers_and_reviewers_invalid_yaml( - self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock + self, owners_file_handler: OwnersFileHandler, tmp_path: Path ) -> None: - mock_tree = Mock() - mock_tree.tree = [Mock(type="blob", path="OWNERS")] - owners_file_handler.repository.get_git_tree = Mock(return_value=mock_tree) - mock_content = ContentFile("invalid: yaml: content: [") - owners_file_handler.repository.get_contents = Mock(return_value=mock_content) - owners_file_handler.logger.error = Mock() - result = await owners_file_handler.get_all_repository_approvers_and_reviewers(mock_pull_request) + """Test handling of invalid YAML in OWNERS files.""" + # Create OWNERS file with invalid YAML + owners_file = tmp_path / "OWNERS" + owners_file.write_text("invalid: yaml: content: [") + + # Set clone_repo_dir to tmp_path + owners_file_handler.github_webhook.clone_repo_dir = str(tmp_path) + owners_file_handler.logger.exception = Mock() + + # No worktree needed - read directly from clone + result = await owners_file_handler.get_all_repository_approvers_and_reviewers() + assert result == {} - owners_file_handler.logger.error.assert_called_once() + owners_file_handler.logger.exception.assert_called_once() @pytest.mark.asyncio async def test_get_all_repository_approvers_and_reviewers_invalid_content( - self, owners_file_handler: OwnersFileHandler, mock_pull_request: Mock + self, owners_file_handler: OwnersFileHandler, tmp_path: Path ) -> None: - mock_tree = Mock() - mock_tree.tree = [Mock(type="blob", path="OWNERS")] - owners_file_handler.repository.get_git_tree = Mock(return_value=mock_tree) - mock_content = ContentFile(yaml.dump({"approvers": "not_a_list"})) - owners_file_handler.repository.get_contents = Mock(return_value=mock_content) + """Test handling of invalid content structure in OWNERS files.""" + # Create OWNERS file with invalid structure + owners_file = tmp_path / "OWNERS" + owners_file.write_text(yaml.dump({"approvers": "not_a_list"})) + + # Set clone_repo_dir to tmp_path + owners_file_handler.github_webhook.clone_repo_dir = str(tmp_path) owners_file_handler.logger.error = Mock() - result = await owners_file_handler.get_all_repository_approvers_and_reviewers(mock_pull_request) + + # No worktree needed - read directly from clone + result = await owners_file_handler.get_all_repository_approvers_and_reviewers() + assert result == {} owners_file_handler.logger.error.assert_called_once() diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index fabb7b5d..7872e8a2 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1,3 +1,4 @@ +import asyncio from unittest.mock import AsyncMock, Mock, patch import pytest @@ -21,9 +22,19 @@ ) -# Helper async function for mocking async cached property -async def _mock_owners_data_for_changed_files() -> dict: - return {} +class _AwaitableValue: + def __init__(self, return_value: dict | None = None) -> None: + self._value = return_value or {} + + def __await__(self): + async def _inner() -> dict: + return self._value + + return _inner().__await__() + + +def _owners_data_coroutine(return_value: dict | None = None) -> _AwaitableValue: + return _AwaitableValue(return_value) class TestPullRequestHandler: @@ -613,7 +624,7 @@ async def test_check_if_can_be_merged_approved( patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - _mock_owners_data_for_changed_files(), + _owners_data_coroutine(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.check_run_handler, "set_merge_check_in_progress", new=AsyncMock()), @@ -644,7 +655,7 @@ async def test_check_if_pr_approved_no_labels(self, pull_request_handler: PullRe patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - _mock_owners_data_for_changed_files(), + _owners_data_coroutine(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -661,7 +672,7 @@ async def test_check_if_pr_approved_approved_label(self, pull_request_handler: P patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - _mock_owners_data_for_changed_files(), + _owners_data_coroutine(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -678,7 +689,7 @@ async def test_check_if_pr_approved_lgtm_label(self, pull_request_handler: PullR patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - _mock_owners_data_for_changed_files(), + _owners_data_coroutine(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -695,7 +706,7 @@ async def test_check_if_pr_approved_changes_requested(self, pull_request_handler patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - _mock_owners_data_for_changed_files(), + _owners_data_coroutine(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -714,7 +725,7 @@ async def test_check_if_pr_approved_commented(self, pull_request_handler: PullRe patch.object( pull_request_handler.owners_file_handler, "owners_data_for_changed_files", - _mock_owners_data_for_changed_files(), + _owners_data_coroutine(), ), patch.object(pull_request_handler.github_webhook, "minimum_lgtm", 0), patch.object(pull_request_handler.owners_file_handler, "all_pull_request_approvers", []), @@ -1326,6 +1337,15 @@ async def test_process_new_or_reprocess_pull_request_parallel_execution( self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock ) -> None: """Test process_new_or_reprocess_pull_request executes tasks in parallel.""" + + # Track that asyncio.gather was used while still executing the real gather + real_gather = asyncio.gather + gather_calls: dict[str, int] = {"count": 0} + + async def tracking_gather(*args, **kwargs): # type: ignore[unused-argument] + gather_calls["count"] += 1 + return await real_gather(*args, **kwargs) + # Mock nothing exists - full workflow with ( patch.object(pull_request_handler, "_welcome_comment_exists", new=AsyncMock(return_value=False)), @@ -1335,38 +1355,63 @@ async def test_process_new_or_reprocess_pull_request_parallel_execution( patch.object(pull_request_handler, "set_wip_label_based_on_title", new=AsyncMock()), patch.object(pull_request_handler, "process_opened_or_synchronize_pull_request", new=AsyncMock()), patch.object(pull_request_handler, "set_pull_request_automerge", new=AsyncMock()), - patch("asyncio.gather", new=AsyncMock(return_value=[])) as mock_gather, + patch("asyncio.gather", new=tracking_gather), ): await pull_request_handler.process_new_or_reprocess_pull_request(pull_request=mock_pull_request) # Verify asyncio.gather was called (parallel execution) - mock_gather.assert_awaited_once() + assert gather_calls["count"] >= 1 @pytest.mark.asyncio async def test_process_new_or_reprocess_pull_request_exception_handling( self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock ) -> None: """Test process_new_or_reprocess_pull_request handles exceptions gracefully.""" - # Mock one task fails + + async def failing_create_issue(*args, **kwargs): # type: ignore[unused-argument] + raise Exception("Test error") + + async def always_false(*args, **kwargs) -> bool: # type: ignore[unused-argument] + return False + + def mock_create_issue_comment(*args, **kwargs): # type: ignore[unused-argument] + return None + + calls: dict[str, int] = { + "set_wip": 0, + "process_opened": 0, + "set_automerge": 0, + } + + async def set_wip_stub(*args, **kwargs): # type: ignore[unused-argument] + calls["set_wip"] += 1 + + async def process_opened_stub(*args, **kwargs): # type: ignore[unused-argument] + calls["process_opened"] += 1 + + async def set_automerge_stub(*args, **kwargs): # type: ignore[unused-argument] + calls["set_automerge"] += 1 + + # Mock one task failing while others still execute with ( - patch.object(pull_request_handler, "_welcome_comment_exists", new=AsyncMock(return_value=False)), - patch.object(pull_request_handler, "_tracking_issue_exists", new=AsyncMock(return_value=False)), - patch.object(mock_pull_request, "create_issue_comment", new=Mock()), + patch.object(pull_request_handler, "_welcome_comment_exists", new=always_false), + patch.object(pull_request_handler, "_tracking_issue_exists", new=always_false), + patch.object(mock_pull_request, "create_issue_comment", new=mock_create_issue_comment), patch.object( pull_request_handler, "create_issue_for_new_pull_request", - new=AsyncMock(side_effect=Exception("Test error")), + new=failing_create_issue, ), - patch.object(pull_request_handler, "set_wip_label_based_on_title", new=AsyncMock()), - patch.object(pull_request_handler, "process_opened_or_synchronize_pull_request", new=AsyncMock()), - patch.object(pull_request_handler, "set_pull_request_automerge", new=AsyncMock()) as mock_automerge, - patch.object(pull_request_handler.logger, "error") as mock_logger_error, + patch.object(pull_request_handler, "set_wip_label_based_on_title", new=set_wip_stub), + patch.object( + pull_request_handler, + "process_opened_or_synchronize_pull_request", + new=process_opened_stub, + ), + patch.object(pull_request_handler, "set_pull_request_automerge", new=set_automerge_stub), ): # Should not raise exception - errors are caught and logged await pull_request_handler.process_new_or_reprocess_pull_request(pull_request=mock_pull_request) - # Verify error was logged - mock_logger_error.assert_called() - - # Verify automerge still executed (after errors) - mock_automerge.assert_awaited_once() + # Verify automerge and other tasks were called despite error in one task + assert calls["set_automerge"] == 1 diff --git a/webhook_server/tests/test_pull_request_owners.py b/webhook_server/tests/test_pull_request_owners.py index f045e075..960f6824 100644 --- a/webhook_server/tests/test_pull_request_owners.py +++ b/webhook_server/tests/test_pull_request_owners.py @@ -1,8 +1,8 @@ +from unittest.mock import AsyncMock, patch + import pytest -import yaml from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler -from webhook_server.tests.conftest import ContentFile, Tree from webhook_server.utils.constants import APPROVED_BY_LABEL_PREFIX ALL_CHANGED_FILES = [ @@ -16,53 +16,6 @@ ] -class Repository: - def __init__(self): - self.name = "test-repo" - - def get_git_tree(self, sha: str, recursive: bool): - return Tree("") - - def get_contents(self, path: str, ref: str): - owners_data = yaml.dump({ - "approvers": ["root_approver1", "root_approver2"], - "reviewers": ["root_reviewer1", "root_reviewer2"], - }) - - folder1_owners_data = yaml.dump({ - "approvers": ["folder1_approver1", "folder1_approver2"], - "reviewers": ["folder1_reviewer1", "folder1_reviewer2"], - }) - - folder4_owners_data = yaml.dump({ - "approvers": ["folder4_approver1", "folder4_approver2"], - "reviewers": ["folder4_reviewer1", "folder4_reviewer2"], - }) - - folder5_owners_data = yaml.dump({ - "root-approvers": False, - "approvers": ["folder5_approver1", "folder5_approver2"], - "reviewers": ["folder5_reviewer1", "folder5_reviewer2"], - }) - if path == "OWNERS": - return ContentFile(owners_data) - - elif path == "folder1/OWNERS": - return ContentFile(folder1_owners_data) - - elif path == "folder2/OWNERS": - return ContentFile(yaml.dump({})) - - elif path == "folder/folder4/OWNERS": - return ContentFile(folder4_owners_data) - - elif path == "folder": - return ContentFile(yaml.dump({})) - - elif path == "folder5/OWNERS": - return ContentFile(folder5_owners_data) - - @pytest.fixture(scope="function") def changed_files(request, owners_file_handler): if hasattr(request, "param") and request.param: @@ -124,10 +77,28 @@ def all_approvers_reviewers(owners_file_handler): @pytest.mark.asyncio async def test_get_all_repository_approvers_and_reviewers( - changed_files, process_github_webhook, owners_file_handler, pull_request, all_repository_approvers_and_reviewers + changed_files, # noqa: ARG001 + process_github_webhook, + owners_file_handler, + all_repository_approvers_and_reviewers, # noqa: ARG001 + tmp_path, + owners_files_test_data, ): - process_github_webhook.repository = Repository() - read_owners_result = await owners_file_handler.get_all_repository_approvers_and_reviewers(pull_request=pull_request) + """Test reading OWNERS files from local cloned repository.""" + # Create a temporary directory structure with OWNERS files + for file_path, content in owners_files_test_data.items(): + full_path = tmp_path / file_path + full_path.parent.mkdir(parents=True, exist_ok=True) + full_path.write_text(content) + + # Set clone_repo_dir to tmp_path (simulating already cloned repo) + process_github_webhook.clone_repo_dir = str(tmp_path) + + # Mock _clone_repository to do nothing (already "cloned" to tmp_path) + with patch.object(process_github_webhook, "_clone_repository", new=AsyncMock()): + # No worktree needed - read directly from clone + read_owners_result = await owners_file_handler.get_all_repository_approvers_and_reviewers() + assert read_owners_result == owners_file_handler.all_repository_approvers_and_reviewers diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index b1cdc200..0fac3555 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -1,5 +1,6 @@ """Tests for webhook_server.libs.handlers.push_handler module.""" +from contextlib import asynccontextmanager from unittest.mock import AsyncMock, Mock, patch import pytest @@ -7,6 +8,21 @@ from webhook_server.libs.handlers.push_handler import PushHandler +def _build_checkout_context(result: tuple[bool, str, str, str]): + """Create an async context manager that yields the provided result.""" + + @asynccontextmanager + async def _cm(*_args, **_kwargs): + yield result + + return _cm() + + +def _set_checkout_result(mock_checkout: Mock, result: tuple[bool, str, str, str]) -> None: + """Configure the checkout mock to return an async context manager.""" + mock_checkout.side_effect = lambda *_a, **_kw: _build_checkout_context(result) + + class TestPushHandler: """Test suite for PushHandler class.""" @@ -107,43 +123,48 @@ async def test_process_push_webhook_data_tag_with_slash(self, push_handler: Push @pytest.mark.asyncio async def test_upload_to_pypi_success(self, push_handler: PushHandler) -> None: """Test successful upload to pypi.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch( "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock ) as mock_run_command: - with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: - with patch("webhook_server.libs.handlers.push_handler.send_slack_message") as mock_slack: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") + with patch("webhook_server.libs.handlers.push_handler.send_slack_message") as mock_slack: + # Mock successful checkout + _set_checkout_result(mock_checkout, (True, "/tmp/worktree-path", "", "")) - # Mock successful build - mock_run_command.side_effect = [ - (True, "", ""), # uv build - (True, "package-1.0.0.tar.gz", ""), # ls command - (True, "", ""), # twine check - (True, "", ""), # twine upload - ] - - mock_uuid.return_value = "test-uuid" + # Mock successful build + mock_run_command.side_effect = [ + (True, "", ""), # uv build + (True, "package-1.0.0.tar.gz", ""), # ls command + (True, "", ""), # twine check + (True, "", ""), # twine upload + ] - await push_handler.upload_to_pypi(tag_name="v1.0.0") + await push_handler.upload_to_pypi(tag_name="v1.0.0") - # Verify clone was called - mock_prepare.assert_called_once() + # Verify checkout was called + mock_checkout.assert_called_once() - # Verify build command was called - assert mock_run_command.call_count == 4 + # Verify build command was called + assert mock_run_command.call_count == 4 - # Verify slack message was sent - mock_slack.assert_called_once() + # Verify slack message was sent + mock_slack.assert_called_once() @pytest.mark.asyncio async def test_upload_to_pypi_clone_failure(self, push_handler: PushHandler) -> None: """Test upload to pypi when clone fails.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: - # Mock failed clone - mock_prepare.return_value.__aenter__.return_value = (False, "Clone failed", "Error") + # Mock failed checkout + _set_checkout_result( + mock_checkout, + ( + False, + "/tmp/worktree-path", + "Clone failed", + "Error", + ), + ) await push_handler.upload_to_pypi(tag_name="v1.0.0") @@ -155,13 +176,13 @@ async def test_upload_to_pypi_clone_failure(self, push_handler: PushHandler) -> @pytest.mark.asyncio async def test_upload_to_pypi_build_failure(self, push_handler: PushHandler) -> None: """Test upload to pypi when build fails.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch( "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock ) as mock_run_command: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") + # Mock successful checkout + _set_checkout_result(mock_checkout, (True, "/tmp/worktree-path", "", "")) # Mock failed build mock_run_command.return_value = (False, "Build failed", "Error") @@ -176,13 +197,13 @@ async def test_upload_to_pypi_build_failure(self, push_handler: PushHandler) -> @pytest.mark.asyncio async def test_upload_to_pypi_ls_failure(self, push_handler: PushHandler) -> None: """Test upload to pypi when ls command fails.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch( "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock ) as mock_run_command: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") + # Mock successful checkout + _set_checkout_result(mock_checkout, (True, "/tmp/worktree-path", "", "")) # Mock successful build, failed ls mock_run_command.side_effect = [ @@ -200,13 +221,13 @@ async def test_upload_to_pypi_ls_failure(self, push_handler: PushHandler) -> Non @pytest.mark.asyncio async def test_upload_to_pypi_twine_check_failure(self, push_handler: PushHandler) -> None: """Test upload to pypi when twine check fails.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch( "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock ) as mock_run_command: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") + # Mock successful checkout + _set_checkout_result(mock_checkout, (True, "/tmp/worktree-path", "", "")) # Mock successful build and ls, failed twine check mock_run_command.side_effect = [ @@ -225,13 +246,13 @@ async def test_upload_to_pypi_twine_check_failure(self, push_handler: PushHandle @pytest.mark.asyncio async def test_upload_to_pypi_twine_upload_failure(self, push_handler: PushHandler) -> None: """Test upload to pypi when twine upload fails.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch( "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock ) as mock_run_command: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") + # Mock successful checkout + _set_checkout_result(mock_checkout, (True, "/tmp/worktree-path", "", "")) # Mock successful build, ls, and twine check, failed twine upload mock_run_command.side_effect = [ @@ -253,100 +274,98 @@ async def test_upload_to_pypi_success_no_slack(self, push_handler: PushHandler) """Test successful upload to pypi without slack webhook.""" push_handler.github_webhook.slack_webhook_url = "" # Empty string instead of None - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch( "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock ) as mock_run_command: - with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") - - # Mock successful build - mock_run_command.side_effect = [ - (True, "", ""), # uv build - (True, "package-1.0.0.tar.gz", ""), # ls command - (True, "", ""), # twine check - (True, "", ""), # twine upload - ] - - mock_uuid.return_value = "test-uuid" - - with patch("webhook_server.libs.handlers.push_handler.send_slack_message") as mock_slack: - await push_handler.upload_to_pypi(tag_name="v1.0.0") + # Mock successful checkout + _set_checkout_result(mock_checkout, (True, "/tmp/worktree-path", "", "")) + + # Mock successful build + mock_run_command.side_effect = [ + (True, "", ""), # uv build + (True, "package-1.0.0.tar.gz", ""), # ls command + (True, "", ""), # twine check + (True, "", ""), # twine upload + ] + + with patch("webhook_server.libs.handlers.push_handler.send_slack_message") as mock_slack: + await push_handler.upload_to_pypi(tag_name="v1.0.0") - # Verify slack message was not sent - mock_slack.assert_not_called() + # Verify slack message was not sent + mock_slack.assert_not_called() @pytest.mark.asyncio async def test_upload_to_pypi_commands_execution_order(self, push_handler: PushHandler) -> None: """Test that commands are executed in the correct order.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch( "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock ) as mock_run_command: - with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") + # Mock successful checkout + _set_checkout_result(mock_checkout, (True, "/tmp/worktree-path", "", "")) - # Mock successful all commands - mock_run_command.side_effect = [ - (True, "", ""), # uv build - (True, "package-1.0.0.tar.gz", ""), # ls command - (True, "", ""), # twine check - (True, "", ""), # twine upload - ] - - mock_uuid.return_value = "test-uuid" + # Mock successful all commands + mock_run_command.side_effect = [ + (True, "", ""), # uv build + (True, "package-1.0.0.tar.gz", ""), # ls command + (True, "", ""), # twine check + (True, "", ""), # twine upload + ] - await push_handler.upload_to_pypi(tag_name="v1.0.0") + await push_handler.upload_to_pypi(tag_name="v1.0.0") - # Verify commands were called in correct order - calls = mock_run_command.call_args_list - # Each call is call(command=..., log_prefix=...) - # The command string is in the 'command' kwarg - assert "uv" in calls[0].kwargs["command"] - assert "build" in calls[0].kwargs["command"] - assert "ls" in calls[1].kwargs["command"] - assert "twine check" in calls[2].kwargs["command"] - assert "twine upload" in calls[3].kwargs["command"] - assert "package-1.0.0.tar.gz" in calls[3].kwargs["command"] + # Verify commands were called in correct order + calls = mock_run_command.call_args_list + # Each call is call(command=..., log_prefix=...) + # The command string is in the 'command' kwarg + assert "uv" in calls[0].kwargs["command"] + assert "build" in calls[0].kwargs["command"] + assert "ls" in calls[1].kwargs["command"] + assert "twine check" in calls[2].kwargs["command"] + assert "twine upload" in calls[3].kwargs["command"] + assert "package-1.0.0.tar.gz" in calls[3].kwargs["command"] @pytest.mark.asyncio - async def test_upload_to_pypi_unique_clone_directory(self, push_handler: PushHandler) -> None: - """Test that each upload uses a unique clone directory.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + async def test_upload_to_pypi_checkout_with_tag(self, push_handler: PushHandler) -> None: + """Test that checkout is called with the correct tag.""" + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch( "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock ) as mock_run_command: - with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") - - # Mock successful build - mock_run_command.side_effect = [ - (True, "", ""), # uv build - (True, "package-1.0.0.tar.gz", ""), # ls command - (True, "", ""), # twine check - (True, "", ""), # twine upload - ] + # Mock successful checkout + _set_checkout_result(mock_checkout, (True, "/tmp/worktree-path", "", "")) - mock_uuid.return_value = "test-uuid" + # Mock successful build + mock_run_command.side_effect = [ + (True, "", ""), # uv build + (True, "package-1.0.0.tar.gz", ""), # ls command + (True, "", ""), # twine check + (True, "", ""), # twine upload + ] - await push_handler.upload_to_pypi(tag_name="v1.0.0") + await push_handler.upload_to_pypi(tag_name="v1.0.0") - # Verify clone directory includes UUID - mock_prepare.assert_called_once() - call_args = mock_prepare.call_args - assert "test-uuid" in call_args[1]["clone_repo_dir"] - assert call_args[1]["clone_repo_dir"] == "/tmp/test-repo-test-uuid" + # Verify checkout was called with correct tag + mock_checkout.assert_called_once() + call_args = mock_checkout.call_args + assert call_args[1]["checkout"] == "v1.0.0" @pytest.mark.asyncio async def test_upload_to_pypi_issue_creation_format(self, push_handler: PushHandler) -> None: """Test that issues are created with proper format.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch.object(push_handler.repository, "create_issue") as mock_create_issue: - # Mock failed clone - mock_prepare.return_value.__aenter__.return_value = (False, "Clone failed", "Error details") + # Mock failed checkout + _set_checkout_result( + mock_checkout, + ( + False, + "/tmp/worktree-path", + "Clone failed", + "Error details", + ), + ) await push_handler.upload_to_pypi(tag_name="v1.0.0") @@ -363,34 +382,31 @@ async def test_upload_to_pypi_issue_creation_format(self, push_handler: PushHand @pytest.mark.asyncio async def test_upload_to_pypi_slack_message_format(self, push_handler: PushHandler) -> None: """Test that slack messages are sent with proper format.""" - with patch.object(push_handler.runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(push_handler.runner_handler, "_checkout_worktree") as mock_checkout: with patch( "webhook_server.libs.handlers.push_handler.run_command", new_callable=AsyncMock ) as mock_run_command: - with patch("webhook_server.libs.handlers.push_handler.uuid4") as mock_uuid: - with patch("webhook_server.libs.handlers.push_handler.send_slack_message") as mock_slack: - # Mock successful clone - mock_prepare.return_value.__aenter__.return_value = (True, "", "") - - # Mock successful build - mock_run_command.side_effect = [ - (True, "", ""), # uv build - (True, "package-1.0.0.tar.gz", ""), # ls command - (True, "", ""), # twine check - (True, "", ""), # twine upload - ] - - mock_uuid.return_value = "test-uuid" - - await push_handler.upload_to_pypi(tag_name="v1.0.0") - - # Verify slack message format - mock_slack.assert_called_once() - call_args = mock_slack.call_args - - assert call_args[1]["webhook_url"] == "https://hooks.slack.com/test" - assert "test-repo" in call_args[1]["message"] - assert "v1.0.0" in call_args[1]["message"] - assert "published to PYPI" in call_args[1]["message"] - assert call_args[1]["logger"] == push_handler.logger - assert call_args[1]["log_prefix"] == push_handler.log_prefix + with patch("webhook_server.libs.handlers.push_handler.send_slack_message") as mock_slack: + # Mock successful checkout + _set_checkout_result(mock_checkout, (True, "/tmp/worktree-path", "", "")) + + # Mock successful build + mock_run_command.side_effect = [ + (True, "", ""), # uv build + (True, "package-1.0.0.tar.gz", ""), # ls command + (True, "", ""), # twine check + (True, "", ""), # twine upload + ] + + await push_handler.upload_to_pypi(tag_name="v1.0.0") + + # Verify slack message format + mock_slack.assert_called_once() + call_args = mock_slack.call_args + + assert call_args[1]["webhook_url"] == "https://hooks.slack.com/test" + assert "test-repo" in call_args[1]["message"] + assert "v1.0.0" in call_args[1]["message"] + assert "published to PYPI" in call_args[1]["message"] + assert call_args[1]["logger"] == push_handler.logger + assert call_args[1]["log_prefix"] == push_handler.log_prefix diff --git a/webhook_server/tests/test_runner_handler.py b/webhook_server/tests/test_runner_handler.py index 84b27234..4919dba7 100644 --- a/webhook_server/tests/test_runner_handler.py +++ b/webhook_server/tests/test_runner_handler.py @@ -138,11 +138,11 @@ async def test_run_tox_check_in_progress(self, runner_handler: RunnerHandler, mo runner_handler.check_run_handler, "is_check_run_in_progress", new=AsyncMock(return_value=True) ): with patch.object(runner_handler.check_run_handler, "set_run_tox_check_in_progress") as mock_set_progress: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: # Simple mock that returns the expected tuple - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock(return_value=(True, "/tmp/worktree-path", "", "")) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch( "webhook_server.utils.helpers.run_command", new=AsyncMock(return_value=(True, "success", "")) ): @@ -159,10 +159,12 @@ async def test_run_tox_prepare_failure(self, runner_handler: RunnerHandler, mock ): with patch.object(runner_handler.check_run_handler, "set_run_tox_check_in_progress") as mock_set_progress: with patch.object(runner_handler.check_run_handler, "set_run_tox_check_failure") as mock_set_failure: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(False, "out", "err")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(False, "/tmp/worktree-path", "out", "err") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) await runner_handler.run_tox(mock_pull_request) mock_set_progress.assert_called_once() mock_set_failure.assert_called_once() @@ -180,10 +182,12 @@ async def test_run_tox_success(self, runner_handler: RunnerHandler, mock_pull_re with patch.object( runner_handler.check_run_handler, "set_run_tox_check_success", new_callable=AsyncMock ) as mock_set_success: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch( "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")), @@ -201,10 +205,12 @@ async def test_run_tox_failure(self, runner_handler: RunnerHandler, mock_pull_re ): with patch.object(runner_handler.check_run_handler, "set_run_tox_check_in_progress") as mock_set_progress: with patch.object(runner_handler.check_run_handler, "set_run_tox_check_failure") as mock_set_failure: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch( "webhook_server.utils.helpers.run_command", new=AsyncMock(return_value=(False, "output", "error")), @@ -233,10 +239,12 @@ async def test_run_pre_commit_success(self, runner_handler: RunnerHandler, mock_ with patch.object( runner_handler.check_run_handler, "set_run_pre_commit_check_success" ) as mock_set_success: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch( "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")), @@ -274,21 +282,23 @@ async def test_run_build_container_success(self, runner_handler: RunnerHandler, runner_handler.check_run_handler, "is_check_run_in_progress", new=AsyncMock(return_value=False) ): with patch.object( - runner_handler.check_run_handler, "set_container_build_in_progress" + runner_handler.check_run_handler, "set_container_build_in_progress", new=AsyncMock() ) as mock_set_progress: with patch.object( - runner_handler.check_run_handler, "set_container_build_success" + runner_handler.check_run_handler, "set_container_build_success", new=AsyncMock() ) as mock_set_success: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch.object( runner_handler, "run_podman_command", new=AsyncMock(return_value=(True, "success", "")) ): await runner_handler.run_build_container(pull_request=mock_pull_request) - mock_set_progress.assert_called_once() - mock_set_success.assert_called_once() + mock_set_progress.assert_awaited_once() + mock_set_success.assert_awaited_once() @pytest.mark.asyncio async def test_run_build_container_with_push_success( @@ -303,22 +313,23 @@ async def test_run_build_container_with_push_success( runner_handler.check_run_handler, "is_check_run_in_progress", new=AsyncMock(return_value=False) ): with patch.object( - runner_handler.check_run_handler, "set_container_build_in_progress" + runner_handler.check_run_handler, "set_container_build_in_progress", new=AsyncMock() ) as mock_set_progress: with patch.object( - runner_handler.check_run_handler, "set_container_build_success" + runner_handler.check_run_handler, "set_container_build_success", new=AsyncMock() ) as mock_set_success: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch.object( runner_handler, "run_podman_command", new=AsyncMock(return_value=(True, "success", "")) ): - with patch("asyncio.to_thread"): - await runner_handler.run_build_container(pull_request=mock_pull_request, push=True) - mock_set_progress.assert_called_once() - mock_set_success.assert_called_once() + await runner_handler.run_build_container(pull_request=mock_pull_request, push=True) + mock_set_progress.assert_awaited_once() + mock_set_success.assert_awaited_once() @pytest.mark.asyncio async def test_run_install_python_module_disabled( @@ -349,10 +360,12 @@ async def test_run_install_python_module_success( with patch.object( runner_handler.check_run_handler, "set_python_module_install_success" ) as mock_set_success: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch( "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")), @@ -376,10 +389,12 @@ async def test_run_install_python_module_failure( with patch.object( runner_handler.check_run_handler, "set_python_module_install_failure" ) as mock_set_failure: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch( "webhook_server.utils.helpers.run_command", new=AsyncMock(return_value=(False, "output", "error")), @@ -496,32 +511,28 @@ async def test_conventional_title_validation( runner_handler.check_run_handler, "is_check_run_in_progress", new=AsyncMock(return_value=False) ): with patch.object( - runner_handler.check_run_handler, "set_conventional_title_in_progress" + runner_handler.check_run_handler, "set_conventional_title_in_progress", new=AsyncMock() ) as mock_set_progress: with patch.object( - runner_handler.check_run_handler, "set_conventional_title_success" + runner_handler.check_run_handler, "set_conventional_title_success", new=AsyncMock() ) as mock_set_success: with patch.object( - runner_handler.check_run_handler, "set_conventional_title_failure" + runner_handler.check_run_handler, "set_conventional_title_failure", new=AsyncMock() ) as mock_set_failure: await runner_handler.run_conventional_title_check(mock_pull_request) - mock_set_progress.assert_called_once() + mock_set_progress.assert_awaited_once() if should_pass: - assert mock_set_success.call_count == 1, ( + assert mock_set_success.await_count == 1, ( f"Expected '{title}' to pass validation ({reason}), but it failed" ) - assert mock_set_failure.call_count == 0, ( - f"Expected '{title}' to pass validation ({reason}), but failure was called" - ) + mock_set_failure.assert_not_awaited() else: - assert mock_set_failure.call_count == 1, ( + assert mock_set_failure.await_count == 1, ( f"Expected '{title}' to fail validation ({reason}), but it passed" ) - assert mock_set_success.call_count == 0, ( - f"Expected '{title}' to fail validation ({reason}), but success was called" - ) + mock_set_success.assert_not_awaited() @pytest.mark.asyncio async def test_run_conventional_title_check_disabled( @@ -530,17 +541,21 @@ async def test_run_conventional_title_check_disabled( """Test run_conventional_title_check when conventional_title is not configured.""" runner_handler.github_webhook.conventional_title = "" - with patch.object(runner_handler.check_run_handler, "set_conventional_title_in_progress") as mock_set_progress: - with patch.object(runner_handler.check_run_handler, "set_conventional_title_success") as mock_set_success: + with patch.object( + runner_handler.check_run_handler, "set_conventional_title_in_progress", new=AsyncMock() + ) as mock_set_progress: + with patch.object( + runner_handler.check_run_handler, "set_conventional_title_success", new=AsyncMock() + ) as mock_set_success: with patch.object( - runner_handler.check_run_handler, "set_conventional_title_failure" + runner_handler.check_run_handler, "set_conventional_title_failure", new=AsyncMock() ) as mock_set_failure: await runner_handler.run_conventional_title_check(mock_pull_request) # Should return early without doing anything - mock_set_progress.assert_not_called() - mock_set_success.assert_not_called() - mock_set_failure.assert_not_called() + mock_set_progress.assert_not_awaited() + mock_set_success.assert_not_awaited() + mock_set_failure.assert_not_awaited() @pytest.mark.asyncio async def test_run_conventional_title_check_custom_types( @@ -565,19 +580,19 @@ async def test_run_conventional_title_check_custom_types( runner_handler.check_run_handler, "is_check_run_in_progress", new=AsyncMock(return_value=False) ): with patch.object( - runner_handler.check_run_handler, "set_conventional_title_in_progress" + runner_handler.check_run_handler, "set_conventional_title_in_progress", new=AsyncMock() ) as mock_set_progress: with patch.object( - runner_handler.check_run_handler, "set_conventional_title_success" + runner_handler.check_run_handler, "set_conventional_title_success", new=AsyncMock() ) as mock_set_success: with patch.object( - runner_handler.check_run_handler, "set_conventional_title_failure" + runner_handler.check_run_handler, "set_conventional_title_failure", new=AsyncMock() ) as mock_set_failure: await runner_handler.run_conventional_title_check(mock_pull_request) - mock_set_progress.assert_called_once() - mock_set_success.assert_called_once() - mock_set_failure.assert_not_called() + mock_set_progress.assert_awaited_once() + mock_set_success.assert_awaited_once() + mock_set_failure.assert_not_awaited() @pytest.mark.asyncio async def test_run_conventional_title_check_in_progress( @@ -590,16 +605,16 @@ async def test_run_conventional_title_check_in_progress( runner_handler.check_run_handler, "is_check_run_in_progress", new=AsyncMock(return_value=True) ): with patch.object( - runner_handler.check_run_handler, "set_conventional_title_in_progress" + runner_handler.check_run_handler, "set_conventional_title_in_progress", new=AsyncMock() ) as mock_set_progress: with patch.object( - runner_handler.check_run_handler, "set_conventional_title_success" + runner_handler.check_run_handler, "set_conventional_title_success", new=AsyncMock() ) as mock_set_success: await runner_handler.run_conventional_title_check(mock_pull_request) # Should still proceed with the check - mock_set_progress.assert_called_once() - mock_set_success.assert_called_once() + mock_set_progress.assert_awaited_once() + mock_set_success.assert_awaited_once() @pytest.mark.asyncio async def test_is_branch_exists(self, runner_handler: RunnerHandler) -> None: @@ -613,9 +628,9 @@ async def test_is_branch_exists(self, runner_handler: RunnerHandler) -> None: async def test_cherry_pick_branch_not_exists(self, runner_handler: RunnerHandler, mock_pull_request: Mock) -> None: """Test cherry_pick when target branch doesn't exist.""" with patch.object(runner_handler, "is_branch_exists", new=AsyncMock(return_value=None)): - with patch("asyncio.to_thread") as mock_to_thread: + with patch.object(mock_pull_request, "create_issue_comment", new=Mock()) as mock_comment: await runner_handler.cherry_pick(mock_pull_request, "non-existent-branch") - mock_to_thread.assert_called_once() + mock_comment.assert_called_once() @pytest.mark.asyncio async def test_cherry_pick_prepare_failure(self, runner_handler: RunnerHandler, mock_pull_request: Mock) -> None: @@ -624,10 +639,12 @@ async def test_cherry_pick_prepare_failure(self, runner_handler: RunnerHandler, with patch.object(runner_handler, "is_branch_exists", new=AsyncMock(return_value=Mock())): with patch.object(runner_handler.check_run_handler, "set_cherry_pick_in_progress") as mock_set_progress: with patch.object(runner_handler.check_run_handler, "set_cherry_pick_failure") as mock_set_failure: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(False, "out", "err")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(False, "/tmp/worktree-path", "out", "err") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) await runner_handler.cherry_pick(mock_pull_request, "main") mock_set_progress.assert_called_once() assert mock_set_failure.call_count >= 1 @@ -639,10 +656,12 @@ async def test_cherry_pick_command_failure(self, runner_handler: RunnerHandler, with patch.object(runner_handler, "is_branch_exists", new=AsyncMock(return_value=Mock())): with patch.object(runner_handler.check_run_handler, "set_cherry_pick_in_progress") as mock_set_progress: with patch.object(runner_handler.check_run_handler, "set_cherry_pick_failure") as mock_set_failure: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch( "webhook_server.utils.helpers.run_command", new=AsyncMock(return_value=(False, "output", "error")), @@ -658,179 +677,114 @@ async def test_cherry_pick_success(self, runner_handler: RunnerHandler, mock_pul with patch.object(runner_handler, "is_branch_exists", new=AsyncMock(return_value=Mock())): with patch.object(runner_handler.check_run_handler, "set_cherry_pick_in_progress") as mock_set_progress: with patch.object(runner_handler.check_run_handler, "set_cherry_pick_success") as mock_set_success: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch( "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")), ): - with patch("asyncio.to_thread"): + with patch.object(mock_pull_request, "create_issue_comment", new=Mock()) as mock_comment: await runner_handler.cherry_pick(mock_pull_request, "main") mock_set_progress.assert_called_once() mock_set_success.assert_called_once() - - @pytest.mark.asyncio - async def test_prepare_cloned_repo_dir_success( - self, runner_handler: RunnerHandler, mock_pull_request: Mock - ) -> None: - """Test _prepare_cloned_repo_dir with successful preparation.""" - with patch( - "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")) - ): - with patch.object( - runner_handler.github_webhook, "get_pull_request", new=AsyncMock(return_value=mock_pull_request) + mock_comment.assert_called_once() + + @pytest.mark.asyncio + async def test_checkout_worktree_success(self, runner_handler: RunnerHandler, mock_pull_request: Mock) -> None: + """Test _checkout_worktree with successful preparation.""" + with patch("webhook_server.utils.helpers.git_worktree_checkout") as mock_git_worktree: + mock_git_worktree.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "success", "") + ) + mock_git_worktree.return_value.__aexit__ = AsyncMock(return_value=None) + with patch( + "webhook_server.libs.handlers.runner_handler.run_command", + new=AsyncMock(return_value=(True, "success", "")), ): - async with runner_handler._prepare_cloned_repo_dir( - "/tmp/test-repo-unique", mock_pull_request - ) as result: - success, _, _ = result + async with runner_handler._checkout_worktree(pull_request=mock_pull_request) as result: + success, worktree_path, _, _ = result assert success is True + assert worktree_path == "/tmp/worktree-path" @pytest.mark.asyncio - async def test_prepare_cloned_repo_dir_clone_failure(self, runner_handler: RunnerHandler) -> None: - """Test _prepare_cloned_repo_dir when clone fails.""" - with patch( - "webhook_server.libs.handlers.runner_handler.run_command", - new=AsyncMock(return_value=(False, "output", "error")), - ): - async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-unique2") as result: - success, out, _ = result + async def test_checkout_worktree_failure(self, runner_handler: RunnerHandler, mock_pull_request: Mock) -> None: + """Test _checkout_worktree when checkout fails.""" + with patch("webhook_server.utils.helpers.git_worktree_checkout") as mock_git_worktree: + mock_git_worktree.return_value.__aenter__ = AsyncMock(return_value=(False, "", "output", "error")) + mock_git_worktree.return_value.__aexit__ = AsyncMock(return_value=None) + async with runner_handler._checkout_worktree(pull_request=mock_pull_request) as result: + success, _, out, _ = result assert success is False assert out == "output" @pytest.mark.asyncio - async def test_prepare_cloned_repo_dir_with_checkout( + async def test_checkout_worktree_with_checkout( self, runner_handler: RunnerHandler, mock_pull_request: Mock ) -> None: - """Test _prepare_cloned_repo_dir with checkout parameter.""" - with patch( - "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")) - ): - async with runner_handler._prepare_cloned_repo_dir( - "/tmp/test-repo-unique3", mock_pull_request, checkout="feature-branch" - ) as result: - success, _, _ = result - assert success is True + """Test _checkout_worktree with checkout parameter.""" + with patch("webhook_server.utils.helpers.git_worktree_checkout") as mock_git_worktree: + mock_git_worktree.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "success", "") + ) + mock_git_worktree.return_value.__aexit__ = AsyncMock(return_value=None) + with patch( + "webhook_server.libs.handlers.runner_handler.run_command", + new=AsyncMock(return_value=(True, "success", "")), + ): + async with runner_handler._checkout_worktree( + pull_request=mock_pull_request, checkout="feature-branch" + ) as result: + success, _, _, _ = result + assert success is True @pytest.mark.asyncio - async def test_prepare_cloned_repo_dir_with_tag( - self, runner_handler: RunnerHandler, mock_pull_request: Mock - ) -> None: - """Test _prepare_cloned_repo_dir with tag_name parameter.""" - with patch( - "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")) - ): - async with runner_handler._prepare_cloned_repo_dir( - "/tmp/test-repo-unique4", mock_pull_request, tag_name="v1.0.0" - ) as result: - success, _, _ = result - assert success is True + async def test_checkout_worktree_with_tag(self, runner_handler: RunnerHandler, mock_pull_request: Mock) -> None: + """Test _checkout_worktree with tag_name parameter.""" + with patch("webhook_server.utils.helpers.git_worktree_checkout") as mock_git_worktree: + mock_git_worktree.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "success", "") + ) + mock_git_worktree.return_value.__aexit__ = AsyncMock(return_value=None) + with patch( + "webhook_server.libs.handlers.runner_handler.run_command", + new=AsyncMock(return_value=(True, "success", "")), + ): + async with runner_handler._checkout_worktree( + pull_request=mock_pull_request, tag_name="v1.0.0" + ) as result: + success, _, _, _ = result + assert success is True @pytest.mark.asyncio - async def test_prepare_cloned_repo_dir_merged_pr( - self, runner_handler: RunnerHandler, mock_pull_request: Mock - ) -> None: - """Test _prepare_cloned_repo_dir with merged pull request.""" - with patch( - "webhook_server.libs.handlers.runner_handler.run_command", new=AsyncMock(return_value=(True, "success", "")) - ): - async with runner_handler._prepare_cloned_repo_dir( - "/tmp/test-repo-unique5", mock_pull_request, is_merged=True - ) as result: - success, _, _ = result + async def test_checkout_worktree_merged_pr(self, runner_handler: RunnerHandler, mock_pull_request: Mock) -> None: + """Test _checkout_worktree with merged pull request.""" + with patch("webhook_server.utils.helpers.git_worktree_checkout") as mock_git_worktree: + mock_git_worktree.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "success", "") + ) + mock_git_worktree.return_value.__aexit__ = AsyncMock(return_value=None) + async with runner_handler._checkout_worktree(pull_request=mock_pull_request, is_merged=True) as result: + success, _, _, _ = result assert success is True @pytest.mark.asyncio - async def test_prepare_cloned_repo_dir_git_config_user_name_failure(self, runner_handler, mock_pull_request): - # Simulate failure at git config user.name - async def run_command_side_effect(*args, **kwargs): - cmd = kwargs.get("command", args[0] if args else "") - if "clone" in cmd: - return (True, "ok", "") - if "config user.name" in cmd: - return (False, "fail", "fail") - return (True, "ok", "") - - with patch( - "webhook_server.libs.handlers.runner_handler.run_command", - new=AsyncMock(side_effect=run_command_side_effect), - ): - async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-x", mock_pull_request) as result: - success, out, _ = result - assert not success - assert out == "fail" - - @pytest.mark.asyncio - async def test_prepare_cloned_repo_dir_git_config_user_email_failure(self, runner_handler, mock_pull_request): - # Simulate failure at git config user.email - async def run_command_side_effect(*args, **kwargs): - cmd = kwargs.get("command", args[0] if args else "") - if "clone" in cmd: - return (True, "ok", "") - if "config user.name" in cmd: - return (True, "ok", "") - if "config user.email" in cmd: - return (False, "fail", "fail") - return (True, "ok", "") - - with patch( - "webhook_server.libs.handlers.runner_handler.run_command", - new=AsyncMock(side_effect=run_command_side_effect), - ): - async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-x", mock_pull_request) as result: - success, out, _ = result - assert not success - assert out == "fail" - - @pytest.mark.asyncio - async def test_prepare_cloned_repo_dir_git_config_fetch_failure(self, runner_handler, mock_pull_request): - # Simulate failure at git config --local --add remote.origin.fetch - async def run_command_side_effect(*args, **kwargs): - cmd = kwargs.get("command", args[0] if args else "") - if "clone" in cmd: - return (True, "ok", "") - if "config user.name" in cmd or "config user.email" in cmd: - return (True, "ok", "") - if "config --local --add remote.origin.fetch" in cmd: - return (False, "fail", "fail") - return (True, "ok", "") - - with patch( - "webhook_server.libs.handlers.runner_handler.run_command", - new=AsyncMock(side_effect=run_command_side_effect), - ): - async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-x", mock_pull_request) as result: - success, out, _ = result - assert not success - assert out == "fail" - - @pytest.mark.asyncio - async def test_prepare_cloned_repo_dir_git_remote_update_failure(self, runner_handler, mock_pull_request): - # Simulate failure at git remote update - async def run_command_side_effect(*args, **kwargs): - cmd = kwargs.get("command", args[0] if args else "") - if "clone" in cmd: - return (True, "ok", "") - if ( - "config user.name" in cmd - or "config user.email" in cmd - or "config --local --add remote.origin.fetch" in cmd + async def test_checkout_worktree_merge_failure(self, runner_handler, mock_pull_request): + """Test _checkout_worktree when merge fails.""" + with patch("webhook_server.utils.helpers.git_worktree_checkout") as mock_git_worktree: + mock_git_worktree.return_value.__aenter__ = AsyncMock(return_value=(True, "/tmp/worktree-path", "ok", "")) + mock_git_worktree.return_value.__aexit__ = AsyncMock(return_value=None) + with patch( + "webhook_server.libs.handlers.runner_handler.run_command", + new=AsyncMock(return_value=(False, "fail", "merge conflict")), ): - return (True, "ok", "") - if "remote update" in cmd: - return (False, "fail", "fail") - return (True, "ok", "") - - with patch( - "webhook_server.libs.handlers.runner_handler.run_command", - new=AsyncMock(side_effect=run_command_side_effect), - ): - async with runner_handler._prepare_cloned_repo_dir("/tmp/test-repo-x", mock_pull_request) as result: - success, out, _ = result - assert not success - assert out == "fail" + async with runner_handler._checkout_worktree(pull_request=mock_pull_request) as result: + success, _, out, _ = result + assert not success + assert out == "fail" @pytest.mark.asyncio async def test_run_build_container_push_failure(self, runner_handler, mock_pull_request): @@ -846,18 +800,20 @@ async def test_run_build_container_push_failure(self, runner_handler, mock_pull_ runner_handler.check_run_handler, "is_check_run_in_progress", new=AsyncMock(return_value=False) ): with patch.object( - runner_handler.check_run_handler, "set_container_build_in_progress" + runner_handler.check_run_handler, "set_container_build_in_progress", new=AsyncMock() ) as mock_set_progress: with patch.object( - runner_handler.check_run_handler, "set_container_build_success" + runner_handler.check_run_handler, "set_container_build_success", new=AsyncMock() ) as mock_set_success: with patch.object( - runner_handler.check_run_handler, "set_container_build_failure" + runner_handler.check_run_handler, "set_container_build_failure", new=AsyncMock() ) as mock_set_failure: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch.object(runner_handler, "run_podman_command") as mock_run_podman: # First call (build) succeeds, second call (push) fails mock_run_podman.side_effect = [ @@ -870,27 +826,28 @@ async def test_run_build_container_push_failure(self, runner_handler, mock_pull_ with patch( "webhook_server.libs.handlers.runner_handler.send_slack_message" ) as mock_slack: - with patch("asyncio.to_thread") as mock_to_thread: + with patch.object( + mock_pull_request, + "create_issue_comment", + new=Mock(), + ) as mock_comment: # Set set_check=False to avoid early return after build success await runner_handler.run_build_container( pull_request=mock_pull_request, push=True, set_check=False ) - mock_set_progress.assert_called_once() + # Should not call set_progress because set_check=False + mock_set_progress.assert_not_awaited() # Should not call set_success because set_check=False - mock_set_success.assert_not_called() + mock_set_success.assert_not_awaited() # Slack message should be sent when push fails mock_slack.assert_called_once() # Should be called twice: build and push assert mock_run_podman.call_count == 2, ( f"Expected 2 calls, got {mock_run_podman.call_count}" ) - # to_thread should be called to create issue comment on push failure - assert mock_to_thread.called, ( - f"to_thread was not called, calls: {mock_to_thread.call_args_list}" - ) - called_args = mock_to_thread.call_args[0] - assert called_args[0] == mock_pull_request.create_issue_comment - mock_set_failure.assert_not_called() + # PR comment should be created on push failure + mock_comment.assert_called_once() + mock_set_failure.assert_not_awaited() @pytest.mark.asyncio async def test_run_build_container_with_command_args(self, runner_handler, mock_pull_request): @@ -902,21 +859,23 @@ async def test_run_build_container_with_command_args(self, runner_handler, mock_ runner_handler.check_run_handler, "is_check_run_in_progress", new=AsyncMock(return_value=False) ): with patch.object( - runner_handler.check_run_handler, "set_container_build_in_progress" + runner_handler.check_run_handler, "set_container_build_in_progress", new=AsyncMock() ) as mock_set_progress: with patch.object( - runner_handler.check_run_handler, "set_container_build_success" + runner_handler.check_run_handler, "set_container_build_success", new=AsyncMock() ) as mock_set_success: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) with patch.object(runner_handler, "run_podman_command", return_value=(True, "success", "")): await runner_handler.run_build_container( pull_request=mock_pull_request, command_args="--extra-arg" ) - mock_set_progress.assert_called_once() - mock_set_success.assert_called_once() + mock_set_progress.assert_awaited_once() + mock_set_success.assert_awaited_once() @pytest.mark.asyncio async def test_cherry_pick_manual_needed(self, runner_handler, mock_pull_request): @@ -924,14 +883,16 @@ async def test_cherry_pick_manual_needed(self, runner_handler, mock_pull_request with patch.object(runner_handler, "is_branch_exists", new=AsyncMock(return_value=Mock())): with patch.object(runner_handler.check_run_handler, "set_cherry_pick_in_progress") as mock_set_progress: with patch.object(runner_handler.check_run_handler, "set_cherry_pick_failure") as mock_set_failure: - with patch.object(runner_handler, "_prepare_cloned_repo_dir") as mock_prepare: - mock_prepare.return_value = AsyncMock() - mock_prepare.return_value.__aenter__ = AsyncMock(return_value=(True, "", "")) - mock_prepare.return_value.__aexit__ = AsyncMock(return_value=None) + with patch.object(runner_handler, "_checkout_worktree") as mock_checkout: + mock_checkout.return_value = AsyncMock() + mock_checkout.return_value.__aenter__ = AsyncMock( + return_value=(True, "/tmp/worktree-path", "", "") + ) + mock_checkout.return_value.__aexit__ = AsyncMock(return_value=None) # First command fails, triggers manual cherry-pick with patch("webhook_server.utils.helpers.run_command", side_effect=[(False, "fail", "err")]): - with patch("asyncio.to_thread") as mock_to_thread: + with patch.object(mock_pull_request, "create_issue_comment", new=Mock()) as mock_comment: await runner_handler.cherry_pick(mock_pull_request, "main") mock_set_progress.assert_called_once() mock_set_failure.assert_called_once() - mock_to_thread.assert_called() + mock_comment.assert_called_once() diff --git a/webhook_server/utils/helpers.py b/webhook_server/utils/helpers.py index e2f8a62b..fad6d2e9 100644 --- a/webhook_server/utils/helpers.py +++ b/webhook_server/utils/helpers.py @@ -1,16 +1,20 @@ from __future__ import annotations import asyncio +import contextlib import datetime import json import os import random import re import shlex +import shutil import subprocess +from collections.abc import AsyncGenerator from concurrent.futures import Future, as_completed from logging import Logger from typing import Any +from uuid import uuid4 import github from colorama import Fore @@ -586,3 +590,63 @@ def prepare_log_prefix( prefix += f"[PR {pr_number}]" return prefix + ":" + + +@contextlib.asynccontextmanager +async def git_worktree_checkout( + repo_dir: str, + checkout: str, + log_prefix: str, + mask_sensitive: bool = True, +) -> AsyncGenerator[tuple[bool, str, str, str], None]: + """Create git worktree for isolated checkout operations. + + Creates a temporary worktree from existing cloned repository, allowing + multiple handlers to work with different checkouts simultaneously. + + Args: + repo_dir: Path to cloned git repository + checkout: Branch, tag, or commit to checkout + log_prefix: Logging prefix + mask_sensitive: Whether to mask sensitive data in logs + + Yields: + tuple: (success: bool, worktree_path: str, stdout: str, stderr: str) + + Example: + async with git_worktree_checkout(repo_dir, "origin/pr/123", log_prefix) as (success, path, out, err): + if success: + # Use path for operations + await run_command(f"pytest {path}/tests") + """ + worktree_path = f"{repo_dir}-worktree-{uuid4()}" + result: tuple[bool, str, str, str] = (False, "", "", "") + + try: + # Create worktree + rc, out, err = await run_command( + command=f"git -C {repo_dir} worktree add {worktree_path} {checkout}", + log_prefix=log_prefix, + mask_sensitive=mask_sensitive, + ) + + if rc: + result = (True, worktree_path, out, err) + else: + result = (False, worktree_path, out, err) + + yield result + + finally: + # Cleanup: Remove worktree + if os.path.exists(worktree_path): + try: + # Remove worktree from git + await run_command( + command=f"git -C {repo_dir} worktree remove {worktree_path} --force", + log_prefix=log_prefix, + mask_sensitive=mask_sensitive, + ) + except Exception: + # Fallback: Force delete directory if git command fails + shutil.rmtree(worktree_path, ignore_errors=True)