From 0607a017fc886b400981389d16e552a5e2d997f6 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 3 Jul 2024 08:40:54 +0300 Subject: [PATCH 01/31] Add typing --- webhook_server_container/libs/config.py | 14 +- webhook_server_container/libs/github_api.py | 136 +++++++++--------- webhook_server_container/libs/jira_api.py | 12 +- webhook_server_container/utils/constants.py | 87 +++++------ .../utils/dockerhub_rate_limit.py | 26 ++-- .../utils/github_repository_settings.py | 85 ++++++----- 6 files changed, 196 insertions(+), 164 deletions(-) diff --git a/webhook_server_container/libs/config.py b/webhook_server_container/libs/config.py index ae7a0846..af04a718 100644 --- a/webhook_server_container/libs/config.py +++ b/webhook_server_container/libs/config.py @@ -1,25 +1,23 @@ import os +from typing import Any, Dict, Optional import yaml class Config: def __init__(self): - self.data_dir = os.environ.get("WEBHOOK_SERVER_DATA_DIR", "/webhook_server") - self.config_path = os.path.join(self.data_dir, "config.yaml") + self.data_dir: str = os.environ.get("WEBHOOK_SERVER_DATA_DIR", "/webhook_server") + self.config_path: str = os.path.join(self.data_dir, "config.yaml") self.exists() - def exists(self): + def exists(self) -> None: if not os.path.isfile(self.config_path): raise FileNotFoundError(f"Config file {self.config_path} not found") @property - def data(self): - return self.get_data_from_config() - - def get_data_from_config(self): + def data(self) -> Dict[str, Any]: with open(self.config_path) as fd: return yaml.safe_load(fd) - def get_repository(self, repository_name): + def get_repository(self, repository_name: str) -> Optional[Dict[str, Any]]: return self.data["repositories"].get(repository_name) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index a84600a1..d633b5a7 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -7,14 +7,14 @@ import time from concurrent.futures import ThreadPoolExecutor, as_completed from pathlib import Path -from typing import Any, Dict, List, Tuple +from typing import Any, Dict, List, Optional, Tuple from fastapi import FastAPI from jira import JIRA import requests import shortuuid import yaml -from github import GithubException +from github import Commit, GithubException from github.PullRequest import PullRequest from github.GithubException import UnknownObjectException from simple_logger.logger import get_logger @@ -94,22 +94,22 @@ def __init__(self, hook_data): self.all_required_status_checks: List[str] = [] # filled by self._repo_data_from_config() - self.dockerhub_username = None - self.dockerhub_password = None - self.container_repository_username = None - self.container_repository_password = None - self.container_repository = None - self.dockerfile = None - self.container_tag = None - self.container_build_args = None - self.container_command_args = None - self.repository_full_name = None - self.github_app_id = None - self.container_release = None - self.can_be_merged_required_labels = [] - self.jira = None - self.jira_tracking = False - self.jira_enabled_repository = False + self.dockerhub_username: str + self.dockerhub_password: str + self.container_repository_username: str + self.container_repository_password: str + self.container_repository: str + self.dockerfile: str + self.container_tag: str + self.container_build_args: List + self.container_command_args: List + self.repository_full_name: str + self.github_app_id: str + self.container_release: bool + self.can_be_merged_required_labels: Any + self.jira: Dict[str, Any] + self.jira_tracking: bool = False + self.jira_enabled_repository: bool = False # End of filled by self._repo_data_from_config() self.config = Config() @@ -161,8 +161,11 @@ def __init__(self, hook_data): f"Committer {self.parent_committer} is not in configures in jira-user-mapping" ) else: - self.jira_track_pr = True - self.issue_title = f"[AUTO:FROM:GITHUB] [{self.repository_name}] PR [{self.pull_request.number}]: {self.pull_request.title}" + self.jira_track_pr: bool = True + self.issue_title: str = ( + f"[AUTO:FROM:GITHUB] [{self.repository_name}] " + f"PR [{self.pull_request.number}]: {self.pull_request.title}" + ) LOGGER.info(f"{self.log_prefix} Jira tracking is enabled for the current pull request.") else: LOGGER.info( @@ -170,8 +173,8 @@ def __init__(self, hook_data): f"Committer {self.parent_committer} is not in {reviewers_and_approvers}" ) - self.supported_user_labels_str = "".join([f" * {label}\n" for label in USER_LABELS_DICT.keys()]) - self.welcome_msg = f""" + self.supported_user_labels_str: str = "".join([f" * {label}\n" for label in USER_LABELS_DICT.keys()]) + self.welcome_msg: str = f""" Report bugs in [Issues](https://github.com/myakove/github-webhook-server/issues) The following are automatically added: @@ -207,8 +210,8 @@ def __init__(self, hook_data): """ @property - def prepare_retest_wellcome_msg(self): - retest_msg = "" + def prepare_retest_wellcome_msg(self) -> str: + retest_msg: str = "" if self.tox_enabled: retest_msg += f" * `/retest {TOX_STR}`: Retest tox\n" if self.build_and_push_container: @@ -218,22 +221,22 @@ def prepare_retest_wellcome_msg(self): return " * This repository does not support retest actions" if not retest_msg else retest_msg - def add_api_users_to_auto_verified_and_merged_users(self): + def add_api_users_to_auto_verified_and_merged_users(self) -> None: apis_and_tokens = get_apis_and_tokes_from_config(config=self.config, repository_name=self.repository_name) self.auto_verified_and_merged_users.extend([_api[0].get_user().login for _api in apis_and_tokens]) - def _set_log_prefix_color(self): - repo_str = "\033[1;{color}m{name}\033[1;0m" - color_file = "/tmp/color.json" + def _set_log_prefix_color(self) -> None: + repo_str: str = "\033[1;{color}m{name}\033[1;0m" + color_file: str = "/tmp/color.json" try: with open(color_file) as fd: color_json = json.load(fd) except Exception: - color_json = {} + color_json: Dict[str, int] = {} color = color_json.get(self.repository_name) if not color: - color = random.choice(range(31, 39)) + color: int = random.choice(range(31, 39)) color_json[self.repository_name] = color self.log_prefix_with_color = repo_str.format(color=color, name=self.repository_name) @@ -242,26 +245,26 @@ def _set_log_prefix_color(self): json.dump(color_json, fd) @property - def log_prefix(self): + def log_prefix(self) -> str: return ( f"{self.log_prefix_with_color}({self.log_uuid})[PR {self.pull_request.number}]:" if self.pull_request else f"{self.log_prefix_with_color}:({self.log_uuid})" ) - def hash_token(self, message): + def hash_token(self, message: str) -> str: hashed_message = message.replace(self.token, "*****") return hashed_message - def app_logger_info(self, message): + def app_logger_info(self, message: str) -> None: hashed_message = self.hash_token(message=message) LOGGER.info(hashed_message) - def app_logger_error(self, message): + def app_logger_error(self, message: str) -> None: hashed_message = self.hash_token(message=message) LOGGER.error(hashed_message) - def process_hook(self, data, event_log): + def process_hook(self, data: str, event_log: str) -> None: LOGGER.info(f"{self.log_prefix} {event_log}") if data == "ping": return @@ -281,9 +284,9 @@ def process_hook(self, data, event_log): elif data == "check_run": self.process_pull_request_check_run_webhook_data() - def process_pull_request_check_run_webhook_data(self): - _check_run = self.hook_data["check_run"] - check_run_name = _check_run["name"] + def process_pull_request_check_run_webhook_data(self) -> None: + _check_run: Dict[str, Any] = self.hook_data["check_run"] + check_run_name: str = _check_run["name"] if check_run_name == CAN_BE_MERGED_STR: LOGGER.info(f"{self.log_prefix} check_run '{check_run_name}' skipped") return @@ -303,7 +306,7 @@ def process_pull_request_check_run_webhook_data(self): self.check_if_can_be_merged() break - def _repo_data_from_config(self): + def _repo_data_from_config(self) -> None: config_data = self.config.data # Global repositories configuration repo_data = self.config.get_repository( repository_name=self.repository_name @@ -315,7 +318,7 @@ def _repo_data_from_config(self): self.github_app_id = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, key="github-app-id" ) - self.repository_full_name = repo_data["name"] + self.repository_full_name: str = repo_data["name"] self.pypi = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="pypi") self.verified_job = get_value_from_dicts( primary_dict=repo_data, @@ -333,43 +336,44 @@ def _repo_data_from_config(self): self.slack_webhook_url = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, key="slack_webhook_url" ) - self.build_and_push_container = repo_data.get("container") + self.build_and_push_container: Optional[Dict[str, Any]] = repo_data.get("container") self.dockerhub = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="docker") self.pre_commit = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="pre-commit") self.jira = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="jira") if self.jira: - self.jira_server = self.jira.get("server") - self.jira_project = self.jira.get("project") - self.jira_token = self.jira.get("token") - self.jira_epic = self.jira.get("epic") - self.jira_user_mapping = self.jira.get("user-mapping", {}) + self.jira_server: Optional[str] = self.jira.get("server") + self.jira_project: Optional[str] = self.jira.get("project") + self.jira_token: Optional[str] = self.jira.get("token") + self.jira_epic: Optional[str] = self.jira.get("epic") + self.jira_user_mapping: Dict[str, str] = self.jira.get("user-mapping", {}) # Check if repository is enabled for jira self.jira_tracking = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, key="jira-tracking" ) if self.jira_tracking: - self.jira_enabled_repository = all([self.jira_server, self.jira_project, self.jira_token]) + self.jira_enabled_repository: bool = all([self.jira_server, self.jira_project, self.jira_token]) if not self.jira_enabled_repository: # if not (self.jira_enabled_repository := all([self.jira_server, self.jira_project, self.jira_token])): LOGGER.error( - f"{self.log_prefix} Jira configuration is not valid. Server: {self.jira_server}, Project: {self.jira_project}, Token: {self.jira_token}" + f"{self.log_prefix} Jira configuration is not valid. Server: {self.jira_server}, " + f"Project: {self.jira_project}, Token: {self.jira_token}" ) if self.dockerhub: - self.dockerhub_username = self.dockerhub["username"] - self.dockerhub_password = self.dockerhub["password"] + self.dockerhub_username: str = self.dockerhub["username"] + self.dockerhub_password: str = self.dockerhub["password"] if self.build_and_push_container: - self.container_repository_username = self.build_and_push_container["username"] - self.container_repository_password = self.build_and_push_container["password"] - self.container_repository = self.build_and_push_container["repository"] - self.dockerfile = self.build_and_push_container.get("dockerfile", "Dockerfile") - self.container_tag = self.build_and_push_container.get("tag", "latest") - self.container_build_args = self.build_and_push_container.get("build-args") - self.container_command_args = self.build_and_push_container.get("args") - self.container_release = self.build_and_push_container.get("release") + self.container_repository_username: str = self.build_and_push_container["username"] + self.container_repository_password: str = self.build_and_push_container["password"] + self.container_repository: str = self.build_and_push_container["repository"] + self.dockerfile: str = self.build_and_push_container.get("dockerfile", "Dockerfile") + self.container_tag: str = self.build_and_push_container.get("tag", "latest") + self.container_build_args: Optional[str] = self.build_and_push_container.get("build-args") + self.container_command_args: Optional[str] = self.build_and_push_container.get("args") + self.container_release: bool = self.build_and_push_container.get("release") self.auto_verified_and_merged_users = get_value_from_dicts( primary_dict=repo_data, @@ -384,7 +388,7 @@ def _repo_data_from_config(self): return_on_none=[], ) - def _get_pull_request(self, number=None): + def _get_pull_request(self, number: Optional[int] = None) -> Optional[PullRequest]: if number: return self.repository.get_pull(number) @@ -396,28 +400,30 @@ def _get_pull_request(self, number=None): commit = self.hook_data.get("commit") if commit: - commit_obj = self.repository.get_commit(commit["sha"]) + commit_obj: Commit = self.repository.get_commit(commit["sha"]) with contextlib.suppress(Exception): return commit_obj.get_pulls()[0] LOGGER.info(f"{self.log_prefix} No issue or pull_request found in hook data") - def _get_last_commit(self): + def _get_last_commit(self) -> Commit: return list(self.pull_request.get_commits())[-1] - def label_exists_in_pull_request(self, label): + def label_exists_in_pull_request(self, label: str) -> bool: return any(lb for lb in self.pull_request_labels_names() if lb == label) - def pull_request_labels_names(self): + def pull_request_labels_names(self) -> List[str]: return [lb.name for lb in self._get_pull_request(number=self.pull_request.number).labels] - def skip_if_pull_request_already_merged(self): + def skip_if_pull_request_already_merged(self) -> bool: if self.pull_request.is_merged(): LOGGER.info(f"{self.log_prefix}: PR is merged, not processing") return True + return False + @ignore_exceptions(logger=LOGGER) - def _remove_label(self, label): + def _remove_label(self, label: str): if self.label_exists_in_pull_request(label=label): LOGGER.info(f"{self.log_prefix} Removing label {label}") self.pull_request.remove_from_labels(label) diff --git a/webhook_server_container/libs/jira_api.py b/webhook_server_container/libs/jira_api.py index 96cb72fc..efc46bb8 100644 --- a/webhook_server_container/libs/jira_api.py +++ b/webhook_server_container/libs/jira_api.py @@ -1,6 +1,6 @@ import os -from typing import Any, Dict -from jira import JIRA +from typing import Any, Dict, List +from jira import Issue, JIRA from pyhelper_utils.general import ignore_exceptions from simple_logger.logger import get_logger @@ -14,7 +14,7 @@ def __init__(self, server: str, project: str, token: str): self.project = project self.token = token - self.conn = JIRA( + self.conn: JIRA = JIRA( server=self.server, token_auth=self.token, ) @@ -33,7 +33,7 @@ def create_story(self, title: str, body: str, epic_key: str, assignee: str) -> s if epic_custom_field := self.get_epic_custom_field(): self.fields.update({epic_custom_field: epic_key}) - _issue = self.conn.create_issue(fields=self.fields) + _issue: Issue = self.conn.create_issue(fields=self.fields) return _issue.key @ignore_exceptions(logger=LOGGER) @@ -45,7 +45,7 @@ def create_closed_subtask(self, title: str, body: str, parent_key: str, assignee "issuetype": {"name": "Sub-task"}, "assignee": {"name": assignee}, }) - _issue = self.conn.create_issue(fields=self.fields) + _issue: Issue = self.conn.create_issue(fields=self.fields) self.close_issue(key=_issue.key) @ignore_exceptions(logger=LOGGER) @@ -57,5 +57,5 @@ def close_issue(self, key: str, comment: str = "") -> None: ) def get_epic_custom_field(self) -> str: - _epic_field_id = [cf["id"] for cf in self.conn.fields() if "Epic Link" in cf["name"]] + _epic_field_id: List[str] = [cf["id"] for cf in self.conn.fields() if "Epic Link" in cf["name"]] return _epic_field_id[0] if _epic_field_id else "" diff --git a/webhook_server_container/utils/constants.py b/webhook_server_container/utils/constants.py index 4ae46638..67f1f3a3 100644 --- a/webhook_server_container/utils/constants.py +++ b/webhook_server_container/utils/constants.py @@ -1,39 +1,46 @@ +from typing import Dict + from fastapi import FastAPI -FASTAPI_APP = FastAPI(title="webhook-server") -OTHER_MAIN_BRANCH = "master" -TOX_STR = "tox" -PRE_COMMIT_STR = "pre-commit" -BUILD_AND_PUSH_CONTAINER_STR = "build-and-push-container" -SUCCESS_STR = "success" -FAILURE_STR = "failure" -IN_PROGRESS_STR = "in_progress" -QUEUED_STR = "queued" -ADD_STR = "add" -DELETE_STR = "delete" -CAN_BE_MERGED_STR = "can-be-merged" -BUILD_CONTAINER_STR = "build-container" -PYTHON_MODULE_INSTALL_STR = "python-module-install" -WIP_STR = "wip" -CHERRY_PICK_LABEL_PREFIX = "cherry-pick-" -CHERRY_PICKED_LABEL_PREFIX = "CherryPicked" -APPROVED_BY_LABEL_PREFIX = "approved-" -CHANGED_REQUESTED_BY_LABEL_PREFIX = "changes-requested-" -COMMENTED_BY_LABEL_PREFIX = "commented-" -BRANCH_LABEL_PREFIX = "branch-" -VERIFIED_LABEL_STR = "verified" -LGTM_STR = "lgtm" -NEEDS_REBASE_LABEL_STR = "needs-rebase" -HAS_CONFLICTS_LABEL_STR = "has-conflicts" -HOLD_LABEL_STR = "hold" -SIZE_LABEL_PREFIX = "size/" -JIRA_STR = "JIRA" +FASTAPI_APP: FastAPI = FastAPI(title="webhook-server") +OTHER_MAIN_BRANCH: str = "master" +TOX_STR: str = "tox" +PRE_COMMIT_STR: str = "pre-commit" +BUILD_AND_PUSH_CONTAINER_STR: str = "build-and-push-container" +SUCCESS_STR: str = "success" +FAILURE_STR: str = "failure" +IN_PROGRESS_STR: str = "in_progress" +QUEUED_STR: str = "queued" +ADD_STR: str = "add" +DELETE_STR: str = "delete" +CAN_BE_MERGED_STR: str = "can-be-merged" +BUILD_CONTAINER_STR: str = "build-container" +PYTHON_MODULE_INSTALL_STR: str = "python-module-install" +WIP_STR: str = "wip" +CHERRY_PICK_LABEL_PREFIX: str = "cherry-pick-" +CHERRY_PICKED_LABEL_PREFIX: str = "CherryPicked" +APPROVED_BY_LABEL_PREFIX: str = "approved-" +CHANGED_REQUESTED_BY_LABEL_PREFIX: str = "changes-requested-" +COMMENTED_BY_LABEL_PREFIX: str = "commented-" +BRANCH_LABEL_PREFIX: str = "branch-" +VERIFIED_LABEL_STR: str = "verified" +LGTM_STR: str = "lgtm" +NEEDS_REBASE_LABEL_STR: str = "needs-rebase" +HAS_CONFLICTS_LABEL_STR: str = "has-conflicts" +HOLD_LABEL_STR: str = "hold" +SIZE_LABEL_PREFIX: str = "size/" +JIRA_STR: str = "JIRA" # Gitlab colors require a '#' prefix; e.g: # -USER_LABELS_DICT = {HOLD_LABEL_STR: "B60205", VERIFIED_LABEL_STR: "0E8A16", WIP_STR: "B60205", LGTM_STR: "0E8A16"} +USER_LABELS_DICT: Dict[str, str] = { + HOLD_LABEL_STR: "B60205", + VERIFIED_LABEL_STR: "0E8A16", + WIP_STR: "B60205", + LGTM_STR: "0E8A16", +} -STATIC_LABELS_DICT = { +STATIC_LABELS_DICT: Dict[str, str] = { **USER_LABELS_DICT, CHERRY_PICKED_LABEL_PREFIX: "1D76DB", f"{SIZE_LABEL_PREFIX}L": "F5621C", @@ -47,7 +54,7 @@ HAS_CONFLICTS_LABEL_STR: "B60205", } -DYNAMIC_LABELS_DICT = { +DYNAMIC_LABELS_DICT: Dict[str, str] = { APPROVED_BY_LABEL_PREFIX: "0E8A16", COMMENTED_BY_LABEL_PREFIX: "D93F0B", CHANGED_REQUESTED_BY_LABEL_PREFIX: "F5621C", @@ -56,15 +63,15 @@ JIRA_STR: "1D76DB", } -ALL_LABELS_DICT = {**STATIC_LABELS_DICT, **DYNAMIC_LABELS_DICT} +ALL_LABELS_DICT: Dict[str, str] = {**STATIC_LABELS_DICT, **DYNAMIC_LABELS_DICT} class REACTIONS: - ok = "+1" - notok = "-1" - laugh = "laugh" - confused = "confused" - heart = "heart" - hooray = "hooray" - rocket = "rocket" - eyes = "eyes" + ok: str = "+1" + notok: str = "-1" + laugh: str = "laugh" + confused: str = "confused" + heart: str = "heart" + hooray: str = "hooray" + rocket: str = "rocket" + eyes: str = "eyes" diff --git a/webhook_server_container/utils/dockerhub_rate_limit.py b/webhook_server_container/utils/dockerhub_rate_limit.py index c7e63f30..b1f248d7 100644 --- a/webhook_server_container/utils/dockerhub_rate_limit.py +++ b/webhook_server_container/utils/dockerhub_rate_limit.py @@ -1,22 +1,24 @@ # From https://docs.docker.com/docker-hub/download-rate-limit/#how-can-i-check-my-current-rate +from typing import Any, Dict import requests +from requests import Response class DockerHub: - def __init__(self, username, password): - self.repository = "ratelimitpreview/test" - self.token_url = ( + def __init__(self, username: str, password: str): + self.repository: str = "ratelimitpreview/test" + self.token_url: str = ( f"https://auth.docker.io/token?service=registry.docker.io&scope=repository:{self.repository}:pull" ) - self.registry_url = f"https://registry-1.docker.io/v2/{self.repository}/manifests/latest" + self.registry_url: str = f"https://registry-1.docker.io/v2/{self.repository}/manifests/latest" self.username = username self.password = password @staticmethod - def limit_extractor(str_raw): + def limit_extractor(str_raw: str) -> str: if not str_raw: - return 0 + return "" if ";" in str_raw: split_arr = str_raw.split(";") # TODO: return other values too? @@ -25,22 +27,22 @@ def limit_extractor(str_raw): else: return str_raw - def get_token(self): - _kwargs = {"url": self.token_url} + def get_token(self) -> str: + _kwargs: Dict[str, Any] = {"url": self.token_url} if self.username and self.password: _kwargs["auth"] = (self.username, self.password) - r_token = requests.get(**_kwargs) + r_token: Response = requests.get(**_kwargs) r_token.raise_for_status() - resp_token = r_token.json() - token = resp_token.get("token") + resp_token: Dict[Any, Any] = r_token.json() + token: str = resp_token.get("token") if not token: raise ValueError("Cannot obtain token from Docker Hub. Please try again!") return token - def get_registry_limits(self): + def get_registry_limits(self) -> Dict[str, str]: r_registry = requests.head(self.registry_url, headers={"Authorization": f"Bearer {self.get_token()}"}) r_registry.raise_for_status() resp_headers = r_registry.headers diff --git a/webhook_server_container/utils/github_repository_settings.py b/webhook_server_container/utils/github_repository_settings.py index ac890d48..a45fef98 100644 --- a/webhook_server_container/utils/github_repository_settings.py +++ b/webhook_server_container/utils/github_repository_settings.py @@ -2,9 +2,10 @@ import os from concurrent.futures import Future, ThreadPoolExecutor, as_completed from copy import deepcopy -from typing import List +from typing import Any, Dict, List, Optional, Tuple -from github import GithubIntegration, Auth +from github import Branch, Commit, Github, GithubIntegration, Auth, Installation, Label, Repository +from github.Auth import AppAuth from github.GithubException import UnknownObjectException from simple_logger.logger import get_logger @@ -33,12 +34,17 @@ @ignore_exceptions(logger=LOGGER) -def get_branch_sampler(repo, branch_name): +def get_branch_sampler(repo: Repository, branch_name: str) -> Branch: return repo.get_branch(branch=branch_name) @ignore_exceptions(logger=LOGGER) -def set_branch_protection(branch, repository, required_status_checks, github_api): +def set_branch_protection( + branch: Branch, + repository: Repository, + required_status_checks: List[str], + github_api: Github, +) -> None: api_user = github_api.get_user().login LOGGER.info(f"Set repository {repository.name} {branch} settings. enabled checks: {required_status_checks}") branch.edit_protection( @@ -56,7 +62,7 @@ def set_branch_protection(branch, repository, required_status_checks, github_api @ignore_exceptions(logger=LOGGER) -def set_repository_settings(repository): +def set_repository_settings(repository: Repository) -> None: LOGGER.info(f"Set repository {repository.name} settings") repository.edit(delete_branch_on_merge=True, allow_auto_merge=True, allow_update_branch=True) @@ -84,7 +90,12 @@ def set_repository_settings(repository): ) -def get_required_status_checks(repo, data, default_status_checks, exclude_status_checks): +def get_required_status_checks( + repo: Repository, + data: Dict[str, Any], + default_status_checks: List[str], + exclude_status_checks: List[str], +) -> List[str]: if data.get("tox"): default_status_checks.append("tox") @@ -108,9 +119,9 @@ def get_required_status_checks(repo, data, default_status_checks, exclude_status return default_status_checks -def get_user_configures_status_checks(status_checks): - include_status_checks = [] - exclude_status_checks = [] +def get_user_configures_status_checks(status_checks: Dict[str, Any]) -> Tuple: + include_status_checks: List[List[str]] = [] + exclude_status_checks: List[List[str]] = [] if status_checks: include_status_checks = status_checks.get("include-runs", []) exclude_status_checks = status_checks.get("exclude-runs", []) @@ -118,16 +129,19 @@ def get_user_configures_status_checks(status_checks): return include_status_checks, exclude_status_checks -def set_repository_labels(repository): +def set_repository_labels(repository: Repository) -> str: LOGGER.info(f"Set repository {repository.name} labels") - repository_labels = {} + repository_labels: Dict[str, Dict[str, Any]] = {} for label in repository.get_labels(): - repository_labels[label.name.lower()] = {"object": label, "color": label.color} + repository_labels[label.name.lower()]: Dict[str, Any] = { + "object": label, + "color": label.color, + } for label, color in STATIC_LABELS_DICT.items(): - label_lower = label.lower() + label_lower: str = label.lower() if label_lower in repository_labels: - repo_label = repository_labels[label_lower]["object"] + repo_label: Label = repository_labels[label_lower]["object"] if repository_labels[label_lower]["color"] == color: continue else: @@ -140,15 +154,15 @@ def set_repository_labels(repository): return f"{repository}: Setting repository labels is done" -def set_repositories_settings(config, github_api): +def set_repositories_settings(config_: Config, github_api: Github) -> None: LOGGER.info("Processing repositories") - config_data = config.data - default_status_checks = config_data.get("default-status-checks", []) - docker = config_data.get("docker") + config_data = config_.data + default_status_checks: List[str] = config_data.get("default-status-checks", []) + docker: Optional[Dict[str, str]] = config_data.get("docker") if docker: LOGGER.info("Login in to docker.io") - docker_username = docker["username"] - docker_password = docker["password"] + docker_username: str = docker["username"] + docker_password: str = docker["password"] os.system(f"podman login -u {docker_username} -p {docker_password} docker.io") futures = [] @@ -171,10 +185,10 @@ def set_repositories_settings(config, github_api): LOGGER.info(result.result()) -def set_repository(data, github_api, default_status_checks): - repository = data["name"] +def set_repository(data: Dict[str, Any], github_api: Github, default_status_checks: List[str]) -> Optional[str]: + repository: str = data["name"] LOGGER.info(f"Processing repository {repository}") - protected_branches = data.get("protected-branches", {}) + protected_branches: Dict[str, Any] = data.get("protected-branches", {}) repo = get_github_repo_api(github_api=github_api, repository=repository) if not repo: LOGGER.error(f"{repository}: Failed to get repository") @@ -234,7 +248,7 @@ def set_repository(data, github_api, default_status_checks): return f"{repository}: Setting repository settings is done" -def set_all_in_progress_check_runs_to_queued(config_, github_api): +def set_all_in_progress_check_runs_to_queued(config_: Config, github_api: Github) -> None: check_runs = ( PYTHON_MODULE_INSTALL_STR, CAN_BE_MERGED_STR, @@ -242,7 +256,8 @@ def set_all_in_progress_check_runs_to_queued(config_, github_api): BUILD_CONTAINER_STR, PRE_COMMIT_STR, ) - futures = [] + futures: List[Future] = [] + with ThreadPoolExecutor() as executor: for _, data in config_.data["repositories"].items(): futures.append( @@ -263,8 +278,10 @@ def set_all_in_progress_check_runs_to_queued(config_, github_api): LOGGER.info(result.result()) -def set_repository_check_runs_to_queued(config_, data, github_api, check_runs): - repository = data["name"] +def set_repository_check_runs_to_queued( + config_: Config, data: Dict[str, Any], github_api: Github, check_runs: Tuple +) -> Optional[str]: + repository: str = data["name"] repository_app_api = get_repository_github_app_api(config_=config_, repository=repository) if not repository_app_api: return @@ -273,7 +290,7 @@ def set_repository_check_runs_to_queued(config_, data, github_api, check_runs): repo = get_github_repo_api(github_api=github_api, repository=repository) LOGGER.info(f"{repository}: Set all {IN_PROGRESS_STR} check runs to {QUEUED_STR}") for pull_request in repo.get_pulls(state="open"): - last_commit = list(pull_request.get_commits())[-1] + last_commit: Commit = list(pull_request.get_commits())[-1] for check_run in last_commit.get_check_runs(): if check_run.name in check_runs and check_run.status == IN_PROGRESS_STR: LOGGER.info( @@ -286,14 +303,16 @@ def set_repository_check_runs_to_queued(config_, data, github_api, check_runs): @ignore_exceptions(logger=LOGGER) -def get_repository_github_app_api(config_, repository): +def get_repository_github_app_api(config_: Config, repository: Repository) -> Optional[Installation]: LOGGER.info("Getting repositories GitHub app API") with open(os.path.join(config_.data_dir, "webhook-server.private-key.pem")) as fd: private_key = fd.read() - github_app_id = config_.data["github-app-id"] - auth = Auth.AppAuth(app_id=github_app_id, private_key=private_key) - app_instance = GithubIntegration(auth=auth) + github_app_id: int = config_.data["github-app-id"] + auth: AppAuth = Auth.AppAuth(app_id=github_app_id, private_key=private_key) + app_instance: GithubIntegration = GithubIntegration(auth=auth) + owner: str + repo: str owner, repo = repository.split("/") try: return app_instance.get_repo_installation(owner=owner, repo=repo).get_github_for_installation() @@ -307,7 +326,7 @@ def get_repository_github_app_api(config_, repository): if __name__ == "__main__": config = Config() api, _ = get_api_with_highest_rate_limit(config=config) - set_repositories_settings(config=config, github_api=api) + set_repositories_settings(config_=config, github_api=api) set_all_in_progress_check_runs_to_queued( config_=config, github_api=api, From 1ce805d4592de2f9f45f4c16c34b411340132fd8 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 3 Jul 2024 08:42:53 +0300 Subject: [PATCH 02/31] Add typing --- webhook_server_container/utils/dockerhub_rate_limit.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/webhook_server_container/utils/dockerhub_rate_limit.py b/webhook_server_container/utils/dockerhub_rate_limit.py index b1f248d7..18cd28e4 100644 --- a/webhook_server_container/utils/dockerhub_rate_limit.py +++ b/webhook_server_container/utils/dockerhub_rate_limit.py @@ -16,16 +16,16 @@ def __init__(self, username: str, password: str): self.password = password @staticmethod - def limit_extractor(str_raw: str) -> str: + def limit_extractor(str_raw: str) -> int: if not str_raw: - return "" + return 0 if ";" in str_raw: split_arr = str_raw.split(";") # TODO: return other values too? if len(split_arr) > 0: - return split_arr[0] + return int(split_arr[0]) else: - return str_raw + return int(str_raw) def get_token(self) -> str: _kwargs: Dict[str, Any] = {"url": self.token_url} @@ -42,7 +42,7 @@ def get_token(self) -> str: return token - def get_registry_limits(self) -> Dict[str, str]: + def get_registry_limits(self) -> Dict[str, int]: r_registry = requests.head(self.registry_url, headers={"Authorization": f"Bearer {self.get_token()}"}) r_registry.raise_for_status() resp_headers = r_registry.headers From 90d74bc65264243e25fbc8e3212aff8e58b005e3 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 3 Jul 2024 11:59:04 +0300 Subject: [PATCH 03/31] fix type --- webhook_server_container/libs/github_api.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index d633b5a7..0a42877a 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -101,12 +101,12 @@ def __init__(self, hook_data): self.container_repository: str self.dockerfile: str self.container_tag: str - self.container_build_args: List - self.container_command_args: List + self.container_build_args: List[str] + self.container_command_args: List[str] self.repository_full_name: str self.github_app_id: str self.container_release: bool - self.can_be_merged_required_labels: Any + self.can_be_merged_required_labels: List[str] self.jira: Dict[str, Any] self.jira_tracking: bool = False self.jira_enabled_repository: bool = False From d31ce6086ea1649b0a13799071dcb7251699d3b9 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 3 Jul 2024 12:20:03 +0300 Subject: [PATCH 04/31] remove DockerHub --- webhook_server_container/libs/github_api.py | 10 ---- .../utils/dockerhub_rate_limit.py | 54 ------------------- 2 files changed, 64 deletions(-) delete mode 100644 webhook_server_container/utils/dockerhub_rate_limit.py diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 0a42877a..bf82ed24 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -57,7 +57,6 @@ OTHER_MAIN_BRANCH, ) from pyhelper_utils.general import ignore_exceptions -from webhook_server_container.utils.dockerhub_rate_limit import DockerHub from webhook_server_container.utils.github_repository_settings import ( get_repository_github_app_api, ) @@ -94,8 +93,6 @@ def __init__(self, hook_data): self.all_required_status_checks: List[str] = [] # filled by self._repo_data_from_config() - self.dockerhub_username: str - self.dockerhub_password: str self.container_repository_username: str self.container_repository_password: str self.container_repository: str @@ -139,7 +136,6 @@ def __init__(self, hook_data): self.add_api_users_to_auto_verified_and_merged_users() self.clone_repository_path = os.path.join("/", self.repository.name) - self.dockerhub = DockerHub(username=self.dockerhub_username, password=self.dockerhub_password) self.pull_request = self._get_pull_request() self.owners_content = self.get_owners_content() @@ -337,7 +333,6 @@ def _repo_data_from_config(self) -> None: primary_dict=repo_data, secondary_dict=config_data, key="slack_webhook_url" ) self.build_and_push_container: Optional[Dict[str, Any]] = repo_data.get("container") - self.dockerhub = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="docker") self.pre_commit = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="pre-commit") self.jira = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="jira") @@ -361,10 +356,6 @@ def _repo_data_from_config(self) -> None: f"Project: {self.jira_project}, Token: {self.jira_token}" ) - if self.dockerhub: - self.dockerhub_username: str = self.dockerhub["username"] - self.dockerhub_password: str = self.dockerhub["password"] - if self.build_and_push_container: self.container_repository_username: str = self.build_and_push_container["username"] self.container_repository_password: str = self.build_and_push_container["password"] @@ -1791,7 +1782,6 @@ def log_repository_features(self): verified-job: {self.verified_job} tox-enabled: {self.tox_enabled} tox-python-version: {self.tox_python_version} - docker: {self.dockerhub} pre-commit: {self.pre_commit} slack-webhook-url: {self.slack_webhook_url} container: {self.build_and_push_container} diff --git a/webhook_server_container/utils/dockerhub_rate_limit.py b/webhook_server_container/utils/dockerhub_rate_limit.py deleted file mode 100644 index 18cd28e4..00000000 --- a/webhook_server_container/utils/dockerhub_rate_limit.py +++ /dev/null @@ -1,54 +0,0 @@ -# From https://docs.docker.com/docker-hub/download-rate-limit/#how-can-i-check-my-current-rate -from typing import Any, Dict - -import requests -from requests import Response - - -class DockerHub: - def __init__(self, username: str, password: str): - self.repository: str = "ratelimitpreview/test" - self.token_url: str = ( - f"https://auth.docker.io/token?service=registry.docker.io&scope=repository:{self.repository}:pull" - ) - self.registry_url: str = f"https://registry-1.docker.io/v2/{self.repository}/manifests/latest" - self.username = username - self.password = password - - @staticmethod - def limit_extractor(str_raw: str) -> int: - if not str_raw: - return 0 - - if ";" in str_raw: - split_arr = str_raw.split(";") # TODO: return other values too? - if len(split_arr) > 0: - return int(split_arr[0]) - else: - return int(str_raw) - - def get_token(self) -> str: - _kwargs: Dict[str, Any] = {"url": self.token_url} - if self.username and self.password: - _kwargs["auth"] = (self.username, self.password) - - r_token: Response = requests.get(**_kwargs) - r_token.raise_for_status() - resp_token: Dict[Any, Any] = r_token.json() - token: str = resp_token.get("token") - - if not token: - raise ValueError("Cannot obtain token from Docker Hub. Please try again!") - - return token - - def get_registry_limits(self) -> Dict[str, int]: - r_registry = requests.head(self.registry_url, headers={"Authorization": f"Bearer {self.get_token()}"}) - r_registry.raise_for_status() - resp_headers = r_registry.headers - - return { - "limit": self.limit_extractor(resp_headers.get("RateLimit-Limit")), - "remaining": self.limit_extractor(resp_headers.get("RateLimit-Remaining")), - "reset": self.limit_extractor(resp_headers.get("RateLimit-Reset")), - } From bd1c4c674dd24cc1623c72b6e3b2c901da07807b Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 3 Jul 2024 12:21:47 +0300 Subject: [PATCH 05/31] ad mypy --- .pre-commit-config.yaml | 7 +++++++ pyproject.toml | 9 +++++++++ 2 files changed, 16 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0e0007ab..04783193 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -49,3 +49,10 @@ repos: rev: v3.0.1 hooks: - id: docker-compose-check + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.10.0 + hooks: + - id: mypy + exclude: ^(openshift_day2_configurator/tests/) + additional_dependencies: [types-all] diff --git a/pyproject.toml b/pyproject.toml index 84f465ca..a30114c7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,6 +7,15 @@ output-format = "grouped" [tool.ruff.format] exclude = [".git", ".venv", ".mypy_cache", ".tox", "__pycache__"] +[tool.mypy] +check_untyped_defs = true +disallow_any_generics = true +disallow_incomplete_defs = true +disallow_untyped_defs = true +no_implicit_optional = true +show_error_codes = true +warn_unused_ignores = true + [tool.poetry] name = "github-webhook-server" version = "0.0.0" From 95a7aa84a82f6c35abefb60e9eaaa1e7030306ae Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 3 Jul 2024 13:57:56 +0300 Subject: [PATCH 06/31] update return type --- .pre-commit-config.yaml | 1 - pyproject.toml | 4 +- webhook_server_container/app.py | 10 +-- webhook_server_container/libs/config.py | 8 +-- .../utils/github_repository_settings.py | 51 +++++++-------- webhook_server_container/utils/helpers.py | 62 ++++++++++++++----- webhook_server_container/utils/webhook.py | 35 +++++------ 7 files changed, 95 insertions(+), 76 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 04783193..476fc073 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -54,5 +54,4 @@ repos: rev: v1.10.0 hooks: - id: mypy - exclude: ^(openshift_day2_configurator/tests/) additional_dependencies: [types-all] diff --git a/pyproject.toml b/pyproject.toml index a30114c7..22dedcb0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,9 +9,9 @@ exclude = [".git", ".venv", ".mypy_cache", ".tox", "__pycache__"] [tool.mypy] check_untyped_defs = true -disallow_any_generics = true +disallow_any_generics = false disallow_incomplete_defs = true -disallow_untyped_defs = true +# disallow_untyped_defs = true no_implicit_optional = true show_error_codes = true warn_unused_ignores = true diff --git a/webhook_server_container/app.py b/webhook_server_container/app.py index ca449134..19a2b10d 100644 --- a/webhook_server_container/app.py +++ b/webhook_server_container/app.py @@ -1,4 +1,6 @@ import os +from typing import Any, Dict + from fastapi import Request import requests import urllib3 @@ -8,21 +10,19 @@ from webhook_server_container.libs.github_api import GitHubApi from webhook_server_container.utils.constants import FASTAPI_APP -APP_ROOT_PATH = "/webhook_server" -REPOSITORIES_APP_API = {} -MISSING_APP_REPOSITORIES = [] +APP_ROOT_PATH: str = "/webhook_server" urllib3.disable_warnings() LOGGER = get_logger(name="app", filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE")) @FASTAPI_APP.get(f"{APP_ROOT_PATH}/healthcheck") -def healthcheck(): +def healthcheck() -> Dict[str, Any]: return {"status": requests.status_codes.codes.ok, "message": "Alive"} @FASTAPI_APP.post(APP_ROOT_PATH) -async def process_webhook(request: Request): +async def process_webhook(request: Request) -> Dict[str, Any]: process_failed_msg = {"status": requests.status_codes.codes.server_error, "Message": "Process failed"} try: hook_data = await request.json() diff --git a/webhook_server_container/libs/config.py b/webhook_server_container/libs/config.py index af04a718..39e7d393 100644 --- a/webhook_server_container/libs/config.py +++ b/webhook_server_container/libs/config.py @@ -1,11 +1,11 @@ import os -from typing import Any, Dict, Optional +from typing import Any, Dict import yaml class Config: - def __init__(self): + def __init__(self) -> None: self.data_dir: str = os.environ.get("WEBHOOK_SERVER_DATA_DIR", "/webhook_server") self.config_path: str = os.path.join(self.data_dir, "config.yaml") self.exists() @@ -19,5 +19,5 @@ def data(self) -> Dict[str, Any]: with open(self.config_path) as fd: return yaml.safe_load(fd) - def get_repository(self, repository_name: str) -> Optional[Dict[str, Any]]: - return self.data["repositories"].get(repository_name) + def get_repository(self, repository_name: str) -> Dict[str, Any]: + return self.data["repositories"].get(repository_name, {}) diff --git a/webhook_server_container/utils/github_repository_settings.py b/webhook_server_container/utils/github_repository_settings.py index a45fef98..704b4a25 100644 --- a/webhook_server_container/utils/github_repository_settings.py +++ b/webhook_server_container/utils/github_repository_settings.py @@ -23,6 +23,7 @@ from pyhelper_utils.general import ignore_exceptions from webhook_server_container.utils.helpers import ( get_api_with_highest_rate_limit, + get_future_results, get_github_repo_api, ) @@ -44,7 +45,7 @@ def set_branch_protection( repository: Repository, required_status_checks: List[str], github_api: Github, -) -> None: +) -> bool: api_user = github_api.get_user().login LOGGER.info(f"Set repository {repository.name} {branch} settings. enabled checks: {required_status_checks}") branch.edit_protection( @@ -60,6 +61,8 @@ def set_branch_protection( apps_bypass_pull_request_allowances=[api_user], ) + return True + @ignore_exceptions(logger=LOGGER) def set_repository_settings(repository: Repository) -> None: @@ -119,9 +122,9 @@ def get_required_status_checks( return default_status_checks -def get_user_configures_status_checks(status_checks: Dict[str, Any]) -> Tuple: - include_status_checks: List[List[str]] = [] - exclude_status_checks: List[List[str]] = [] +def get_user_configures_status_checks(status_checks: Dict[str, Any]) -> Tuple[List[str], List[str]]: + include_status_checks: List[str] = [] + exclude_status_checks: List[str] = [] if status_checks: include_status_checks = status_checks.get("include-runs", []) exclude_status_checks = status_checks.get("exclude-runs", []) @@ -133,7 +136,7 @@ def set_repository_labels(repository: Repository) -> str: LOGGER.info(f"Set repository {repository.name} labels") repository_labels: Dict[str, Dict[str, Any]] = {} for label in repository.get_labels(): - repository_labels[label.name.lower()]: Dict[str, Any] = { + repository_labels[label.name.lower()] = { "object": label, "color": label.color, } @@ -179,30 +182,25 @@ def set_repositories_settings(config_: Config, github_api: Github) -> None: ) ) - for result in as_completed(futures): - if result.exception(): - LOGGER.error(result.exception()) - LOGGER.info(result.result()) + get_future_results(futures=futures) -def set_repository(data: Dict[str, Any], github_api: Github, default_status_checks: List[str]) -> Optional[str]: +def set_repository(data: Dict[str, Any], github_api: Github, default_status_checks: List[str]) -> Tuple[bool, str]: repository: str = data["name"] LOGGER.info(f"Processing repository {repository}") protected_branches: Dict[str, Any] = data.get("protected-branches", {}) repo = get_github_repo_api(github_api=github_api, repository=repository) if not repo: - LOGGER.error(f"{repository}: Failed to get repository") - return + return False, f"{repository}: Failed to get repository" try: set_repository_labels(repository=repo) set_repository_settings(repository=repo) if repo.private: - LOGGER.warning(f"{repository}: Repository is private, skipping setting branch settings") - return + return False, f"{repository}: Repository is private, skipping setting branch settings" - futures: List[Future] = [] + futures: List["Future"] = [] with ThreadPoolExecutor() as executor: for branch_name, status_checks in protected_branches.items(): @@ -240,12 +238,11 @@ def set_repository(data: Dict[str, Any], github_api: Github, default_status_chec for result in as_completed(futures): if result.exception(): LOGGER.error(result.exception()) - LOGGER.info(result.result()) - except UnknownObjectException: - LOGGER.error(f"{repository}: Failed to get repository settings") + except UnknownObjectException as ex: + return False, f"{repository}: Failed to get repository settings, ex: {ex}" - return f"{repository}: Setting repository settings is done" + return True, f"{repository}: Setting repository settings is done" def set_all_in_progress_check_runs_to_queued(config_: Config, github_api: Github) -> None: @@ -256,7 +253,7 @@ def set_all_in_progress_check_runs_to_queued(config_: Config, github_api: Github BUILD_CONTAINER_STR, PRE_COMMIT_STR, ) - futures: List[Future] = [] + futures: List["Future"] = [] with ThreadPoolExecutor() as executor: for _, data in config_.data["repositories"].items(): @@ -272,19 +269,16 @@ def set_all_in_progress_check_runs_to_queued(config_: Config, github_api: Github ) ) - for result in as_completed(futures): - if result.exception(): - LOGGER.error(result.exception()) - LOGGER.info(result.result()) + get_future_results(futures=futures) def set_repository_check_runs_to_queued( - config_: Config, data: Dict[str, Any], github_api: Github, check_runs: Tuple -) -> Optional[str]: + config_: Config, data: Dict[str, Any], github_api: Github, check_runs: Tuple[str] +) -> Tuple[bool, str]: repository: str = data["name"] repository_app_api = get_repository_github_app_api(config_=config_, repository=repository) if not repository_app_api: - return + return False, "Failed to get repositories GitHub app API" app_api = get_github_repo_api(github_api=repository_app_api, repository=repository) repo = get_github_repo_api(github_api=github_api, repository=repository) @@ -299,7 +293,7 @@ def set_repository_check_runs_to_queued( ) app_api.create_check_run(name=check_run.name, head_sha=last_commit.sha, status=QUEUED_STR) - return f"{repository}: Set check run status to {QUEUED_STR} is done" + return True, f"{repository}: Set check run status to {QUEUED_STR} is done" @ignore_exceptions(logger=LOGGER) @@ -321,6 +315,7 @@ def get_repository_github_app_api(config_: Config, repository: Repository) -> Op f"Repository {repository} not found by manage-repositories-app, " f"make sure the app installed (https://github.com/apps/manage-repositories-app)" ) + return None if __name__ == "__main__": diff --git a/webhook_server_container/utils/helpers.py b/webhook_server_container/utils/helpers.py index 17a72256..affc3fe9 100644 --- a/webhook_server_container/utils/helpers.py +++ b/webhook_server_container/utils/helpers.py @@ -1,18 +1,21 @@ +from __future__ import annotations import datetime import os import shlex import subprocess -from typing import Any, Dict, Optional, Tuple +from concurrent.futures import Future, as_completed +from typing import Any, Dict, List, Optional, Tuple from pyhelper_utils.general import ignore_exceptions from colorama import Fore -from github import Github +from github import Github, RateLimit, Repository from simple_logger.logger import get_logger +from webhook_server_container.libs.config import Config LOGGER = get_logger(name="helpers", filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE")) -def extract_key_from_dict(key, _dict): +def extract_key_from_dict(key: Any, _dict: Dict[Any, Any]) -> Any: if isinstance(_dict, dict): for _key, _val in _dict.items(): if _key == key: @@ -27,7 +30,7 @@ def extract_key_from_dict(key, _dict): @ignore_exceptions(logger=LOGGER) -def get_github_repo_api(github_api, repository): +def get_github_repo_api(github_api: Github, repository: int | str) -> Repository: return github_api.get_repo(repository) @@ -93,15 +96,15 @@ def run_command( return False, out_decoded, err_decoded -def get_apis_and_tokes_from_config(config, repository_name=None): - apis_and_tokens = [] - tokens = None - if repository_name: - repo_data = config.get_repository(repository_name=repository_name) - tokens = repo_data.get("github-tokens") +def get_apis_and_tokes_from_config(config: Config, repository_name: str = "") -> List[Tuple[Github, str]]: + apis_and_tokens: List[Tuple[Github, str]] = [] - if not tokens: - tokens = config.data["github-tokens"] + tokens = get_value_from_dicts( + primary_dict=config.get_repository(repository_name=repository_name), + secondary_dict=config.data, + key="github-tokens", + return_on_none=[], + ) for _token in tokens: apis_and_tokens.append((Github(login_or_token=_token), _token)) @@ -110,7 +113,7 @@ def get_apis_and_tokes_from_config(config, repository_name=None): @ignore_exceptions(logger=LOGGER) -def get_api_with_highest_rate_limit(config, repository_name=None): +def get_api_with_highest_rate_limit(config: Config, repository_name: str = "") -> Tuple[Github | None, str | None]: """ Get API with the highest rate limit @@ -121,7 +124,11 @@ def get_api_with_highest_rate_limit(config, repository_name=None): Returns: tuple: API, token """ - api, token, _api_user, rate_limit = None, None, None, None + api: Optional[Github] = None + token: Optional[str] = None + _api_user: str = "" + rate_limit: Optional[RateLimit] = None + remaining = 0 apis_and_tokens = get_apis_and_tokes_from_config(config=config, repository_name=repository_name) @@ -138,14 +145,19 @@ def get_api_with_highest_rate_limit(config, repository_name=None): return api, token -def log_rate_limit(rate_limit, api_user): - time_for_limit_reset = (rate_limit.core.reset - datetime.datetime.now(tz=datetime.timezone.utc)).seconds +def log_rate_limit(rate_limit: RateLimit, api_user: str) -> None: + rate_limit_str: str + time_for_limit_reset: int = (rate_limit.core.reset - datetime.datetime.now(tz=datetime.timezone.utc)).seconds + if rate_limit.core.remaining < 700: rate_limit_str = f"{Fore.RED}{rate_limit.core.remaining}{Fore.RESET}" + elif rate_limit.core.remaining < 2000: rate_limit_str = f"{Fore.YELLOW}{rate_limit.core.remaining}{Fore.RESET}" + else: rate_limit_str = f"{Fore.GREEN}{rate_limit.core.remaining}{Fore.RESET}" + LOGGER.info( f"{Fore.CYAN}[{api_user}] API rate limit:{Fore.RESET} Current {rate_limit_str} of {rate_limit.core.limit}. " f"Reset in {rate_limit.core.reset} [{datetime.timedelta(seconds=time_for_limit_reset)}] " @@ -154,7 +166,10 @@ def log_rate_limit(rate_limit, api_user): def get_value_from_dicts( - primary_dict: Dict[Any, Any], secondary_dict: Dict[Any, Any], key: str, return_on_none: Optional[Any] = None + primary_dict: Dict[Any, Any], + secondary_dict: Dict[Any, Any], + key: str, + return_on_none: Optional[Any] = None, ) -> Any: """ Get value from two dictionaries. @@ -162,3 +177,16 @@ def get_value_from_dicts( If value is not found in primary_dict, try to get it from secondary_dict, otherwise return return_on_none. """ return primary_dict.get(key, secondary_dict.get(key, return_on_none)) + + +def get_future_results(futures: List["Future"]) -> None: + for result in as_completed(futures): + if result.exception(): + LOGGER.error(result.exception()) + + _res = result.result() + if _res[0]: + LOGGER.info(_res[1]) + + else: + LOGGER.error(_res[1]) diff --git a/webhook_server_container/utils/webhook.py b/webhook_server_container/utils/webhook.py index f611aadb..c95acc11 100644 --- a/webhook_server_container/utils/webhook.py +++ b/webhook_server_container/utils/webhook.py @@ -1,12 +1,14 @@ -from concurrent.futures import ThreadPoolExecutor, as_completed +from concurrent.futures import ThreadPoolExecutor import os +from typing import Any, Dict, List, Tuple -from github import Github +from github import Github, HookDescription from simple_logger.logger import get_logger from webhook_server_container.libs.config import Config from webhook_server_container.utils.helpers import ( get_api_with_highest_rate_limit, + get_future_results, get_github_repo_api, ) from pyhelper_utils.general import ignore_exceptions @@ -16,29 +18,27 @@ @ignore_exceptions(logger=LOGGER) -def process_github_webhook(data, github_api, webhook_ip): - repository = data["name"] +def process_github_webhook(data: Dict[str, Any], github_api: Github, webhook_ip: str) -> Tuple[bool, str]: + repository: str = data["name"] repo = get_github_repo_api(github_api=github_api, repository=repository) if not repo: - LOGGER.error(f"Could not find repository {repository}") - return + return False, f"Could not find repository {repository}" - config = {"url": f"{webhook_ip}/webhook_server", "content_type": "json"} - events = data.get("events", ["*"]) + config_: Dict[str, str] = {"url": f"{webhook_ip}/webhook_server", "content_type": "json"} + events: List[str] = data.get("events", ["*"]) try: - hooks = list(repo.get_hooks()) + hooks: List[HookDescription] = list(repo.get_hooks()) except Exception as ex: - LOGGER.error(f"Could not list webhook for {repository}, check token permissions: {ex}") - return + return False, f"Could not list webhook for {repository}, check token permissions: {ex}" for _hook in hooks: if webhook_ip in _hook.config["url"]: - return f"{repository}: Hook already exists - {_hook.config['url']}" + return True, f"{repository}: Hook already exists - {_hook.config['url']}" - LOGGER.info(f"Creating webhook: {config['url']} for {repository} with events: {events}") - repo.create_hook(name="web", config=config, events=events, active=True) - return f"{repository}: Create webhook is done" + LOGGER.info(f"Creating webhook: {config_['url']} for {repository} with events: {events}") + repo.create_hook(name="web", config=config_, events=events, active=True) + return True, f"{repository}: Create webhook is done" def create_webhook(config_: Config, github_api: Github) -> None: @@ -55,10 +55,7 @@ def create_webhook(config_: Config, github_api: Github) -> None: ) ) - for result in as_completed(futures): - if result.exception(): - LOGGER.error(result.exception()) - LOGGER.info(result.result()) + get_future_results(futures=futures) if __name__ == "__main__": From 77b1f9fbd45b46f9401b2d29da3146cadf22c118 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 3 Jul 2024 14:09:02 +0300 Subject: [PATCH 07/31] update return type --- webhook_server_container/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server_container/app.py b/webhook_server_container/app.py index 19a2b10d..fb15b64e 100644 --- a/webhook_server_container/app.py +++ b/webhook_server_container/app.py @@ -36,7 +36,7 @@ async def process_webhook(request: Request) -> Dict[str, Any]: LOGGER.error(f"Failed to initialized GitHubApi instance: {ex}") return process_failed_msg - github_event = request.headers.get("X-GitHub-Event") + github_event: str = request.headers["X-GitHub-Event"] event_log = f"Event type: {github_event}. event ID: {request.headers.get('X-GitHub-Delivery')}" try: api.process_hook(data=github_event, event_log=event_log) From 0d76e00189162acea9ca930621e4e4c0d216bede Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Wed, 3 Jul 2024 14:32:27 +0300 Subject: [PATCH 08/31] More typing --- webhook_server_container/libs/github_api.py | 84 ++++++++----------- .../utils/github_repository_settings.py | 14 ++-- webhook_server_container/utils/helpers.py | 8 +- 3 files changed, 49 insertions(+), 57 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index bf82ed24..92016cfe 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -82,39 +82,22 @@ def __init__(self, hook_data): self.app: FastAPI = FASTAPI_APP self.hook_data: Dict[Any, Any] = hook_data self.repository_name: str = hook_data["repository"]["name"] - self.log_prefix_with_color: str | None = None - self.pull_request: PullRequest | None = None - self.parent_committer: str | None = None + self.log_prefix_with_color: str = "" + self.pull_request: Optional[PullRequest] = None + self.parent_committer: str = "" self.log_uuid: str = shortuuid.uuid()[:5] self.container_repo_dir: str = "/tmp/repository" - self.jira_conn: JIRA | None = None + self.jira_conn: Optional[JIRA] = None self.jira_track_pr: bool = False - self.issue_title: str | None = None + self.issue_title: str = "" self.all_required_status_checks: List[str] = [] - - # filled by self._repo_data_from_config() - self.container_repository_username: str - self.container_repository_password: str - self.container_repository: str - self.dockerfile: str - self.container_tag: str - self.container_build_args: List[str] - self.container_command_args: List[str] - self.repository_full_name: str - self.github_app_id: str - self.container_release: bool - self.can_be_merged_required_labels: List[str] - self.jira: Dict[str, Any] - self.jira_tracking: bool = False - self.jira_enabled_repository: bool = False - # End of filled by self._repo_data_from_config() - self.config = Config() self._repo_data_from_config() self._set_log_prefix_color() - # self.log_repository_features() - self.github_app_api = get_repository_github_app_api(config_=self.config, repository=self.repository_full_name) + self.github_app_api = get_repository_github_app_api( + config_=self.config, repository_name=self.repository_full_name + ) if not self.github_app_api: LOGGER.error( f"Repository {self.repository_full_name} not found by manage-repositories-app, " @@ -311,61 +294,62 @@ def _repo_data_from_config(self) -> None: if not repo_data: raise RepositoryNotFoundError(f"Repository {self.repository_name} not found in config file") - self.github_app_id = get_value_from_dicts( + self.github_app_id: str = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, key="github-app-id" ) self.repository_full_name: str = repo_data["name"] - self.pypi = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="pypi") - self.verified_job = get_value_from_dicts( + self.pypi: Dict[str, str] = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="pypi") + self.verified_job: bool = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, key="verified-job", return_on_none=True, ) - self.tox_enabled = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="tox") - self.tox_python_version = get_value_from_dicts( + self.tox_enabled: bool = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="tox") + self.tox_python_version: str = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, key="tox-python-version", return_on_none="python", ) - self.slack_webhook_url = get_value_from_dicts( + self.slack_webhook_url: str = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, key="slack_webhook_url" ) - self.build_and_push_container: Optional[Dict[str, Any]] = repo_data.get("container") - self.pre_commit = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="pre-commit") - self.jira = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="jira") + self.build_and_push_container: Dict[str, Any] = repo_data.get("container", {}) + if self.build_and_push_container: + self.container_repository_username: str = self.build_and_push_container["username"] + self.container_repository_password: str = self.build_and_push_container["password"] + self.container_repository: str = self.build_and_push_container["repository"] + self.dockerfile: str = self.build_and_push_container.get("dockerfile", "Dockerfile") + self.container_tag: str = self.build_and_push_container.get("tag", "latest") + self.container_build_args: str = self.build_and_push_container.get("build-args", "") + self.container_command_args: str = self.build_and_push_container.get("args", "") + self.container_release: bool = self.build_and_push_container.get("release", False) + + self.pre_commit: bool = get_value_from_dicts( + primary_dict=repo_data, secondary_dict=config_data, key="pre-commit", return_on_none=False + ) + self.jira: Dict[str, Any] = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="jira") if self.jira: - self.jira_server: Optional[str] = self.jira.get("server") - self.jira_project: Optional[str] = self.jira.get("project") - self.jira_token: Optional[str] = self.jira.get("token") - self.jira_epic: Optional[str] = self.jira.get("epic") + self.jira_server: str = self.jira["server"] + self.jira_project: str = self.jira["project"] + self.jira_token: str = self.jira["token"] + self.jira_epic: Optional[str] = self.jira.get("epic", "") self.jira_user_mapping: Dict[str, str] = self.jira.get("user-mapping", {}) # Check if repository is enabled for jira - self.jira_tracking = get_value_from_dicts( + self.jira_tracking: bool = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, key="jira-tracking" ) if self.jira_tracking: self.jira_enabled_repository: bool = all([self.jira_server, self.jira_project, self.jira_token]) if not self.jira_enabled_repository: - # if not (self.jira_enabled_repository := all([self.jira_server, self.jira_project, self.jira_token])): LOGGER.error( f"{self.log_prefix} Jira configuration is not valid. Server: {self.jira_server}, " f"Project: {self.jira_project}, Token: {self.jira_token}" ) - if self.build_and_push_container: - self.container_repository_username: str = self.build_and_push_container["username"] - self.container_repository_password: str = self.build_and_push_container["password"] - self.container_repository: str = self.build_and_push_container["repository"] - self.dockerfile: str = self.build_and_push_container.get("dockerfile", "Dockerfile") - self.container_tag: str = self.build_and_push_container.get("tag", "latest") - self.container_build_args: Optional[str] = self.build_and_push_container.get("build-args") - self.container_command_args: Optional[str] = self.build_and_push_container.get("args") - self.container_release: bool = self.build_and_push_container.get("release") - self.auto_verified_and_merged_users = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, diff --git a/webhook_server_container/utils/github_repository_settings.py b/webhook_server_container/utils/github_repository_settings.py index 704b4a25..4a8d96e9 100644 --- a/webhook_server_container/utils/github_repository_settings.py +++ b/webhook_server_container/utils/github_repository_settings.py @@ -4,7 +4,11 @@ from copy import deepcopy from typing import Any, Dict, List, Optional, Tuple -from github import Branch, Commit, Github, GithubIntegration, Auth, Installation, Label, Repository +from github import Github, GithubIntegration, Auth +from github.Repository import Repository +from github.Branch import Branch +from github.Label import Label +from github.Commit import Commit from github.Auth import AppAuth from github.GithubException import UnknownObjectException from simple_logger.logger import get_logger @@ -276,7 +280,7 @@ def set_repository_check_runs_to_queued( config_: Config, data: Dict[str, Any], github_api: Github, check_runs: Tuple[str] ) -> Tuple[bool, str]: repository: str = data["name"] - repository_app_api = get_repository_github_app_api(config_=config_, repository=repository) + repository_app_api = get_repository_github_app_api(config_=config_, repository_name=repository) if not repository_app_api: return False, "Failed to get repositories GitHub app API" @@ -297,7 +301,7 @@ def set_repository_check_runs_to_queued( @ignore_exceptions(logger=LOGGER) -def get_repository_github_app_api(config_: Config, repository: Repository) -> Optional[Installation]: +def get_repository_github_app_api(config_: Config, repository_name: str) -> Optional[Github]: LOGGER.info("Getting repositories GitHub app API") with open(os.path.join(config_.data_dir, "webhook-server.private-key.pem")) as fd: private_key = fd.read() @@ -307,12 +311,12 @@ def get_repository_github_app_api(config_: Config, repository: Repository) -> Op app_instance: GithubIntegration = GithubIntegration(auth=auth) owner: str repo: str - owner, repo = repository.split("/") + owner, repo = repository_name.split("/") try: return app_instance.get_repo_installation(owner=owner, repo=repo).get_github_for_installation() except UnknownObjectException: LOGGER.error( - f"Repository {repository} not found by manage-repositories-app, " + f"Repository {repository_name} not found by manage-repositories-app, " f"make sure the app installed (https://github.com/apps/manage-repositories-app)" ) return None diff --git a/webhook_server_container/utils/helpers.py b/webhook_server_container/utils/helpers.py index affc3fe9..caaf628e 100644 --- a/webhook_server_container/utils/helpers.py +++ b/webhook_server_container/utils/helpers.py @@ -7,7 +7,9 @@ from typing import Any, Dict, List, Optional, Tuple from pyhelper_utils.general import ignore_exceptions from colorama import Fore -from github import Github, RateLimit, Repository +from github import Github +from github.RateLimit import RateLimit +from github.Repository import Repository from simple_logger.logger import get_logger from webhook_server_container.libs.config import Config @@ -140,7 +142,9 @@ def get_api_with_highest_rate_limit(config: Config, repository_name: str = "") - LOGGER.info(f"API user {_api_user} remaining rate limit: {remaining}") api, token = _api, _token - log_rate_limit(rate_limit=rate_limit, api_user=_api_user) + if rate_limit: + log_rate_limit(rate_limit=rate_limit, api_user=_api_user) + LOGGER.info(f"API user {_api_user} selected with highest rate limit: {remaining}") return api, token From d5378ae675a792900a6f933d1b77e345146b1278 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Wed, 3 Jul 2024 14:39:38 +0300 Subject: [PATCH 09/31] More typing --- webhook_server_container/libs/github_api.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 92016cfe..d5d2b68e 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -14,7 +14,8 @@ import requests import shortuuid import yaml -from github import Commit, GithubException +from github import GithubException +from github.Commit import Commit from github.PullRequest import PullRequest from github.GithubException import UnknownObjectException from simple_logger.logger import get_logger @@ -331,6 +332,7 @@ def _repo_data_from_config(self) -> None: ) self.jira: Dict[str, Any] = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="jira") + self.jira_enabled_repository: bool = False if self.jira: self.jira_server: str = self.jira["server"] self.jira_project: str = self.jira["project"] @@ -343,7 +345,7 @@ def _repo_data_from_config(self) -> None: primary_dict=repo_data, secondary_dict=config_data, key="jira-tracking" ) if self.jira_tracking: - self.jira_enabled_repository: bool = all([self.jira_server, self.jira_project, self.jira_token]) + self.jira_enabled_repository = all([self.jira_server, self.jira_project, self.jira_token]) if not self.jira_enabled_repository: LOGGER.error( f"{self.log_prefix} Jira configuration is not valid. Server: {self.jira_server}, " From 9d4ec1f28155b766fe60039b1c51b1097fa82a88 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Wed, 3 Jul 2024 14:47:07 +0300 Subject: [PATCH 10/31] More typing --- webhook_server_container/libs/github_api.py | 25 +++++++++------------ 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index d5d2b68e..b77b8301 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -331,26 +331,23 @@ def _repo_data_from_config(self) -> None: primary_dict=repo_data, secondary_dict=config_data, key="pre-commit", return_on_none=False ) - self.jira: Dict[str, Any] = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="jira") self.jira_enabled_repository: bool = False - if self.jira: + self.jira_tracking: bool = get_value_from_dicts( + primary_dict=repo_data, secondary_dict=config_data, key="jira-tracking" + ) + self.jira: Dict[str, Any] = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="jira") + if self.jira_tracking and self.jira: self.jira_server: str = self.jira["server"] self.jira_project: str = self.jira["project"] self.jira_token: str = self.jira["token"] self.jira_epic: Optional[str] = self.jira.get("epic", "") self.jira_user_mapping: Dict[str, str] = self.jira.get("user-mapping", {}) - - # Check if repository is enabled for jira - self.jira_tracking: bool = get_value_from_dicts( - primary_dict=repo_data, secondary_dict=config_data, key="jira-tracking" - ) - if self.jira_tracking: - self.jira_enabled_repository = all([self.jira_server, self.jira_project, self.jira_token]) - if not self.jira_enabled_repository: - LOGGER.error( - f"{self.log_prefix} Jira configuration is not valid. Server: {self.jira_server}, " - f"Project: {self.jira_project}, Token: {self.jira_token}" - ) + self.jira_enabled_repository = all([self.jira_server, self.jira_project, self.jira_token]) + if not self.jira_enabled_repository: + LOGGER.error( + f"{self.log_prefix} Jira configuration is not valid. Server: {self.jira_server}, " + f"Project: {self.jira_project}, Token: {self.jira_token}" + ) self.auto_verified_and_merged_users = get_value_from_dicts( primary_dict=repo_data, From 5ea2f1e1284973c2aa56397cec3fd3ecade8551f Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Wed, 3 Jul 2024 15:22:15 +0300 Subject: [PATCH 11/31] More typing --- webhook_server_container/libs/github_api.py | 89 ++++++++++----------- 1 file changed, 43 insertions(+), 46 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index b77b8301..c5269736 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -10,7 +10,7 @@ from typing import Any, Dict, List, Optional, Tuple from fastapi import FastAPI -from jira import JIRA +from github.ContentFile import ContentFile import requests import shortuuid import yaml @@ -88,7 +88,7 @@ def __init__(self, hook_data): self.parent_committer: str = "" self.log_uuid: str = shortuuid.uuid()[:5] self.container_repo_dir: str = "/tmp/repository" - self.jira_conn: Optional[JIRA] = None + jira_conn: Optional[JiraApi] = None self.jira_track_pr: bool = False self.issue_title: str = "" self.all_required_status_checks: List[str] = [] @@ -119,7 +119,7 @@ def __init__(self, hook_data): return self.add_api_users_to_auto_verified_and_merged_users() - self.clone_repository_path = os.path.join("/", self.repository.name) + self.clone_repository_path: str = os.path.join("/", self.repository.name) self.pull_request = self._get_pull_request() self.owners_content = self.get_owners_content() @@ -208,18 +208,19 @@ def add_api_users_to_auto_verified_and_merged_users(self) -> None: def _set_log_prefix_color(self) -> None: repo_str: str = "\033[1;{color}m{name}\033[1;0m" color_file: str = "/tmp/color.json" + color_json: Dict[str, int] try: with open(color_file) as fd: color_json = json.load(fd) except Exception: - color_json: Dict[str, int] = {} + color_json = {} - color = color_json.get(self.repository_name) + color: int = color_json.get(self.repository_name, 0) if not color: color: int = random.choice(range(31, 39)) color_json[self.repository_name] = color - self.log_prefix_with_color = repo_str.format(color=color, name=self.repository_name) + self.log_prefix_with_color: str = repo_str.format(color=color, name=self.repository_name) with open(color_file, "w") as fd: json.dump(color_json, fd) @@ -372,9 +373,9 @@ def _get_pull_request(self, number: Optional[int] = None) -> Optional[PullReques except GithubException: continue - commit = self.hook_data.get("commit") + commit: Dict[str, Any] = self.hook_data.get("commit", {}) if commit: - commit_obj: Commit = self.repository.get_commit(commit["sha"]) + commit_obj = self.repository.get_commit(commit["sha"]) with contextlib.suppress(Exception): return commit_obj.get_pulls()[0] @@ -495,10 +496,10 @@ def upload_to_pypi(self, tag_name): """, ) - def get_owners_content(self): + def get_owners_content(self) -> Dict[str, Any]: try: - owners_content = self.repository.get_contents("OWNERS") - _content = yaml.safe_load(owners_content.decoded_content) + owners_content: ContentFile = self.repository.get_contents("OWNERS") + _content: Dict[str, Any] = yaml.safe_load(owners_content.decoded_content) LOGGER.info(f"{self.log_prefix} OWNERS file content: {_content}") return _content except UnknownObjectException: @@ -506,10 +507,10 @@ def get_owners_content(self): return {} @property - def reviewers(self): - bc_reviewers = self.owners_content.get("reviewers", []) + def reviewers(self) -> List[str]: + bc_reviewers: List[str] = self.owners_content.get("reviewers", []) if isinstance(bc_reviewers, dict): - _reviewers = self.owners_content.get("reviewers", {}).get("any", []) + _reviewers: List[str] = self.owners_content.get("reviewers", {}).get("any", []) else: _reviewers = bc_reviewers @@ -517,27 +518,23 @@ def reviewers(self): return _reviewers @property - def files_reviewers(self): - _reviewers = self.owners_content.get("reviewers") - if isinstance(_reviewers, dict): - return _reviewers.get("files", {}) - return {} + def files_reviewers(self) -> Dict[str, str]: + _reviewers: Dict[str, Any] = self.owners_content.get("reviewers", {}) + return _reviewers.get("files", {}) @property - def folders_reviewers(self): - _reviewers = self.owners_content.get("reviewers") - if isinstance(_reviewers, dict): - return _reviewers.get("folders", {}) - return {} + def folders_reviewers(self) -> Dict[str, str]: + _reviewers: Dict[str, Any] = self.owners_content.get("reviewers", {}) + return _reviewers.get("folders", {}) @property - def approvers(self): + def approvers(self) -> List[str]: return self.owners_content.get("approvers", []) - def list_changed_commit_files(self): + def list_changed_commit_files(self) -> list[str]: return [fd["filename"] for fd in self.last_commit.raw_data["files"]] - def assign_reviewers(self): + def assign_reviewers(self) -> None: LOGGER.info(f"{self.log_prefix} Assign reviewers") changed_files = self.list_changed_commit_files() reviewers_to_add = self.reviewers @@ -549,7 +546,7 @@ def assign_reviewers(self): if any(cf for cf in changed_files if _folder in str(Path(cf).parent)): reviewers_to_add.extend(_reviewers) - _to_add = list(set(reviewers_to_add)) + _to_add: List[str] = list(set(reviewers_to_add)) LOGGER.info(f"{self.log_prefix} Reviewers to add: {_to_add}") for reviewer in _to_add: if reviewer != self.pull_request.user.login: @@ -732,7 +729,7 @@ def set_cherry_pick_failure(self, output): return self.set_check_run_status(check_run=CHERRY_PICKED_LABEL_PREFIX, conclusion=FAILURE_STR, output=output) @ignore_exceptions(logger=LOGGER) - def create_issue_for_new_pull_request(self): + def create_issue_for_new_pull_request(self) -> None: if self.parent_committer in self.auto_verified_and_merged_users: LOGGER.info( f"{self.log_prefix} Committer {self.parent_committer} is part of " @@ -839,14 +836,14 @@ def process_comment_webhook_data(self): ) def process_pull_request_webhook_data(self): - hook_action = self.hook_data["action"] + hook_action: str = self.hook_data["action"] LOGGER.info(f"{self.log_prefix} hook_action is: {hook_action}") if not self.pull_request: return - pull_request_data = self.hook_data["pull_request"] + pull_request_data: Dict[str, Any] = self.hook_data["pull_request"] self.parent_committer = pull_request_data["user"]["login"] - self.pull_request_branch = pull_request_data["base"]["ref"] + self.pull_request_branch: str = pull_request_data["base"]["ref"] if hook_action == "opened": LOGGER.info(f"{self.log_prefix} Creating welcome comment") @@ -854,13 +851,13 @@ def process_pull_request_webhook_data(self): self.create_issue_for_new_pull_request() if self.jira_track_pr: - self.get_jira_conn() - if not self.jira_conn: + jira_conn = jira_conn = self.get_jira_conn() + if not jira_conn: LOGGER.error(f"{self.log_prefix} Jira connection not found") return LOGGER.info(f"{self.log_prefix} Creating Jira story") - jira_story_key = self.jira_conn.create_story( + jira_story_key = jira_conn.create_story( title=self.issue_title, body=self.pull_request.html_url, epic_key=self.jira_epic, @@ -883,7 +880,7 @@ def process_pull_request_webhook_data(self): if self.jira_track_pr: if _story_key := self.get_story_key_with_jira_connection(): LOGGER.info(f"{self.log_prefix} Creating sub-task for Jira story {_story_key}") - self.jira_conn.create_closed_subtask( + jira_conn.create_closed_subtask( title=f"{self.issue_title}: New commit from {self.last_committer}", parent_key=_story_key, assignee=self.jira_assignee, @@ -900,7 +897,7 @@ def process_pull_request_webhook_data(self): if self.jira_track_pr: if _story_key := self.get_story_key_with_jira_connection(): LOGGER.info(f"{self.log_prefix} Closing Jira story") - self.jira_conn.close_issue( + jira_conn.close_issue( key=_story_key, comment=f"PR: {self.pull_request.title} is closed. Megred: {is_merged}", ) @@ -981,13 +978,13 @@ def process_pull_request_review_webhook_data(self): return _story_key = _story_label[0].name.split(":")[-1] - self.get_jira_conn() - if not self.jira_conn: + jira_conn = self.get_jira_conn() + if not jira_conn: LOGGER.error(f"{self.log_prefix} Jira connection not found") return LOGGER.info(f"{self.log_prefix} Creating sub-task for Jira story {_story_key}") - self.jira_conn.create_closed_subtask( + jira_conn.create_closed_subtask( title=f"{self.issue_title}: reviewed by: {reviewed_user} - {review_state}", parent_key=_story_key, assignee=self.jira_user_mapping.get(reviewed_user, self.parent_committer), @@ -1751,7 +1748,7 @@ def get_check_run_text(err, out): @ignore_exceptions(logger=LOGGER) def get_jira_conn(self): - self.jira_conn = JiraApi( + return JiraApi( server=self.jira_server, project=self.jira_project, token=self.jira_token, @@ -1783,14 +1780,14 @@ def get_story_key_with_jira_connection(self): return None if _story_key := _story_label[0].name.split(":")[-1]: - self.get_jira_conn() - if not self.jira_conn: + jira_conn = self.get_jira_conn() + if not jira_conn: LOGGER.error(f"{self.log_prefix} Jira connection not found") return None return _story_key @ignore_exceptions(logger=LOGGER, return_on_error=[]) - def get_branch_required_status_checks(self): + def get_branch_required_status_checks(self) -> List[str]: if self.repository.private: LOGGER.info( f"{self.log_prefix} Repository is private, skipping getting branch protection required status checks" @@ -1801,8 +1798,8 @@ def get_branch_required_status_checks(self): branch_protection = pull_request_branch.get_protection() return branch_protection.required_status_checks.contexts - def get_all_required_status_checks(self): - all_required_status_checks = [] + def get_all_required_status_checks(self) -> List[str]: + all_required_status_checks: List[str] = [] branch_required_status_checks = self.get_branch_required_status_checks() if self.tox_enabled: all_required_status_checks.append(TOX_STR) From a2f9e22d833ae2d3fa0b996bf3c9ef27a18c347d Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Wed, 3 Jul 2024 15:49:56 +0300 Subject: [PATCH 12/31] More typing --- webhook_server_container/libs/github_api.py | 127 ++++++++++---------- 1 file changed, 65 insertions(+), 62 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index c5269736..1a529ea1 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -5,7 +5,7 @@ import random import re import time -from concurrent.futures import ThreadPoolExecutor, as_completed +from concurrent.futures import Future, ThreadPoolExecutor, as_completed from pathlib import Path from typing import Any, Dict, List, Optional, Tuple @@ -88,7 +88,6 @@ def __init__(self, hook_data): self.parent_committer: str = "" self.log_uuid: str = shortuuid.uuid()[:5] self.container_repo_dir: str = "/tmp/repository" - jira_conn: Optional[JiraApi] = None self.jira_track_pr: bool = False self.issue_title: str = "" self.all_required_status_checks: List[str] = [] @@ -141,8 +140,8 @@ def __init__(self, hook_data): f"Committer {self.parent_committer} is not in configures in jira-user-mapping" ) else: - self.jira_track_pr: bool = True - self.issue_title: str = ( + self.jira_track_pr = True + self.issue_title = ( f"[AUTO:FROM:GITHUB] [{self.repository_name}] " f"PR [{self.pull_request.number}]: {self.pull_request.title}" ) @@ -217,10 +216,10 @@ def _set_log_prefix_color(self) -> None: color: int = color_json.get(self.repository_name, 0) if not color: - color: int = random.choice(range(31, 39)) + color = random.choice(range(31, 39)) color_json[self.repository_name] = color - self.log_prefix_with_color: str = repo_str.format(color=color, name=self.repository_name) + self.log_prefix_with_color = repo_str.format(color=color, name=self.repository_name) with open(color_file, "w") as fd: json.dump(color_json, fd) @@ -843,7 +842,7 @@ def process_pull_request_webhook_data(self): pull_request_data: Dict[str, Any] = self.hook_data["pull_request"] self.parent_committer = pull_request_data["user"]["login"] - self.pull_request_branch: str = pull_request_data["base"]["ref"] + self.pull_request_branch = pull_request_data["base"]["ref"] if hook_action == "opened": LOGGER.info(f"{self.log_prefix} Creating welcome comment") @@ -1471,7 +1470,7 @@ def _comment_with_details(title, body): """ - def _container_repository_and_tag(self, is_merged=None, tag=None): + def _container_repository_and_tag(self, is_merged: bool = False, tag: str = "") -> str: if not tag: if is_merged: tag = ( @@ -1488,43 +1487,44 @@ def _container_repository_and_tag(self, is_merged=None, tag=None): return f"{self.container_repository}:{tag}" LOGGER.error(f"{self.log_prefix} container tag not found") + return f"{self.container_repository}:webhook-server-tag-not-found" @ignore_exceptions(logger=LOGGER) def _run_build_container( self, - set_check=True, - push=False, - is_merged=False, - tag=None, - ): + set_check: bool = True, + push: bool = False, + is_merged: bool = False, + tag: str = "", + ) -> None: if not self.build_and_push_container: - return False + return if set_check: if self.is_check_run_in_progress(check_run=BUILD_CONTAINER_STR) and not is_merged: LOGGER.info(f"{self.log_prefix} Check run is in progress, not running {BUILD_CONTAINER_STR}.") - return False + return self.set_container_build_in_progress() _container_repository_and_tag = self._container_repository_and_tag(is_merged=is_merged, tag=tag) - no_cache = " --no-cache" if is_merged else "" - build_cmd = f"--network=host {no_cache} -f {self.container_repo_dir}/{self.dockerfile} . -t {_container_repository_and_tag}" + no_cache: str = " --no-cache" if is_merged else "" + build_cmd: str = f"--network=host {no_cache} -f {self.container_repo_dir}/{self.dockerfile} . -t {_container_repository_and_tag}" if self.container_build_args: - build_args = [f"--build-arg {b_arg}" for b_arg in self.container_build_args][0] + build_args: str = [f"--build-arg {b_arg}" for b_arg in self.container_build_args][0] build_cmd = f"{build_args} {build_cmd}" if self.container_command_args: build_cmd = f"{' '.join(self.container_command_args)} {build_cmd}" if push: - repository_creds = f"{self.container_repository_username}:{self.container_repository_password}" + repository_creds: str = f"{self.container_repository_username}:{self.container_repository_password}" build_cmd += f" && podman push --creds {repository_creds} {_container_repository_and_tag}" - podman_build_cmd = f"podman build {build_cmd}" + podman_build_cmd: str = f"podman build {build_cmd}" rc, out, err = self._run_in_container(command=podman_build_cmd, is_merged=is_merged, tag_name=tag) - output = { + output: Dict[str, str] = { "title": "Build container", "summary": "", "text": self.get_check_run_text(err=err, out=out), @@ -1533,10 +1533,12 @@ def _run_build_container( LOGGER.info(f"{self.log_prefix} Done building {_container_repository_and_tag}") if self.pull_request and set_check: return self.set_container_build_success(output=output) + if push: - push_msg = f"New container for {_container_repository_and_tag} published" + push_msg: str = f"New container for {_container_repository_and_tag} published" if self.pull_request: self.pull_request.create_issue_comment(push_msg) + if self.slack_webhook_url: message = f""" ``` @@ -1548,7 +1550,7 @@ def _run_build_container( LOGGER.info(f"{self.log_prefix} Done push {_container_repository_and_tag}") else: if push: - err_msg = f"Failed to create and push {_container_repository_and_tag}" + err_msg: str = f"Failed to create and push {_container_repository_and_tag}" if self.pull_request: self.pull_request.create_issue_comment(err_msg) if self.slack_webhook_url: @@ -1558,22 +1560,23 @@ def _run_build_container( ``` """ self.send_slack_message(message=message, webhook_url=self.slack_webhook_url) + if self.pull_request and set_check: return self.set_container_build_failure(output=output) - def _run_install_python_module(self): + def _run_install_python_module(self) -> None: if not self.pypi: - return False + return if self.is_check_run_in_progress(check_run=PYTHON_MODULE_INSTALL_STR): LOGGER.info(f"{self.log_prefix} Check run is in progress, not running {PYTHON_MODULE_INSTALL_STR}.") - return False + return LOGGER.info(f"{self.log_prefix} Installing python module") f"{PYTHON_MODULE_INSTALL_STR}-{shortuuid.uuid()}" self.set_python_module_install_in_progress() rc, out, err = self._run_in_container(command="pip install .") - output = { + output: Dict[str, str] = { "title": "Python module installation", "summary": "", "text": self.get_check_run_text(err=err, out=out), @@ -1583,10 +1586,10 @@ def _run_install_python_module(self): return self.set_python_module_install_failure(output=output) - def send_slack_message(self, message, webhook_url): - slack_data = {"text": message} + def send_slack_message(self, message: str, webhook_url: str) -> None: + slack_data: Dict[str, str] = {"text": message} LOGGER.info(f"{self.log_prefix} Sending message to slack: {message}") - response = requests.post( + response: requests.Response = requests.post( webhook_url, data=json.dumps(slack_data), headers={"Content-Type": "application/json"}, @@ -1597,7 +1600,7 @@ def send_slack_message(self, message, webhook_url): f"{response.text}" ) - def _process_verified(self): + def _process_verified(self) -> None: if not self.verified_job: return @@ -1612,11 +1615,11 @@ def _process_verified(self): self.reset_verify_label() self.set_verify_check_queued() - def create_comment_reaction(self, issue_comment_id, reaction): + def create_comment_reaction(self, issue_comment_id: str, reaction: str) -> None: _comment = self.pull_request.get_issue_comment(issue_comment_id) _comment.create_reaction(reaction) - def process_opened_or_synchronize_pull_request(self): + def process_opened_or_synchronize_pull_request(self) -> None: self.set_merge_check_queued() self.set_run_tox_check_queued() self.set_run_pre_commit_check_queued() @@ -1636,7 +1639,7 @@ def process_opened_or_synchronize_pull_request(self): self.assign_reviewers() self.label_pull_request_by_merge_state() - futures = [] + futures: List[Future] = [] with ThreadPoolExecutor() as executor: futures.append(executor.submit(self._run_tox)) futures.append(executor.submit(self._run_pre_commit)) @@ -1648,7 +1651,7 @@ def process_opened_or_synchronize_pull_request(self): LOGGER.error(f"{self.log_prefix} {result.exception()}") LOGGER.info(f"{self.log_prefix} {result.result()}") - def is_check_run_in_progress(self, check_run): + def is_check_run_in_progress(self, check_run: str) -> bool: for run in self.last_commit.get_check_runs(): if run.name == check_run and run.status == IN_PROGRESS_STR: return True @@ -1660,7 +1663,7 @@ def set_check_run_status( status: str = "", conclusion: str = "", output: str = "", - ): + ) -> None: kwargs: Dict[str, str] = {"name": check_run, "head_sha": self.last_commit.sha} if status: @@ -1739,51 +1742,51 @@ def _run_in_container( return run_command(command=podman_base_cmd, log_prefix=self.log_prefix) @staticmethod - def get_check_run_text(err, out): - total_len = len(err) + len(out) + def get_check_run_text(err: str, out: str) -> str: + total_len: int = len(err) + len(out) if total_len > 65534: # GitHub limit is 65535 characters return f"```\n{err}\n\n{out}\n```"[:65534] else: return f"```\n{err}\n\n{out}\n```" @ignore_exceptions(logger=LOGGER) - def get_jira_conn(self): + def get_jira_conn(self) -> JiraApi: return JiraApi( server=self.jira_server, project=self.jira_project, token=self.jira_token, ) - def log_repository_features(self): - repository_features = f""" - auto-verified-and-merged-users: {self.auto_verified_and_merged_users} - can-be-merged-required-labels: {self.can_be_merged_required_labels} - pypi: {self.pypi} - verified-job: {self.verified_job} - tox-enabled: {self.tox_enabled} - tox-python-version: {self.tox_python_version} - pre-commit: {self.pre_commit} - slack-webhook-url: {self.slack_webhook_url} - container: {self.build_and_push_container} - jira-tracking: {self.jira_tracking} - jira-server: {self.jira_server} - jira-project: {self.jira_project} - jira-token: {self.jira_token} - jira-enabled-repository: {self.jira_enabled_repository} - jira-user-mapping: {self.jira_user_mapping} -""" - LOGGER.info(f"{self.log_prefix} Repository features: {repository_features}") - - def get_story_key_with_jira_connection(self): + # def log_repository_features(self): + # repository_features = f""" + # auto-verified-and-merged-users: {self.auto_verified_and_merged_users} + # can-be-merged-required-labels: {self.can_be_merged_required_labels} + # pypi: {self.pypi} + # verified-job: {self.verified_job} + # tox-enabled: {self.tox_enabled} + # tox-python-version: {self.tox_python_version} + # pre-commit: {self.pre_commit} + # slack-webhook-url: {self.slack_webhook_url} + # container: {self.build_and_push_container} + # jira-tracking: {self.jira_tracking} + # jira-server: {self.jira_server} + # jira-project: {self.jira_project} + # jira-token: {self.jira_token} + # jira-enabled-repository: {self.jira_enabled_repository} + # jira-user-mapping: {self.jira_user_mapping} + # """ + # LOGGER.info(f"{self.log_prefix} Repository features: {repository_features}") + + def get_story_key_with_jira_connection(self) -> str: _story_label = [_label for _label in self.pull_request.labels if _label.name.startswith(JIRA_STR)] if not _story_label: - return None + return "" if _story_key := _story_label[0].name.split(":")[-1]: jira_conn = self.get_jira_conn() if not jira_conn: LOGGER.error(f"{self.log_prefix} Jira connection not found") - return None + return "" return _story_key @ignore_exceptions(logger=LOGGER, return_on_error=[]) From 2dfc3e04ba20c2b2737a5f09ee1b5a3945d6ddcf Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 3 Jul 2024 17:11:55 +0300 Subject: [PATCH 13/31] update return type --- webhook_server_container/libs/github_api.py | 116 ++++++++++++-------- 1 file changed, 70 insertions(+), 46 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 1a529ea1..17b91de5 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -10,6 +10,7 @@ from typing import Any, Dict, List, Optional, Tuple from fastapi import FastAPI +from github.Branch import Branch from github.ContentFile import ContentFile import requests import shortuuid @@ -306,7 +307,7 @@ def _repo_data_from_config(self) -> None: key="verified-job", return_on_none=True, ) - self.tox_enabled: bool = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="tox") + self.tox_enabled: str = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="tox") self.tox_python_version: str = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, @@ -328,7 +329,10 @@ def _repo_data_from_config(self) -> None: self.container_release: bool = self.build_and_push_container.get("release", False) self.pre_commit: bool = get_value_from_dicts( - primary_dict=repo_data, secondary_dict=config_data, key="pre-commit", return_on_none=False + primary_dict=repo_data, + secondary_dict=config_data, + key="pre-commit", + return_on_none=False, ) self.jira_enabled_repository: bool = False @@ -379,6 +383,7 @@ def _get_pull_request(self, number: Optional[int] = None) -> Optional[PullReques return commit_obj.get_pulls()[0] LOGGER.info(f"{self.log_prefix} No issue or pull_request found in hook data") + return None def _get_last_commit(self) -> Commit: return list(self.pull_request.get_commits())[-1] @@ -387,10 +392,14 @@ def label_exists_in_pull_request(self, label: str) -> bool: return any(lb for lb in self.pull_request_labels_names() if lb == label) def pull_request_labels_names(self) -> List[str]: - return [lb.name for lb in self._get_pull_request(number=self.pull_request.number).labels] + return ( + [lb.name for lb in self._get_pull_request(number=self.pull_request.number).labels] + if self.pull_request + else [] + ) def skip_if_pull_request_already_merged(self) -> bool: - if self.pull_request.is_merged(): + if self.pull_request and self.pull_request.is_merged(): LOGGER.info(f"{self.log_prefix}: PR is merged, not processing") return True @@ -437,7 +446,7 @@ def _add_label(self, label): self.pull_request.add_to_labels(label) return self.wait_for_label(label=label, exists=True) - def wait_for_label(self, label, exists): + def wait_for_label(self, label: str, exists: bool) -> bool: try: for sample in TimeoutSampler( wait_timeout=30, @@ -449,24 +458,25 @@ def wait_for_label(self, label, exists): return True except TimeoutExpiredError: LOGGER.warning(f"{self.log_prefix} Label {label} {'not found' if exists else 'found'}") + return False - def _generate_issue_title(self): + def _generate_issue_title(self) -> str: return f"{self.pull_request.title} - {self.pull_request.number}" - def _generate_issue_body(self): + def _generate_issue_body(self) -> str: return f"[Auto generated]\nNumber: [#{self.pull_request.number}]" @ignore_exceptions(logger=LOGGER) - def is_branch_exists(self, branch): + def is_branch_exists(self, branch: str) -> Branch: return self.repository.get_branch(branch) - def upload_to_pypi(self, tag_name): - out, err = "", "" - token = self.pypi["token"] - env = f"-e TWINE_USERNAME=__token__ -e TWINE_PASSWORD={token} " + def upload_to_pypi(self, tag_name: str) -> None: + out: str = "" + token: str = self.pypi["token"] + env: str = f"-e TWINE_USERNAME=__token__ -e TWINE_PASSWORD={token} " LOGGER.info(f"{self.log_prefix} Start uploading to pypi") - _dist_dir = "/tmp/dist" - cmd = ( + _dist_dir: str = "/tmp/dist" + cmd: str = ( f" python3 -m build --sdist --outdir {_dist_dir} ." f" && twine check {_dist_dir}/$(echo *.tar.gz)" f" && twine upload {_dist_dir}/$(echo *.tar.gz) --skip-existing" @@ -476,7 +486,7 @@ def upload_to_pypi(self, tag_name): if rc: LOGGER.info(f"{self.log_prefix} Publish to pypi finished") if self.slack_webhook_url: - message = f""" + message: str = f""" ``` {self.repository_name} Version {tag_name} published to PYPI. ``` @@ -484,7 +494,7 @@ def upload_to_pypi(self, tag_name): self.send_slack_message(message=message, webhook_url=self.slack_webhook_url) except Exception as exp: - err = f"Publish to pypi failed: {exp}" + err: str = f"Publish to pypi failed: {exp}" LOGGER.error(f"{self.log_prefix} {err}") self.repository.create_issue( title=err, @@ -795,7 +805,8 @@ def delete_remote_tag_for_merged_or_closed_pr(self): LOGGER.error(f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. OUT:{out}. ERR:{err}") else: LOGGER.warning( - f"{self.log_prefix} {pr_tag} tag not found in registry {self.container_repository}. OUT:{out}. ERR:{err}" + f"{self.log_prefix} {pr_tag} tag not found in registry {self.container_repository}. " + f"OUT:{out}. ERR:{err}" ) else: self.pull_request.create_issue_comment( @@ -849,22 +860,22 @@ def process_pull_request_webhook_data(self): self.pull_request.create_issue_comment(self.welcome_msg) self.create_issue_for_new_pull_request() + self.process_opened_or_synchronize_pull_request() + if self.jira_track_pr: - jira_conn = jira_conn = self.get_jira_conn() + jira_conn = self.get_jira_conn() if not jira_conn: LOGGER.error(f"{self.log_prefix} Jira connection not found") - return - - LOGGER.info(f"{self.log_prefix} Creating Jira story") - jira_story_key = jira_conn.create_story( - title=self.issue_title, - body=self.pull_request.html_url, - epic_key=self.jira_epic, - assignee=self.jira_assignee, - ) - self._add_label(label=f"{JIRA_STR}:{jira_story_key}") - self.process_opened_or_synchronize_pull_request() + else: + LOGGER.info(f"{self.log_prefix} Creating Jira story") + jira_story_key = jira_conn.create_story( + title=self.issue_title, + body=self.pull_request.html_url, + epic_key=self.jira_epic, + assignee=self.jira_assignee, + ) + self._add_label(label=f"{JIRA_STR}:{jira_story_key}") if hook_action == "synchronize": for _label in self.pull_request.labels: @@ -876,31 +887,28 @@ def process_pull_request_webhook_data(self): ): self._remove_label(label=_label_name) + self.process_opened_or_synchronize_pull_request() + if self.jira_track_pr: - if _story_key := self.get_story_key_with_jira_connection(): - LOGGER.info(f"{self.log_prefix} Creating sub-task for Jira story {_story_key}") - jira_conn.create_closed_subtask( - title=f"{self.issue_title}: New commit from {self.last_committer}", - parent_key=_story_key, - assignee=self.jira_assignee, - body=f"PR: {self.pull_request.title}, new commit pushed by {self.last_committer}", - ) + jira_conn = self.get_jira_conn() + if not jira_conn: + LOGGER.error(f"{self.log_prefix} Jira connection not found") - self.process_opened_or_synchronize_pull_request() + else: + if _story_key := self.get_story_key_with_jira_connection(): + LOGGER.info(f"{self.log_prefix} Creating sub-task for Jira story {_story_key}") + jira_conn.create_closed_subtask( + title=f"{self.issue_title}: New commit from {self.last_committer}", + parent_key=_story_key, + assignee=self.jira_assignee, + body=f"PR: {self.pull_request.title}, new commit pushed by {self.last_committer}", + ) if hook_action == "closed": self.close_issue_for_merged_or_closed_pr(hook_action=hook_action) self.delete_remote_tag_for_merged_or_closed_pr() is_merged = pull_request_data.get("merged") - if self.jira_track_pr: - if _story_key := self.get_story_key_with_jira_connection(): - LOGGER.info(f"{self.log_prefix} Closing Jira story") - jira_conn.close_issue( - key=_story_key, - comment=f"PR: {self.pull_request.title} is closed. Megred: {is_merged}", - ) - if is_merged: LOGGER.info(f"{self.log_prefix} PR is merged") @@ -920,6 +928,19 @@ def process_pull_request_webhook_data(self): self.label_by_pull_requests_merge_state_after_merged() self.pull_request = original_pull_request + if self.jira_track_pr: + jira_conn = self.get_jira_conn() + if not jira_conn: + LOGGER.error(f"{self.log_prefix} Jira connection not found") + + else: + if _story_key := self.get_story_key_with_jira_connection(): + LOGGER.info(f"{self.log_prefix} Closing Jira story") + jira_conn.close_issue( + key=_story_key, + comment=f"PR: {self.pull_request.title} is closed. Merged: {is_merged}", + ) + if hook_action in ("labeled", "unlabeled"): action_labeled = hook_action == "labeled" labeled = self.hook_data["label"]["name"].lower() @@ -1778,6 +1799,9 @@ def get_jira_conn(self) -> JiraApi: # LOGGER.info(f"{self.log_prefix} Repository features: {repository_features}") def get_story_key_with_jira_connection(self) -> str: + if not self.pull_request: + return "" + _story_label = [_label for _label in self.pull_request.labels if _label.name.startswith(JIRA_STR)] if not _story_label: return "" From 456ef89bc3598f0784c4078ea79e2b95f8aaa88f Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 3 Jul 2024 17:13:02 +0300 Subject: [PATCH 14/31] update return type --- webhook_server_container/libs/github_api.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 17b91de5..69ac2153 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -406,13 +406,14 @@ def skip_if_pull_request_already_merged(self) -> bool: return False @ignore_exceptions(logger=LOGGER) - def _remove_label(self, label: str): + def _remove_label(self, label: str) -> bool: if self.label_exists_in_pull_request(label=label): LOGGER.info(f"{self.log_prefix} Removing label {label}") self.pull_request.remove_from_labels(label) return self.wait_for_label(label=label, exists=False) LOGGER.warning(f"{self.log_prefix} Label {label} not found and cannot be removed") + return False @ignore_exceptions(logger=LOGGER) def _add_label(self, label): From bafe8307265490bb9cd6d0f788db86956cfb9c6d Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 15:10:43 +0300 Subject: [PATCH 15/31] More typing --- pyproject.toml | 15 +- webhook_server_container/app.py | 10 +- webhook_server_container/libs/github_api.py | 340 ++++++++++---------- webhook_server_container/utils/webhook.py | 5 +- 4 files changed, 181 insertions(+), 189 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 22dedcb0..9e61dbcd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,7 +11,7 @@ exclude = [".git", ".venv", ".mypy_cache", ".tox", "__pycache__"] check_untyped_defs = true disallow_any_generics = false disallow_incomplete_defs = true -# disallow_untyped_defs = true +disallow_untyped_defs = true no_implicit_optional = true show_error_codes = true warn_unused_ignores = true @@ -20,15 +20,18 @@ warn_unused_ignores = true name = "github-webhook-server" version = "0.0.0" description = "A webhook server to manage Github reposotories and pull requests." -authors = ["Meni Yakove ", "Ruth Netser "] +authors = [ + "Meni Yakove ", + "Ruth Netser ", +] readme = "README.md" license = "Apache-2.0" homepage = "https://github.com/myakove/github-webhook-server" repository = "https://github.com/myakove/github-webhook-server" -packages = [{include = "webhook_server_container"}] +packages = [{ include = "webhook_server_container" }] classifiers = [ - "Programming Language :: Python :: 3", - "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Operating System :: OS Independent", ] @@ -38,7 +41,7 @@ Download = "https://quay.io/repository/myakove/github-webhook-server" [tool.poetry.dependencies] python = "^3.8" -poetry-dynamic-versioning = {extras = ["plugin"], version = "^1.0.0"} +poetry-dynamic-versioning = { extras = ["plugin"], version = "^1.0.0" } pygithub = "^2.0.0" pyyaml = "^6.0" build = "^1.0.0" diff --git a/webhook_server_container/app.py b/webhook_server_container/app.py index fb15b64e..c445b6df 100644 --- a/webhook_server_container/app.py +++ b/webhook_server_container/app.py @@ -31,15 +31,7 @@ async def process_webhook(request: Request) -> Dict[str, Any]: return process_failed_msg try: - api = GitHubApi(hook_data=hook_data) - except Exception as ex: - LOGGER.error(f"Failed to initialized GitHubApi instance: {ex}") - return process_failed_msg - - github_event: str = request.headers["X-GitHub-Event"] - event_log = f"Event type: {github_event}. event ID: {request.headers.get('X-GitHub-Delivery')}" - try: - api.process_hook(data=github_event, event_log=event_log) + GitHubApi(hook_data=hook_data, headers=request.headers) return {"status": requests.status_codes.codes.ok, "Message": "process success"} except Exception as ex: diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 69ac2153..a8b78eb8 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -7,13 +7,14 @@ import time from concurrent.futures import Future, ThreadPoolExecutor, as_completed from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Dict, List, Optional, Set, Tuple from fastapi import FastAPI from github.Branch import Branch from github.ContentFile import ContentFile import requests import shortuuid +from starlette.datastructures import Headers import yaml from github import GithubException from github.Commit import Commit @@ -75,17 +76,21 @@ LOGGER = get_logger(name="GitHubApi", filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE")) +class NoPullRequestError(Exception): + pass + + class RepositoryNotFoundError(Exception): pass class GitHubApi: - def __init__(self, hook_data): + def __init__(self, hook_data: Dict[Any, Any], headers: Headers): self.app: FastAPI = FASTAPI_APP - self.hook_data: Dict[Any, Any] = hook_data + self.hook_data = hook_data + self.headers = headers self.repository_name: str = hook_data["repository"]["name"] self.log_prefix_with_color: str = "" - self.pull_request: Optional[PullRequest] = None self.parent_committer: str = "" self.log_uuid: str = shortuuid.uuid()[:5] self.container_repo_dir: str = "/tmp/repository" @@ -94,7 +99,9 @@ def __init__(self, hook_data): self.all_required_status_checks: List[str] = [] self.config = Config() self._repo_data_from_config() - self._set_log_prefix_color() + + github_event: str = self.headers["X-GitHub-Event"] + event_log: str = f"Event type: {github_event}. event ID: {self.headers.get('X-GitHub-Delivery')}" self.github_app_api = get_repository_github_app_api( config_=self.config, repository_name=self.repository_full_name @@ -114,45 +121,15 @@ def __init__(self, hook_data): self.repository_by_github_app = get_github_repo_api( github_api=self.github_app_api, repository=self.repository_full_name ) + if not (self.repository or self.repository_by_github_app): - LOGGER.error(f"{self.log_prefix} Failed to get repository.") + LOGGER.error(f"{self.repository_full_name} Failed to get repository.") return self.add_api_users_to_auto_verified_and_merged_users() self.clone_repository_path: str = os.path.join("/", self.repository.name) - self.pull_request = self._get_pull_request() self.owners_content = self.get_owners_content() - - if self.pull_request: - self.last_commit = self._get_last_commit() - self.parent_committer = self.pull_request.user.login - self.last_committer = self.last_commit.committer.login - self.pull_request_branch = self.pull_request.base.ref - self.all_required_status_checks = self.get_all_required_status_checks() - - if self.jira_enabled_repository: - reviewers_and_approvers = self.reviewers + self.approvers - if self.parent_committer in reviewers_and_approvers: - self.jira_assignee = self.jira_user_mapping.get(self.parent_committer) - if not self.jira_assignee: - LOGGER.info( - f"{self.log_prefix} Jira tracking is disabled for the current pull request. " - f"Committer {self.parent_committer} is not in configures in jira-user-mapping" - ) - else: - self.jira_track_pr = True - self.issue_title = ( - f"[AUTO:FROM:GITHUB] [{self.repository_name}] " - f"PR [{self.pull_request.number}]: {self.pull_request.title}" - ) - LOGGER.info(f"{self.log_prefix} Jira tracking is enabled for the current pull request.") - else: - LOGGER.info( - f"{self.log_prefix} Jira tracking is disabled for the current pull request. " - f"Committer {self.parent_committer} is not in {reviewers_and_approvers}" - ) - self.supported_user_labels_str: str = "".join([f" * {label}\n" for label in USER_LABELS_DICT.keys()]) self.welcome_msg: str = f""" Report bugs in [Issues](https://github.com/myakove/github-webhook-server/issues) @@ -188,6 +165,60 @@ def __init__(self, hook_data): {self.supported_user_labels_str} """ + if github_event == "ping": + return + + try: + self.pull_request = self._get_pull_request() + self.log_prefix = self.prepare_log_prefix(pull_request=self.pull_request) + LOGGER.info(f"{self.log_prefix} {event_log}") + + self.last_commit = self._get_last_commit() + self.parent_committer = self.pull_request.user.login + self.last_committer = self.last_commit.committer.login + self.pull_request_branch = self.pull_request.base.ref + self.all_required_status_checks = self.get_all_required_status_checks() + + if self.jira_enabled_repository: + reviewers_and_approvers = self.reviewers + self.approvers + if self.parent_committer in reviewers_and_approvers: + self.jira_assignee = self.jira_user_mapping.get(self.parent_committer) + if not self.jira_assignee: + LOGGER.info( + f"{self.log_prefix} Jira tracking is disabled for the current pull request. " + f"Committer {self.parent_committer} is not in configures in jira-user-mapping" + ) + else: + self.jira_track_pr = True + self.issue_title = ( + f"[AUTO:FROM:GITHUB] [{self.repository_name}] " + f"PR [{self.pull_request.number}]: {self.pull_request.title}" + ) + LOGGER.info(f"{self.log_prefix} Jira tracking is enabled for the current pull request.") + else: + LOGGER.info( + f"{self.log_prefix} Jira tracking is disabled for the current pull request. " + f"Committer {self.parent_committer} is not in {reviewers_and_approvers}" + ) + + if github_event == "issue_comment": + self.process_comment_webhook_data() + + elif github_event == "pull_request": + self.process_pull_request_webhook_data() + + elif github_event == "pull_request_review": + self.process_pull_request_review_webhook_data() + + except NoPullRequestError: + self.log_prefix = self.prepare_log_prefix() + LOGGER.info(f"{self.log_prefix} {event_log}") + + if github_event == "push": + self.process_push_webhook_data() + + elif github_event == "check_run": + self.process_pull_request_check_run_webhook_data() @property def prepare_retest_wellcome_msg(self) -> str: @@ -225,11 +256,11 @@ def _set_log_prefix_color(self) -> None: with open(color_file, "w") as fd: json.dump(color_json, fd) - @property - def log_prefix(self) -> str: + def prepare_log_prefix(self, pull_request: Optional[PullRequest] = None) -> str: + self._set_log_prefix_color() return ( - f"{self.log_prefix_with_color}({self.log_uuid})[PR {self.pull_request.number}]:" - if self.pull_request + f"{self.log_prefix_with_color}({self.log_uuid})[PR {pull_request.number}]:" + if pull_request else f"{self.log_prefix_with_color}:({self.log_uuid})" ) @@ -245,26 +276,6 @@ def app_logger_error(self, message: str) -> None: hashed_message = self.hash_token(message=message) LOGGER.error(hashed_message) - def process_hook(self, data: str, event_log: str) -> None: - LOGGER.info(f"{self.log_prefix} {event_log}") - if data == "ping": - return - - if data == "issue_comment": - self.process_comment_webhook_data() - - elif data == "pull_request": - self.process_pull_request_webhook_data() - - elif data == "push": - self.process_push_webhook_data() - - elif data == "pull_request_review": - self.process_pull_request_review_webhook_data() - - elif data == "check_run": - self.process_pull_request_check_run_webhook_data() - def process_pull_request_check_run_webhook_data(self) -> None: _check_run: Dict[str, Any] = self.hook_data["check_run"] check_run_name: str = _check_run["name"] @@ -366,7 +377,7 @@ def _repo_data_from_config(self) -> None: return_on_none=[], ) - def _get_pull_request(self, number: Optional[int] = None) -> Optional[PullRequest]: + def _get_pull_request(self, number: Optional[int] = None) -> PullRequest: if number: return self.repository.get_pull(number) @@ -382,8 +393,7 @@ def _get_pull_request(self, number: Optional[int] = None) -> Optional[PullReques with contextlib.suppress(Exception): return commit_obj.get_pulls()[0] - LOGGER.info(f"{self.log_prefix} No issue or pull_request found in hook data") - return None + raise NoPullRequestError(f"{self.log_prefix} No issue or pull_request found in hook data") def _get_last_commit(self) -> Commit: return list(self.pull_request.get_commits())[-1] @@ -392,11 +402,7 @@ def label_exists_in_pull_request(self, label: str) -> bool: return any(lb for lb in self.pull_request_labels_names() if lb == label) def pull_request_labels_names(self) -> List[str]: - return ( - [lb.name for lb in self._get_pull_request(number=self.pull_request.number).labels] - if self.pull_request - else [] - ) + return [lb.name for lb in self.pull_request.labels] if self.pull_request else [] def skip_if_pull_request_already_merged(self) -> bool: if self.pull_request and self.pull_request.is_merged(): @@ -416,7 +422,7 @@ def _remove_label(self, label: str) -> bool: return False @ignore_exceptions(logger=LOGGER) - def _add_label(self, label): + def _add_label(self, label: str) -> None: label = label.strip() if len(label) > 49: LOGGER.warning(f"{label} is to long, not adding.") @@ -428,7 +434,8 @@ def _add_label(self, label): if label in STATIC_LABELS_DICT: LOGGER.info(f"{self.log_prefix} Adding pull request label {label} to {self.pull_request.number}") - return self.pull_request.add_to_labels(label) + self.pull_request.add_to_labels(label) + return _color = [DYNAMIC_LABELS_DICT[_label] for _label in DYNAMIC_LABELS_DICT if _label in label] LOGGER.info(f"{self.log_prefix} Label {label} was {'found' if _color else 'not found'} in labels dict") @@ -445,7 +452,7 @@ def _add_label(self, label): LOGGER.info(f"{self.log_prefix} Adding pull request label {label} to {self.pull_request.number}") self.pull_request.add_to_labels(label) - return self.wait_for_label(label=label, exists=True) + self.wait_for_label(label=label, exists=True) def wait_for_label(self, label: str, exists: bool) -> bool: try: @@ -457,9 +464,11 @@ def wait_for_label(self, label: str, exists: bool) -> bool: ): if sample == exists: return True + except TimeoutExpiredError: LOGGER.warning(f"{self.log_prefix} Label {label} {'not found' if exists else 'found'}") - return False + + return False def _generate_issue_title(self) -> str: return f"{self.pull_request.title} - {self.pull_request.number}" @@ -495,7 +504,7 @@ def upload_to_pypi(self, tag_name: str) -> None: self.send_slack_message(message=message, webhook_url=self.slack_webhook_url) except Exception as exp: - err: str = f"Publish to pypi failed: {exp}" + err = f"Publish to pypi failed: {exp}" LOGGER.error(f"{self.log_prefix} {err}") self.repository.create_issue( title=err, @@ -566,8 +575,8 @@ def assign_reviewers(self) -> None: except GithubException as ex: LOGGER.error(f"{self.log_prefix} Failed to add reviewer {reviewer}. {ex}") - def add_size_label(self): - size = self.pull_request.additions + self.pull_request.deletions + def add_size_label(self) -> None: + size: int = self.pull_request.additions + self.pull_request.deletions if size < 20: _label = "XS" @@ -598,7 +607,7 @@ def add_size_label(self): self._add_label(label=size_label) - def label_by_user_comment(self, user_request, remove, reviewed_user, issue_comment_id): + def label_by_user_comment(self, user_request: str, remove: bool, reviewed_user: str, issue_comment_id: int) -> None: if not any(user_request.startswith(label_name) for label_name in USER_LABELS_DICT): LOGGER.info(f"{self.log_prefix} Label {user_request} is not a predefined one, will not be added / removed.") @@ -621,121 +630,116 @@ def label_by_user_comment(self, user_request, remove, reviewed_user, issue_comme label_func = self._remove_label if remove else self._add_label label_func(label=user_request) - def reset_verify_label(self): - LOGGER.info(f"{self.log_prefix} Processing reset {VERIFIED_LABEL_STR} label on new commit push") - # Remove verified label - self._remove_label(label=VERIFIED_LABEL_STR) - @ignore_exceptions(logger=LOGGER) - def set_verify_check_queued(self): + def set_verify_check_queued(self) -> None: return self.set_check_run_status(check_run=VERIFIED_LABEL_STR, status=QUEUED_STR) @ignore_exceptions(logger=LOGGER) - def set_verify_check_success(self): + def set_verify_check_success(self) -> None: return self.set_check_run_status(check_run=VERIFIED_LABEL_STR, conclusion=SUCCESS_STR) @ignore_exceptions(logger=LOGGER) - def set_run_tox_check_queued(self): + def set_run_tox_check_queued(self) -> None: if not self.tox_enabled: - return False + return return self.set_check_run_status(check_run=TOX_STR, status=QUEUED_STR) @ignore_exceptions(logger=LOGGER) - def set_run_tox_check_in_progress(self): + def set_run_tox_check_in_progress(self) -> None: return self.set_check_run_status(check_run=TOX_STR, status=IN_PROGRESS_STR) @ignore_exceptions(logger=LOGGER) - def set_run_tox_check_failure(self, output): + def set_run_tox_check_failure(self, output: str) -> None: return self.set_check_run_status(check_run=TOX_STR, conclusion=FAILURE_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_run_tox_check_success(self, output): + def set_run_tox_check_success(self, output: str) -> None: return self.set_check_run_status(check_run=TOX_STR, conclusion=SUCCESS_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_run_pre_commit_check_queued(self): + def set_run_pre_commit_check_queued(self) -> None: if not self.pre_commit: - return False + return return self.set_check_run_status(check_run=PRE_COMMIT_STR, status=QUEUED_STR) @ignore_exceptions(logger=LOGGER) - def set_run_pre_commit_check_in_progress(self): + def set_run_pre_commit_check_in_progress(self) -> None: return self.set_check_run_status(check_run=PRE_COMMIT_STR, status=IN_PROGRESS_STR) @ignore_exceptions(logger=LOGGER) - def set_run_pre_commit_check_failure(self, output): + def set_run_pre_commit_check_failure(self, output: str = "") -> None: return self.set_check_run_status(check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_run_pre_commit_check_success(self, output): + def set_run_pre_commit_check_success(self, output: str = "") -> None: return self.set_check_run_status(check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_merge_check_queued(self, output=None): + def set_merge_check_queued(self, output: str = "") -> None: return self.set_check_run_status(check_run=CAN_BE_MERGED_STR, status=QUEUED_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_merge_check_in_progress(self): + def set_merge_check_in_progress(self) -> None: return self.set_check_run_status(check_run=CAN_BE_MERGED_STR, status=IN_PROGRESS_STR) @ignore_exceptions(logger=LOGGER) - def set_merge_check_success(self): + def set_merge_check_success(self) -> None: return self.set_check_run_status(check_run=CAN_BE_MERGED_STR, conclusion=SUCCESS_STR) @ignore_exceptions(logger=LOGGER) - def set_merge_check_failure(self, output): + def set_merge_check_failure(self, output: str) -> None: return self.set_check_run_status(check_run=CAN_BE_MERGED_STR, conclusion=FAILURE_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_container_build_queued(self): + def set_container_build_queued(self) -> None: if not self.build_and_push_container: return return self.set_check_run_status(check_run=BUILD_CONTAINER_STR, status=QUEUED_STR) @ignore_exceptions(logger=LOGGER) - def set_container_build_in_progress(self): + def set_container_build_in_progress(self) -> None: return self.set_check_run_status(check_run=BUILD_CONTAINER_STR, status=IN_PROGRESS_STR) @ignore_exceptions(logger=LOGGER) - def set_container_build_success(self, output): + def set_container_build_success(self, output: str) -> None: return self.set_check_run_status(check_run=BUILD_CONTAINER_STR, conclusion=SUCCESS_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_container_build_failure(self, output): + def set_container_build_failure(self, output: str) -> None: return self.set_check_run_status(check_run=BUILD_CONTAINER_STR, conclusion=FAILURE_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_python_module_install_queued(self): + def set_python_module_install_queued(self) -> None: if not self.pypi: - return False + return return self.set_check_run_status(check_run=PYTHON_MODULE_INSTALL_STR, status=QUEUED_STR) @ignore_exceptions(logger=LOGGER) - def set_python_module_install_in_progress(self): + def set_python_module_install_in_progress(self) -> None: return self.set_check_run_status(check_run=PYTHON_MODULE_INSTALL_STR, status=IN_PROGRESS_STR) @ignore_exceptions(logger=LOGGER) - def set_python_module_install_success(self, output): + def set_python_module_install_success(self, output: str) -> None: return self.set_check_run_status(check_run=PYTHON_MODULE_INSTALL_STR, conclusion=SUCCESS_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_python_module_install_failure(self, output): + def set_python_module_install_failure(self, output: str) -> None: return self.set_check_run_status(check_run=PYTHON_MODULE_INSTALL_STR, conclusion=FAILURE_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_cherry_pick_in_progress(self): + def set_cherry_pick_in_progress(self) -> None: return self.set_check_run_status(check_run=CHERRY_PICKED_LABEL_PREFIX, status=IN_PROGRESS_STR) @ignore_exceptions(logger=LOGGER) - def set_cherry_pick_success(self, output): + def set_cherry_pick_success(self, output: str) -> None: return self.set_check_run_status(check_run=CHERRY_PICKED_LABEL_PREFIX, conclusion=SUCCESS_STR, output=output) @ignore_exceptions(logger=LOGGER) - def set_cherry_pick_failure(self, output): + def set_cherry_pick_failure(self, output: str) -> None: return self.set_check_run_status(check_run=CHERRY_PICKED_LABEL_PREFIX, conclusion=FAILURE_STR, output=output) @ignore_exceptions(logger=LOGGER) @@ -755,7 +759,7 @@ def create_issue_for_new_pull_request(self) -> None: ) @ignore_exceptions(logger=LOGGER) - def close_issue_for_merged_or_closed_pr(self, hook_action): + def close_issue_for_merged_or_closed_pr(self, hook_action: str) -> None: for issue in self.repository.get_issues(): if issue.body == self._generate_issue_body(): LOGGER.info(f"{self.log_prefix} Closing issue {issue.title} for PR: {self.pull_request.title}") @@ -766,11 +770,7 @@ def close_issue_for_merged_or_closed_pr(self, hook_action): break @ignore_exceptions(logger=LOGGER) - def delete_remote_tag_for_merged_or_closed_pr(self): - if not self.pull_request: - LOGGER.warning(f"{self.log_prefix} [Delete remote container TAG] - No pull request found") - return - + def delete_remote_tag_for_merged_or_closed_pr(self) -> None: if not self.container_repository: LOGGER.info(f"{self.log_prefix} repository do not have container configured") return @@ -815,30 +815,27 @@ def delete_remote_tag_for_merged_or_closed_pr(self): ) LOGGER.error(f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. OUT:{out}. ERR:{err}") - def process_comment_webhook_data(self): + def process_comment_webhook_data(self) -> None: if self.hook_data["action"] in ("action", "deleted"): return - issue_number = self.hook_data["issue"]["number"] + issue_number: str = self.hook_data["issue"]["number"] LOGGER.info(f"{self.log_prefix} Processing issue {issue_number}") - if not self.pull_request: - return - - body = self.hook_data["comment"]["body"] + body: str = self.hook_data["comment"]["body"] if body == self.welcome_msg: LOGGER.info(f"{self.log_prefix} Welcome message found in issue {self.pull_request.title}. Not processing") return - striped_body = body.strip() - _user_commands = list( + striped_body: str = body.strip() + _user_commands: List[str] = list( filter( lambda x: x, striped_body.split("/") if striped_body.startswith("/") else [], ) ) - user_login = self.hook_data["sender"]["login"] + user_login: str = self.hook_data["sender"]["login"] for user_command in _user_commands: self.user_commands( command=user_command, @@ -846,11 +843,9 @@ def process_comment_webhook_data(self): issue_comment_id=self.hook_data["comment"]["id"], ) - def process_pull_request_webhook_data(self): + def process_pull_request_webhook_data(self) -> None: hook_action: str = self.hook_data["action"] LOGGER.info(f"{self.log_prefix} hook_action is: {hook_action}") - if not self.pull_request: - return pull_request_data: Dict[str, Any] = self.hook_data["pull_request"] self.parent_committer = pull_request_data["user"]["login"] @@ -957,7 +952,7 @@ def process_pull_request_webhook_data(self): return self.check_if_can_be_merged() - def process_push_webhook_data(self): + def process_push_webhook_data(self) -> None: tag = re.search(r"refs/tags/?(.*)", self.hook_data["ref"]) if tag: tag_name = tag.group(1) @@ -970,10 +965,7 @@ def process_push_webhook_data(self): LOGGER.info(f"{self.log_prefix} Processing build and push container for tag: {tag_name}") self._run_build_container(push=True, set_check=False, tag=tag_name) - def process_pull_request_review_webhook_data(self): - if not self.pull_request: - return - + def process_pull_request_review_webhook_data(self) -> None: if self.hook_data["action"] == "submitted": """ commented @@ -1012,7 +1004,7 @@ def process_pull_request_review_webhook_data(self): body=f"PR: {self.pull_request.title}, reviewed by: {reviewed_user}", ) - def manage_reviewed_by_label(self, review_state, action, reviewed_user): + def manage_reviewed_by_label(self, review_state: str, action: str, reviewed_user: str) -> None: LOGGER.info( f"{self.log_prefix} " f"Processing label for review from {reviewed_user}. " @@ -1064,13 +1056,13 @@ def manage_reviewed_by_label(self, review_state, action, reviewed_user): f"{self.log_prefix} PR {self.pull_request.number} got unsupported review state: {review_state}" ) - def _run_tox(self): + def _run_tox(self) -> None: if not self.tox_enabled: - return False + return if self.is_check_run_in_progress(check_run=TOX_STR): LOGGER.info(f"{self.log_prefix} Check run is in progress, not running {TOX_STR}.") - return False + return cmd = f"{self.tox_python_version} -m {TOX_STR}" if self.tox_enabled != "all": @@ -1090,13 +1082,13 @@ def _run_tox(self): else: return self.set_run_tox_check_failure(output=output) - def _run_pre_commit(self): + def _run_pre_commit(self) -> None: if not self.pre_commit: - return False + return if self.is_check_run_in_progress(check_run=PRE_COMMIT_STR): LOGGER.info(f"{self.log_prefix} Check run is in progress, not running {PRE_COMMIT_STR}.") - return False + return cmd = f"{PRE_COMMIT_STR} run --all-files" self.set_run_pre_commit_check_in_progress() @@ -1112,9 +1104,9 @@ def _run_pre_commit(self): else: return self.set_run_pre_commit_check_failure(output=output) - def user_commands(self, command, reviewed_user, issue_comment_id): - remove = False - available_commands = [ + def user_commands(self, command: str, reviewed_user: str, issue_comment_id: int) -> None: + remove: bool = False + available_commands: List[str] = [ "retest", "cherry-pick", "assign-reviewers", @@ -1125,18 +1117,18 @@ def user_commands(self, command, reviewed_user, issue_comment_id): return LOGGER.info(f"{self.log_prefix} Processing label/user command {command} by user {reviewed_user}") - command_and_args = command.split(" ", 1) + command_and_args: List[str] = command.split(" ", 1) _command = command_and_args[0] - not_running_msg = f"Pull request already merged, not running {_command}" - _args = command_and_args[1] if len(command_and_args) > 1 else "" + not_running_msg: str = f"Pull request already merged, not running {_command}" + _args: str = command_and_args[1] if len(command_and_args) > 1 else "" if len(command_and_args) > 1 and _args == "cancel": LOGGER.info(f"{self.log_prefix} User requested 'cancel' for command {_command}") remove = True if _command in available_commands: if not _args and _command not in ("assign-reviewers", "check-can-merge"): - issue_msg = f"{_command} requires an argument" - error_msg = f"{self.log_prefix} {issue_msg}" + issue_msg: str = f"{_command} requires an argument" + error_msg: str = f"{self.log_prefix} {issue_msg}" LOGGER.info(error_msg) self.pull_request.create_issue_comment(issue_msg) return @@ -1151,9 +1143,9 @@ def user_commands(self, command, reviewed_user, issue_comment_id): if _command == "cherry-pick": self.create_comment_reaction(issue_comment_id=issue_comment_id, reaction=REACTIONS.ok) - _target_branches = _args.split() - _exits_target_branches = set() - _non_exits_target_branches_msg = "" + _target_branches: List[str] = _args.split() + _exits_target_branches: Set[str] = set() + _non_exits_target_branches_msg: str = "" for _target_branch in _target_branches: try: @@ -1169,10 +1161,10 @@ def user_commands(self, command, reviewed_user, issue_comment_id): if _exits_target_branches: if not self.pull_request.is_merged(): - cp_labels = [ + cp_labels: List[str] = [ f"{CHERRY_PICK_LABEL_PREFIX}{_target_branch}" for _target_branch in _exits_target_branches ] - info_msg = f""" + info_msg: str = f""" Cherry-pick requested for PR: `{self.pull_request.title}` by user `{reviewed_user}` Adding label/s `{" ".join([_cp_label for _cp_label in cp_labels])}` for automatic cheery-pick once the PR is merged """ @@ -1191,11 +1183,11 @@ def user_commands(self, command, reviewed_user, issue_comment_id): if self.skip_if_pull_request_already_merged(): return self.pull_request.create_issue_comment(not_running_msg) - _target_tests = _args.split() + _target_tests: List[str] = _args.split() for _test in _target_tests: if _test == TOX_STR: if not self.tox_enabled: - msg = f"No {TOX_STR} configured for this repository" + msg: str = f"No {TOX_STR} configured for this repository" error_msg = f"{self.log_prefix} {msg}." LOGGER.info(error_msg) self.pull_request.create_issue_comment(msg) @@ -1243,7 +1235,7 @@ def user_commands(self, command, reviewed_user, issue_comment_id): return self.pull_request.create_issue_comment(not_running_msg) self.create_comment_reaction(issue_comment_id=issue_comment_id, reaction=REACTIONS.ok) - wip_for_title = f"{WIP_STR.upper()}:" + wip_for_title: str = f"{WIP_STR.upper()}:" if remove: self._remove_label(label=WIP_STR) self.pull_request.edit(title=self.pull_request.title.replace(wip_for_title, "")) @@ -1253,7 +1245,8 @@ def user_commands(self, command, reviewed_user, issue_comment_id): else: if self.skip_if_pull_request_already_merged(): - return self.pull_request.create_issue_comment(not_running_msg) + self.pull_request.create_issue_comment(not_running_msg) + return self.label_by_user_comment( user_request=_command, @@ -1263,7 +1256,7 @@ def user_commands(self, command, reviewed_user, issue_comment_id): ) @ignore_exceptions(logger=LOGGER) - def cherry_pick(self, target_branch, reviewed_user=None): + def cherry_pick(self, target_branch: str, reviewed_user: str = "") -> None: requested_by = reviewed_user or "by target-branch label" LOGGER.info(f"{self.log_prefix} Cherry-pick requested by user: {requested_by}") @@ -1323,7 +1316,7 @@ def cherry_pick(self, target_branch, reviewed_user=None): ) @ignore_exceptions(logger=LOGGER) - def label_by_pull_requests_merge_state_after_merged(self): + def label_by_pull_requests_merge_state_after_merged(self) -> None: """ Labels pull requests based on their mergeable state. @@ -1339,7 +1332,7 @@ def label_by_pull_requests_merge_state_after_merged(self): LOGGER.info(f"{self.log_prefix} check label pull request after merge") self.label_pull_request_by_merge_state(_sleep=time_sleep) - def label_pull_request_by_merge_state(self, _sleep=0): + def label_pull_request_by_merge_state(self, _sleep: int = 0) -> None: if _sleep: LOGGER.info(f"{self.log_prefix} Sleep for {_sleep} seconds before checking merge state") time.sleep(_sleep) @@ -1359,7 +1352,7 @@ def label_pull_request_by_merge_state(self, _sleep=0): else: self._remove_label(label=HAS_CONFLICTS_LABEL_STR) - def check_if_can_be_merged(self): + def check_if_can_be_merged(self) -> None: """ Check if PR can be merged and set the job for it @@ -1371,7 +1364,7 @@ def check_if_can_be_merged(self): PR has no changed requests from approvers. """ if self.skip_if_pull_request_already_merged(): - return False + return output = { "title": "Check if can be merged", @@ -1468,7 +1461,7 @@ def check_if_can_be_merged(self): f"is part of:\n`{self.auto_verified_and_merged_users}`\n" "Pull request is merged automatically." ) - return self.pull_request.merge(merge_method="squash") + self.pull_request.merge(merge_method="squash") if not pr_approved: failure_output += f"Missing lgtm/approved from approvers {self.approvers}\n" @@ -1481,10 +1474,11 @@ def check_if_can_be_merged(self): except Exception as ex: LOGGER.error(f"{self.log_prefix} Failed to check if can be merged, set check run to {FAILURE_STR} {ex}") output["text"] = "Failed to check if can be merged, check logs" - return self.set_merge_check_failure(output=output) + self.set_merge_check_failure(output=output) + return @staticmethod - def _comment_with_details(title, body): + def _comment_with_details(title: str, body: str) -> str: return f"""
{title} @@ -1634,10 +1628,12 @@ def _process_verified(self) -> None: self._add_label(label=VERIFIED_LABEL_STR) self.set_verify_check_success() else: - self.reset_verify_label() + LOGGER.info(f"{self.log_prefix} Processing reset {VERIFIED_LABEL_STR} label on new commit push") + # Remove verified label + self._remove_label(label=VERIFIED_LABEL_STR) self.set_verify_check_queued() - def create_comment_reaction(self, issue_comment_id: str, reaction: str) -> None: + def create_comment_reaction(self, issue_comment_id: int, reaction: str) -> None: _comment = self.pull_request.get_issue_comment(issue_comment_id) _comment.create_reaction(reaction) @@ -1754,10 +1750,13 @@ def _run_in_container( # Checkout the pull request else: - if not self.pull_request: + try: + pull_request = self._get_pull_request() + except NoPullRequestError: LOGGER.error(f"{self.log_prefix} [func:_run_in_container] No pull request found") return False, "", "" - clone_base_cmd += f" && git checkout origin/pr/{self.pull_request.number}" + + clone_base_cmd += f" && git checkout origin/pr/{pull_request.number}" # final podman command podman_base_cmd += f" '{clone_base_cmd} && {command}'" @@ -1800,9 +1799,6 @@ def get_jira_conn(self) -> JiraApi: # LOGGER.info(f"{self.log_prefix} Repository features: {repository_features}") def get_story_key_with_jira_connection(self) -> str: - if not self.pull_request: - return "" - _story_label = [_label for _label in self.pull_request.labels if _label.name.startswith(JIRA_STR)] if not _story_label: return "" diff --git a/webhook_server_container/utils/webhook.py b/webhook_server_container/utils/webhook.py index c95acc11..1ff6e6e6 100644 --- a/webhook_server_container/utils/webhook.py +++ b/webhook_server_container/utils/webhook.py @@ -2,7 +2,8 @@ import os from typing import Any, Dict, List, Tuple -from github import Github, HookDescription +from github.Hook import Hook +from github import Github from simple_logger.logger import get_logger from webhook_server_container.libs.config import Config @@ -28,7 +29,7 @@ def process_github_webhook(data: Dict[str, Any], github_api: Github, webhook_ip: events: List[str] = data.get("events", ["*"]) try: - hooks: List[HookDescription] = list(repo.get_hooks()) + hooks: List[Hook] = list(repo.get_hooks()) except Exception as ex: return False, f"Could not list webhook for {repository}, check token permissions: {ex}" From b9932fbe90c40b456f32976c81496acb0ccf4c4b Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 15:12:47 +0300 Subject: [PATCH 16/31] More typing --- webhook_server_container/libs/github_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index a8b78eb8..d0bd12b7 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -360,7 +360,7 @@ def _repo_data_from_config(self) -> None: self.jira_enabled_repository = all([self.jira_server, self.jira_project, self.jira_token]) if not self.jira_enabled_repository: LOGGER.error( - f"{self.log_prefix} Jira configuration is not valid. Server: {self.jira_server}, " + f"{self.repository_full_name} Jira configuration is not valid. Server: {self.jira_server}, " f"Project: {self.jira_project}, Token: {self.jira_token}" ) From 832b6f401a20dcbaab130214661831b156779c8e Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 15:19:29 +0300 Subject: [PATCH 17/31] More typing --- webhook_server_container/app.py | 4 ++-- webhook_server_container/libs/github_api.py | 11 ++++++----- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/webhook_server_container/app.py b/webhook_server_container/app.py index c445b6df..7c3b39a1 100644 --- a/webhook_server_container/app.py +++ b/webhook_server_container/app.py @@ -7,7 +7,7 @@ from simple_logger.logger import get_logger -from webhook_server_container.libs.github_api import GitHubApi +from webhook_server_container.libs.github_api import ProcessGithubWehook from webhook_server_container.utils.constants import FASTAPI_APP APP_ROOT_PATH: str = "/webhook_server" @@ -31,7 +31,7 @@ async def process_webhook(request: Request) -> Dict[str, Any]: return process_failed_msg try: - GitHubApi(hook_data=hook_data, headers=request.headers) + ProcessGithubWehook(hook_data=hook_data, headers=request.headers) return {"status": requests.status_codes.codes.ok, "Message": "process success"} except Exception as ex: diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index d0bd12b7..d277f72d 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -84,7 +84,7 @@ class RepositoryNotFoundError(Exception): pass -class GitHubApi: +class ProcessGithubWehook: def __init__(self, hook_data: Dict[Any, Any], headers: Headers): self.app: FastAPI = FASTAPI_APP self.hook_data = hook_data @@ -98,6 +98,7 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): self.issue_title: str = "" self.all_required_status_checks: List[str] = [] self.config = Config() + self.log_prefix = self.prepare_log_prefix() self._repo_data_from_config() github_event: str = self.headers["X-GitHub-Event"] @@ -108,7 +109,7 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): ) if not self.github_app_api: LOGGER.error( - f"Repository {self.repository_full_name} not found by manage-repositories-app, " + f"{self.log_prefix} not found by manage-repositories-app, " f"make sure the app installed (https://github.com/apps/manage-repositories-app)" ) return @@ -165,13 +166,14 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): {self.supported_user_labels_str}
""" + LOGGER.info(f"{self.log_prefix} {event_log}") + if github_event == "ping": return try: self.pull_request = self._get_pull_request() self.log_prefix = self.prepare_log_prefix(pull_request=self.pull_request) - LOGGER.info(f"{self.log_prefix} {event_log}") self.last_commit = self._get_last_commit() self.parent_committer = self.pull_request.user.login @@ -211,7 +213,6 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): self.process_pull_request_review_webhook_data() except NoPullRequestError: - self.log_prefix = self.prepare_log_prefix() LOGGER.info(f"{self.log_prefix} {event_log}") if github_event == "push": @@ -360,7 +361,7 @@ def _repo_data_from_config(self) -> None: self.jira_enabled_repository = all([self.jira_server, self.jira_project, self.jira_token]) if not self.jira_enabled_repository: LOGGER.error( - f"{self.repository_full_name} Jira configuration is not valid. Server: {self.jira_server}, " + f"{self.log_prefix} Jira configuration is not valid. Server: {self.jira_server}, " f"Project: {self.jira_project}, Token: {self.jira_token}" ) From 28cf44183a4e960564864bd3806227f2310808cd Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 15:32:04 +0300 Subject: [PATCH 18/31] Remove can be merged lable when check failed --- webhook_server_container/libs/github_api.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index d277f72d..3a28aaeb 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -1470,11 +1470,13 @@ def check_if_can_be_merged(self) -> None: if failure_output: LOGGER.info(f"{self.log_prefix} cannot be merged: {failure_output}") output["text"] = failure_output + self._remove_label(label=CAN_BE_MERGED_STR) self.set_merge_check_failure(output=output) except Exception as ex: LOGGER.error(f"{self.log_prefix} Failed to check if can be merged, set check run to {FAILURE_STR} {ex}") output["text"] = "Failed to check if can be merged, check logs" + self._remove_label(label=CAN_BE_MERGED_STR) self.set_merge_check_failure(output=output) return From 26aab0c0b7668a4239102c7527cc9452cb40bec3 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 15:44:47 +0300 Subject: [PATCH 19/31] Fix call check_run process --- webhook_server_container/libs/github_api.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 3a28aaeb..7ee7814c 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -166,7 +166,6 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): {self.supported_user_labels_str} """ - LOGGER.info(f"{self.log_prefix} {event_log}") if github_event == "ping": return @@ -174,6 +173,7 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): try: self.pull_request = self._get_pull_request() self.log_prefix = self.prepare_log_prefix(pull_request=self.pull_request) + LOGGER.info(f"{self.log_prefix} {event_log}") self.last_commit = self._get_last_commit() self.parent_committer = self.pull_request.user.login @@ -212,15 +212,14 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): elif github_event == "pull_request_review": self.process_pull_request_review_webhook_data() + elif github_event == "check_run": + self.process_pull_request_check_run_webhook_data() + except NoPullRequestError: LOGGER.info(f"{self.log_prefix} {event_log}") - if github_event == "push": self.process_push_webhook_data() - elif github_event == "check_run": - self.process_pull_request_check_run_webhook_data() - @property def prepare_retest_wellcome_msg(self) -> str: retest_msg: str = "" From 88bf8d15fdc8e0fe09f405dfca252fdc5109b5a2 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 15:59:32 +0300 Subject: [PATCH 20/31] improve check if can be merged --- webhook_server_container/libs/github_api.py | 36 +++++++++------------ 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 7ee7814c..ea572f69 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -1395,7 +1395,6 @@ def check_if_can_be_merged(self) -> None: is_hold = HOLD_LABEL_STR in _labels is_wip = WIP_STR in _labels if is_hold or is_wip: - self._remove_label(label=CAN_BE_MERGED_STR) if is_hold: failure_output += "Hold label exists.\n" @@ -1403,7 +1402,6 @@ def check_if_can_be_merged(self) -> None: failure_output += "WIP label exists.\n" if not self.pull_request.mergeable: - self._remove_label(label=CAN_BE_MERGED_STR) failure_output += "PR is not mergeable: {self.pull_request.mergeable_state}\n" failed_check_runs = [] @@ -1419,7 +1417,6 @@ def check_if_can_be_merged(self) -> None: failed_check_runs.append(check_run.name) if failed_check_runs: - self._remove_label(label=CAN_BE_MERGED_STR) failure_output += f"Some check runs failed: {failed_check_runs}\n" LOGGER.info(f"{self.log_prefix} check if can be merged. PR labels are: {_labels}") @@ -1428,26 +1425,24 @@ def check_if_can_be_merged(self) -> None: if CHANGED_REQUESTED_BY_LABEL_PREFIX.lower() in _label.lower(): change_request_user = _label.split("-")[-1] if change_request_user in self.approvers: - self._remove_label(label=CAN_BE_MERGED_STR) failure_output += "PR has changed requests from approvers\n" - pr_approved = False - for _label in _labels: - if APPROVED_BY_LABEL_PREFIX.lower() in _label.lower(): - approved_user = _label.split("-")[-1] - if approved_user in self.approvers: - pr_approved = True - break - missing_required_labels = [] for _req_label in self.can_be_merged_required_labels: if _req_label not in _labels: missing_required_labels.append(_req_label) if missing_required_labels: - self._remove_label(label=CAN_BE_MERGED_STR) failure_output += f"Missing required labels: {missing_required_labels}\n" + pr_approved = False + for _label in _labels: + if APPROVED_BY_LABEL_PREFIX.lower() in _label.lower(): + approved_user = _label.split("-")[-1] + if approved_user in self.approvers: + pr_approved = True + break + if pr_approved and not failure_output: self._add_label(label=CAN_BE_MERGED_STR) self.set_merge_check_success() @@ -1463,21 +1458,20 @@ def check_if_can_be_merged(self) -> None: ) self.pull_request.merge(merge_method="squash") - if not pr_approved: - failure_output += f"Missing lgtm/approved from approvers {self.approvers}\n" + return - if failure_output: - LOGGER.info(f"{self.log_prefix} cannot be merged: {failure_output}") - output["text"] = failure_output - self._remove_label(label=CAN_BE_MERGED_STR) - self.set_merge_check_failure(output=output) + failure_output += f"Missing lgtm/approved from approvers {self.approvers}\n" + + LOGGER.info(f"{self.log_prefix} cannot be merged: {failure_output}") + output["text"] = failure_output + self._remove_label(label=CAN_BE_MERGED_STR) + self.set_merge_check_failure(output=output) except Exception as ex: LOGGER.error(f"{self.log_prefix} Failed to check if can be merged, set check run to {FAILURE_STR} {ex}") output["text"] = "Failed to check if can be merged, check logs" self._remove_label(label=CAN_BE_MERGED_STR) self.set_merge_check_failure(output=output) - return @staticmethod def _comment_with_details(title: str, body: str) -> str: From d374717c081671dca1d9815a5736a9d7a14c0a71 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 16:15:23 +0300 Subject: [PATCH 21/31] improve process pull request speed --- webhook_server_container/libs/github_api.py | 45 ++++++++++++--------- 1 file changed, 26 insertions(+), 19 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index ea572f69..b497963d 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -1634,13 +1634,32 @@ def create_comment_reaction(self, issue_comment_id: int, reaction: str) -> None: _comment.create_reaction(reaction) def process_opened_or_synchronize_pull_request(self) -> None: - self.set_merge_check_queued() - self.set_run_tox_check_queued() - self.set_run_pre_commit_check_queued() - self.set_python_module_install_queued() - self.set_container_build_queued() - self._process_verified() - self.add_size_label() + prepare_pull_futures: List[Future] = [] + with ThreadPoolExecutor() as executor: + prepare_pull_futures.append(executor.submit(self.set_merge_check_queued)) + prepare_pull_futures.append(executor.submit(self.set_run_tox_check_queued)) + prepare_pull_futures.append(executor.submit(self.set_run_pre_commit_check_queued)) + prepare_pull_futures.append(executor.submit(self.set_python_module_install_queued)) + prepare_pull_futures.append(executor.submit(self.set_container_build_queued)) + prepare_pull_futures.append(executor.submit(self._process_verified)) + prepare_pull_futures.append(executor.submit(self.add_size_label)) + + for result in as_completed(prepare_pull_futures): + if result.exception(): + LOGGER.error(f"{self.log_prefix} {result.exception()}") + + run_check_runs_futures: List[Future] = [] + with ThreadPoolExecutor() as executor: + run_check_runs_futures.append(executor.submit(self._run_tox)) + run_check_runs_futures.append(executor.submit(self._run_pre_commit)) + run_check_runs_futures.append(executor.submit(self._run_install_python_module)) + run_check_runs_futures.append(executor.submit(self._run_build_container)) + + for result in as_completed(run_check_runs_futures): + if result.exception(): + LOGGER.error(f"{self.log_prefix} {result.exception()}") + LOGGER.info(f"{self.log_prefix} {result.result()}") + self._add_label(label=f"{BRANCH_LABEL_PREFIX}{self.pull_request_branch}") LOGGER.info(f"{self.log_prefix} Adding PR owner as assignee") @@ -1653,18 +1672,6 @@ def process_opened_or_synchronize_pull_request(self) -> None: self.assign_reviewers() self.label_pull_request_by_merge_state() - futures: List[Future] = [] - with ThreadPoolExecutor() as executor: - futures.append(executor.submit(self._run_tox)) - futures.append(executor.submit(self._run_pre_commit)) - futures.append(executor.submit(self._run_install_python_module)) - futures.append(executor.submit(self._run_build_container)) - - for result in as_completed(futures): - if result.exception(): - LOGGER.error(f"{self.log_prefix} {result.exception()}") - LOGGER.info(f"{self.log_prefix} {result.result()}") - def is_check_run_in_progress(self, check_run: str) -> bool: for run in self.last_commit.get_check_runs(): if run.name == check_run and run.status == IN_PROGRESS_STR: From 987cea3ab0c61f8b8c79da3a1599baa4cb9ec761 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 19:16:09 +0300 Subject: [PATCH 22/31] add pr number to log when check run reset to queued on startup --- webhook_server_container/utils/github_repository_settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server_container/utils/github_repository_settings.py b/webhook_server_container/utils/github_repository_settings.py index 4a8d96e9..b3f7b89b 100644 --- a/webhook_server_container/utils/github_repository_settings.py +++ b/webhook_server_container/utils/github_repository_settings.py @@ -292,7 +292,7 @@ def set_repository_check_runs_to_queued( for check_run in last_commit.get_check_runs(): if check_run.name in check_runs and check_run.status == IN_PROGRESS_STR: LOGGER.info( - f"{repository}: {check_run.name} status is {IN_PROGRESS_STR}, " + f"{repository}: [PR:{pull_request.number}] {check_run.name} status is {IN_PROGRESS_STR}, " f"Setting check run {check_run.name} to {QUEUED_STR}" ) app_api.create_check_run(name=check_run.name, head_sha=last_commit.sha, status=QUEUED_STR) From e764372badc25e6035046641c7e3043f3cbae010 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 19:37:15 +0300 Subject: [PATCH 23/31] pass log to get_future_results --- .../utils/github_repository_settings.py | 20 ++++++++++--------- webhook_server_container/utils/helpers.py | 12 +++++++---- webhook_server_container/utils/webhook.py | 12 +++++------ 3 files changed, 25 insertions(+), 19 deletions(-) diff --git a/webhook_server_container/utils/github_repository_settings.py b/webhook_server_container/utils/github_repository_settings.py index b3f7b89b..b401e2ea 100644 --- a/webhook_server_container/utils/github_repository_settings.py +++ b/webhook_server_container/utils/github_repository_settings.py @@ -2,7 +2,7 @@ import os from concurrent.futures import Future, ThreadPoolExecutor, as_completed from copy import deepcopy -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Callable, Dict, List, Optional, Tuple from github import Github, GithubIntegration, Auth from github.Repository import Repository @@ -189,20 +189,22 @@ def set_repositories_settings(config_: Config, github_api: Github) -> None: get_future_results(futures=futures) -def set_repository(data: Dict[str, Any], github_api: Github, default_status_checks: List[str]) -> Tuple[bool, str]: +def set_repository( + data: Dict[str, Any], github_api: Github, default_status_checks: List[str] +) -> Tuple[bool, str, Callable]: repository: str = data["name"] LOGGER.info(f"Processing repository {repository}") protected_branches: Dict[str, Any] = data.get("protected-branches", {}) repo = get_github_repo_api(github_api=github_api, repository=repository) if not repo: - return False, f"{repository}: Failed to get repository" + return False, f"{repository}: Failed to get repository", LOGGER.error try: set_repository_labels(repository=repo) set_repository_settings(repository=repo) if repo.private: - return False, f"{repository}: Repository is private, skipping setting branch settings" + return False, f"{repository}: Repository is private, skipping setting branch settings", LOGGER.warning futures: List["Future"] = [] @@ -244,9 +246,9 @@ def set_repository(data: Dict[str, Any], github_api: Github, default_status_chec LOGGER.error(result.exception()) except UnknownObjectException as ex: - return False, f"{repository}: Failed to get repository settings, ex: {ex}" + return False, f"{repository}: Failed to get repository settings, ex: {ex}", LOGGER.error - return True, f"{repository}: Setting repository settings is done" + return True, f"{repository}: Setting repository settings is done", LOGGER.info def set_all_in_progress_check_runs_to_queued(config_: Config, github_api: Github) -> None: @@ -278,11 +280,11 @@ def set_all_in_progress_check_runs_to_queued(config_: Config, github_api: Github def set_repository_check_runs_to_queued( config_: Config, data: Dict[str, Any], github_api: Github, check_runs: Tuple[str] -) -> Tuple[bool, str]: +) -> Tuple[bool, str, Callable]: repository: str = data["name"] repository_app_api = get_repository_github_app_api(config_=config_, repository_name=repository) if not repository_app_api: - return False, "Failed to get repositories GitHub app API" + return False, "Failed to get repositories GitHub app API", LOGGER.error app_api = get_github_repo_api(github_api=repository_app_api, repository=repository) repo = get_github_repo_api(github_api=github_api, repository=repository) @@ -297,7 +299,7 @@ def set_repository_check_runs_to_queued( ) app_api.create_check_run(name=check_run.name, head_sha=last_commit.sha, status=QUEUED_STR) - return True, f"{repository}: Set check run status to {QUEUED_STR} is done" + return True, f"{repository}: Set check run status to {QUEUED_STR} is done", LOGGER.info @ignore_exceptions(logger=LOGGER) diff --git a/webhook_server_container/utils/helpers.py b/webhook_server_container/utils/helpers.py index caaf628e..12c17fed 100644 --- a/webhook_server_container/utils/helpers.py +++ b/webhook_server_container/utils/helpers.py @@ -184,13 +184,17 @@ def get_value_from_dicts( def get_future_results(futures: List["Future"]) -> None: + """ + result must return Tuple[bool, str, Callable] when the Callable is Logger function (LOGGER.info, LOGGER.error, etc) + """ for result in as_completed(futures): + _res = result.result() + _log = _res[2] if result.exception(): - LOGGER.error(result.exception()) + _log(result.exception()) - _res = result.result() if _res[0]: - LOGGER.info(_res[1]) + _log(_res[1]) else: - LOGGER.error(_res[1]) + _log(_res[1]) diff --git a/webhook_server_container/utils/webhook.py b/webhook_server_container/utils/webhook.py index 1ff6e6e6..34dd816a 100644 --- a/webhook_server_container/utils/webhook.py +++ b/webhook_server_container/utils/webhook.py @@ -1,6 +1,6 @@ from concurrent.futures import ThreadPoolExecutor import os -from typing import Any, Dict, List, Tuple +from typing import Any, Callable, Dict, List, Tuple from github.Hook import Hook from github import Github @@ -19,11 +19,11 @@ @ignore_exceptions(logger=LOGGER) -def process_github_webhook(data: Dict[str, Any], github_api: Github, webhook_ip: str) -> Tuple[bool, str]: +def process_github_webhook(data: Dict[str, Any], github_api: Github, webhook_ip: str) -> Tuple[bool, str, Callable]: repository: str = data["name"] repo = get_github_repo_api(github_api=github_api, repository=repository) if not repo: - return False, f"Could not find repository {repository}" + return False, f"Could not find repository {repository}", LOGGER.error config_: Dict[str, str] = {"url": f"{webhook_ip}/webhook_server", "content_type": "json"} events: List[str] = data.get("events", ["*"]) @@ -31,15 +31,15 @@ def process_github_webhook(data: Dict[str, Any], github_api: Github, webhook_ip: try: hooks: List[Hook] = list(repo.get_hooks()) except Exception as ex: - return False, f"Could not list webhook for {repository}, check token permissions: {ex}" + return False, f"Could not list webhook for {repository}, check token permissions: {ex}", LOGGER.error for _hook in hooks: if webhook_ip in _hook.config["url"]: - return True, f"{repository}: Hook already exists - {_hook.config['url']}" + return True, f"{repository}: Hook already exists - {_hook.config['url']}", LOGGER.info LOGGER.info(f"Creating webhook: {config_['url']} for {repository} with events: {events}") repo.create_hook(name="web", config=config_, events=events, active=True) - return True, f"{repository}: Create webhook is done" + return True, f"{repository}: Create webhook is done", LOGGER.info def create_webhook(config_: Config, github_api: Github) -> None: From cccad2cd5dad5a041cf23acd2a7e98fca4e858c9 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 20:05:29 +0300 Subject: [PATCH 24/31] Fix delete tag for merged PR --- webhook_server_container/libs/github_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index b497963d..50781192 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -771,7 +771,7 @@ def close_issue_for_merged_or_closed_pr(self, hook_action: str) -> None: @ignore_exceptions(logger=LOGGER) def delete_remote_tag_for_merged_or_closed_pr(self) -> None: - if not self.container_repository: + if not self.build_and_push_container: LOGGER.info(f"{self.log_prefix} repository do not have container configured") return From 12f0c93afe22f1f714cc0e143bef378702175ec8 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 20:08:34 +0300 Subject: [PATCH 25/31] Set log warn when set check run to queued --- webhook_server_container/utils/github_repository_settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server_container/utils/github_repository_settings.py b/webhook_server_container/utils/github_repository_settings.py index b401e2ea..96fbd59b 100644 --- a/webhook_server_container/utils/github_repository_settings.py +++ b/webhook_server_container/utils/github_repository_settings.py @@ -293,7 +293,7 @@ def set_repository_check_runs_to_queued( last_commit: Commit = list(pull_request.get_commits())[-1] for check_run in last_commit.get_check_runs(): if check_run.name in check_runs and check_run.status == IN_PROGRESS_STR: - LOGGER.info( + LOGGER.warning( f"{repository}: [PR:{pull_request.number}] {check_run.name} status is {IN_PROGRESS_STR}, " f"Setting check run {check_run.name} to {QUEUED_STR}" ) From 04a38aeeab4bb720703c660b0fb94a6b0f80008b Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Sun, 7 Jul 2024 20:10:39 +0300 Subject: [PATCH 26/31] update poetry --- poetry.lock | 679 +++++++++++++++++++++++++++------------------------- 1 file changed, 354 insertions(+), 325 deletions(-) diff --git a/poetry.lock b/poetry.lock index 11bc4fd9..78dc9ba2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "annotated-types" @@ -164,13 +164,13 @@ redis = ["redis (>=2.10.5)"] [[package]] name = "certifi" -version = "2024.6.2" +version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, ] [[package]] @@ -617,13 +617,13 @@ pgp = ["gpg"] [[package]] name = "dunamai" -version = "1.21.1" +version = "1.21.2" description = "Dynamic version generation" optional = false python-versions = ">=3.5" files = [ - {file = "dunamai-1.21.1-py3-none-any.whl", hash = "sha256:fe303541463648b8197c495decf62cd8f15234fb6d891a5f295015e452f656c8"}, - {file = "dunamai-1.21.1.tar.gz", hash = "sha256:d7fea28ad2faf20a6ca5ec121e5c68e55eec6b8ada23d9c387e4e7a574cc559f"}, + {file = "dunamai-1.21.2-py3-none-any.whl", hash = "sha256:87db76405bf9366f9b4925ff5bb1db191a9a1bd9f9693f81c4d3abb8298be6f0"}, + {file = "dunamai-1.21.2.tar.gz", hash = "sha256:05827fb5f032f5596bfc944b23f613c147e676de118681f3bb1559533d8a65c4"}, ] [package.dependencies] @@ -718,13 +718,13 @@ standard = ["fastapi", "uvicorn[standard] (>=0.15.0)"] [[package]] name = "fastjsonschema" -version = "2.19.1" +version = "2.20.0" description = "Fastest Python implementation of JSON schema" optional = false python-versions = "*" files = [ - {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, - {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, + {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"}, + {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"}, ] [package.extras] @@ -732,18 +732,18 @@ devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benc [[package]] name = "filelock" -version = "3.15.1" +version = "3.15.4" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.1-py3-none-any.whl", hash = "sha256:71b3102950e91dfc1bb4209b64be4dc8854f40e5f534428d8684f953ac847fac"}, - {file = "filelock-3.15.1.tar.gz", hash = "sha256:58a2549afdf9e02e10720eaa4d4470f56386d7a6f72edd7d0596337af8ed7ad8"}, + {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, + {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, ] [package.extras] docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] typing = ["typing-extensions (>=4.8)"] [[package]] @@ -863,22 +863,22 @@ files = [ [[package]] name = "importlib-metadata" -version = "7.1.0" +version = "8.0.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570"}, - {file = "importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2"}, + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] [[package]] name = "importlib-resources" @@ -1310,57 +1310,62 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "orjson" -version = "3.10.5" +version = "3.10.6" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"}, - {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"}, - {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"}, - {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"}, - {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"}, - {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"}, - {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"}, - {file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"}, - {file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"}, - {file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"}, - {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"}, - {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"}, - {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"}, - {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"}, - {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"}, - {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"}, - {file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"}, - {file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"}, - {file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"}, - {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"}, - {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"}, - {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"}, - {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"}, - {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"}, - {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"}, - {file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"}, - {file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"}, - {file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"}, - {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"}, - {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"}, - {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"}, - {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"}, - {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"}, - {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"}, - {file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"}, - {file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"}, - {file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"}, - {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"}, - {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"}, - {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"}, - {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"}, - {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"}, - {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"}, - {file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"}, - {file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"}, - {file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"}, + {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"}, + {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"}, + {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"}, + {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"}, + {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"}, + {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"}, + {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"}, + {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"}, + {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"}, + {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"}, + {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"}, + {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"}, + {file = "orjson-3.10.6-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ea2977b21f8d5d9b758bb3f344a75e55ca78e3ff85595d248eee813ae23ecdfb"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6f3d167d13a16ed263b52dbfedff52c962bfd3d270b46b7518365bcc2121eed"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f710f346e4c44a4e8bdf23daa974faede58f83334289df80bc9cd12fe82573c7"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7275664f84e027dcb1ad5200b8b18373e9c669b2a9ec33d410c40f5ccf4b257e"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0943e4c701196b23c240b3d10ed8ecd674f03089198cf503105b474a4f77f21f"}, + {file = "orjson-3.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:446dee5a491b5bc7d8f825d80d9637e7af43f86a331207b9c9610e2f93fee22a"}, + {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:64c81456d2a050d380786413786b057983892db105516639cb5d3ee3c7fd5148"}, + {file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"}, + {file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"}, + {file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"}, + {file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2c116072a8533f2fec435fde4d134610f806bdac20188c7bd2081f3e9e0133f"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6eeb13218c8cf34c61912e9df2de2853f1d009de0e46ea09ccdf3d757896af0a"}, + {file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:965a916373382674e323c957d560b953d81d7a8603fbeee26f7b8248638bd48b"}, + {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03c95484d53ed8e479cade8628c9cea00fd9d67f5554764a1110e0d5aa2de96e"}, + {file = "orjson-3.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:e060748a04cccf1e0a6f2358dffea9c080b849a4a68c28b1b907f272b5127e9b"}, + {file = "orjson-3.10.6-cp38-none-win32.whl", hash = "sha256:738dbe3ef909c4b019d69afc19caf6b5ed0e2f1c786b5d6215fbb7539246e4c6"}, + {file = "orjson-3.10.6-cp38-none-win_amd64.whl", hash = "sha256:d40f839dddf6a7d77114fe6b8a70218556408c71d4d6e29413bb5f150a692ff7"}, + {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"}, + {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"}, + {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"}, + {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"}, + {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"}, + {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"}, + {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"}, ] [[package]] @@ -1448,84 +1453,95 @@ files = [ [[package]] name = "pillow" -version = "10.3.0" +version = "10.4.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"}, - {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"}, - {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"}, - {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"}, - {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"}, - {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"}, - {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"}, - {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"}, - {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"}, - {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"}, - {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"}, - {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"}, - {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:e46f38133e5a060d46bd630faa4d9fa0202377495df1f068a8299fd78c84de84"}, - {file = "pillow-10.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:50b8eae8f7334ec826d6eeffaeeb00e36b5e24aa0b9df322c247539714c6df19"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bea1c75f8c53ee4d505c3e67d8c158ad4df0d83170605b50b64025917f338"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19aeb96d43902f0a783946a0a87dbdad5c84c936025b8419da0a0cd7724356b1"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74d28c17412d9caa1066f7a31df8403ec23d5268ba46cd0ad2c50fb82ae40462"}, - {file = "pillow-10.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:ff61bfd9253c3915e6d41c651d5f962da23eda633cf02262990094a18a55371a"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d886f5d353333b4771d21267c7ecc75b710f1a73d72d03ca06df49b09015a9ef"}, - {file = "pillow-10.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b5ec25d8b17217d635f8935dbc1b9aa5907962fae29dff220f2659487891cd3"}, - {file = "pillow-10.3.0-cp312-cp312-win32.whl", hash = "sha256:51243f1ed5161b9945011a7360e997729776f6e5d7005ba0c6879267d4c5139d"}, - {file = "pillow-10.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:412444afb8c4c7a6cc11a47dade32982439925537e483be7c0ae0cf96c4f6a0b"}, - {file = "pillow-10.3.0-cp312-cp312-win_arm64.whl", hash = "sha256:798232c92e7665fe82ac085f9d8e8ca98826f8e27859d9a96b41d519ecd2e49a"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:4eaa22f0d22b1a7e93ff0a596d57fdede2e550aecffb5a1ef1106aaece48e96b"}, - {file = "pillow-10.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cd5e14fbf22a87321b24c88669aad3a51ec052eb145315b3da3b7e3cc105b9a2"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1530e8f3a4b965eb6a7785cf17a426c779333eb62c9a7d1bbcf3ffd5bf77a4aa"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d512aafa1d32efa014fa041d38868fda85028e3f930a96f85d49c7d8ddc0383"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:339894035d0ede518b16073bdc2feef4c991ee991a29774b33e515f1d308e08d"}, - {file = "pillow-10.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:aa7e402ce11f0885305bfb6afb3434b3cd8f53b563ac065452d9d5654c7b86fd"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0ea2a783a2bdf2a561808fe4a7a12e9aa3799b701ba305de596bc48b8bdfce9d"}, - {file = "pillow-10.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c78e1b00a87ce43bb37642c0812315b411e856a905d58d597750eb79802aaaa3"}, - {file = "pillow-10.3.0-cp38-cp38-win32.whl", hash = "sha256:72d622d262e463dfb7595202d229f5f3ab4b852289a1cd09650362db23b9eb0b"}, - {file = "pillow-10.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:2034f6759a722da3a3dbd91a81148cf884e91d1b747992ca288ab88c1de15999"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"}, - {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"}, - {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"}, - {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"}, - {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"}, - {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"}, - {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"}, - {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"}, - {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"}, - {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, + {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, + {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, + {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, + {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, + {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, + {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, + {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, + {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, + {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, + {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, + {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, + {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, + {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, + {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, + {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, + {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, + {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, + {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, + {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, + {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, + {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, + {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, + {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, + {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, + {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, + {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, + {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, + {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, + {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, + {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, + {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, + {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, + {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, + {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, + {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, + {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, + {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, + {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] @@ -1697,109 +1713,122 @@ files = [ [[package]] name = "pydantic" -version = "2.7.4" +version = "2.8.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"}, - {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"}, + {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"}, + {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.18.4" -typing-extensions = ">=4.6.1" +pydantic-core = "2.20.1" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] [package.extras] email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.18.4" +version = "2.20.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"}, - {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"}, - {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"}, - {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"}, - {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"}, - {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"}, - {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"}, - {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"}, - {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"}, - {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"}, - {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6f5c4d41b2771c730ea1c34e458e781b18cc668d194958e0112455fff4e402b2"}, - {file = "pydantic_core-2.18.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fdf2156aa3d017fddf8aea5adfba9f777db1d6022d392b682d2a8329e087cef"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4748321b5078216070b151d5271ef3e7cc905ab170bbfd27d5c83ee3ec436695"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:847a35c4d58721c5dc3dba599878ebbdfd96784f3fb8bb2c356e123bdcd73f34"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c40d4eaad41f78e3bbda31b89edc46a3f3dc6e171bf0ecf097ff7a0ffff7cb1"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21a5e440dbe315ab9825fcd459b8814bb92b27c974cbc23c3e8baa2b76890077"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01dd777215e2aa86dfd664daed5957704b769e726626393438f9c87690ce78c3"}, - {file = "pydantic_core-2.18.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4b06beb3b3f1479d32befd1f3079cc47b34fa2da62457cdf6c963393340b56e9"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:564d7922e4b13a16b98772441879fcdcbe82ff50daa622d681dd682175ea918c"}, - {file = "pydantic_core-2.18.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0eb2a4f660fcd8e2b1c90ad566db2b98d7f3f4717c64fe0a83e0adb39766d5b8"}, - {file = "pydantic_core-2.18.4-cp312-none-win32.whl", hash = "sha256:8b8bab4c97248095ae0c4455b5a1cd1cdd96e4e4769306ab19dda135ea4cdb07"}, - {file = "pydantic_core-2.18.4-cp312-none-win_amd64.whl", hash = "sha256:14601cdb733d741b8958224030e2bfe21a4a881fb3dd6fbb21f071cabd48fa0a"}, - {file = "pydantic_core-2.18.4-cp312-none-win_arm64.whl", hash = "sha256:c1322d7dd74713dcc157a2b7898a564ab091ca6c58302d5c7b4c07296e3fd00f"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:823be1deb01793da05ecb0484d6c9e20baebb39bd42b5d72636ae9cf8350dbd2"}, - {file = "pydantic_core-2.18.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebef0dd9bf9b812bf75bda96743f2a6c5734a02092ae7f721c048d156d5fabae"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae1d6df168efb88d7d522664693607b80b4080be6750c913eefb77e34c12c71a"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9899c94762343f2cc2fc64c13e7cae4c3cc65cdfc87dd810a31654c9b7358cc"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99457f184ad90235cfe8461c4d70ab7dd2680e28821c29eca00252ba90308c78"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18f469a3d2a2fdafe99296a87e8a4c37748b5080a26b806a707f25a902c040a8"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cdf28938ac6b8b49ae5e92f2735056a7ba99c9b110a474473fd71185c1af5d"}, - {file = "pydantic_core-2.18.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:938cb21650855054dc54dfd9120a851c974f95450f00683399006aa6e8abb057"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:44cd83ab6a51da80fb5adbd9560e26018e2ac7826f9626bc06ca3dc074cd198b"}, - {file = "pydantic_core-2.18.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:972658f4a72d02b8abfa2581d92d59f59897d2e9f7e708fdabe922f9087773af"}, - {file = "pydantic_core-2.18.4-cp38-none-win32.whl", hash = "sha256:1d886dc848e60cb7666f771e406acae54ab279b9f1e4143babc9c2258213daa2"}, - {file = "pydantic_core-2.18.4-cp38-none-win_amd64.whl", hash = "sha256:bb4462bd43c2460774914b8525f79b00f8f407c945d50881568f294c1d9b4443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"}, - {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"}, - {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"}, - {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"}, - {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"}, - {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"}, - {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"}, - {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"}, - {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"}, + {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"}, + {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"}, + {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"}, + {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"}, + {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"}, + {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"}, + {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"}, + {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"}, + {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:595ba5be69b35777474fa07f80fc260ea71255656191adb22a8c53aba4479231"}, + {file = "pydantic_core-2.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a4f55095ad087474999ee28d3398bae183a66be4823f753cd7d67dd0153427c9"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9aa05d09ecf4c75157197f27cdc9cfaeb7c5f15021c6373932bf3e124af029f"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e97fdf088d4b31ff4ba35db26d9cc472ac7ef4a2ff2badeabf8d727b3377fc52"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc633a9fe1eb87e250b5c57d389cf28998e4292336926b0b6cdaee353f89a237"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d573faf8eb7e6b1cbbcb4f5b247c60ca8be39fe2c674495df0eb4318303137fe"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26dc97754b57d2fd00ac2b24dfa341abffc380b823211994c4efac7f13b9e90e"}, + {file = "pydantic_core-2.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:33499e85e739a4b60c9dac710c20a08dc73cb3240c9a0e22325e671b27b70d24"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bebb4d6715c814597f85297c332297c6ce81e29436125ca59d1159b07f423eb1"}, + {file = "pydantic_core-2.20.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:516d9227919612425c8ef1c9b869bbbee249bc91912c8aaffb66116c0b447ebd"}, + {file = "pydantic_core-2.20.1-cp312-none-win32.whl", hash = "sha256:469f29f9093c9d834432034d33f5fe45699e664f12a13bf38c04967ce233d688"}, + {file = "pydantic_core-2.20.1-cp312-none-win_amd64.whl", hash = "sha256:035ede2e16da7281041f0e626459bcae33ed998cca6a0a007a5ebb73414ac72d"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:0827505a5c87e8aa285dc31e9ec7f4a17c81a813d45f70b1d9164e03a813a686"}, + {file = "pydantic_core-2.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:19c0fa39fa154e7e0b7f82f88ef85faa2a4c23cc65aae2f5aea625e3c13c735a"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa223cd1e36b642092c326d694d8bf59b71ddddc94cdb752bbbb1c5c91d833b"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c336a6d235522a62fef872c6295a42ecb0c4e1d0f1a3e500fe949415761b8a19"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7eb6a0587eded33aeefea9f916899d42b1799b7b14b8f8ff2753c0ac1741edac"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:70c8daf4faca8da5a6d655f9af86faf6ec2e1768f4b8b9d0226c02f3d6209703"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9fa4c9bf273ca41f940bceb86922a7667cd5bf90e95dbb157cbb8441008482c"}, + {file = "pydantic_core-2.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:11b71d67b4725e7e2a9f6e9c0ac1239bbc0c48cce3dc59f98635efc57d6dac83"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:270755f15174fb983890c49881e93f8f1b80f0b5e3a3cc1394a255706cabd203"}, + {file = "pydantic_core-2.20.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c81131869240e3e568916ef4c307f8b99583efaa60a8112ef27a366eefba8ef0"}, + {file = "pydantic_core-2.20.1-cp313-none-win32.whl", hash = "sha256:b91ced227c41aa29c672814f50dbb05ec93536abf8f43cd14ec9521ea09afe4e"}, + {file = "pydantic_core-2.20.1-cp313-none-win_amd64.whl", hash = "sha256:65db0f2eefcaad1a3950f498aabb4875c8890438bc80b19362cf633b87a8ab20"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4745f4ac52cc6686390c40eaa01d48b18997cb130833154801a442323cc78f91"}, + {file = "pydantic_core-2.20.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a8ad4c766d3f33ba8fd692f9aa297c9058970530a32c728a2c4bfd2616d3358b"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41e81317dd6a0127cabce83c0c9c3fbecceae981c8391e6f1dec88a77c8a569a"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04024d270cf63f586ad41fff13fde4311c4fc13ea74676962c876d9577bcc78f"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eaad4ff2de1c3823fddf82f41121bdf453d922e9a238642b1dedb33c4e4f98ad"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:26ab812fa0c845df815e506be30337e2df27e88399b985d0bb4e3ecfe72df31c"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c5ebac750d9d5f2706654c638c041635c385596caf68f81342011ddfa1e5598"}, + {file = "pydantic_core-2.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2aafc5a503855ea5885559eae883978c9b6d8c8993d67766ee73d82e841300dd"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4868f6bd7c9d98904b748a2653031fc9c2f85b6237009d475b1008bfaeb0a5aa"}, + {file = "pydantic_core-2.20.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa2f457b4af386254372dfa78a2eda2563680d982422641a85f271c859df1987"}, + {file = "pydantic_core-2.20.1-cp38-none-win32.whl", hash = "sha256:225b67a1f6d602de0ce7f6c1c3ae89a4aa25d3de9be857999e9124f15dab486a"}, + {file = "pydantic_core-2.20.1-cp38-none-win_amd64.whl", hash = "sha256:6b507132dcfc0dea440cce23ee2182c0ce7aba7054576efc65634f080dbe9434"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"}, + {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"}, + {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"}, + {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"}, + {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"}, + {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"}, + {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"}, + {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"}, + {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"}, ] [package.dependencies] @@ -2042,104 +2071,104 @@ files = [ [[package]] name = "rapidfuzz" -version = "3.9.3" +version = "3.9.4" description = "rapid fuzzy string matching" optional = false python-versions = ">=3.8" files = [ - {file = "rapidfuzz-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdb8c5b8e29238ec80727c2ba3b301efd45aa30c6a7001123a6647b8e6f77ea4"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3bd0d9632088c63a241f217742b1cf86e2e8ae573e01354775bd5016d12138c"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:153f23c03d4917f6a1fc2fb56d279cc6537d1929237ff08ee7429d0e40464a18"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a96c5225e840f1587f1bac8fa6f67562b38e095341576e82b728a82021f26d62"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b777cd910ceecd738adc58593d6ed42e73f60ad04ecdb4a841ae410b51c92e0e"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:53e06e4b81f552da04940aa41fc556ba39dee5513d1861144300c36c33265b76"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c7ca5b6050f18fdcacdada2dc5fb7619ff998cd9aba82aed2414eee74ebe6cd"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:87bb8d84cb41446a808c4b5f746e29d8a53499381ed72f6c4e456fe0f81c80a8"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:959a15186d18425d19811bea86a8ffbe19fd48644004d29008e636631420a9b7"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a24603dd05fb4e3c09d636b881ce347e5f55f925a6b1b4115527308a323b9f8e"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:0d055da0e801c71dd74ba81d72d41b2fa32afa182b9fea6b4b199d2ce937450d"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:875b581afb29a7213cf9d98cb0f98df862f1020bce9d9b2e6199b60e78a41d14"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-win32.whl", hash = "sha256:6073a46f61479a89802e3f04655267caa6c14eb8ac9d81a635a13805f735ebc1"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:119c010e20e561249b99ca2627f769fdc8305b07193f63dbc07bca0a6c27e892"}, - {file = "rapidfuzz-3.9.3-cp310-cp310-win_arm64.whl", hash = "sha256:790b0b244f3213581d42baa2fed8875f9ee2b2f9b91f94f100ec80d15b140ba9"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f57e8305c281e8c8bc720515540e0580355100c0a7a541105c6cafc5de71daae"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4fc7b784cf987dbddc300cef70e09a92ed1bce136f7bb723ea79d7e297fe76d"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b422c0a6fe139d5447a0766268e68e6a2a8c2611519f894b1f31f0a392b9167"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f50fed4a9b0c9825ff37cf0bccafd51ff5792090618f7846a7650f21f85579c9"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b80eb7cbe62348c61d3e67e17057cddfd6defab168863028146e07d5a8b24a89"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f45be77ec82da32ce5709a362e236ccf801615cc7163b136d1778cf9e31b14"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd84b7f652a5610733400307dc732f57c4a907080bef9520412e6d9b55bc9adc"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e6d27dad8c990218b8cd4a5c99cbc8834f82bb46ab965a7265d5aa69fc7ced7"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:05ee0696ebf0dfe8f7c17f364d70617616afc7dafe366532730ca34056065b8a"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2bc8391749e5022cd9e514ede5316f86e332ffd3cfceeabdc0b17b7e45198a8c"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:93981895602cf5944d89d317ae3b1b4cc684d175a8ae2a80ce5b65615e72ddd0"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:754b719a4990735f66653c9e9261dcf52fd4d925597e43d6b9069afcae700d21"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-win32.whl", hash = "sha256:14c9f268ade4c88cf77ab007ad0fdf63699af071ee69378de89fff7aa3cae134"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc1991b4cde6c9d3c0bbcb83d5581dc7621bec8c666c095c65b4277233265a82"}, - {file = "rapidfuzz-3.9.3-cp311-cp311-win_arm64.whl", hash = "sha256:0c34139df09a61b1b557ab65782ada971b4a3bce7081d1b2bee45b0a52231adb"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d6a210347d6e71234af5c76d55eeb0348b026c9bb98fe7c1cca89bac50fb734"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b300708c917ce52f6075bdc6e05b07c51a085733650f14b732c087dc26e0aaad"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83ea7ca577d76778250421de61fb55a719e45b841deb769351fc2b1740763050"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8319838fb5b7b5f088d12187d91d152b9386ce3979ed7660daa0ed1bff953791"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:505d99131afd21529293a9a7b91dfc661b7e889680b95534756134dc1cc2cd86"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c52970f7784518d7c82b07a62a26e345d2de8c2bd8ed4774e13342e4b3ff4200"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:143caf7247449055ecc3c1e874b69e42f403dfc049fc2f3d5f70e1daf21c1318"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b8ab0fa653d9225195a8ff924f992f4249c1e6fa0aea563f685e71b81b9fcccf"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57e7c5bf7b61c7320cfa5dde1e60e678d954ede9bb7da8e763959b2138391401"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:51fa1ba84653ab480a2e2044e2277bd7f0123d6693051729755addc0d015c44f"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:17ff7f7eecdb169f9236e3b872c96dbbaf116f7787f4d490abd34b0116e3e9c8"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:afe7c72d3f917b066257f7ff48562e5d462d865a25fbcabf40fca303a9fa8d35"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-win32.whl", hash = "sha256:e53ed2e9b32674ce96eed80b3b572db9fd87aae6742941fb8e4705e541d861ce"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:35b7286f177e4d8ba1e48b03612f928a3c4bdac78e5651379cec59f95d8651e6"}, - {file = "rapidfuzz-3.9.3-cp312-cp312-win_arm64.whl", hash = "sha256:e6e4b9380ed4758d0cb578b0d1970c3f32dd9e87119378729a5340cb3169f879"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a39890013f6d5b056cc4bfdedc093e322462ece1027a57ef0c636537bdde7531"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b5bc0fdbf419493163c5c9cb147c5fbe95b8e25844a74a8807dcb1a125e630cf"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efe6e200a75a792d37b960457904c4fce7c928a96ae9e5d21d2bd382fe39066e"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de077c468c225d4c18f7188c47d955a16d65f21aab121cbdd98e3e2011002c37"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f917eaadf5388466a95f6a236f678a1588d231e52eda85374077101842e794e"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:858ba57c05afd720db8088a8707079e8d024afe4644001fe0dbd26ef7ca74a65"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d36447d21b05f90282a6f98c5a33771805f9222e5d0441d03eb8824e33e5bbb4"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:acbe4b6f1ccd5b90c29d428e849aa4242e51bb6cab0448d5f3c022eb9a25f7b1"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:53c7f27cdf899e94712972237bda48cfd427646aa6f5d939bf45d084780e4c16"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:6175682a829c6dea4d35ed707f1dadc16513270ef64436568d03b81ccb6bdb74"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:5276df395bd8497397197fca2b5c85f052d2e6a66ffc3eb0544dd9664d661f95"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:77b5c4f3e72924d7845f0e189c304270066d0f49635cf8a3938e122c437e58de"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-win32.whl", hash = "sha256:8add34061e5cd561c72ed4febb5c15969e7b25bda2bb5102d02afc3abc1f52d0"}, - {file = "rapidfuzz-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:604e0502a39cf8e67fa9ad239394dddad4cdef6d7008fdb037553817d420e108"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:21047f55d674614eb4b0ab34e35c3dc66f36403b9fbfae645199c4a19d4ed447"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a56da3aff97cb56fe85d9ca957d1f55dbac7c27da927a86a2a86d8a7e17f80aa"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:964c08481aec2fe574f0062e342924db2c6b321391aeb73d68853ed42420fd6d"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e2b827258beefbe5d3f958243caa5a44cf46187eff0c20e0b2ab62d1550327a"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6e65a301fcd19fbfbee3a514cc0014ff3f3b254b9fd65886e8a9d6957fb7bca"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbe93ba1725a8d47d2b9dca6c1f435174859427fbc054d83de52aea5adc65729"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aca21c0a34adee582775da997a600283e012a608a107398d80a42f9a57ad323d"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:256e07d3465173b2a91c35715a2277b1ee3ae0b9bbab4e519df6af78570741d0"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:802ca2cc8aa6b8b34c6fdafb9e32540c1ba05fca7ad60b3bbd7ec89ed1797a87"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:dd789100fc852cffac1449f82af0da139d36d84fd9faa4f79fc4140a88778343"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:5d0abbacdb06e27ff803d7ae0bd0624020096802758068ebdcab9bd49cf53115"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:378d1744828e27490a823fc6fe6ebfb98c15228d54826bf4e49e4b76eb5f5579"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-win32.whl", hash = "sha256:5d0cb272d43e6d3c0dedefdcd9d00007471f77b52d2787a4695e9dd319bb39d2"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:15e4158ac4b3fb58108072ec35b8a69165f651ba1c8f43559a36d518dbf9fb3f"}, - {file = "rapidfuzz-3.9.3-cp39-cp39-win_arm64.whl", hash = "sha256:58c6a4936190c558d5626b79fc9e16497e5df7098589a7e80d8bff68148ff096"}, - {file = "rapidfuzz-3.9.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5410dc848c947a603792f4f51b904a3331cf1dc60621586bfbe7a6de72da1091"}, - {file = "rapidfuzz-3.9.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:282d55700a1a3d3a7980746eb2fcd48c9bbc1572ebe0840d0340d548a54d01fe"}, - {file = "rapidfuzz-3.9.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc1037507810833646481f5729901a154523f98cbebb1157ba3a821012e16402"}, - {file = "rapidfuzz-3.9.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e33f779391caedcba2ba3089fb6e8e557feab540e9149a5c3f7fea7a3a7df37"}, - {file = "rapidfuzz-3.9.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41a81a9f311dc83d22661f9b1a1de983b201322df0c4554042ffffd0f2040c37"}, - {file = "rapidfuzz-3.9.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a93250bd8fae996350c251e1752f2c03335bb8a0a5b0c7e910a593849121a435"}, - {file = "rapidfuzz-3.9.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3617d1aa7716c57d120b6adc8f7c989f2d65bc2b0cbd5f9288f1fc7bf469da11"}, - {file = "rapidfuzz-3.9.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad04a3f5384b82933213bba2459f6424decc2823df40098920856bdee5fd6e88"}, - {file = "rapidfuzz-3.9.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8709918da8a88ad73c9d4dd0ecf24179a4f0ceba0bee21efc6ea21a8b5290349"}, - {file = "rapidfuzz-3.9.3-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b770f85eab24034e6ef7df04b2bfd9a45048e24f8a808e903441aa5abde8ecdd"}, - {file = "rapidfuzz-3.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930b4e6fdb4d914390141a2b99a6f77a52beacf1d06aa4e170cba3a98e24c1bc"}, - {file = "rapidfuzz-3.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c8444e921bfc3757c475c4f4d7416a7aa69b2d992d5114fe55af21411187ab0d"}, - {file = "rapidfuzz-3.9.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c1d3ef3878f871abe6826e386c3d61b5292ef5f7946fe646f4206b85836b5da"}, - {file = "rapidfuzz-3.9.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d861bf326ee7dabc35c532a40384541578cd1ec1e1b7db9f9ecbba56eb76ca22"}, - {file = "rapidfuzz-3.9.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cde6b9d9ba5007077ee321ec722fa714ebc0cbd9a32ccf0f4dd3cc3f20952d71"}, - {file = "rapidfuzz-3.9.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3bb6546e7b6bed1aefbe24f68a5fb9b891cc5aef61bca6c1a7b1054b7f0359bb"}, - {file = "rapidfuzz-3.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d8a57261ef7996d5ced7c8cba9189ada3fbeffd1815f70f635e4558d93766cb"}, - {file = "rapidfuzz-3.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:67201c02efc596923ad950519e0b75ceb78d524177ea557134d6567b9ac2c283"}, - {file = "rapidfuzz-3.9.3.tar.gz", hash = "sha256:b398ea66e8ed50451bce5997c430197d5e4b06ac4aa74602717f792d8d8d06e2"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c9b9793c19bdf38656c8eaefbcf4549d798572dadd70581379e666035c9df781"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:015b5080b999404fe06ec2cb4f40b0be62f0710c926ab41e82dfbc28e80675b4"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acc5ceca9c1e1663f3e6c23fb89a311f69b7615a40ddd7645e3435bf3082688a"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1424e238bc3f20e1759db1e0afb48a988a9ece183724bef91ea2a291c0b92a95"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed01378f605aa1f449bee82cd9c83772883120d6483e90aa6c5a4ce95dc5c3aa"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb26d412271e5a76cdee1c2d6bf9881310665d3fe43b882d0ed24edfcb891a84"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f37e9e1f17be193c41a31c864ad4cd3ebd2b40780db11cd5c04abf2bcf4201b"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d070ec5cf96b927c4dc5133c598c7ff6db3b833b363b2919b13417f1002560bc"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:10e61bb7bc807968cef09a0e32ce253711a2d450a4dce7841d21d45330ffdb24"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:31a2fc60bb2c7face4140010a7aeeafed18b4f9cdfa495cc644a68a8c60d1ff7"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fbebf1791a71a2e89f5c12b78abddc018354d5859e305ec3372fdae14f80a826"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:aee9fc9e3bb488d040afc590c0a7904597bf4ccd50d1491c3f4a5e7e67e6cd2c"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-win32.whl", hash = "sha256:005a02688a51c7d2451a2d41c79d737aa326ff54167211b78a383fc2aace2c2c"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:3a2e75e41ee3274754d3b2163cc6c82cd95b892a85ab031f57112e09da36455f"}, + {file = "rapidfuzz-3.9.4-cp310-cp310-win_arm64.whl", hash = "sha256:2c99d355f37f2b289e978e761f2f8efeedc2b14f4751d9ff7ee344a9a5ca98d9"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:07141aa6099e39d48637ce72a25b893fc1e433c50b3e837c75d8edf99e0c63e1"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:db1664eaff5d7d0f2542dd9c25d272478deaf2c8412e4ad93770e2e2d828e175"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc01a223f6605737bec3202e94dcb1a449b6c76d46082cfc4aa980f2a60fd40e"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1869c42e73e2a8910b479be204fa736418741b63ea2325f9cc583c30f2ded41a"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:62ea7007941fb2795fff305ac858f3521ec694c829d5126e8f52a3e92ae75526"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:698e992436bf7f0afc750690c301215a36ff952a6dcd62882ec13b9a1ebf7a39"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b76f611935f15a209d3730c360c56b6df8911a9e81e6a38022efbfb96e433bab"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:129627d730db2e11f76169344a032f4e3883d34f20829419916df31d6d1338b1"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:90a82143c14e9a14b723a118c9ef8d1bbc0c5a16b1ac622a1e6c916caff44dd8"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ded58612fe3b0e0d06e935eaeaf5a9fd27da8ba9ed3e2596307f40351923bf72"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f16f5d1c4f02fab18366f2d703391fcdbd87c944ea10736ca1dc3d70d8bd2d8b"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:26aa7eece23e0df55fb75fbc2a8fb678322e07c77d1fd0e9540496e6e2b5f03e"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-win32.whl", hash = "sha256:f187a9c3b940ce1ee324710626daf72c05599946bd6748abe9e289f1daa9a077"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d8e9130fe5d7c9182990b366ad78fd632f744097e753e08ace573877d67c32f8"}, + {file = "rapidfuzz-3.9.4-cp311-cp311-win_arm64.whl", hash = "sha256:40419e98b10cd6a00ce26e4837a67362f658fc3cd7a71bd8bd25c99f7ee8fea5"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b5d5072b548db1b313a07d62d88fe0b037bd2783c16607c647e01b070f6cf9e5"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cf5bcf22e1f0fd273354462631d443ef78d677f7d2fc292de2aec72ae1473e66"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c8fc973adde8ed52810f590410e03fb6f0b541bbaeb04c38d77e63442b2df4c"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2464bb120f135293e9a712e342c43695d3d83168907df05f8c4ead1612310c7"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8d9d58689aca22057cf1a5851677b8a3ccc9b535ca008c7ed06dc6e1899f7844"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167e745f98baa0f3034c13583e6302fb69249a01239f1483d68c27abb841e0a1"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db0bf0663b4b6da1507869722420ea9356b6195aa907228d6201303e69837af9"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cd6ac61b74fdb9e23f04d5f068e6cf554f47e77228ca28aa2347a6ca8903972f"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:60ff67c690acecf381759c16cb06c878328fe2361ddf77b25d0e434ea48a29da"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:cb934363380c60f3a57d14af94325125cd8cded9822611a9f78220444034e36e"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fe833493fb5cc5682c823ea3e2f7066b07612ee8f61ecdf03e1268f262106cdd"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2797fb847d89e04040d281cb1902cbeffbc4b5131a5c53fc0db490fd76b2a547"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-win32.whl", hash = "sha256:52e3d89377744dae68ed7c84ad0ddd3f5e891c82d48d26423b9e066fc835cc7c"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:c76da20481c906e08400ee9be230f9e611d5931a33707d9df40337c2655c84b5"}, + {file = "rapidfuzz-3.9.4-cp312-cp312-win_arm64.whl", hash = "sha256:f2d2846f3980445864c7e8b8818a29707fcaff2f0261159ef6b7bd27ba139296"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:355fc4a268ffa07bab88d9adee173783ec8d20136059e028d2a9135c623c44e6"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4d81a78f90269190b568a8353d4ea86015289c36d7e525cd4d43176c88eff429"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e618625ffc4660b26dc8e56225f8b966d5842fa190e70c60db6cd393e25b86e"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b712336ad6f2bacdbc9f1452556e8942269ef71f60a9e6883ef1726b52d9228a"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc1ee19fdad05770c897e793836c002344524301501d71ef2e832847425707"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1950f8597890c0c707cb7e0416c62a1cf03dcdb0384bc0b2dbda7e05efe738ec"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a6c35f272ec9c430568dc8c1c30cb873f6bc96be2c79795e0bce6db4e0e101d"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:1df0f9e9239132a231c86ae4f545ec2b55409fa44470692fcfb36b1bd00157ad"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d2c51955329bfccf99ae26f63d5928bf5be9fcfcd9f458f6847fd4b7e2b8986c"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:3c522f462d9fc504f2ea8d82e44aa580e60566acc754422c829ad75c752fbf8d"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:d8a52fc50ded60d81117d7647f262c529659fb21d23e14ebfd0b35efa4f1b83d"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:04dbdfb0f0bfd3f99cf1e9e24fadc6ded2736d7933f32f1151b0f2abb38f9a25"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-win32.whl", hash = "sha256:4968c8bd1df84b42f382549e6226710ad3476f976389839168db3e68fd373298"}, + {file = "rapidfuzz-3.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:3fe4545f89f8d6c27b6bbbabfe40839624873c08bd6700f63ac36970a179f8f5"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9f256c8fb8f3125574c8c0c919ab0a1f75d7cba4d053dda2e762dcc36357969d"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5fdc09cf6e9d8eac3ce48a4615b3a3ee332ea84ac9657dbbefef913b13e632f"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d395d46b80063d3b5d13c0af43d2c2cedf3ab48c6a0c2aeec715aa5455b0c632"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fa714fb96ce9e70c37e64c83b62fe8307030081a0bfae74a76fac7ba0f91715"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1bc1a0f29f9119be7a8d3c720f1d2068317ae532e39e4f7f948607c3a6de8396"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6022674aa1747d6300f699cd7c54d7dae89bfe1f84556de699c4ac5df0838082"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcb72e5f9762fd469701a7e12e94b924af9004954f8c739f925cb19c00862e38"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ad04ae301129f0eb5b350a333accd375ce155a0c1cec85ab0ec01f770214e2e4"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f46a22506f17c0433e349f2d1dc11907c393d9b3601b91d4e334fa9a439a6a4d"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:01b42a8728c36011718da409aa86b84984396bf0ca3bfb6e62624f2014f6022c"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e590d5d5443cf56f83a51d3c4867bd1f6be8ef8cfcc44279522bcef3845b2a51"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4c72078b5fdce34ba5753f9299ae304e282420e6455e043ad08e4488ca13a2b0"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-win32.whl", hash = "sha256:f75639277304e9b75e6a7b3c07042d2264e16740a11e449645689ed28e9c2124"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:e81e27e8c32a1e1278a4bb1ce31401bfaa8c2cc697a053b985a6f8d013df83ec"}, + {file = "rapidfuzz-3.9.4-cp39-cp39-win_arm64.whl", hash = "sha256:15bc397ee9a3ed1210b629b9f5f1da809244adc51ce620c504138c6e7095b7bd"}, + {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:20488ade4e1ddba3cfad04f400da7a9c1b91eff5b7bd3d1c50b385d78b587f4f"}, + {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:e61b03509b1a6eb31bc5582694f6df837d340535da7eba7bedb8ae42a2fcd0b9"}, + {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:098d231d4e51644d421a641f4a5f2f151f856f53c252b03516e01389b2bfef99"}, + {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17ab8b7d10fde8dd763ad428aa961c0f30a1b44426e675186af8903b5d134fb0"}, + {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e272df61bee0a056a3daf99f9b1bd82cf73ace7d668894788139c868fdf37d6f"}, + {file = "rapidfuzz-3.9.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d6481e099ff8c4edda85b8b9b5174c200540fd23c8f38120016c765a86fa01f5"}, + {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ad61676e9bdae677d577fe80ec1c2cea1d150c86be647e652551dcfe505b1113"}, + {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:af65020c0dd48d0d8ae405e7e69b9d8ae306eb9b6249ca8bf511a13f465fad85"}, + {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d38b4e026fcd580e0bda6c0ae941e0e9a52c6bc66cdce0b8b0da61e1959f5f8"}, + {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f74ed072c2b9dc6743fb19994319d443a4330b0e64aeba0aa9105406c7c5b9c2"}, + {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aee5f6b8321f90615c184bd8a4c676e9becda69b8e4e451a90923db719d6857c"}, + {file = "rapidfuzz-3.9.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3a555e3c841d6efa350f862204bb0a3fea0c006b8acc9b152b374fa36518a1c6"}, + {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0772150d37bf018110351c01d032bf9ab25127b966a29830faa8ad69b7e2f651"}, + {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:addcdd3c3deef1bd54075bd7aba0a6ea9f1d01764a08620074b7a7b1e5447cb9"}, + {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fe86b82b776554add8f900b6af202b74eb5efe8f25acdb8680a5c977608727f"}, + {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0fc91ac59f4414d8542454dfd6287a154b8e6f1256718c898f695bdbb993467"}, + {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a944e546a296a5fdcaabb537b01459f1b14d66f74e584cb2a91448bffadc3c1"}, + {file = "rapidfuzz-3.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4fb96ba96d58c668a17a06b5b5e8340fedc26188e87b0d229d38104556f30cd8"}, + {file = "rapidfuzz-3.9.4.tar.gz", hash = "sha256:366bf8947b84e37f2f4cf31aaf5f37c39f620d8c0eddb8b633e6ba0129ca4a0a"}, ] [package.extras] @@ -2392,13 +2421,13 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "trove-classifiers" -version = "2024.5.22" +version = "2024.7.2" description = "Canonical source for classifiers on PyPI (pypi.org)." optional = false python-versions = "*" files = [ - {file = "trove_classifiers-2024.5.22-py3-none-any.whl", hash = "sha256:c43ade18704823e4afa3d9db7083294bc4708a5e02afbcefacd0e9d03a7a24ef"}, - {file = "trove_classifiers-2024.5.22.tar.gz", hash = "sha256:8a6242bbb5c9ae88d34cf665e816b287d2212973c8777dfaef5ec18d72ac1d03"}, + {file = "trove_classifiers-2024.7.2-py3-none-any.whl", hash = "sha256:ccc57a33717644df4daca018e7ec3ef57a835c48e96a1e71fc07eb7edac67af6"}, + {file = "trove_classifiers-2024.7.2.tar.gz", hash = "sha256:8328f2ac2ce3fd773cbb37c765a0ed7a83f89dc564c7d452f039b69249d0ac35"}, ] [[package]] @@ -2518,13 +2547,13 @@ files = [ [[package]] name = "urllib3" -version = "2.2.1" +version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, - {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, ] [package.extras] @@ -2615,13 +2644,13 @@ files = [ [[package]] name = "virtualenv" -version = "20.26.2" +version = "20.26.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.2-py3-none-any.whl", hash = "sha256:a624db5e94f01ad993d476b9ee5346fdf7b9de43ccaee0e0197012dc838a0e9b"}, - {file = "virtualenv-20.26.2.tar.gz", hash = "sha256:82bf0f4eebbb78d36ddaee0283d43fe5736b53880b8a8cdcd37390a07ac3741c"}, + {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, + {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, ] [package.dependencies] From e72c430b51924adecd1e444771fcc64c1b6be1f7 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Mon, 8 Jul 2024 13:39:13 +0300 Subject: [PATCH 27/31] Fix add reviewers --- webhook_server_container/app.py | 11 ++- webhook_server_container/libs/github_api.py | 81 +++++++++++++-------- 2 files changed, 59 insertions(+), 33 deletions(-) diff --git a/webhook_server_container/app.py b/webhook_server_container/app.py index 7c3b39a1..8e7dc7b5 100644 --- a/webhook_server_container/app.py +++ b/webhook_server_container/app.py @@ -18,12 +18,17 @@ @FASTAPI_APP.get(f"{APP_ROOT_PATH}/healthcheck") def healthcheck() -> Dict[str, Any]: - return {"status": requests.status_codes.codes.ok, "message": "Alive"} + return {"status": requests.codes.ok, "message": "Alive"} @FASTAPI_APP.post(APP_ROOT_PATH) async def process_webhook(request: Request) -> Dict[str, Any]: - process_failed_msg = {"status": requests.status_codes.codes.server_error, "Message": "Process failed"} + log_prefix = request.headers.get("X-GitHub-Delivery", "") + process_failed_msg = { + "status": requests.codes.server_error, + "message": "Process failed", + "log_prefix": log_prefix, + } try: hook_data = await request.json() except Exception as ex: @@ -32,7 +37,7 @@ async def process_webhook(request: Request) -> Dict[str, Any]: try: ProcessGithubWehook(hook_data=hook_data, headers=request.headers) - return {"status": requests.status_codes.codes.ok, "Message": "process success"} + return {"status": requests.codes.ok, "message": "process success", "log_prefix": log_prefix} except Exception as ex: LOGGER.error(f"Failed to process hook: {ex}") diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 50781192..8e53cc1a 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -84,6 +84,14 @@ class RepositoryNotFoundError(Exception): pass +class ProcessGithubWehookError(Exception): + def __init__(self, err: Dict[str, str]): + self.err = err + + def __str__(self) -> str: + return f"{self.err}" + + class ProcessGithubWehook: def __init__(self, hook_data: Dict[Any, Any], headers: Headers): self.app: FastAPI = FASTAPI_APP @@ -92,25 +100,28 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): self.repository_name: str = hook_data["repository"]["name"] self.log_prefix_with_color: str = "" self.parent_committer: str = "" - self.log_uuid: str = shortuuid.uuid()[:5] self.container_repo_dir: str = "/tmp/repository" self.jira_track_pr: bool = False self.issue_title: str = "" self.all_required_status_checks: List[str] = [] self.config = Config() + self.x_github_delivery: str = self.headers.get("X-GitHub-Delivery", "") self.log_prefix = self.prepare_log_prefix() self._repo_data_from_config() github_event: str = self.headers["X-GitHub-Event"] - event_log: str = f"Event type: {github_event}. event ID: {self.headers.get('X-GitHub-Delivery')}" + event_log: str = f"Event type: {github_event}. event ID: {self.x_github_delivery}" self.github_app_api = get_repository_github_app_api( config_=self.config, repository_name=self.repository_full_name ) + if not self.github_app_api: LOGGER.error( - f"{self.log_prefix} not found by manage-repositories-app, " - f"make sure the app installed (https://github.com/apps/manage-repositories-app)" + ( + f"{self.log_prefix} not found by manage-repositories-app, " + "make sure the app installed (https://github.com/apps/manage-repositories-app)" + ), ) return @@ -124,7 +135,7 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): ) if not (self.repository or self.repository_by_github_app): - LOGGER.error(f"{self.repository_full_name} Failed to get repository.") + LOGGER.error(f"{self.log_prefix} Failed to get repository.") return self.add_api_users_to_auto_verified_and_merged_users() @@ -259,9 +270,9 @@ def _set_log_prefix_color(self) -> None: def prepare_log_prefix(self, pull_request: Optional[PullRequest] = None) -> str: self._set_log_prefix_color() return ( - f"{self.log_prefix_with_color}({self.log_uuid})[PR {pull_request.number}]:" + f"{self.log_prefix_with_color}({self.x_github_delivery})[PR {pull_request.number}]:" if pull_request - else f"{self.log_prefix_with_color}:({self.log_uuid})" + else f"{self.log_prefix_with_color}:({self.x_github_delivery})" ) def hash_token(self, message: str) -> str: @@ -280,7 +291,6 @@ def process_pull_request_check_run_webhook_data(self) -> None: _check_run: Dict[str, Any] = self.hook_data["check_run"] check_run_name: str = _check_run["name"] if check_run_name == CAN_BE_MERGED_STR: - LOGGER.info(f"{self.log_prefix} check_run '{check_run_name}' skipped") return if ( @@ -296,7 +306,8 @@ def process_pull_request_check_run_webhook_data(self) -> None: self.pull_request = _pull_request self.last_commit = self._get_last_commit() self.check_if_can_be_merged() - break + + LOGGER.error(f"{self.log_prefix} No pull request found") def _repo_data_from_config(self) -> None: config_data = self.config.data # Global repositories configuration @@ -538,13 +549,19 @@ def reviewers(self) -> List[str]: @property def files_reviewers(self) -> Dict[str, str]: - _reviewers: Dict[str, Any] = self.owners_content.get("reviewers", {}) - return _reviewers.get("files", {}) + _reviewers = self.owners_content.get("reviewers", {}) + if isinstance(_reviewers, dict): + return _reviewers.get("files", {}) + + return {} @property def folders_reviewers(self) -> Dict[str, str]: - _reviewers: Dict[str, Any] = self.owners_content.get("reviewers", {}) - return _reviewers.get("folders", {}) + _reviewers = self.owners_content.get("reviewers", {}) + if isinstance(_reviewers, dict): + return _reviewers.get("folders", {}) + + return {} @property def approvers(self) -> List[str]: @@ -557,13 +574,13 @@ def assign_reviewers(self) -> None: LOGGER.info(f"{self.log_prefix} Assign reviewers") changed_files = self.list_changed_commit_files() reviewers_to_add = self.reviewers - for _file, _reviewers in self.files_reviewers.items(): + for _file, _file_reviewers in self.files_reviewers.items(): if _file in changed_files: - reviewers_to_add.extend(_reviewers) + reviewers_to_add.extend(_file_reviewers) - for _folder, _reviewers in self.folders_reviewers.items(): + for _folder, _folder_reviewers in self.folders_reviewers.items(): if any(cf for cf in changed_files if _folder in str(Path(cf).parent)): - reviewers_to_add.extend(_reviewers) + reviewers_to_add.extend(_folder_reviewers) _to_add: List[str] = list(set(reviewers_to_add)) LOGGER.info(f"{self.log_prefix} Reviewers to add: {_to_add}") @@ -1364,6 +1381,7 @@ def check_if_can_be_merged(self) -> None: PR has no changed requests from approvers. """ if self.skip_if_pull_request_already_merged(): + LOGGER.info(f"{self.log_prefix} Pull request already merged") return output = { @@ -1458,6 +1476,7 @@ def check_if_can_be_merged(self) -> None: ) self.pull_request.merge(merge_method="squash") + LOGGER.info(f"{self.log_prefix} Pull request can be merged") return failure_output += f"Missing lgtm/approved from approvers {self.approvers}\n" @@ -1469,7 +1488,8 @@ def check_if_can_be_merged(self) -> None: except Exception as ex: LOGGER.error(f"{self.log_prefix} Failed to check if can be merged, set check run to {FAILURE_STR} {ex}") - output["text"] = "Failed to check if can be merged, check logs" + _err = "Failed to check if can be merged, check logs" + output["text"] = _err self._remove_label(label=CAN_BE_MERGED_STR) self.set_merge_check_failure(output=output) @@ -1636,6 +1656,11 @@ def create_comment_reaction(self, issue_comment_id: int, reaction: str) -> None: def process_opened_or_synchronize_pull_request(self) -> None: prepare_pull_futures: List[Future] = [] with ThreadPoolExecutor() as executor: + prepare_pull_futures.append(executor.submit(self.assign_reviewers)) + prepare_pull_futures.append( + executor.submit(self._add_label, **{"label": f"{BRANCH_LABEL_PREFIX}{self.pull_request_branch}"}) + ) + prepare_pull_futures.append(executor.submit(self.label_pull_request_by_merge_state)) prepare_pull_futures.append(executor.submit(self.set_merge_check_queued)) prepare_pull_futures.append(executor.submit(self.set_run_tox_check_queued)) prepare_pull_futures.append(executor.submit(self.set_run_pre_commit_check_queued)) @@ -1644,10 +1669,6 @@ def process_opened_or_synchronize_pull_request(self) -> None: prepare_pull_futures.append(executor.submit(self._process_verified)) prepare_pull_futures.append(executor.submit(self.add_size_label)) - for result in as_completed(prepare_pull_futures): - if result.exception(): - LOGGER.error(f"{self.log_prefix} {result.exception()}") - run_check_runs_futures: List[Future] = [] with ThreadPoolExecutor() as executor: run_check_runs_futures.append(executor.submit(self._run_tox)) @@ -1655,23 +1676,23 @@ def process_opened_or_synchronize_pull_request(self) -> None: run_check_runs_futures.append(executor.submit(self._run_install_python_module)) run_check_runs_futures.append(executor.submit(self._run_build_container)) + for result in as_completed(prepare_pull_futures): + if _exp := result.exception(): + LOGGER.error(f"{self.log_prefix} {_exp}") + for result in as_completed(run_check_runs_futures): - if result.exception(): - LOGGER.error(f"{self.log_prefix} {result.exception()}") - LOGGER.info(f"{self.log_prefix} {result.result()}") + if _exp := result.exception(): + LOGGER.error(f"{self.log_prefix} {_exp}") - self._add_label(label=f"{BRANCH_LABEL_PREFIX}{self.pull_request_branch}") - LOGGER.info(f"{self.log_prefix} Adding PR owner as assignee") + LOGGER.info(f"{self.log_prefix} {result.result()}") try: + LOGGER.info(f"{self.log_prefix} Adding PR owner as assignee") self.pull_request.add_to_assignees() except Exception: if self.approvers: self.pull_request.add_to_assignees(self.approvers[0]) - self.assign_reviewers() - self.label_pull_request_by_merge_state() - def is_check_run_in_progress(self, check_run: str) -> bool: for run in self.last_commit.get_check_runs(): if run.name == check_run and run.status == IN_PROGRESS_STR: From cd5af7a1e9515dd2e8d234abf8ad0667b716d2af Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Mon, 8 Jul 2024 14:04:31 +0300 Subject: [PATCH 28/31] Fix add reviewers --- webhook_server_container/libs/github_api.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 8e53cc1a..a5e34b62 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -1562,13 +1562,14 @@ def _run_build_container( "text": self.get_check_run_text(err=err, out=out), } if rc: + pull_request = hasattr(self, "pull_request") LOGGER.info(f"{self.log_prefix} Done building {_container_repository_and_tag}") - if self.pull_request and set_check: + if pull_request and set_check: return self.set_container_build_success(output=output) if push: push_msg: str = f"New container for {_container_repository_and_tag} published" - if self.pull_request: + if pull_request: self.pull_request.create_issue_comment(push_msg) if self.slack_webhook_url: From 17f9d173a29caea25669074d0480196e546c1b7c Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Mon, 8 Jul 2024 14:08:26 +0300 Subject: [PATCH 29/31] fix release for container --- webhook_server_container/libs/github_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index a5e34b62..d7d12c7e 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -978,7 +978,7 @@ def process_push_webhook_data(self) -> None: LOGGER.info(f"{self.log_prefix} Processing upload to pypi for tag: {tag_name}") self.upload_to_pypi(tag_name=tag_name) - if self.container_release: + if self.build_and_push_container and self.container_release: LOGGER.info(f"{self.log_prefix} Processing build and push container for tag: {tag_name}") self._run_build_container(push=True, set_check=False, tag=tag_name) From 76c399e4ec52cb8e3de0b05d14ba3daa63aed040 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Mon, 8 Jul 2024 14:18:29 +0300 Subject: [PATCH 30/31] fix release for container --- webhook_server_container/libs/github_api.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index d7d12c7e..6919a8a5 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -73,7 +73,7 @@ ) -LOGGER = get_logger(name="GitHubApi", filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE")) +LOGGER = get_logger(name="ProcessGithubWehook", filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE")) class NoPullRequestError(Exception): @@ -1532,7 +1532,9 @@ def _run_build_container( if not self.build_and_push_container: return - if set_check: + pull_request = hasattr(self, "pull_request") + + if pull_request and set_check: if self.is_check_run_in_progress(check_run=BUILD_CONTAINER_STR) and not is_merged: LOGGER.info(f"{self.log_prefix} Check run is in progress, not running {BUILD_CONTAINER_STR}.") return @@ -1562,7 +1564,6 @@ def _run_build_container( "text": self.get_check_run_text(err=err, out=out), } if rc: - pull_request = hasattr(self, "pull_request") LOGGER.info(f"{self.log_prefix} Done building {_container_repository_and_tag}") if pull_request and set_check: return self.set_container_build_success(output=output) @@ -1586,6 +1587,7 @@ def _run_build_container( err_msg: str = f"Failed to create and push {_container_repository_and_tag}" if self.pull_request: self.pull_request.create_issue_comment(err_msg) + if self.slack_webhook_url: message = f""" ``` From 0afccdddf7eee18a11a1d08f854dcc17de453be0 Mon Sep 17 00:00:00 2001 From: Meni Yakove Date: Mon, 8 Jul 2024 14:22:46 +0300 Subject: [PATCH 31/31] fix release for container --- webhook_server_container/libs/github_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 6919a8a5..dff0687c 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -307,7 +307,7 @@ def process_pull_request_check_run_webhook_data(self) -> None: self.last_commit = self._get_last_commit() self.check_if_can_be_merged() - LOGGER.error(f"{self.log_prefix} No pull request found") + LOGGER.warning(f"{self.log_prefix} No pull request found") def _repo_data_from_config(self) -> None: config_data = self.config.data # Global repositories configuration