diff --git a/README.md b/README.md index e6422f6a..06323291 100644 --- a/README.md +++ b/README.md @@ -296,7 +296,8 @@ Supported labels: ### Logging Setup -The webhook server configures custom logging with color-coded log level names to enhance readability. It also supports optional logging to a file when 'WEBHOOK_SERVER_LOG_FILE' is set in the environment. This feature uses a rotating file handler to manage log rotation and is defined in the `constants.py` file. +The webhook server configures custom logging with color-coded log level names to enhance readability. It also supports optional logging to a file. +See [example-config](example.config.yaml) for more details. ### Webhook Creation @@ -304,4 +305,4 @@ Webhooks are automatically created for GitHub repositories based on settings def ### Usage Guide -To use the webhook server, first prepare the `config.yaml` file with the necessary repository and server configurations. Set the required environment variables, including `WEBHOOK_SERVER_LOG_FILE` and `WEBHOOK_SERVER_DATA_DIR`. Build and start the server using the instructions in the 'Build container' section. +To use the webhook server, first prepare the [config.yaml](example.config.yaml) file with the necessary repository and server configurations. Set the required environment variables, including `WEBHOOK_SERVER_DATA_DIR`. Build and start the server using the instructions in the 'Build container' section. diff --git a/docker-compose-example.yaml b/docker-compose-example.yaml index a164dcbd..566bc158 100644 --- a/docker-compose-example.yaml +++ b/docker-compose-example.yaml @@ -6,8 +6,6 @@ services: - "./webhook_server_data_dir:/webhook_server:Z" # Should include config.yaml and webhook-server.private-key.pem - "./webhook_server_containers:/containers:Z" # Optional, to cache podman pull containers environment: - - WEBHOOK_SERVER_LOG_FILE=/tmp/webhook_server.log - - WEBHOOK_SERVER_LOG_LEVEL=INFO # Options are: DEBUG, INFO, WARNING, ERROR, CRITICAL - PUID=1000 - PGID=1000 - TZ=Asia/Jerusalem diff --git a/example.config.yaml b/example.config.yaml index 72f8c501..44f2d96d 100644 --- a/example.config.yaml +++ b/example.config.yaml @@ -1,3 +1,6 @@ +log-level: INFO # Set global log level, change take effect immediately without server restart +log-file: webhook-server.log # Set global log file, change take effect immediately without server restart + github-app-id: 123456 # GitHub app id github-toekns: - @@ -28,6 +31,8 @@ jira: repositories: my-repository: name: my-org/my-repository + log-level: DEBUG # Override global log-level for repository + log-file: my-repository.log # Override global log-file for repository slack_webhook_url: # Send notification to slack on several operations verified_job: true pypi: diff --git a/poetry.lock b/poetry.lock index 3e1b601f..94a04270 100644 --- a/poetry.lock +++ b/poetry.lock @@ -393,6 +393,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] development = ["black", "flake8", "mypy", "pytest", "types-colorama"] +[[package]] +name = "columnar" +version = "1.4.1" +description = "A tool for printing data in a columnar format." +optional = false +python-versions = "*" +files = [ + {file = "Columnar-1.4.1-py3-none-any.whl", hash = "sha256:8efb692a7e6ca07dcc8f4ea889960421331a5dffa8e5af81f0a67ad8ea1fc798"}, + {file = "Columnar-1.4.1.tar.gz", hash = "sha256:c3cb57273333b2ff9cfaafc86f09307419330c97faa88dcfe23df05e6fbb9c72"}, +] + +[package.dependencies] +toolz = "*" +wcwidth = "*" + [[package]] name = "crashtest" version = "0.4.1" @@ -2226,6 +2241,22 @@ files = [ cryptography = ">=2.0" jeepney = ">=0.6" +[[package]] +name = "setuptools" +version = "71.1.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-71.1.0-py3-none-any.whl", hash = "sha256:33874fdc59b3188304b2e7c80d9029097ea31627180896fb549c578ceb8a0855"}, + {file = "setuptools-71.1.0.tar.gz", hash = "sha256:032d42ee9fb536e33087fb66cac5f840eb9391ed05637b3f2a76a7c8fb477936"}, +] + +[package.extras] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "shellingham" version = "1.5.4" @@ -2307,6 +2338,22 @@ typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\"" [package.extras] full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +[[package]] +name = "string-color" +version = "1.2.3" +description = "just another mod to print strings in 256 colors in the terminal." +optional = false +python-versions = "*" +files = [ + {file = "string-color-1.2.3.tar.gz", hash = "sha256:c2492c626bd77ca168bf13a2752fed5cf988d87bdd17853bdb7c7a0b0003c183"}, + {file = "string_color-1.2.3-py3-none-any.whl", hash = "sha256:45672a2de0a152d407270ba2da272484524f7faecaa0b0f8d9604eec02fec549"}, +] + +[package.dependencies] +colorama = "*" +columnar = ">=1.3.1" +setuptools = "*" + [[package]] name = "timeout-sampler" version = "0.0.36" @@ -2342,6 +2389,17 @@ files = [ {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, ] +[[package]] +name = "toolz" +version = "0.12.1" +description = "List processing tools and functional utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"}, + {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"}, +] + [[package]] name = "traitlets" version = "5.14.3" @@ -2862,4 +2920,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "663a613b687b0366567a80a373848e32d9da955551443a60754e17e1bc1cf61c" +content-hash = "5a3c1d08a8929a02c75e7b50b8cca18fa7c5a5df15f5c1546135ca975999d720" diff --git a/pyproject.toml b/pyproject.toml index 7d9f8eca..b985d81a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ uwsgi = "^2.0.26" fastapi = "^0.111.0" python-simple-logger = "^1.0.30" uvicorn = "^0.30.1" +string-color = "^1.2.3" [tool.poetry.group.dev.dependencies] ipdb = "^0.13.13" diff --git a/webhook_server_container/app.py b/webhook_server_container/app.py index b836b3b2..2af00fa3 100644 --- a/webhook_server_container/app.py +++ b/webhook_server_container/app.py @@ -1,19 +1,19 @@ -import os from typing import Any, Dict from fastapi import Request import requests import urllib3 -from simple_logger.logger import get_logger +from fastapi import FastAPI from webhook_server_container.libs.github_api import ProcessGithubWehook -from webhook_server_container.utils.constants import FASTAPI_APP +from webhook_server_container.utils.helpers import get_logger_with_params +FASTAPI_APP: FastAPI = FastAPI(title="webhook-server") APP_ROOT_PATH: str = "/webhook_server" urllib3.disable_warnings() -LOGGER = get_logger(name="app", filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE")) +LOGGER = get_logger_with_params(name="main") @FASTAPI_APP.get(f"{APP_ROOT_PATH}/healthcheck") @@ -23,18 +23,19 @@ def healthcheck() -> Dict[str, Any]: @FASTAPI_APP.post(APP_ROOT_PATH) async def process_webhook(request: Request) -> Dict[str, Any]: - log_prefix = request.headers.get("X-GitHub-Delivery", "") + delivery_headers = request.headers.get("X-GitHub-Delivery", "") process_failed_msg: Dict[str, Any] = { "status": requests.codes.server_error, "message": "Process failed", - "log_prefix": log_prefix, + "log_prefix": delivery_headers, } try: hook_data: Dict[Any, Any] = await request.json() + except Exception as ex: LOGGER.error(f"Error get JSON from request: {ex}") return process_failed_msg api: ProcessGithubWehook = ProcessGithubWehook(hook_data=hook_data, headers=request.headers) api.process() - return {"status": requests.codes.ok, "message": "process success", "log_prefix": log_prefix} + return {"status": requests.codes.ok, "message": "process success", "log_prefix": delivery_headers} diff --git a/webhook_server_container/libs/config.py b/webhook_server_container/libs/config.py index 39e7d393..f73c4b79 100644 --- a/webhook_server_container/libs/config.py +++ b/webhook_server_container/libs/config.py @@ -19,5 +19,5 @@ def data(self) -> Dict[str, Any]: with open(self.config_path) as fd: return yaml.safe_load(fd) - def get_repository(self, repository_name: str) -> Dict[str, Any]: + def repository_data(self, repository_name: str) -> Dict[str, Any]: return self.data["repositories"].get(repository_name, {}) diff --git a/webhook_server_container/libs/github_api.py b/webhook_server_container/libs/github_api.py index 52606cfa..56522c78 100644 --- a/webhook_server_container/libs/github_api.py +++ b/webhook_server_container/libs/github_api.py @@ -8,8 +8,8 @@ from concurrent.futures import Future, ThreadPoolExecutor, as_completed from pathlib import Path from typing import Any, Dict, List, Optional, Set, Tuple +from stringcolor import cs -from fastapi import FastAPI from github.Branch import Branch from github.ContentFile import ContentFile import requests @@ -20,7 +20,6 @@ from github.Commit import Commit from github.PullRequest import PullRequest from github.GithubException import UnknownObjectException -from simple_logger.logger import get_logger from timeout_sampler import TimeoutSampler, TimeoutExpiredError from webhook_server_container.libs.config import Config @@ -39,7 +38,6 @@ DELETE_STR, DYNAMIC_LABELS_DICT, FAILURE_STR, - FASTAPI_APP, HAS_CONFLICTS_LABEL_STR, HOLD_LABEL_STR, IN_PROGRESS_STR, @@ -67,19 +65,13 @@ get_api_with_highest_rate_limit, extract_key_from_dict, get_github_repo_api, + get_logger_with_params, get_value_from_dicts, run_command, get_apis_and_tokes_from_config, ) -LOGGER = get_logger( - name="ProcessGithubWehook", - filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE"), - level=os.environ.get("WEBHOOK_SERVER_LOG_LEVEL", "INFO"), -) - - class NoPullRequestError(Exception): pass @@ -97,23 +89,21 @@ def __str__(self) -> str: class ProcessGithubWehook: - def __init__(self, hook_data: Dict[Any, Any], headers: Headers): - self.app: FastAPI = FASTAPI_APP + def __init__(self, hook_data: Dict[Any, Any], headers: Headers) -> None: self.hook_data = hook_data self.headers = headers self.repository_name: str = hook_data["repository"]["name"] - self.log_prefix_with_color: str = "" self.parent_committer: str = "" self.container_repo_dir: str = "/tmp/repository" self.jira_track_pr: bool = False self.issue_title: str = "" self.all_required_status_checks: List[str] = [] - self.config = Config() self.x_github_delivery: str = self.headers.get("X-GitHub-Delivery", "") self.github_event: str = self.headers["X-GitHub-Event"] - self.log_prefix = self.prepare_log_prefix() self.owners_content: Dict[str, Any] = {} + self.config = Config() + self.log_prefix = self.prepare_log_prefix() self._repo_data_from_config() self.github_app_api = get_repository_github_app_api( @@ -121,7 +111,7 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): ) if not self.github_app_api: - LOGGER.error( + self.logger.error( ( f"{self.log_prefix} not found by manage-repositories-app, " "make sure the app installed (https://github.com/apps/manage-repositories-app)" @@ -133,13 +123,19 @@ def __init__(self, hook_data: Dict[Any, Any], headers: Headers): config=self.config, repository_name=self.repository_name ) - self.repository = get_github_repo_api(github_api=self.github_api, repository=self.repository_full_name) + if self.github_api and self.token: + self.repository = get_github_repo_api(github_api=self.github_api, repository=self.repository_full_name) + + else: + self.logger.error(f"{self.log_prefix} Failed to get GitHub API and token.") + return + self.repository_by_github_app = get_github_repo_api( github_api=self.github_app_api, repository=self.repository_full_name ) if not (self.repository or self.repository_by_github_app): - LOGGER.error(f"{self.log_prefix} Failed to get repository.") + self.logger.error(f"{self.log_prefix} Failed to get repository.") return self.add_api_users_to_auto_verified_and_merged_users() @@ -192,7 +188,7 @@ def process(self) -> None: try: self.pull_request = self._get_pull_request() self.log_prefix = self.prepare_log_prefix(pull_request=self.pull_request) - LOGGER.info(f"{self.log_prefix} {event_log}") + self.logger.debug(f"{self.log_prefix} {event_log}") self.owners_content = self.get_owners_content() self.last_commit = self._get_last_commit() self.parent_committer = self.pull_request.user.login @@ -204,7 +200,7 @@ def process(self) -> None: if self.parent_committer in reviewers_and_approvers: self.jira_assignee = self.jira_user_mapping.get(self.parent_committer) if not self.jira_assignee: - LOGGER.info( + self.logger.debug( f"{self.log_prefix} Jira tracking is disabled for the current pull request. " f"Committer {self.parent_committer} is not in configures in jira-user-mapping" ) @@ -214,9 +210,9 @@ def process(self) -> None: f"[AUTO:FROM:GITHUB] [{self.repository_name}] " f"PR [{self.pull_request.number}]: {self.pull_request.title}" ) - LOGGER.info(f"{self.log_prefix} Jira tracking is enabled for the current pull request.") + self.logger.debug(f"{self.log_prefix} Jira tracking is enabled for the current pull request.") else: - LOGGER.info( + self.logger.debug( f"{self.log_prefix} Jira tracking is disabled for the current pull request. " f"Committer {self.parent_committer} is not in {reviewers_and_approvers}" ) @@ -234,7 +230,7 @@ def process(self) -> None: self.process_pull_request_check_run_webhook_data() except NoPullRequestError: - LOGGER.info(f"{self.log_prefix} {event_log}. [No pull request found in hook data]") + self.logger.debug(f"{self.log_prefix} {event_log}. [No pull request found in hook data]") self.owners_content = self.get_owners_content() if self.github_event == "push": @@ -259,88 +255,107 @@ def add_api_users_to_auto_verified_and_merged_users(self) -> None: apis_and_tokens = get_apis_and_tokes_from_config(config=self.config, repository_name=self.repository_name) self.auto_verified_and_merged_users.extend([_api[0].get_user().login for _api in apis_and_tokens]) - def _set_log_prefix_color(self) -> None: - repo_str: str = "\033[1;{color}m{name}\033[1;0m" - color_file: str = "/tmp/color.json" - color_json: Dict[str, int] + def _get_reposiroty_color_for_log_prefix(self) -> str: + def _get_random_color(_colors: List[str], _json: Dict[str, str]) -> str: + color = random.choice(_colors) + _json[self.repository_name] = color + + if _selected := cs(self.repository_name, color).render(): + return _selected + + return self.repository_name + + _all_colors: List[str] = [] + color_json: Dict[str, str] + _colors_to_exclude = ("blue", "white", "black", "grey") + color_file: str = os.path.join(self.config.data_dir, "log-colors.json") + + for _color_name in cs.colors.values(): + _cname = _color_name["name"] + if _cname.lower() in _colors_to_exclude: + continue + + _all_colors.append(_cname) + try: with open(color_file) as fd: color_json = json.load(fd) + except Exception: color_json = {} - color: int = color_json.get(self.repository_name, 0) - if not color: - color = random.choice(range(31, 39)) - color_json[self.repository_name] = color + if color := color_json.get(self.repository_name, ""): + _cs_object = cs(self.repository_name, color) + if cs.find_color(_cs_object): + _str_color = _cs_object.render() + + else: + _str_color = _get_random_color(_colors=_all_colors, _json=color_json) - self.log_prefix_with_color = repo_str.format(color=color, name=self.repository_name) + else: + _str_color = _get_random_color(_colors=_all_colors, _json=color_json) with open(color_file, "w") as fd: json.dump(color_json, fd) + if _str_color: + _str_color = _str_color.replace("\x1b", "\033") + return _str_color + + return self.repository_name + def prepare_log_prefix(self, pull_request: Optional[PullRequest] = None) -> str: - self._set_log_prefix_color() + _repository_color = self._get_reposiroty_color_for_log_prefix() + _id = self.x_github_delivery.split("-", 1)[-1] return ( - f"{self.log_prefix_with_color}[{self.github_event}][{self.x_github_delivery}][PR {pull_request.number}]:" + f"{_repository_color}[{self.github_event}][{_id}][PR {pull_request.number}]:" if pull_request - else f"{self.log_prefix_with_color}:[{self.github_event}][{self.x_github_delivery}]" + else f"{_repository_color}[{self.github_event}][{_id}]:" ) - def hash_token(self, message: str) -> str: - if self.token: - hashed_message = message.replace(self.token, "*****") - return hashed_message - - return message - - def app_logger_info(self, message: str) -> None: - hashed_message = self.hash_token(message=message) - LOGGER.info(hashed_message) - - def app_logger_error(self, message: str) -> None: - hashed_message = self.hash_token(message=message) - LOGGER.error(hashed_message) - def process_pull_request_check_run_webhook_data(self) -> None: _check_run: Dict[str, Any] = self.hook_data["check_run"] check_run_name: str = _check_run["name"] check_run_status: str = _check_run["status"] check_run_conclusion: str = _check_run["conclusion"] check_run_head_sha: str = _check_run["head_sha"] - LOGGER.info( + self.logger.debug( f"{self.log_prefix} processing check_run - Name: {check_run_name} Status: {check_run_status} Conclusion: {check_run_conclusion}" ) if check_run_name == CAN_BE_MERGED_STR: - LOGGER.warning(f"{self.log_prefix} check run is {CAN_BE_MERGED_STR}, skipping") + self.logger.debug(f"{self.log_prefix} check run is {CAN_BE_MERGED_STR}, skipping") return if getattr(self, "pull_request", None): return self.check_if_can_be_merged() - LOGGER.info(f"{self.log_prefix} No pull request found in hook data, searching for pull request by head sha") + self.logger.debug( + f"{self.log_prefix} No pull request found in hook data, searching for pull request by head sha" + ) for _pull_request in self.repository.get_pulls(state="open"): if _pull_request.head.sha == check_run_head_sha: self.pull_request = _pull_request self.last_commit = self._get_last_commit() return self.check_if_can_be_merged() - LOGGER.error(f"{self.log_prefix} No pull request found") + self.logger.error(f"{self.log_prefix} No pull request found") def _repo_data_from_config(self) -> None: config_data = self.config.data # Global repositories configuration - repo_data = self.config.get_repository( + repo_data = self.config.repository_data( repository_name=self.repository_name ) # Specific repository configuration if not repo_data: raise RepositoryNotFoundError(f"Repository {self.repository_name} not found in config file") + self.repository_full_name: str = repo_data["name"] + self.logger = get_logger_with_params(name="ProcessGithubWehook", repository_name=self.repository_name) + self.github_app_id: str = get_value_from_dicts( primary_dict=repo_data, secondary_dict=config_data, key="github-app-id" ) - self.repository_full_name: str = repo_data["name"] self.pypi: Dict[str, str] = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="pypi") self.verified_job: bool = get_value_from_dicts( primary_dict=repo_data, @@ -389,7 +404,7 @@ def _repo_data_from_config(self) -> None: self.jira_user_mapping: Dict[str, str] = self.jira.get("user-mapping", {}) self.jira_enabled_repository = all([self.jira_server, self.jira_project, self.jira_token]) if not self.jira_enabled_repository: - LOGGER.error( + self.logger.error( f"{self.log_prefix} Jira configuration is not valid. Server: {self.jira_server}, " f"Project: {self.jira_project}, Token: {self.jira_token}" ) @@ -436,49 +451,50 @@ def pull_request_labels_names(self) -> List[str]: def skip_if_pull_request_already_merged(self) -> bool: if self.pull_request and self.pull_request.is_merged(): - LOGGER.info(f"{self.log_prefix}: PR is merged, not processing") + self.logger.info(f"{self.log_prefix}: PR is merged, not processing") return True return False def _remove_label(self, label: str) -> bool: if self.label_exists_in_pull_request(label=label): - LOGGER.info(f"{self.log_prefix} Removing label {label}") + self.logger.info(f"{self.log_prefix} Removing label {label}") self.pull_request.remove_from_labels(label) return self.wait_for_label(label=label, exists=False) - LOGGER.warning(f"{self.log_prefix} Label {label} not found and cannot be removed") + self.logger.debug(f"{self.log_prefix} Label {label} not found and cannot be removed") return False def _add_label(self, label: str) -> None: label = label.strip() if len(label) > 49: - LOGGER.warning(f"{label} is to long, not adding.") + self.logger.debug(f"{label} is to long, not adding.") return if self.label_exists_in_pull_request(label=label): - LOGGER.info(f"{self.log_prefix} Label {label} already assign to PR {self.pull_request.number}") + self.logger.debug(f"{self.log_prefix} Label {label} already assign") return if label in STATIC_LABELS_DICT: - LOGGER.info(f"{self.log_prefix} Adding pull request label {label} to {self.pull_request.number}") + self.logger.info(f"{self.log_prefix} Adding pull request label {label}") self.pull_request.add_to_labels(label) return _color = [DYNAMIC_LABELS_DICT[_label] for _label in DYNAMIC_LABELS_DICT if _label in label] - LOGGER.info(f"{self.log_prefix} Label {label} was {'found' if _color else 'not found'} in labels dict") + self.logger.debug(f"{self.log_prefix} Label {label} was {'found' if _color else 'not found'} in labels dict") color = _color[0] if _color else "D4C5F9" - LOGGER.info(f"{self.log_prefix} Adding label {label} with color {color}") + _with_color_msg = f"repository label {label} with color {color}" try: _repo_label = self.repository.get_label(label) _repo_label.edit(name=_repo_label.name, color=color) - LOGGER.info(f"{self.log_prefix} Edit repository label {label} with color {color}") + self.logger.debug(f"{self.log_prefix} Edit {_with_color_msg}") + except UnknownObjectException: - LOGGER.info(f"{self.log_prefix} Add repository label {label} with color {color}") + self.logger.debug(f"{self.log_prefix} Add {_with_color_msg}") self.repository.create_label(name=label, color=color) - LOGGER.info(f"{self.log_prefix} Adding pull request label {label} to {self.pull_request.number}") + self.logger.info(f"{self.log_prefix} Adding pull request label {label}") self.pull_request.add_to_labels(label) self.wait_for_label(label=label, exists=True) @@ -494,7 +510,7 @@ def wait_for_label(self, label: str, exists: bool) -> bool: return True except TimeoutExpiredError: - LOGGER.warning(f"{self.log_prefix} Label {label} {'not found' if exists else 'found'}") + self.logger.debug(f"{self.log_prefix} Label {label} {'not found' if exists else 'found'}") return False @@ -511,7 +527,7 @@ def upload_to_pypi(self, tag_name: str) -> None: out: str = "" token: str = self.pypi["token"] env: str = f"-e TWINE_USERNAME=__token__ -e TWINE_PASSWORD={token} " - LOGGER.info(f"{self.log_prefix} Start uploading to pypi") + self.logger.info(f"{self.log_prefix} Start uploading to pypi") _dist_dir: str = "/tmp/dist" cmd: str = ( f" python3 -m build --sdist --outdir {_dist_dir} ." @@ -521,7 +537,7 @@ def upload_to_pypi(self, tag_name: str) -> None: try: rc, out, err = self._run_in_container(command=cmd, env=env, checkout=tag_name) if rc: - LOGGER.info(f"{self.log_prefix} Publish to pypi finished") + self.logger.info(f"{self.log_prefix} Publish to pypi finished") if self.slack_webhook_url: message: str = f""" ``` @@ -532,10 +548,10 @@ def upload_to_pypi(self, tag_name: str) -> None: except Exception as exp: err = f"Publish to pypi failed: {exp}" - LOGGER.error(f"{self.log_prefix} {err}") + self.logger.error(f"{self.log_prefix} {err}") self.repository.create_issue( title=err, - assignee=self.approvers[0] if self.approvers else None, + assignee=self.approvers[0] if self.approvers else "", body=f""" stdout: `{out}` stderr: `{err}` @@ -544,12 +560,17 @@ def upload_to_pypi(self, tag_name: str) -> None: def get_owners_content(self) -> Dict[str, Any]: try: - owners_content: ContentFile = self.repository.get_contents("OWNERS") + owners_content: list[ContentFile] | ContentFile = self.repository.get_contents("OWNERS") + if isinstance(owners_content, list): + self.logger.debug(f"{self.log_prefix} Found more then one OWNERS file, using the first one") + owners_content = owners_content[0] + _content: Dict[str, Any] = yaml.safe_load(owners_content.decoded_content) - LOGGER.info(f"{self.log_prefix} OWNERS file content: {_content}") + self.logger.debug(f"{self.log_prefix} OWNERS file content: {_content}") return _content + except UnknownObjectException: - LOGGER.error(f"{self.log_prefix} OWNERS file not found") + self.logger.error(f"{self.log_prefix} OWNERS file not found") return {} @property @@ -560,7 +581,7 @@ def reviewers(self) -> List[str]: else: _reviewers = bc_reviewers - LOGGER.info(f"{self.log_prefix} Reviewers: {_reviewers}") + self.logger.debug(f"{self.log_prefix} Reviewers: {_reviewers}") return _reviewers @property @@ -587,7 +608,7 @@ def list_changed_commit_files(self) -> list[str]: return [fd["filename"] for fd in self.last_commit.raw_data["files"]] def assign_reviewers(self) -> None: - LOGGER.info(f"{self.log_prefix} Assign reviewers") + self.logger.info(f"{self.log_prefix} Assign reviewers") changed_files = self.list_changed_commit_files() reviewers_to_add = self.reviewers for _file, _file_reviewers in self.files_reviewers.items(): @@ -599,14 +620,14 @@ def assign_reviewers(self) -> None: reviewers_to_add.extend(_folder_reviewers) _to_add: List[str] = list(set(reviewers_to_add)) - LOGGER.info(f"{self.log_prefix} Reviewers to add: {_to_add}") + self.logger.debug(f"{self.log_prefix} Reviewers to add: {', '.join(_to_add)}") for reviewer in _to_add: if reviewer != self.pull_request.user.login: - LOGGER.info(f"{self.log_prefix} Adding reviewer {reviewer}") + self.logger.debug(f"{self.log_prefix} Adding reviewer {reviewer}") try: self.pull_request.create_review_request([reviewer]) except GithubException as ex: - LOGGER.warning(f"{self.log_prefix} Failed to add reviewer {reviewer}. {ex}") + self.logger.debug(f"{self.log_prefix} Failed to add reviewer {reviewer}. {ex}") self.pull_request.create_issue_comment(f"{reviewer} can not be added as reviewer. {ex}") def add_size_label(self) -> None: @@ -643,12 +664,14 @@ def add_size_label(self) -> None: def label_by_user_comment(self, user_request: str, remove: bool, reviewed_user: str, issue_comment_id: int) -> None: if not any(user_request.startswith(label_name) for label_name in USER_LABELS_DICT): - LOGGER.info(f"{self.log_prefix} Label {user_request} is not a predefined one, will not be added / removed.") + self.logger.debug( + f"{self.log_prefix} Label {user_request} is not a predefined one, will not be added / removed." + ) return - LOGGER.info( - f"{self.log_prefix} {'Remove' if remove else 'Add'} " + self.logger.info( + f"{self.log_prefix} {DELETE_STR if remove else ADD_STR} " f"label requested by user {reviewed_user}: {user_request}" ) self.create_comment_reaction(issue_comment_id=issue_comment_id, reaction=REACTIONS.ok) @@ -753,13 +776,13 @@ def set_cherry_pick_failure(self, output: Dict[str, Any]) -> None: def create_issue_for_new_pull_request(self) -> None: if self.parent_committer in self.auto_verified_and_merged_users: - LOGGER.info( + self.logger.info( f"{self.log_prefix} Committer {self.parent_committer} is part of " f"{self.auto_verified_and_merged_users}, will not create issue." ) return - LOGGER.info(f"{self.log_prefix} Creating issue for new PR: {self.pull_request.title}") + self.logger.info(f"{self.log_prefix} Creating issue for new PR: {self.pull_request.title}") self.repository.create_issue( title=self._generate_issue_title(), body=self._generate_issue_body(), @@ -769,7 +792,7 @@ def create_issue_for_new_pull_request(self) -> None: def close_issue_for_merged_or_closed_pr(self, hook_action: str) -> None: for issue in self.repository.get_issues(): if issue.body == self._generate_issue_body(): - LOGGER.info(f"{self.log_prefix} Closing issue {issue.title} for PR: {self.pull_request.title}") + self.logger.info(f"{self.log_prefix} Closing issue {issue.title} for PR: {self.pull_request.title}") issue.create_comment( f"{self.log_prefix} Closing issue for PR: {self.pull_request.title}.\nPR was {hook_action}." ) @@ -778,7 +801,7 @@ def close_issue_for_merged_or_closed_pr(self, hook_action: str) -> None: def delete_remote_tag_for_merged_or_closed_pr(self) -> None: if not self.build_and_push_container: - LOGGER.info(f"{self.log_prefix} repository do not have container configured") + self.logger.info(f"{self.log_prefix} repository do not have container configured") return repository_full_tag = self._container_repository_and_tag() @@ -809,9 +832,11 @@ def delete_remote_tag_for_merged_or_closed_pr(self) -> None: )[0]: self.pull_request.create_issue_comment(f"Successfully removed PR tag: {repository_full_tag}.") else: - LOGGER.error(f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. OUT:{out}. ERR:{err}") + self.logger.error( + f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. OUT:{out}. ERR:{err}" + ) else: - LOGGER.warning( + self.logger.warning( f"{self.log_prefix} {pr_tag} tag not found in registry {self.container_repository}. " f"OUT:{out}. ERR:{err}" ) @@ -819,19 +844,21 @@ def delete_remote_tag_for_merged_or_closed_pr(self) -> None: self.pull_request.create_issue_comment( f"Failed to delete tag: {repository_full_tag}. Please delete it manually." ) - LOGGER.error(f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. OUT:{out}. ERR:{err}") + self.logger.error(f"{self.log_prefix} Failed to delete tag: {repository_full_tag}. OUT:{out}. ERR:{err}") def process_comment_webhook_data(self) -> None: if self.hook_data["action"] in ("action", "deleted"): return issue_number: str = self.hook_data["issue"]["number"] - LOGGER.info(f"{self.log_prefix} Processing issue {issue_number}") + self.logger.info(f"{self.log_prefix} Processing issue {issue_number}") body: str = self.hook_data["comment"]["body"] if body == self.welcome_msg: - LOGGER.info(f"{self.log_prefix} Welcome message found in issue {self.pull_request.title}. Not processing") + self.logger.debug( + f"{self.log_prefix} Welcome message found in issue {self.pull_request.title}. Not processing" + ) return striped_body: str = body.strip() @@ -851,14 +878,14 @@ def process_comment_webhook_data(self) -> None: def process_pull_request_webhook_data(self) -> None: hook_action: str = self.hook_data["action"] - LOGGER.info(f"{self.log_prefix} hook_action is: {hook_action}") + self.logger.info(f"{self.log_prefix} hook_action is: {hook_action}") pull_request_data: Dict[str, Any] = self.hook_data["pull_request"] self.parent_committer = pull_request_data["user"]["login"] self.pull_request_branch = pull_request_data["base"]["ref"] if hook_action == "opened": - LOGGER.info(f"{self.log_prefix} Creating welcome comment") + self.logger.info(f"{self.log_prefix} Creating welcome comment") self.pull_request.create_issue_comment(self.welcome_msg) self.create_issue_for_new_pull_request() @@ -867,11 +894,11 @@ def process_pull_request_webhook_data(self) -> None: if self.jira_track_pr: jira_conn = self.get_jira_conn() if not jira_conn: - LOGGER.error(f"{self.log_prefix} Jira connection not found") + self.logger.error(f"{self.log_prefix} Jira connection not found") else: if self.jira_epic and self.jira_assignee: - LOGGER.info(f"{self.log_prefix} Creating Jira story") + self.logger.info(f"{self.log_prefix} Creating Jira story") jira_story_key = jira_conn.create_story( title=self.issue_title, body=self.pull_request.html_url, @@ -880,7 +907,7 @@ def process_pull_request_webhook_data(self) -> None: ) self._add_label(label=f"{JIRA_STR}:{jira_story_key}") else: - LOGGER.warning( + self.logger.warning( f"{self.log_prefix} Jira epic or assignee is not set. Skipping Jira story creation" ) @@ -899,15 +926,17 @@ def process_pull_request_webhook_data(self) -> None: if self.jira_track_pr: jira_conn = self.get_jira_conn() if not jira_conn: - LOGGER.error(f"{self.log_prefix} Jira connection not found") + self.logger.error(f"{self.log_prefix} Jira connection not found") else: if _story_key := self.get_story_key_with_jira_connection(): if not self.jira_assignee: - LOGGER.warning(f"{self.log_prefix} Jira assignee is not set. Skipping sub-task creation") + self.logger.warning( + f"{self.log_prefix} Jira assignee is not set. Skipping sub-task creation" + ) else: - LOGGER.info(f"{self.log_prefix} Creating sub-task for Jira story {_story_key}") + self.logger.info(f"{self.log_prefix} Creating sub-task for Jira story {_story_key}") jira_conn.create_closed_subtask( title=f"{self.issue_title}: New commit from {self.last_committer}", parent_key=_story_key, @@ -921,7 +950,7 @@ def process_pull_request_webhook_data(self) -> None: is_merged = pull_request_data.get("merged") if is_merged: - LOGGER.info(f"{self.log_prefix} PR is merged") + self.logger.info(f"{self.log_prefix} PR is merged") for _label in self.pull_request.labels: _label_name = _label.name @@ -942,11 +971,11 @@ def process_pull_request_webhook_data(self) -> None: if self.jira_track_pr: jira_conn = self.get_jira_conn() if not jira_conn: - LOGGER.error(f"{self.log_prefix} Jira connection not found") + self.logger.error(f"{self.log_prefix} Jira connection not found") else: if _story_key := self.get_story_key_with_jira_connection(): - LOGGER.info(f"{self.log_prefix} Closing Jira story") + self.logger.info(f"{self.log_prefix} Closing Jira story") jira_conn.close_issue( key=_story_key, comment=f"PR: {self.pull_request.title} is closed. Merged: {is_merged}", @@ -960,7 +989,7 @@ def process_pull_request_webhook_data(self) -> None: if labeled == CAN_BE_MERGED_STR: return - LOGGER.info(f"{self.log_prefix} PR {self.pull_request.number} {hook_action} with {labeled}") + self.logger.info(f"{self.log_prefix} PR {self.pull_request.number} {hook_action} with {labeled}") if labeled.startswith(APPROVED_BY_LABEL_PREFIX): _reviewer = labeled.split(APPROVED_BY_LABEL_PREFIX)[-1] @@ -984,13 +1013,13 @@ def process_push_webhook_data(self) -> None: tag = re.search(r"refs/tags/?(.*)", self.hook_data["ref"]) if tag: tag_name = tag.group(1) - LOGGER.info(f"{self.log_prefix} Processing push for tag: {tag.group(1)}") + self.logger.info(f"{self.log_prefix} Processing push for tag: {tag.group(1)}") if self.pypi: - LOGGER.info(f"{self.log_prefix} Processing upload to pypi for tag: {tag_name}") + self.logger.info(f"{self.log_prefix} Processing upload to pypi for tag: {tag_name}") self.upload_to_pypi(tag_name=tag_name) if self.build_and_push_container and self.container_release: - LOGGER.info(f"{self.log_prefix} Processing build and push container for tag: {tag_name}") + self.logger.info(f"{self.log_prefix} Processing build and push container for tag: {tag_name}") self._run_build_container(push=True, set_check=False, tag=tag_name) def process_pull_request_review_webhook_data(self) -> None: @@ -1013,7 +1042,7 @@ def process_pull_request_review_webhook_data(self) -> None: _story_label = [_label for _label in self.pull_request.labels if _label.name.startswith(JIRA_STR)] if _story_label: if reviewed_user == self.parent_committer or reviewed_user == self.last_committer: - LOGGER.info( + self.logger.info( f"{self.log_prefix} Skipping Jira review sub-task creation for review by {reviewed_user} which is parent or last committer" ) return @@ -1021,10 +1050,10 @@ def process_pull_request_review_webhook_data(self) -> None: _story_key = _story_label[0].name.split(":")[-1] jira_conn = self.get_jira_conn() if not jira_conn: - LOGGER.error(f"{self.log_prefix} Jira connection not found") + self.logger.error(f"{self.log_prefix} Jira connection not found") return - LOGGER.info(f"{self.log_prefix} Creating sub-task for Jira story {_story_key}") + self.logger.info(f"{self.log_prefix} Creating sub-task for Jira story {_story_key}") jira_conn.create_closed_subtask( title=f"{self.issue_title}: reviewed by: {reviewed_user} - {review_state}", parent_key=_story_key, @@ -1033,7 +1062,7 @@ def process_pull_request_review_webhook_data(self) -> None: ) def manage_reviewed_by_label(self, review_state: str, action: str, reviewed_user: str) -> None: - LOGGER.info( + self.logger.info( f"{self.log_prefix} " f"Processing label for review from {reviewed_user}. " f"review_state: {review_state}, action: {action}" @@ -1050,7 +1079,7 @@ def manage_reviewed_by_label(self, review_state: str, action: str, reviewed_user base_dict = self.hook_data.get("issue", self.hook_data.get("pull_request")) pr_owner = base_dict["user"]["login"] if pr_owner == reviewed_user: - LOGGER.info(f"{self.log_prefix} PR owner {pr_owner} set /lgtm, not adding label.") + self.logger.info(f"{self.log_prefix} PR owner {pr_owner} set /lgtm, not adding label.") return _remove_label = f"{CHANGED_REQUESTED_BY_LABEL_PREFIX}{reviewed_user}" @@ -1075,7 +1104,7 @@ def manage_reviewed_by_label(self, review_state: str, action: str, reviewed_user if action == DELETE_STR: self._remove_label(label=reviewer_label) else: - LOGGER.warning( + self.logger.warning( f"{self.log_prefix} PR {self.pull_request.number} got unsupported review state: {review_state}" ) @@ -1084,7 +1113,7 @@ def _run_tox(self) -> None: return if self.is_check_run_in_progress(check_run=TOX_STR): - LOGGER.info(f"{self.log_prefix} Check run is in progress, not running {TOX_STR}.") + self.logger.debug(f"{self.log_prefix} Check run is in progress, not running {TOX_STR}.") return cmd = f"{self.tox_python_version} -m {TOX_STR}" @@ -1110,7 +1139,7 @@ def _run_pre_commit(self) -> None: return if self.is_check_run_in_progress(check_run=PRE_COMMIT_STR): - LOGGER.info(f"{self.log_prefix} Check run is in progress, not running {PRE_COMMIT_STR}.") + self.logger.debug(f"{self.log_prefix} Check run is in progress, not running {PRE_COMMIT_STR}.") return cmd = f"{PRE_COMMIT_STR} run --all-files" @@ -1136,33 +1165,30 @@ def user_commands(self, command: str, reviewed_user: str, issue_comment_id: int) "check-can-merge", ] if "sonarsource.github.io" in command: - LOGGER.info(f"{self.log_prefix} command is in ignore list") + self.logger.debug(f"{self.log_prefix} command is in ignore list") return - LOGGER.info(f"{self.log_prefix} Processing label/user command {command} by user {reviewed_user}") + self.logger.info(f"{self.log_prefix} Processing label/user command {command} by user {reviewed_user}") command_and_args: List[str] = command.split(" ", 1) _command = command_and_args[0] not_running_msg: str = f"Pull request already merged, not running {_command}" _args: str = command_and_args[1] if len(command_and_args) > 1 else "" - if len(command_and_args) > 1 and _args == "cancel": - LOGGER.info(f"{self.log_prefix} User requested 'cancel' for command {_command}") - remove = True + if remove := len(command_and_args) > 1 and _args == "cancel": + self.logger.info(f"{self.log_prefix} User requested 'cancel' for command {_command}") if _command in available_commands: if not _args and _command not in ("assign-reviewers", "check-can-merge"): issue_msg: str = f"{_command} requires an argument" error_msg: str = f"{self.log_prefix} {issue_msg}" - LOGGER.info(error_msg) + self.logger.debug(error_msg) self.pull_request.create_issue_comment(issue_msg) return if _command == "assign-reviewers": - self.assign_reviewers() - return + return self.assign_reviewers() if _command == "check-can-merge": - self.check_if_can_be_merged() - return + return self.check_if_can_be_merged() if _command == "cherry-pick": self.create_comment_reaction(issue_comment_id=issue_comment_id, reaction=REACTIONS.ok) @@ -1179,7 +1205,7 @@ def user_commands(self, command: str, reviewed_user: str, issue_comment_id: int) _exits_target_branches.add(_target_branch) if _non_exits_target_branches_msg: - LOGGER.info(f"{self.log_prefix} {_non_exits_target_branches_msg}") + self.logger.info(f"{self.log_prefix} {_non_exits_target_branches_msg}") self.pull_request.create_issue_comment(_non_exits_target_branches_msg) if _exits_target_branches: @@ -1191,7 +1217,7 @@ def user_commands(self, command: str, reviewed_user: str, issue_comment_id: int) Cherry-pick requested for PR: `{self.pull_request.title}` by user `{reviewed_user}` Adding label/s `{" ".join([_cp_label for _cp_label in cp_labels])}` for automatic cheery-pick once the PR is merged """ - LOGGER.info(f"{self.log_prefix} {info_msg}") + self.logger.info(f"{self.log_prefix} {info_msg}") self.pull_request.create_issue_comment(info_msg) for _cp_label in cp_labels: self._add_label(label=_cp_label) @@ -1213,7 +1239,7 @@ def user_commands(self, command: str, reviewed_user: str, issue_comment_id: int) if not self.tox_enabled: msg: str = f"No {TOX_STR} configured for this repository" error_msg = f"{self.log_prefix} {msg}." - LOGGER.info(error_msg) + self.logger.info(error_msg) self.pull_request.create_issue_comment(msg) return @@ -1231,13 +1257,13 @@ def user_commands(self, command: str, reviewed_user: str, issue_comment_id: int) else: msg = f"No {BUILD_CONTAINER_STR} configured for this repository" error_msg = f"{self.log_prefix} {msg}" - LOGGER.info(error_msg) + self.logger.info(error_msg) self.pull_request.create_issue_comment(msg) elif _test == PYTHON_MODULE_INSTALL_STR: if not self.pypi: error_msg = f"{self.log_prefix} No pypi configured" - LOGGER.info(error_msg) + self.logger.info(error_msg) self.pull_request.create_issue_comment(error_msg) return @@ -1251,7 +1277,7 @@ def user_commands(self, command: str, reviewed_user: str, issue_comment_id: int) else: msg = f"No {BUILD_AND_PUSH_CONTAINER_STR} configured for this repository" error_msg = f"{self.log_prefix} {msg}" - LOGGER.info(error_msg) + self.logger.info(error_msg) self.pull_request.create_issue_comment(msg) elif _command == WIP_STR: @@ -1282,12 +1308,12 @@ def user_commands(self, command: str, reviewed_user: str, issue_comment_id: int) def cherry_pick(self, target_branch: str, reviewed_user: str = "") -> None: requested_by = reviewed_user or "by target-branch label" - LOGGER.info(f"{self.log_prefix} Cherry-pick requested by user: {requested_by}") + self.logger.info(f"{self.log_prefix} Cherry-pick requested by user: {requested_by}") new_branch_name = f"{CHERRY_PICKED_LABEL_PREFIX}-{self.pull_request.head.ref}-{shortuuid.uuid()[:5]}" if not self.is_branch_exists(branch=target_branch): err_msg = f"cherry-pick failed: {target_branch} does not exists" - LOGGER.error(err_msg) + self.logger.error(err_msg) self.pull_request.create_issue_comment(err_msg) else: self.set_cherry_pick_in_progress() @@ -1323,7 +1349,7 @@ def cherry_pick(self, target_branch: str, reviewed_user: str = "") -> None: ) else: self.set_cherry_pick_failure(output=output) - LOGGER.error(f"{self.log_prefix} Cherry pick failed: {out} --- {err}") + self.logger.error(f"{self.log_prefix} Cherry pick failed: {out} --- {err}") local_branch_name = f"{self.pull_request.head.ref}-{target_branch}" self.pull_request.create_issue_comment( f"**Manual cherry-pick is needed**\nCherry pick failed for " @@ -1347,21 +1373,21 @@ def label_by_pull_requests_merge_state_after_merged(self) -> None: If the mergeable state is 'dirty', the 'has conflicts' label is added. """ time_sleep = 30 - LOGGER.info(f"{self.log_prefix} Sleep for {time_sleep} seconds before getting all opened PRs") + self.logger.info(f"{self.log_prefix} Sleep for {time_sleep} seconds before getting all opened PRs") time.sleep(time_sleep) for pull_request in self.repository.get_pulls(state="open"): self.pull_request = pull_request - LOGGER.info(f"{self.log_prefix} check label pull request after merge") + self.logger.info(f"{self.log_prefix} check label pull request after merge") self.label_pull_request_by_merge_state(_sleep=time_sleep) def label_pull_request_by_merge_state(self, _sleep: int = 0) -> None: if _sleep: - LOGGER.info(f"{self.log_prefix} Sleep for {_sleep} seconds before checking merge state") + self.logger.info(f"{self.log_prefix} Sleep for {_sleep} seconds before checking merge state") time.sleep(_sleep) merge_state = self.pull_request.mergeable_state - LOGGER.info(f"{self.log_prefix} Mergeable state is {merge_state}") + self.logger.debug(f"{self.log_prefix} Mergeable state is {merge_state}") if merge_state == "unknown": return @@ -1388,7 +1414,7 @@ def check_if_can_be_merged(self) -> None: PR has no changed requests from approvers. """ if self.skip_if_pull_request_already_merged(): - LOGGER.info(f"{self.log_prefix} Pull request already merged") + self.logger.debug(f"{self.log_prefix} Pull request already merged") return output = { @@ -1400,10 +1426,10 @@ def check_if_can_be_merged(self) -> None: try: self.all_required_status_checks = self.get_all_required_status_checks() - LOGGER.info(f"{self.log_prefix} Check if {CAN_BE_MERGED_STR}.") + self.logger.info(f"{self.log_prefix} Check if {CAN_BE_MERGED_STR}.") self.set_merge_check_queued() last_commit_check_runs = list(self.last_commit.get_check_runs()) - LOGGER.info(f"{self.log_prefix} Check if any required check runs in progress.") + self.logger.debug(f"{self.log_prefix} Check if any required check runs in progress.") check_runs_in_progress = [ check_run.name for check_run in last_commit_check_runs @@ -1412,7 +1438,7 @@ def check_if_can_be_merged(self) -> None: and check_run.name in self.all_required_status_checks ] if check_runs_in_progress: - LOGGER.info( + self.logger.debug( f"{self.log_prefix} Some required check runs in progress {check_runs_in_progress}, " f"skipping check if {CAN_BE_MERGED_STR}." ) @@ -1446,7 +1472,7 @@ def check_if_can_be_merged(self) -> None: if failed_check_runs: failure_output += f"Some check runs failed: {failed_check_runs}\n" - LOGGER.info(f"{self.log_prefix} check if can be merged. PR labels are: {_labels}") + self.logger.debug(f"{self.log_prefix} check if can be merged. PR labels are: {_labels}") for _label in _labels: if CHANGED_REQUESTED_BY_LABEL_PREFIX.lower() in _label.lower(): @@ -1474,7 +1500,7 @@ def check_if_can_be_merged(self) -> None: self._add_label(label=CAN_BE_MERGED_STR) self.set_merge_check_success() if self.parent_committer in self.auto_verified_and_merged_users: - LOGGER.info( + self.logger.info( f"{self.log_prefix} will be merged automatically. owner: {self.parent_committer} " f"is part of {self.auto_verified_and_merged_users}" ) @@ -1485,18 +1511,20 @@ def check_if_can_be_merged(self) -> None: ) self.pull_request.merge(merge_method="squash") - LOGGER.info(f"{self.log_prefix} Pull request can be merged") + self.logger.info(f"{self.log_prefix} Pull request can be merged") return failure_output += f"Missing lgtm/approved from approvers {self.approvers}\n" - LOGGER.info(f"{self.log_prefix} cannot be merged: {failure_output}") + self.logger.debug(f"{self.log_prefix} cannot be merged: {failure_output}") output["text"] = failure_output self._remove_label(label=CAN_BE_MERGED_STR) self.set_merge_check_failure(output=output) except Exception as ex: - LOGGER.error(f"{self.log_prefix} Failed to check if can be merged, set check run to {FAILURE_STR} {ex}") + self.logger.error( + f"{self.log_prefix} Failed to check if can be merged, set check run to {FAILURE_STR} {ex}" + ) _err = "Failed to check if can be merged, check logs" output["text"] = _err self._remove_label(label=CAN_BE_MERGED_STR) @@ -1524,10 +1552,10 @@ def _container_repository_and_tag(self, is_merged: bool = False, tag: str = "") tag = f"pr-{self.pull_request.number}" if tag: - LOGGER.info(f"{self.log_prefix} container tag is: {tag}") + self.logger.debug(f"{self.log_prefix} container tag is: {tag}") return f"{self.container_repository}:{tag}" - LOGGER.error(f"{self.log_prefix} container tag not found") + self.logger.error(f"{self.log_prefix} container tag not found") return f"{self.container_repository}:webhook-server-tag-not-found" def _run_build_container( @@ -1545,7 +1573,7 @@ def _run_build_container( if pull_request and set_check: if self.is_check_run_in_progress(check_run=BUILD_CONTAINER_STR) and not is_merged: - LOGGER.info(f"{self.log_prefix} Check run is in progress, not running {BUILD_CONTAINER_STR}.") + self.logger.info(f"{self.log_prefix} Check run is in progress, not running {BUILD_CONTAINER_STR}.") return self.set_container_build_in_progress() @@ -1576,7 +1604,7 @@ def _run_build_container( "text": self.get_check_run_text(err=err, out=out), } if rc: - LOGGER.info(f"{self.log_prefix} Done building {_container_repository_and_tag}") + self.logger.info(f"{self.log_prefix} Done building {_container_repository_and_tag}") if pull_request and set_check: return self.set_container_build_success(output=output) @@ -1593,7 +1621,7 @@ def _run_build_container( """ self.send_slack_message(message=message, webhook_url=self.slack_webhook_url) - LOGGER.info(f"{self.log_prefix} Done push {_container_repository_and_tag}") + self.logger.info(f"{self.log_prefix} Done push {_container_repository_and_tag}") else: if push: err_msg: str = f"Failed to create and push {_container_repository_and_tag}" @@ -1616,10 +1644,10 @@ def _run_install_python_module(self) -> None: return if self.is_check_run_in_progress(check_run=PYTHON_MODULE_INSTALL_STR): - LOGGER.info(f"{self.log_prefix} Check run is in progress, not running {PYTHON_MODULE_INSTALL_STR}.") + self.logger.info(f"{self.log_prefix} Check run is in progress, not running {PYTHON_MODULE_INSTALL_STR}.") return - LOGGER.info(f"{self.log_prefix} Installing python module") + self.logger.info(f"{self.log_prefix} Installing python module") f"{PYTHON_MODULE_INSTALL_STR}-{shortuuid.uuid()}" self.set_python_module_install_in_progress() rc, out, err = self._run_in_container(command="pip install .") @@ -1635,7 +1663,7 @@ def _run_install_python_module(self) -> None: def send_slack_message(self, message: str, webhook_url: str) -> None: slack_data: Dict[str, str] = {"text": message} - LOGGER.info(f"{self.log_prefix} Sending message to slack: {message}") + self.logger.info(f"{self.log_prefix} Sending message to slack: {message}") response: requests.Response = requests.post( webhook_url, data=json.dumps(slack_data), @@ -1652,14 +1680,14 @@ def _process_verified(self) -> None: return if self.parent_committer in self.auto_verified_and_merged_users: - LOGGER.info( + self.logger.info( f"{self.log_prefix} Committer {self.parent_committer} is part of {self.auto_verified_and_merged_users}" ", Setting verified label" ) self._add_label(label=VERIFIED_LABEL_STR) self.set_verify_check_success() else: - LOGGER.info(f"{self.log_prefix} Processing reset {VERIFIED_LABEL_STR} label on new commit push") + self.logger.info(f"{self.log_prefix} Processing reset {VERIFIED_LABEL_STR} label on new commit push") # Remove verified label self._remove_label(label=VERIFIED_LABEL_STR) self.set_verify_check_queued() @@ -1693,16 +1721,16 @@ def process_opened_or_synchronize_pull_request(self) -> None: for result in as_completed(prepare_pull_futures): if _exp := result.exception(): - LOGGER.error(f"{self.log_prefix} {_exp}") + self.logger.error(f"{self.log_prefix} {_exp}") for result in as_completed(run_check_runs_futures): if _exp := result.exception(): - LOGGER.error(f"{self.log_prefix} {_exp}") + self.logger.error(f"{self.log_prefix} {_exp}") - LOGGER.info(f"{self.log_prefix} {result.result()}") + self.logger.info(f"{self.log_prefix} {result.result()}") try: - LOGGER.info(f"{self.log_prefix} Adding PR owner as assignee") + self.logger.info(f"{self.log_prefix} Adding PR owner as assignee") self.pull_request.add_to_assignees() except Exception: if self.approvers: @@ -1732,23 +1760,17 @@ def set_check_run_status( if output: kwargs["output"] = output - msg: str = f"{self.log_prefix} Set {check_run} check to {status or conclusion}" - LOGGER.info(msg) + msg: str = f"{self.log_prefix} check run {check_run} status: {status or conclusion}" try: self.repository_by_github_app.create_check_run(**kwargs) - if conclusion == SUCCESS_STR: - LOGGER.success(msg) - + if conclusion in (SUCCESS_STR, IN_PROGRESS_STR): + self.logger.success(msg) # type: ignore return except Exception as ex: - LOGGER.error(f"{self.log_prefix} Failed to set {check_run} check to {status or conclusion}, {ex}") + self.logger.debug(f"{self.log_prefix} Failed to set {check_run} check to {status or conclusion}, {ex}") kwargs["conclusion"] = FAILURE_STR - - LOGGER.error( - f"{self.log_prefix} Check run {check_run}, status: {FAILURE_STR}, output: {kwargs.get('output')}" - ) self.repository_by_github_app.create_check_run(**kwargs) def _run_in_container( @@ -1792,7 +1814,7 @@ def _run_in_container( try: pull_request = self._get_pull_request() except NoPullRequestError: - LOGGER.error(f"{self.log_prefix} [func:_run_in_container] No pull request found") + self.logger.error(f"{self.log_prefix} [func:_run_in_container] No pull request found") return False, "", "" clone_base_cmd += f" && git checkout origin/pr/{pull_request.number}" @@ -1834,7 +1856,7 @@ def get_jira_conn(self) -> JiraApi: # jira-enabled-repository: {self.jira_enabled_repository} # jira-user-mapping: {self.jira_user_mapping} # """ - # LOGGER.info(f"{self.log_prefix} Repository features: {repository_features}") + # self.logger.info(f"{self.log_prefix} Repository features: {repository_features}") def get_story_key_with_jira_connection(self) -> str: _story_label = [_label for _label in self.pull_request.labels if _label.name.startswith(JIRA_STR)] @@ -1844,13 +1866,13 @@ def get_story_key_with_jira_connection(self) -> str: if _story_key := _story_label[0].name.split(":")[-1]: jira_conn = self.get_jira_conn() if not jira_conn: - LOGGER.error(f"{self.log_prefix} Jira connection not found") + self.logger.error(f"{self.log_prefix} Jira connection not found") return "" return _story_key def get_branch_required_status_checks(self) -> List[str]: if self.repository.private: - LOGGER.info( + self.logger.info( f"{self.log_prefix} Repository is private, skipping getting branch protection required status checks" ) return [] @@ -1878,5 +1900,5 @@ def get_all_required_status_checks(self) -> List[str]: all_required_status_checks.append(PYTHON_MODULE_INSTALL_STR) _all_required_status_checks = branch_required_status_checks + all_required_status_checks - LOGGER.info(f"{self.log_prefix} All required status checks: {_all_required_status_checks}") + self.logger.debug(f"{self.log_prefix} All required status checks: {_all_required_status_checks}") return _all_required_status_checks diff --git a/webhook_server_container/libs/jira_api.py b/webhook_server_container/libs/jira_api.py index de2ef155..2574c4b5 100644 --- a/webhook_server_container/libs/jira_api.py +++ b/webhook_server_container/libs/jira_api.py @@ -1,14 +1,12 @@ -import os from typing import Any, Dict, List from jira import Issue, JIRA -from simple_logger.logger import get_logger - -LOGGER = get_logger(name="JiraApi", filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE")) +from webhook_server_container.utils.helpers import get_logger_with_params class JiraApi: def __init__(self, server: str, project: str, token: str): + self.logger = get_logger_with_params(name="JiraApi") self.server = server self.project = project self.token = token diff --git a/webhook_server_container/utils/constants.py b/webhook_server_container/utils/constants.py index d342f7ec..9627b117 100644 --- a/webhook_server_container/utils/constants.py +++ b/webhook_server_container/utils/constants.py @@ -1,9 +1,6 @@ from typing import Dict -from fastapi import FastAPI - -FASTAPI_APP: FastAPI = FastAPI(title="webhook-server") OTHER_MAIN_BRANCH: str = "master" TOX_STR: str = "tox" PRE_COMMIT_STR: str = "pre-commit" diff --git a/webhook_server_container/utils/github_repository_settings.py b/webhook_server_container/utils/github_repository_settings.py index 96b26e2a..e05c782f 100644 --- a/webhook_server_container/utils/github_repository_settings.py +++ b/webhook_server_container/utils/github_repository_settings.py @@ -11,7 +11,6 @@ from github.Commit import Commit from github.Auth import AppAuth from github.GithubException import UnknownObjectException -from simple_logger.logger import get_logger from webhook_server_container.libs.config import Config from webhook_server_container.utils.constants import ( @@ -28,13 +27,10 @@ get_api_with_highest_rate_limit, get_future_results, get_github_repo_api, + get_logger_with_params, ) - -LOGGER = get_logger( - name="github-repository-settings", - filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE"), -) +LOGGER = get_logger_with_params(name="github-repository-settings") def get_branch_sampler(repo: Repository, branch_name: str) -> Branch: @@ -48,7 +44,7 @@ def set_branch_protection( github_api: Github, ) -> bool: api_user = github_api.get_user().login - LOGGER.info(f"Set repository {repository.name} {branch} settings. enabled checks: {required_status_checks}") + LOGGER.info(f"Set branch {branch} setting for {repository.name}. enabled checks: {required_status_checks}") branch.edit_protection( strict=True, required_conversation_resolution=True, @@ -69,12 +65,11 @@ def set_repository_settings(repository: Repository) -> None: LOGGER.info(f"Set repository {repository.name} settings") repository.edit(delete_branch_on_merge=True, allow_auto_merge=True, allow_update_branch=True) - LOGGER.info(f"Set repository {repository.name} security settings") - if repository.private: LOGGER.warning(f"{repository.name}: Repository is private, skipping setting security settings") return + LOGGER.info(f"Set repository {repository.name} security settings") repository._requester.requestJsonAndCheck( "PATCH", f"{repository.url}/code-scanning/default-setup", @@ -148,10 +143,10 @@ def set_repository_labels(repository: Repository) -> str: if repository_labels[label_lower]["color"] == color: continue else: - LOGGER.info(f"{repository.name}: Edit repository label {label} with color {color}") + LOGGER.debug(f"{repository.name}: Edit repository label {label} with color {color}") repo_label.edit(name=repo_label.name, color=color) else: - LOGGER.info(f"{repository.name}: Add repository label {label} with color {color}") + LOGGER.debug(f"{repository.name}: Add repository label {label} with color {color}") repository.create_label(name=label, color=color) return f"{repository}: Setting repository labels is done" @@ -206,7 +201,7 @@ def set_repository( with ThreadPoolExecutor() as executor: for branch_name, status_checks in protected_branches.items(): - LOGGER.info(f"{repository}: Getting branch {branch_name}") + LOGGER.debug(f"{repository}: Getting branch {branch_name}") branch = get_branch_sampler(repo=repo, branch_name=branch_name) if not branch: LOGGER.error(f"{repository}: Failed to get branch {branch_name}") @@ -295,11 +290,11 @@ def set_repository_check_runs_to_queued( ) app_api.create_check_run(name=check_run.name, head_sha=last_commit.sha, status=QUEUED_STR) - return True, f"{repository}: Set check run status to {QUEUED_STR} is done", LOGGER.info + return True, f"{repository}: Set check run status to {QUEUED_STR} is done", LOGGER.debug def get_repository_github_app_api(config_: Config, repository_name: str) -> Optional[Github]: - LOGGER.info("Getting repositories GitHub app API") + LOGGER.debug("Getting repositories GitHub app API") with open(os.path.join(config_.data_dir, "webhook-server.private-key.pem")) as fd: private_key = fd.read() @@ -322,8 +317,12 @@ def get_repository_github_app_api(config_: Config, repository_name: str) -> Opti if __name__ == "__main__": config = Config() api, _ = get_api_with_highest_rate_limit(config=config) - set_repositories_settings(config_=config, github_api=api) - set_all_in_progress_check_runs_to_queued( - config_=config, - github_api=api, - ) + if api: + set_repositories_settings(config_=config, github_api=api) + set_all_in_progress_check_runs_to_queued( + config_=config, + github_api=api, + ) + + else: + LOGGER.error("Failed to get GitHub API") diff --git a/webhook_server_container/utils/helpers.py b/webhook_server_container/utils/helpers.py index 47a1a3e3..07337ef0 100644 --- a/webhook_server_container/utils/helpers.py +++ b/webhook_server_container/utils/helpers.py @@ -1,6 +1,6 @@ from __future__ import annotations import datetime -import os +from logging import Logger import shlex import subprocess from concurrent.futures import Future, as_completed @@ -13,7 +13,37 @@ from webhook_server_container.libs.config import Config -LOGGER = get_logger(name="helpers", filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE")) + +def get_value_from_dicts( + primary_dict: Dict[Any, Any], + secondary_dict: Dict[Any, Any], + key: str, + return_on_none: Optional[Any] = None, +) -> Any: + """ + Get value from two dictionaries. + + If value is not found in primary_dict, try to get it from secondary_dict, otherwise return return_on_none. + """ + return primary_dict.get(key, secondary_dict.get(key, return_on_none)) + + +def get_logger_with_params(name: str, repository_name: Optional[str] = "") -> Logger: + _config = Config() + config_data = _config.data # Global repositories configuration + repo_data: Dict[str, Any] = {} + + if repository_name: + repo_data = _config.repository_data(repository_name=repository_name) # Specific repository configuration + + log_level: str = get_value_from_dicts( + primary_dict=repo_data, secondary_dict=config_data, key="log-level", return_on_none="INFO" + ) + log_file: str = get_value_from_dicts(primary_dict=repo_data, secondary_dict=config_data, key="log-file") + return get_logger(name=name, filename=log_file, level=log_level) + + +LOGGER = get_logger_with_params(name="helpers") def extract_key_from_dict(key: Any, _dict: Dict[Any, Any]) -> Any: @@ -63,7 +93,7 @@ def run_command( out_decoded: str = "" err_decoded: str = "" try: - LOGGER.info(f"{log_prefix} Running '{command}' command") + LOGGER.debug(f"{log_prefix} Running '{command}' command") sub_process = subprocess.run( shlex.split(command), capture_output=capture_output, @@ -100,7 +130,7 @@ def get_apis_and_tokes_from_config(config: Config, repository_name: str = "") -> apis_and_tokens: List[Tuple[Github, str]] = [] tokens = get_value_from_dicts( - primary_dict=config.get_repository(repository_name=repository_name), + primary_dict=config.repository_data(repository_name=repository_name), secondary_dict=config.data, key="github-tokens", return_on_none=[], @@ -136,7 +166,7 @@ def get_api_with_highest_rate_limit(config: Config, repository_name: str = "") - rate_limit = _api.get_rate_limit() if rate_limit.core.remaining > remaining: remaining = rate_limit.core.remaining - LOGGER.info(f"API user {_api_user} remaining rate limit: {remaining}") + LOGGER.debug(f"API user {_api_user} remaining rate limit: {remaining}") api, token = _api, _token if rate_limit: @@ -149,8 +179,9 @@ def get_api_with_highest_rate_limit(config: Config, repository_name: str = "") - def log_rate_limit(rate_limit: RateLimit, api_user: str) -> None: rate_limit_str: str time_for_limit_reset: int = (rate_limit.core.reset - datetime.datetime.now(tz=datetime.timezone.utc)).seconds + below_minimum: bool = rate_limit.core.remaining < 700 - if rate_limit.core.remaining < 700: + if below_minimum: rate_limit_str = f"{Fore.RED}{rate_limit.core.remaining}{Fore.RESET}" elif rate_limit.core.remaining < 2000: @@ -159,25 +190,14 @@ def log_rate_limit(rate_limit: RateLimit, api_user: str) -> None: else: rate_limit_str = f"{Fore.GREEN}{rate_limit.core.remaining}{Fore.RESET}" - LOGGER.info( + msg = ( f"{Fore.CYAN}[{api_user}] API rate limit:{Fore.RESET} Current {rate_limit_str} of {rate_limit.core.limit}. " f"Reset in {rate_limit.core.reset} [{datetime.timedelta(seconds=time_for_limit_reset)}] " f"(UTC time is {datetime.datetime.now(tz=datetime.timezone.utc)})" ) - - -def get_value_from_dicts( - primary_dict: Dict[Any, Any], - secondary_dict: Dict[Any, Any], - key: str, - return_on_none: Optional[Any] = None, -) -> Any: - """ - Get value from two dictionaries. - - If value is not found in primary_dict, try to get it from secondary_dict, otherwise return return_on_none. - """ - return primary_dict.get(key, secondary_dict.get(key, return_on_none)) + LOGGER.debug(msg) + if below_minimum: + LOGGER.warning(msg) def get_future_results(futures: List["Future"]) -> None: diff --git a/webhook_server_container/utils/webhook.py b/webhook_server_container/utils/webhook.py index 1fdf9703..98447493 100644 --- a/webhook_server_container/utils/webhook.py +++ b/webhook_server_container/utils/webhook.py @@ -1,20 +1,19 @@ from concurrent.futures import ThreadPoolExecutor -import os from typing import Any, Callable, Dict, List, Tuple from github.Hook import Hook from github import Github -from simple_logger.logger import get_logger from webhook_server_container.libs.config import Config from webhook_server_container.utils.helpers import ( get_api_with_highest_rate_limit, get_future_results, get_github_repo_api, + get_logger_with_params, ) -LOGGER = get_logger(name="webhook", filename=os.environ.get("WEBHOOK_SERVER_LOG_FILE")) +LOGGER = get_logger_with_params(name="webhook") def process_github_webhook(data: Dict[str, Any], github_api: Github, webhook_ip: str) -> Tuple[bool, str, Callable]: @@ -60,4 +59,7 @@ def create_webhook(config_: Config, github_api: Github) -> None: if __name__ == "__main__": config = Config() api, _ = get_api_with_highest_rate_limit(config=config) - create_webhook(config_=config, github_api=api) + if api: + create_webhook(config_=config, github_api=api) + else: + LOGGER.error("Failed to get GitHub API")