From ac0e638ecad837560a4e83a38a8b819229cf17f4 Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Tue, 23 Aug 2022 12:13:09 +0200 Subject: [PATCH 01/13] feat: update compare tests folder * feat: update architecture of project, new folder github_management * feat: new script github_validator_app * feat: validation on Push and Pull Request * feat: validation with the custom branch and not only the main branch on student repo --- .../__init__.py | 0 .../constants.py | 0 github_management/github_validator_app.py | 62 +++++++++++++++++++ .../py.typed | 0 .../utils.py | 4 +- server.py | 35 ++--------- 6 files changed, 68 insertions(+), 33 deletions(-) rename {github_tests_validator_app => github_management}/__init__.py (100%) rename {github_tests_validator_app => github_management}/constants.py (100%) create mode 100644 github_management/github_validator_app.py rename {github_tests_validator_app => github_management}/py.typed (100%) rename {github_tests_validator_app => github_management}/utils.py (96%) diff --git a/github_tests_validator_app/__init__.py b/github_management/__init__.py similarity index 100% rename from github_tests_validator_app/__init__.py rename to github_management/__init__.py diff --git a/github_tests_validator_app/constants.py b/github_management/constants.py similarity index 100% rename from github_tests_validator_app/constants.py rename to github_management/constants.py diff --git a/github_management/github_validator_app.py b/github_management/github_validator_app.py new file mode 100644 index 0000000..647c55f --- /dev/null +++ b/github_management/github_validator_app.py @@ -0,0 +1,62 @@ +from github import GithubIntegration +from github_management.constants import ( + APP_ID, + APP_KEY, + SOLUTION_OWNER, + SOLUTION_REPO_NAME, + SOLUTION_TESTS_ACCESS_TOKEN, + TESTS_FOLDER_NAME, +) +from github_management.utils import compare_tests_folder, get_repo + +git_integration = GithubIntegration( + APP_ID, + APP_KEY, +) + +triggers = { + 'pull_request': ['pull_request', 'head', 'ref'], + 'pusher': ['ref'] +} + +def get_student_branch(payload: dict = None): + for trigger in triggers: + if trigger in payload: + branch = payload + path = triggers[trigger] + while path: + branch = branch[path.pop(0)] + return branch + return None + +def get_student_repo(payload: dict = None): + owner = payload["repository"]["owner"]["login"] + repo_name = payload["repository"]["name"] + token = git_integration.get_access_token( + git_integration.get_installation(owner, repo_name).id + ).token + + return get_repo(token, owner, repo_name) + +def github_validator_repo(payload: dict): + student_repo = get_student_repo(payload) + student_branch = get_student_branch(payload) + if student_branch is None: + # Log error + # FIXME + # Archive the payload + # FIXME + print('Could\'nt find the student commit, maybe the trigger is not managed') + return False + + solution_repo = get_repo(SOLUTION_TESTS_ACCESS_TOKEN, SOLUTION_OWNER, SOLUTION_REPO_NAME) + + # Valide of repo + tests_havent_changed = compare_tests_folder(student_repo, solution_repo, student_branch) + + # Send results to GCP + # FIXME + + # Results of challenges + # FIXME + return tests_havent_changed diff --git a/github_tests_validator_app/py.typed b/github_management/py.typed similarity index 100% rename from github_tests_validator_app/py.typed rename to github_management/py.typed diff --git a/github_tests_validator_app/utils.py b/github_management/utils.py similarity index 96% rename from github_tests_validator_app/utils.py rename to github_management/utils.py index 949b81b..2415104 100644 --- a/github_tests_validator_app/utils.py +++ b/github_management/utils.py @@ -45,9 +45,9 @@ def get_last_hash_commit(repo: Repository.Repository, branch_name: str) -> str: def compare_tests_folder( - student_repo: Repository.Repository, solution_repo: Repository.Repository + student_repo: Repository.Repository, solution_repo: Repository.Repository, student_branch_repo: str ) -> bool: - student_contents = student_repo.get_contents(TESTS_FOLDER_NAME) + student_contents = student_repo.get_contents(TESTS_FOLDER_NAME, ref=student_branch_repo) if ( isinstance(student_contents, ContentFile.ContentFile) diff --git a/server.py b/server.py index 59c30ea..c21d499 100644 --- a/server.py +++ b/server.py @@ -1,44 +1,17 @@ import uvicorn from fastapi import FastAPI, Request -from github import GithubIntegration -from github_tests_validator_app.constants import ( - APP_ID, - APP_KEY, - SOLUTION_OWNER, - SOLUTION_REPO_NAME, - SOLUTION_TESTS_ACCESS_TOKEN, - TESTS_FOLDER_NAME, -) -from github_tests_validator_app.utils import compare_tests_folder, get_repo -app = FastAPI() +from github_management.github_validator_app import github_validator_repo -git_integration = GithubIntegration( - APP_ID, - APP_KEY, -) +app = FastAPI() @app.post("/") async def main(request: Request) -> None: payload = await request.json() - if payload["action"] not in ["opened", "synchronize"]: - return - - owner = payload["repository"]["owner"]["login"] - repo_name = payload["repository"]["name"] - token = git_integration.get_access_token( - git_integration.get_installation(owner, repo_name).id - ).token - - student_repo = get_repo(token, owner, repo_name) - solution_repo = get_repo(SOLUTION_TESTS_ACCESS_TOKEN, SOLUTION_OWNER, SOLUTION_REPO_NAME) - - tests_havent_changed = compare_tests_folder(student_repo, solution_repo) - print(tests_havent_changed) - - return + tests_havent_changed = github_validator_repo(payload) + return tests_havent_changed if __name__ == "__main__": From 35c22db47332cca6b304939827d2eb12dff469ac Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Wed, 24 Aug 2022 11:40:41 +0200 Subject: [PATCH 02/13] feat: update architecture of project * new bin folder with github_validator_app * new lib folder with connectors and utils * new github connector * new config file --- .gitignore | 2 - bin/github_validator_app.py | 85 +++++++++++++++++++++ github_management/constants.py => config.py | 0 github_management/__init__.py | 0 github_management/github_validator_app.py | 62 --------------- github_management/py.typed | 0 github_management/utils.py | 63 --------------- lib/connectors/github_connector.py | 52 +++++++++++++ lib/utils.py | 12 +++ server.py | 2 +- 10 files changed, 150 insertions(+), 128 deletions(-) create mode 100644 bin/github_validator_app.py rename github_management/constants.py => config.py (100%) delete mode 100644 github_management/__init__.py delete mode 100644 github_management/github_validator_app.py delete mode 100755 github_management/py.typed delete mode 100644 github_management/utils.py create mode 100644 lib/connectors/github_connector.py create mode 100644 lib/utils.py diff --git a/.gitignore b/.gitignore index 0c2ea52..98032a4 100644 --- a/.gitignore +++ b/.gitignore @@ -136,7 +136,6 @@ dist/ downloads/ eggs/ .eggs/ -lib/ lib64/ parts/ sdist/ @@ -295,7 +294,6 @@ x86/ [Aa][Rr][Mm]/ [Aa][Rr][Mm]64/ bld/ -[Bb]in/ [Oo]bj/ [Ll]og/ diff --git a/bin/github_validator_app.py b/bin/github_validator_app.py new file mode 100644 index 0000000..7bc6174 --- /dev/null +++ b/bin/github_validator_app.py @@ -0,0 +1,85 @@ +from github import GithubIntegration, ContentFile +from config import ( + APP_ID, + APP_KEY, + SOLUTION_OWNER, + SOLUTION_REPO_NAME, + SOLUTION_TESTS_ACCESS_TOKEN, + TESTS_FOLDER_NAME, +) + +from lib.connectors.github_connector import GithubConnector + +git_integration = GithubIntegration( + APP_ID, + APP_KEY, +) + +triggers = { + 'pull_request': ['pull_request', 'head', 'ref'], + 'pusher': ['ref'] +} + +def get_student_branch(payload: dict = None): + for trigger in triggers: + if trigger in payload: + branch = payload + path = triggers[trigger] + while path: + branch = branch[path.pop(0)] + return branch + return None + + +def get_student_creds(payload: dict = None): + + owner = payload["repository"]["owner"]["login"] + repo_name = payload["repository"]["name"] + token = git_integration.get_access_token( + git_integration.get_installation(owner, repo_name).id + ).token + return owner, repo_name, token + + +def compare_tests_folder( + student_repo: GithubConnector, solution_repo: GithubConnector, student_branch_repo: str +) -> bool: + + student_contents = student_repo.repo.get_contents(TESTS_FOLDER_NAME, ref=student_branch_repo) + + if ( + isinstance(student_contents, ContentFile.ContentFile) + and student_contents.type == "submodule" + ): + solution_last_commit = solution_repo.get_last_hash_commit("main") + student_tests_commit = student_contents.sha + return solution_last_commit == student_tests_commit + + student_hash_tests = student_repo.get_tests_hash(TESTS_FOLDER_NAME) + solution_hash_tests = solution_repo.get_tests_hash(TESTS_FOLDER_NAME) + return student_hash_tests == solution_hash_tests + + +def github_validator_repo(payload: dict): + + student_owner, student_repo_name, student_token = get_student_creds(payload) + student_github = GithubConnector(student_token, student_owner, student_repo_name) + student_branch = get_student_branch(payload) + if student_branch is None: + # Log error + # FIXME + # Archive the payload + # FIXME + print('Could\'nt find the student commit, maybe the trigger is not managed') + return False + solution_github = GithubConnector(SOLUTION_TESTS_ACCESS_TOKEN, SOLUTION_OWNER, SOLUTION_REPO_NAME) + + # Valide of repo + tests_havent_changed = compare_tests_folder(student_github, solution_github, student_branch) + + # Send results to GCP + # FIXME + + # Results of challenges + # FIXME + return tests_havent_changed diff --git a/github_management/constants.py b/config.py similarity index 100% rename from github_management/constants.py rename to config.py diff --git a/github_management/__init__.py b/github_management/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/github_management/github_validator_app.py b/github_management/github_validator_app.py deleted file mode 100644 index 647c55f..0000000 --- a/github_management/github_validator_app.py +++ /dev/null @@ -1,62 +0,0 @@ -from github import GithubIntegration -from github_management.constants import ( - APP_ID, - APP_KEY, - SOLUTION_OWNER, - SOLUTION_REPO_NAME, - SOLUTION_TESTS_ACCESS_TOKEN, - TESTS_FOLDER_NAME, -) -from github_management.utils import compare_tests_folder, get_repo - -git_integration = GithubIntegration( - APP_ID, - APP_KEY, -) - -triggers = { - 'pull_request': ['pull_request', 'head', 'ref'], - 'pusher': ['ref'] -} - -def get_student_branch(payload: dict = None): - for trigger in triggers: - if trigger in payload: - branch = payload - path = triggers[trigger] - while path: - branch = branch[path.pop(0)] - return branch - return None - -def get_student_repo(payload: dict = None): - owner = payload["repository"]["owner"]["login"] - repo_name = payload["repository"]["name"] - token = git_integration.get_access_token( - git_integration.get_installation(owner, repo_name).id - ).token - - return get_repo(token, owner, repo_name) - -def github_validator_repo(payload: dict): - student_repo = get_student_repo(payload) - student_branch = get_student_branch(payload) - if student_branch is None: - # Log error - # FIXME - # Archive the payload - # FIXME - print('Could\'nt find the student commit, maybe the trigger is not managed') - return False - - solution_repo = get_repo(SOLUTION_TESTS_ACCESS_TOKEN, SOLUTION_OWNER, SOLUTION_REPO_NAME) - - # Valide of repo - tests_havent_changed = compare_tests_folder(student_repo, solution_repo, student_branch) - - # Send results to GCP - # FIXME - - # Results of challenges - # FIXME - return tests_havent_changed diff --git a/github_management/py.typed b/github_management/py.typed deleted file mode 100755 index e69de29..0000000 diff --git a/github_management/utils.py b/github_management/utils.py deleted file mode 100644 index 2415104..0000000 --- a/github_management/utils.py +++ /dev/null @@ -1,63 +0,0 @@ -from typing import Any, List - -import hashlib - -from github import ContentFile, Github, Repository -from github_tests_validator_app.constants import TESTS_FOLDER_NAME - - -def get_hash_files(contents: List[ContentFile.ContentFile]) -> str: - hash_sum = "" - for content in contents: - hash_sum += content.sha - hash = hashlib.sha256() - hash.update(hash_sum.encode()) - return str(hash.hexdigest()) - - -def get_tests_hash(repo: Repository.Repository) -> str: - contents = repo.get_contents(TESTS_FOLDER_NAME) - files_content = get_files_content(contents, repo) - hash = get_hash_files(files_content) - return hash - - -def get_files_content(contents: Any, repo: Repository.Repository) -> List[ContentFile.ContentFile]: - files_content = [] - while contents: - file_content = contents.pop(0) - if file_content.type == "dir": - contents.extend(repo.get_contents(file_content.path)) - else: - files_content.append(file_content) - return files_content - - -def get_repo(token: str, owner: str, repo_name: str) -> Repository.Repository: - git_connection = Github(login_or_token=token) - repo = git_connection.get_repo(f"{owner}/{repo_name}") - return repo - - -def get_last_hash_commit(repo: Repository.Repository, branch_name: str) -> str: - branch = repo.get_branch(branch_name) - return branch.commit.sha - - -def compare_tests_folder( - student_repo: Repository.Repository, solution_repo: Repository.Repository, student_branch_repo: str -) -> bool: - student_contents = student_repo.get_contents(TESTS_FOLDER_NAME, ref=student_branch_repo) - - if ( - isinstance(student_contents, ContentFile.ContentFile) - and student_contents.type == "submodule" - ): - solution_last_commit = get_last_hash_commit(solution_repo, "main") - student_tests_commit = student_contents.sha - return solution_last_commit == student_tests_commit - - student_hash_tests = get_tests_hash(student_repo) - solution_hash_tests = get_tests_hash(solution_repo) - - return student_hash_tests == solution_hash_tests diff --git a/lib/connectors/github_connector.py b/lib/connectors/github_connector.py new file mode 100644 index 0000000..135b89f --- /dev/null +++ b/lib/connectors/github_connector.py @@ -0,0 +1,52 @@ +import hashlib +from typing import List, Any +from github import Github, Repository, ContentFile + +from lib.utils import get_hash_files + +class GithubConnector(): + + def __init__(self, token: str, owner: str, repo_name: str): + self.OWNER = owner + self.REPO_NAME = repo_name + + print(f"Connecting to Github as {owner} on {repo_name}...") + self.connector = Github(login_or_token=token) + print(f"Connecting to repo {repo_name} as {owner}...") + self.repo = self.connector.get_repo(f"{owner}/{repo_name}") + + def get_repo(self, repo_name: str) -> Repository.Repository: + self.REPO_NAME = repo_name + print(f"Connecting to new repo {repo_name} as {self.OWNER}...") + self.repo = self.connector.get_repo(f"{self.OWNER}/{repo_name}") + return self.repo + + def get_last_hash_commit(self, branch_name: str) -> str: + branch = self.repo.get_branch(branch_name) + return branch.commit.sha + + def get_files_content(self, contents: Any) -> List[ContentFile.ContentFile]: + files_content = [] + while contents: + file_content = contents.pop(0) + if file_content.type == "dir": + contents.extend(self.repo.get_contents(file_content.path)) + else: + files_content.append(file_content) + return + + def get_tests_hash(self, folder_name: str) -> str: + contents = self.repo.get_contents(folder_name) + files_content = self.get_files_content(contents, self.repo) + hash = get_hash_files(files_content) + return hash + + def get_files_content(self, contents: Any) -> List[ContentFile.ContentFile]: + files_content = [] + while contents: + file_content = contents.pop(0) + if file_content.type == "dir": + contents.extend(self.repo.get_contents(file_content.path)) + else: + files_content.append(file_content) + return files_content diff --git a/lib/utils.py b/lib/utils.py new file mode 100644 index 0000000..8924a0b --- /dev/null +++ b/lib/utils.py @@ -0,0 +1,12 @@ +from typing import List +import hashlib +from github import ContentFile + + +def get_hash_files(contents: List[ContentFile.ContentFile]) -> str: + hash_sum = "" + for content in contents: + hash_sum += content.sha + hash = hashlib.sha256() + hash.update(hash_sum.encode()) + return str(hash.hexdigest()) diff --git a/server.py b/server.py index c21d499..6335a17 100644 --- a/server.py +++ b/server.py @@ -1,7 +1,7 @@ import uvicorn from fastapi import FastAPI, Request -from github_management.github_validator_app import github_validator_repo +from bin.github_validator_app import github_validator_repo app = FastAPI() From 84cc059a42f8c14e57b933143f2abaf9bffe603b Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Wed, 24 Aug 2022 15:27:18 +0200 Subject: [PATCH 03/13] fix: activate pre-commit --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2902ab1..3a69a78 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,7 +16,7 @@ repos: hooks: - id: isort name: isort - entry: poetry run isort --settings-path pyproject.toml + entry: poetry run isort --settings-path pyproject.toml . types: [python] language: system stages: [commit, push] @@ -28,13 +28,13 @@ repos: stages: [commit, push] - id: black name: black - entry: poetry run black --config pyproject.toml + entry: poetry run black --config pyproject.toml . types: [python] language: system stages: [commit, push] - id: mypy name: mypy - entry: poetry run mypy + entry: poetry run mypy . require_serial: true types: [python] language: system From 9cd68c6302171e21123f4e7f26d3d8eb34883edd Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Wed, 24 Aug 2022 16:16:46 +0200 Subject: [PATCH 04/13] refactoring: fix error of coding style --- bin/github_validator_app.py | 25 +++++++++++++------------ config.py | 8 +++++++- lib/connectors/github_connector.py | 23 ++++++----------------- lib/utils.py | 2 ++ server.py | 9 +++++---- 5 files changed, 33 insertions(+), 34 deletions(-) diff --git a/bin/github_validator_app.py b/bin/github_validator_app.py index 7bc6174..1276689 100644 --- a/bin/github_validator_app.py +++ b/bin/github_validator_app.py @@ -1,4 +1,5 @@ -from github import GithubIntegration, ContentFile +from typing import Any, Dict, Tuple + from config import ( APP_ID, APP_KEY, @@ -7,7 +8,7 @@ SOLUTION_TESTS_ACCESS_TOKEN, TESTS_FOLDER_NAME, ) - +from github import ContentFile, GithubIntegration from lib.connectors.github_connector import GithubConnector git_integration = GithubIntegration( @@ -15,12 +16,10 @@ APP_KEY, ) -triggers = { - 'pull_request': ['pull_request', 'head', 'ref'], - 'pusher': ['ref'] -} +triggers = {"pull_request": ["pull_request", "head", "ref"], "pusher": ["ref"]} + -def get_student_branch(payload: dict = None): +def get_student_branch(payload: Dict[str, Any]) -> Any: for trigger in triggers: if trigger in payload: branch = payload @@ -31,7 +30,7 @@ def get_student_branch(payload: dict = None): return None -def get_student_creds(payload: dict = None): +def get_student_creds(payload: Dict[str, Any]) -> Tuple[str, str, str]: owner = payload["repository"]["owner"]["login"] repo_name = payload["repository"]["name"] @@ -43,7 +42,7 @@ def get_student_creds(payload: dict = None): def compare_tests_folder( student_repo: GithubConnector, solution_repo: GithubConnector, student_branch_repo: str -) -> bool: +) -> Any: student_contents = student_repo.repo.get_contents(TESTS_FOLDER_NAME, ref=student_branch_repo) @@ -60,7 +59,7 @@ def compare_tests_folder( return student_hash_tests == solution_hash_tests -def github_validator_repo(payload: dict): +def github_validator_repo(payload: Dict[str, Any]) -> Any: student_owner, student_repo_name, student_token = get_student_creds(payload) student_github = GithubConnector(student_token, student_owner, student_repo_name) @@ -70,9 +69,11 @@ def github_validator_repo(payload: dict): # FIXME # Archive the payload # FIXME - print('Could\'nt find the student commit, maybe the trigger is not managed') + # print("Could'nt find the student commit, maybe the trigger is not managed") return False - solution_github = GithubConnector(SOLUTION_TESTS_ACCESS_TOKEN, SOLUTION_OWNER, SOLUTION_REPO_NAME) + solution_github = GithubConnector( + SOLUTION_TESTS_ACCESS_TOKEN, SOLUTION_OWNER, SOLUTION_REPO_NAME + ) # Valide of repo tests_havent_changed = compare_tests_folder(student_github, solution_github, student_branch) diff --git a/config.py b/config.py index 61130d4..71a9a7b 100644 --- a/config.py +++ b/config.py @@ -3,7 +3,13 @@ import os APP_ID = cast(str, os.getenv("GH_APP_ID")) -APP_KEY = cast(str, os.getenv("GH_APP_KEY")) + +with open( + "/Users/brunozheng/.certs/github/tests-validator-app-events.2022-08-22.private-key.pem" +) as f: + APP_KEY = f.read() + +# APP_KEY = cast(str, os.getenv("GH_APP_KEY")) SOLUTION_TESTS_ACCESS_TOKEN = cast(str, os.getenv("SOLUTION_TESTS_ACCESS_TOKEN")) SOLUTION_OWNER = "artefactory-fr" SOLUTION_REPO_NAME = "school_of_data_tests" diff --git a/lib/connectors/github_connector.py b/lib/connectors/github_connector.py index 135b89f..3b38004 100644 --- a/lib/connectors/github_connector.py +++ b/lib/connectors/github_connector.py @@ -1,11 +1,10 @@ -import hashlib -from typing import List, Any -from github import Github, Repository, ContentFile +from typing import Any, List +from github import ContentFile, Github, Repository from lib.utils import get_hash_files -class GithubConnector(): +class GithubConnector: def __init__(self, token: str, owner: str, repo_name: str): self.OWNER = owner self.REPO_NAME = repo_name @@ -33,20 +32,10 @@ def get_files_content(self, contents: Any) -> List[ContentFile.ContentFile]: contents.extend(self.repo.get_contents(file_content.path)) else: files_content.append(file_content) - return + return files_content def get_tests_hash(self, folder_name: str) -> str: contents = self.repo.get_contents(folder_name) - files_content = self.get_files_content(contents, self.repo) - hash = get_hash_files(files_content) + files_content = self.get_files_content(contents) + hash = str(get_hash_files(files_content)) return hash - - def get_files_content(self, contents: Any) -> List[ContentFile.ContentFile]: - files_content = [] - while contents: - file_content = contents.pop(0) - if file_content.type == "dir": - contents.extend(self.repo.get_contents(file_content.path)) - else: - files_content.append(file_content) - return files_content diff --git a/lib/utils.py b/lib/utils.py index 8924a0b..2129164 100644 --- a/lib/utils.py +++ b/lib/utils.py @@ -1,5 +1,7 @@ from typing import List + import hashlib + from github import ContentFile diff --git a/server.py b/server.py index 6335a17..6b44aeb 100644 --- a/server.py +++ b/server.py @@ -1,13 +1,14 @@ -import uvicorn -from fastapi import FastAPI, Request +from typing import Any +import uvicorn from bin.github_validator_app import github_validator_repo - +from fastapi import FastAPI, Request app = FastAPI() + @app.post("/") -async def main(request: Request) -> None: +async def main(request: Request) -> Any: payload = await request.json() tests_havent_changed = github_validator_repo(payload) From 3adc913c06cfb34904df380fa03599f48d755578 Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Thu, 25 Aug 2022 10:01:47 +0200 Subject: [PATCH 05/13] feat: new class GitHubUser and apply to scripts --- bin/github_validator_app.py | 86 ------------------------------ bin/github_validator_repo.py | 36 +++++++++++++ bin/validator.py | 48 +++++++++++++++++ config.py | 17 +++--- lib/connectors/github_connector.py | 24 +++++---- lib/user.py | 21 ++++++++ lib/utils.py | 10 +++- server.py | 4 +- 8 files changed, 139 insertions(+), 107 deletions(-) delete mode 100644 bin/github_validator_app.py create mode 100644 bin/github_validator_repo.py create mode 100644 bin/validator.py create mode 100644 lib/user.py diff --git a/bin/github_validator_app.py b/bin/github_validator_app.py deleted file mode 100644 index 1276689..0000000 --- a/bin/github_validator_app.py +++ /dev/null @@ -1,86 +0,0 @@ -from typing import Any, Dict, Tuple - -from config import ( - APP_ID, - APP_KEY, - SOLUTION_OWNER, - SOLUTION_REPO_NAME, - SOLUTION_TESTS_ACCESS_TOKEN, - TESTS_FOLDER_NAME, -) -from github import ContentFile, GithubIntegration -from lib.connectors.github_connector import GithubConnector - -git_integration = GithubIntegration( - APP_ID, - APP_KEY, -) - -triggers = {"pull_request": ["pull_request", "head", "ref"], "pusher": ["ref"]} - - -def get_student_branch(payload: Dict[str, Any]) -> Any: - for trigger in triggers: - if trigger in payload: - branch = payload - path = triggers[trigger] - while path: - branch = branch[path.pop(0)] - return branch - return None - - -def get_student_creds(payload: Dict[str, Any]) -> Tuple[str, str, str]: - - owner = payload["repository"]["owner"]["login"] - repo_name = payload["repository"]["name"] - token = git_integration.get_access_token( - git_integration.get_installation(owner, repo_name).id - ).token - return owner, repo_name, token - - -def compare_tests_folder( - student_repo: GithubConnector, solution_repo: GithubConnector, student_branch_repo: str -) -> Any: - - student_contents = student_repo.repo.get_contents(TESTS_FOLDER_NAME, ref=student_branch_repo) - - if ( - isinstance(student_contents, ContentFile.ContentFile) - and student_contents.type == "submodule" - ): - solution_last_commit = solution_repo.get_last_hash_commit("main") - student_tests_commit = student_contents.sha - return solution_last_commit == student_tests_commit - - student_hash_tests = student_repo.get_tests_hash(TESTS_FOLDER_NAME) - solution_hash_tests = solution_repo.get_tests_hash(TESTS_FOLDER_NAME) - return student_hash_tests == solution_hash_tests - - -def github_validator_repo(payload: Dict[str, Any]) -> Any: - - student_owner, student_repo_name, student_token = get_student_creds(payload) - student_github = GithubConnector(student_token, student_owner, student_repo_name) - student_branch = get_student_branch(payload) - if student_branch is None: - # Log error - # FIXME - # Archive the payload - # FIXME - # print("Could'nt find the student commit, maybe the trigger is not managed") - return False - solution_github = GithubConnector( - SOLUTION_TESTS_ACCESS_TOKEN, SOLUTION_OWNER, SOLUTION_REPO_NAME - ) - - # Valide of repo - tests_havent_changed = compare_tests_folder(student_github, solution_github, student_branch) - - # Send results to GCP - # FIXME - - # Results of challenges - # FIXME - return tests_havent_changed diff --git a/bin/github_validator_repo.py b/bin/github_validator_repo.py new file mode 100644 index 0000000..7f7bc3d --- /dev/null +++ b/bin/github_validator_repo.py @@ -0,0 +1,36 @@ +from typing import Any + +from config import TESTS_FOLDER_NAME +from github import ContentFile +from lib.connectors.github_connector import GitHubConnector + + +def compare_tests_folder(student_repo: GitHubConnector, solution_repo: GitHubConnector) -> Any: + + student_contents = student_repo.repo.get_contents( + TESTS_FOLDER_NAME, ref=student_repo.BRANCH_NAME + ) + + if ( + isinstance(student_contents, ContentFile.ContentFile) + and student_contents.type == "submodule" + ): + solution_last_commit = solution_repo.get_last_hash_commit() + student_tests_commit = student_contents.sha + return solution_last_commit == student_tests_commit + + student_hash_tests = student_repo.get_tests_hash(TESTS_FOLDER_NAME) + solution_hash_tests = solution_repo.get_tests_hash(TESTS_FOLDER_NAME) + return student_hash_tests == solution_hash_tests + + +def github_validator_repo(student_github: GitHubConnector, solution_github: GitHubConnector) -> Any: + # Valide of repo + tests_havent_changed = compare_tests_folder(student_github, solution_github) + + # Send results to GCP + # FIXME + + # Results of challenges + # FIXME + return tests_havent_changed diff --git a/bin/validator.py b/bin/validator.py new file mode 100644 index 0000000..802b63f --- /dev/null +++ b/bin/validator.py @@ -0,0 +1,48 @@ +from typing import Any, Dict + +from bin.github_validator_repo import github_validator_repo +from config import SOLUTION_OWNER, SOLUTION_REPO_NAME, SOLUTION_TESTS_ACCESS_TOKEN +from lib.connectors.github_connector import GitHubConnector +from lib.user import GitHubUser +from lib.utils import get_github_user + +triggers = {"pull_request": ["pull_request", "head", "ref"], "pusher": ["ref"]} + + +def get_trigger(payload: Dict[str, Any]) -> Any: + for trigger in triggers: + if trigger in payload: + return trigger + return None + + +def get_student_branch(payload: Dict[str, Any]) -> Any: + trigger = get_trigger(payload) + path = triggers[trigger] + branch = payload + while path: + branch = branch[path.pop(0)] + return branch + + +def validator(payload: Dict[str, Any]) -> Any: + + # Init Data + student = get_github_user(payload) + github_student_branch = get_student_branch(payload) + if github_student_branch is None: + # Log error + # FIXME + # Archive the payload + # FIXME + # print("Could'nt find the student commit, maybe the trigger is not managed") + return False + + repo_name = payload["repository"]["name"] + student.get_access_token(repo_name) + student_github = GitHubConnector(student, repo_name, github_student_branch) + + solution_user = GitHubUser(LOGIN=SOLUTION_OWNER, ACCESS_TOKEN=SOLUTION_TESTS_ACCESS_TOKEN) + solution_github = GitHubConnector(solution_user, SOLUTION_REPO_NAME, "main") + tests_havent_changed = github_validator_repo(student_github, solution_github) + return tests_havent_changed diff --git a/config.py b/config.py index 71a9a7b..3fcc1a0 100644 --- a/config.py +++ b/config.py @@ -2,15 +2,18 @@ import os -APP_ID = cast(str, os.getenv("GH_APP_ID")) - -with open( - "/Users/brunozheng/.certs/github/tests-validator-app-events.2022-08-22.private-key.pem" -) as f: - APP_KEY = f.read() +from github import GithubIntegration -# APP_KEY = cast(str, os.getenv("GH_APP_KEY")) +APP_ID = cast(str, os.getenv("GH_APP_ID")) +APP_KEY = cast(str, os.getenv("GH_APP_KEY")) SOLUTION_TESTS_ACCESS_TOKEN = cast(str, os.getenv("SOLUTION_TESTS_ACCESS_TOKEN")) SOLUTION_OWNER = "artefactory-fr" SOLUTION_REPO_NAME = "school_of_data_tests" TESTS_FOLDER_NAME = "tests" +GSHEET_SA_JSON = cast(str, os.getenv("GSHEET_SA_JSON")) + + +git_integration = GithubIntegration( + APP_ID, + APP_KEY, +) diff --git a/lib/connectors/github_connector.py b/lib/connectors/github_connector.py index 3b38004..a1e0436 100644 --- a/lib/connectors/github_connector.py +++ b/lib/connectors/github_connector.py @@ -1,27 +1,29 @@ from typing import Any, List from github import ContentFile, Github, Repository +from lib.user import GitHubUser from lib.utils import get_hash_files -class GithubConnector: - def __init__(self, token: str, owner: str, repo_name: str): - self.OWNER = owner +class GitHubConnector: + def __init__(self, user: GitHubUser, repo_name: str, branch_name: str): + self.user = user self.REPO_NAME = repo_name + self.BRANCH_NAME = branch_name - print(f"Connecting to Github as {owner} on {repo_name}...") - self.connector = Github(login_or_token=token) - print(f"Connecting to repo {repo_name} as {owner}...") - self.repo = self.connector.get_repo(f"{owner}/{repo_name}") + print(f"Connecting to Github with user {self.user.LOGIN} on repo: {repo_name} ...") + self.connector = Github(login_or_token=self.user.ACCESS_TOKEN) + print(f"Connecting to repo {repo_name} with user: {self.user.LOGIN} ...") + self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") def get_repo(self, repo_name: str) -> Repository.Repository: self.REPO_NAME = repo_name - print(f"Connecting to new repo {repo_name} as {self.OWNER}...") - self.repo = self.connector.get_repo(f"{self.OWNER}/{repo_name}") + print(f"Connecting to new repo: {repo_name} with user: {self.user.LOGIN} ...") + self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") return self.repo - def get_last_hash_commit(self, branch_name: str) -> str: - branch = self.repo.get_branch(branch_name) + def get_last_hash_commit(self) -> str: + branch = self.repo.get_branch(self.BRANCH_NAME) return branch.commit.sha def get_files_content(self, contents: Any) -> List[ContentFile.ContentFile]: diff --git a/lib/user.py b/lib/user.py new file mode 100644 index 0000000..be335c5 --- /dev/null +++ b/lib/user.py @@ -0,0 +1,21 @@ +from typing import Union + +from dataclasses import dataclass + +from config import git_integration + + +@dataclass +class GitHubUser: + + LOGIN: str + URL: Union[str, None] = None + ID: Union[str, None] = None + ACCESS_TOKEN: Union[str, None] = None + + def get_access_token(self, repo_name: str) -> str: + + self.ACCESS_TOKEN = git_integration.get_access_token( + git_integration.get_installation(self.LOGIN, repo_name).id + ).token + return self.ACCESS_TOKEN diff --git a/lib/utils.py b/lib/utils.py index 2129164..9949ca1 100644 --- a/lib/utils.py +++ b/lib/utils.py @@ -1,8 +1,9 @@ -from typing import List +from typing import Any, Dict, List import hashlib from github import ContentFile +from lib.user import GitHubUser def get_hash_files(contents: List[ContentFile.ContentFile]) -> str: @@ -12,3 +13,10 @@ def get_hash_files(contents: List[ContentFile.ContentFile]) -> str: hash = hashlib.sha256() hash.update(hash_sum.encode()) return str(hash.hexdigest()) + + +def get_github_user(data: Dict[str, Any]) -> GitHubUser: + login = data["repository"]["owner"]["login"] + id = data["repository"]["owner"]["id"] + url = data["repository"]["owner"]["url"] + return GitHubUser(LOGIN=login, ID=id, URL=url) diff --git a/server.py b/server.py index 6b44aeb..c64cb9e 100644 --- a/server.py +++ b/server.py @@ -1,7 +1,7 @@ from typing import Any import uvicorn -from bin.github_validator_app import github_validator_repo +from bin.validator import validator from fastapi import FastAPI, Request app = FastAPI() @@ -11,7 +11,7 @@ async def main(request: Request) -> Any: payload = await request.json() - tests_havent_changed = github_validator_repo(payload) + tests_havent_changed = validator(payload) return tests_havent_changed From 80df143e9f97fc401c6cc35477474cfd0f0f258c Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Mon, 29 Aug 2022 11:48:20 +0200 Subject: [PATCH 06/13] feat: add check on bin lib dir --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index a9d8017..6962d6e 100644 --- a/Makefile +++ b/Makefile @@ -113,7 +113,7 @@ check-style: poetry run black --config pyproject.toml --diff --check ./$(BLACK_COMMAND_FLAG) && \ poetry run darglint -v 2 **/*.py$(DARGLINT_COMMAND_FLAG) && \ poetry run isort --settings-path pyproject.toml --check-only **/*.py$(ISORT_COMMAND_FLAG) && \ - poetry run mypy --config-file setup.cfg github_tests_validator_app tests/**/*.py$(MYPY_COMMAND_FLAG) + poetry run mypy --config-file setup.cfg bin lib tests/**/*.py$(MYPY_COMMAND_FLAG) .PHONY: format-code format-code: From 01a45ef294b0de22025405c007c470ccaa09ca97 Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Mon, 29 Aug 2022 11:51:27 +0200 Subject: [PATCH 07/13] feat: new connector Google Sheet, apply results on check valid repository --- bin/github_validator_repo.py | 53 ++++++++++++--- bin/validator.py | 86 ++++++++++++++---------- config.py | 15 ++++- lib/connectors/github_connector.py | 5 +- lib/connectors/google_sheet_connector.py | 55 +++++++++++++++ lib/user.py | 7 +- lib/utils.py | 2 +- 7 files changed, 170 insertions(+), 53 deletions(-) create mode 100644 lib/connectors/google_sheet_connector.py diff --git a/bin/github_validator_repo.py b/bin/github_validator_repo.py index 7f7bc3d..cd55cbb 100644 --- a/bin/github_validator_repo.py +++ b/bin/github_validator_repo.py @@ -1,14 +1,52 @@ -from typing import Any +from typing import Any, Dict, Union from config import TESTS_FOLDER_NAME from github import ContentFile from lib.connectors.github_connector import GitHubConnector +from lib.user import GitHubUser +triggers = {"pull_request": ["pull_request", "head", "ref"], "pusher": ["ref"]} -def compare_tests_folder(student_repo: GitHubConnector, solution_repo: GitHubConnector) -> Any: - student_contents = student_repo.repo.get_contents( - TESTS_FOLDER_NAME, ref=student_repo.BRANCH_NAME +def get_trigger(payload: Dict[str, Any]) -> Any: + for trigger in triggers: + if trigger in payload: + return trigger + return None + + +def get_student_branch(payload: Dict[str, Any], trigger: Union[str, None] = None) -> Any: + trigger = get_trigger(payload) if not trigger else None + if not trigger: + # Log error + # FIXME + # Archive the payload + # FIXME + print("Couldn't find the student branch, maybe the trigger is not managed") + return None + path = triggers[trigger] + branch = payload + while path: + branch = branch[path.pop(0)] + return branch + + +def get_student_github_connector( + student: GitHubUser, payload: Dict[str, Any] +) -> Union[bool, GitHubConnector]: + github_student_branch = get_student_branch(payload) + if github_student_branch is None: + return None + + repo_name = payload["repository"]["name"] + student.get_access_token(repo_name) + return GitHubConnector(student, repo_name, github_student_branch) + + +def compare_tests_folder(student_github: GitHubConnector, solution_repo: GitHubConnector) -> Any: + + student_contents = student_github.repo.get_contents( + TESTS_FOLDER_NAME, ref=student_github.BRANCH_NAME ) if ( @@ -19,7 +57,7 @@ def compare_tests_folder(student_repo: GitHubConnector, solution_repo: GitHubCon student_tests_commit = student_contents.sha return solution_last_commit == student_tests_commit - student_hash_tests = student_repo.get_tests_hash(TESTS_FOLDER_NAME) + student_hash_tests = student_github.get_tests_hash(TESTS_FOLDER_NAME) solution_hash_tests = solution_repo.get_tests_hash(TESTS_FOLDER_NAME) return student_hash_tests == solution_hash_tests @@ -28,9 +66,4 @@ def github_validator_repo(student_github: GitHubConnector, solution_github: GitH # Valide of repo tests_havent_changed = compare_tests_folder(student_github, solution_github) - # Send results to GCP - # FIXME - - # Results of challenges - # FIXME return tests_havent_changed diff --git a/bin/validator.py b/bin/validator.py index 802b63f..5a1a7db 100644 --- a/bin/validator.py +++ b/bin/validator.py @@ -1,48 +1,64 @@ from typing import Any, Dict -from bin.github_validator_repo import github_validator_repo -from config import SOLUTION_OWNER, SOLUTION_REPO_NAME, SOLUTION_TESTS_ACCESS_TOKEN +from bin.github_validator_repo import ( + get_student_github_connector, + get_trigger, + github_validator_repo, +) +from config import SOLUTION_OWNER, SOLUTION_REPO_NAME, SOLUTION_TESTS_ACCESS_TOKEN, default_message from lib.connectors.github_connector import GitHubConnector +from lib.connectors.google_sheet_connector import GSheet from lib.user import GitHubUser from lib.utils import get_github_user -triggers = {"pull_request": ["pull_request", "head", "ref"], "pusher": ["ref"]} +def validator(payload: Dict[str, Any]) -> Any: + # Init trigger + trigger = get_trigger(payload) + if trigger == "pull_request" and payload["action"] not in ["reopened", "opened"]: + return -def get_trigger(payload: Dict[str, Any]) -> Any: - for trigger in triggers: - if trigger in payload: - return trigger - return None + # Init Google Sheet + gsheet = GSheet() + # Init GitHubUser + student_user = get_github_user(payload) + solution_user = GitHubUser(LOGIN=str(SOLUTION_OWNER), ACCESS_TOKEN=SOLUTION_TESTS_ACCESS_TOKEN) -def get_student_branch(payload: Dict[str, Any]) -> Any: - trigger = get_trigger(payload) - path = triggers[trigger] - branch = payload - while path: - branch = branch[path.pop(0)] - return branch + # Add user on Google Sheet + gsheet.add_new_user_on_sheet(student_user) + # Check valid repo + student_github_connector = get_student_github_connector(student_user, payload) + if not student_github_connector: + gsheet.add_new_repo_valid_result( + student_user, False, "[ERROR]: cannot get the student github repository." + ) + print("[ERROR]: cannot get the student github repository.") + return -def validator(payload: Dict[str, Any]) -> Any: + solution_github_connector = GitHubConnector(solution_user, SOLUTION_REPO_NAME, "main") + if not student_github_connector: + gsheet.add_new_repo_valid_result( + student_user, False, "[ERROR]: cannot get the solution github repository." + ) + print("[ERROR]: cannot get the solution github repository.") + return + + tests_havent_changed = github_validator_repo( + student_github_connector, solution_github_connector + ) + + # Add valid repo result on Google Sheet + gsheet.add_new_repo_valid_result( + student_user, + tests_havent_changed, + default_message["valid_repository"][str(tests_havent_changed)], + ) - # Init Data - student = get_github_user(payload) - github_student_branch = get_student_branch(payload) - if github_student_branch is None: - # Log error - # FIXME - # Archive the payload - # FIXME - # print("Could'nt find the student commit, maybe the trigger is not managed") - return False - - repo_name = payload["repository"]["name"] - student.get_access_token(repo_name) - student_github = GitHubConnector(student, repo_name, github_student_branch) - - solution_user = GitHubUser(LOGIN=SOLUTION_OWNER, ACCESS_TOKEN=SOLUTION_TESTS_ACCESS_TOKEN) - solution_github = GitHubConnector(solution_user, SOLUTION_REPO_NAME, "main") - tests_havent_changed = github_validator_repo(student_github, solution_github) - return tests_havent_changed + # Update Pull Request + if "pull_request" in payload: + issue = student_github_connector.repo.get_issue(number=payload["pull_request"]["number"]) + issue.create_comment(default_message["valid_repository"][str(tests_havent_changed)]) + if not tests_havent_changed: + issue.edit(state="closed") diff --git a/config.py b/config.py index 3fcc1a0..ce3e4a9 100644 --- a/config.py +++ b/config.py @@ -4,16 +4,27 @@ from github import GithubIntegration +# GitHub APP_ID = cast(str, os.getenv("GH_APP_ID")) APP_KEY = cast(str, os.getenv("GH_APP_KEY")) SOLUTION_TESTS_ACCESS_TOKEN = cast(str, os.getenv("SOLUTION_TESTS_ACCESS_TOKEN")) SOLUTION_OWNER = "artefactory-fr" SOLUTION_REPO_NAME = "school_of_data_tests" TESTS_FOLDER_NAME = "tests" -GSHEET_SA_JSON = cast(str, os.getenv("GSHEET_SA_JSON")) - git_integration = GithubIntegration( APP_ID, APP_KEY, ) + +# Google Sheet +GSHEET_SA_JSON = cast(str, os.getenv("GSHEET_SA_JSON")) +GSHEET_WORKSHEET_ID = "1tzn73q_QhZ2gLAmZObRsE_JmD6yD6433uZBGc-Llsdk" + +# Others +default_message = { + "valid_repository": { + "True": "Validation of the repository `school_of_data_tests`", + "False": "The repository `school_of_data_tests` does not seem to be the correct version.", + } +} diff --git a/lib/connectors/github_connector.py b/lib/connectors/github_connector.py index a1e0436..d8ecc47 100644 --- a/lib/connectors/github_connector.py +++ b/lib/connectors/github_connector.py @@ -12,14 +12,15 @@ def __init__(self, user: GitHubUser, repo_name: str, branch_name: str): self.BRANCH_NAME = branch_name print(f"Connecting to Github with user {self.user.LOGIN} on repo: {repo_name} ...") - self.connector = Github(login_or_token=self.user.ACCESS_TOKEN) - print(f"Connecting to repo {repo_name} with user: {self.user.LOGIN} ...") + self.connector = Github(login_or_token=self.user.ACCESS_TOKEN, timeout=30) self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") + print("Done.") def get_repo(self, repo_name: str) -> Repository.Repository: self.REPO_NAME = repo_name print(f"Connecting to new repo: {repo_name} with user: {self.user.LOGIN} ...") self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") + print("Done.") return self.repo def get_last_hash_commit(self) -> str: diff --git a/lib/connectors/google_sheet_connector.py b/lib/connectors/google_sheet_connector.py new file mode 100644 index 0000000..0ce13d8 --- /dev/null +++ b/lib/connectors/google_sheet_connector.py @@ -0,0 +1,55 @@ +import gspread +from config import GSHEET_SA_JSON, GSHEET_WORKSHEET_ID +from lib.user import GitHubUser + + +class GSheet: + def __init__(self): + print(f"Connecting to Google Sheet API ...") + self.gs_client = gspread.service_account(filename=GSHEET_SA_JSON) + self.spreadsheet = self.gs_client.open_by_key(GSHEET_WORKSHEET_ID) + print("Done.") + + def get_new_sheet(self, sheet_id: str) -> gspread.spreadsheet.Spreadsheet: + self.spreadsheet = self.gs_client.open_by_key(sheet_id) + return self.spreadsheet + + def add_new_user_on_sheet(self, user: GitHubUser) -> None: + # Controle the workseet exist of not + worksheet = self.spreadsheet.worksheet("students") + + # Check is user exist + id_cell = worksheet.find(user.ID) + login_cell = worksheet.find(user.LOGIN) + if id_cell and login_cell and id_cell.row == login_cell.row: + print("User already exist in student worksheet.") + else: + print(f"Add new user {user.LOGIN} in student worksheet ...") + headers = worksheet.row_values(1) + user_dict = user.__dict__ + new_row = [ + user_dict[header.upper()] if header.upper() in user_dict else None + for header in headers + ] + worksheet.append_row(new_row) + print("Done.") + + def add_new_repo_valid_result(self, user: GitHubUser, result: bool, info: str = "") -> None: + # Controle the workseet exist of not + worksheet = self.spreadsheet.worksheet("check_validation_repo") + + headers = worksheet.row_values(1) + user_dict = user.__dict__ + new_row = list() + for header in headers: + if header == "is_valid": + new_row.append(str(result)) + elif header == "user_id": + new_row.append(user.ID) + elif header == "info": + new_row.append(info) + elif header.upper() in user_dict: + new_row.append(user_dict[header.upper()]) + else: + new_row.append("") + worksheet.append_row(new_row) diff --git a/lib/user.py b/lib/user.py index be335c5..44036d4 100644 --- a/lib/user.py +++ b/lib/user.py @@ -1,6 +1,7 @@ from typing import Union from dataclasses import dataclass +from datetime import datetime from config import git_integration @@ -9,12 +10,12 @@ class GitHubUser: LOGIN: str - URL: Union[str, None] = None - ID: Union[str, None] = None + URL: str = "" + ID: str = "" ACCESS_TOKEN: Union[str, None] = None + CREATED_AT: str = datetime.now().strftime("%d/%m/%Y %H:%M:%S") def get_access_token(self, repo_name: str) -> str: - self.ACCESS_TOKEN = git_integration.get_access_token( git_integration.get_installation(self.LOGIN, repo_name).id ).token diff --git a/lib/utils.py b/lib/utils.py index 9949ca1..15a7793 100644 --- a/lib/utils.py +++ b/lib/utils.py @@ -19,4 +19,4 @@ def get_github_user(data: Dict[str, Any]) -> GitHubUser: login = data["repository"]["owner"]["login"] id = data["repository"]["owner"]["id"] url = data["repository"]["owner"]["url"] - return GitHubUser(LOGIN=login, ID=id, URL=url) + return GitHubUser(LOGIN=login, ID=str(id), URL=url) From e1308732dad51fb90a44ead95b107357dd81b6b8 Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Mon, 29 Aug 2022 12:56:13 +0200 Subject: [PATCH 08/13] feat: change print by logs on process --- bin/github_validator_repo.py | 6 ++++-- bin/validator.py | 13 ++++++++++--- config/__init__.py | 11 +++++++++++ config.py => config/config.py | 0 lib/connectors/github_connector.py | 10 ++++++---- lib/connectors/google_sheet_connector.py | 14 ++++++++------ lib/user.py | 2 +- 7 files changed, 40 insertions(+), 16 deletions(-) create mode 100644 config/__init__.py rename config.py => config/config.py (100%) diff --git a/bin/github_validator_repo.py b/bin/github_validator_repo.py index cd55cbb..f867100 100644 --- a/bin/github_validator_repo.py +++ b/bin/github_validator_repo.py @@ -1,6 +1,8 @@ from typing import Any, Dict, Union -from config import TESTS_FOLDER_NAME +import logging + +from config.config import TESTS_FOLDER_NAME from github import ContentFile from lib.connectors.github_connector import GitHubConnector from lib.user import GitHubUser @@ -22,7 +24,7 @@ def get_student_branch(payload: Dict[str, Any], trigger: Union[str, None] = None # FIXME # Archive the payload # FIXME - print("Couldn't find the student branch, maybe the trigger is not managed") + logging.error("Couldn't find the student branch, maybe the trigger is not managed") return None path = triggers[trigger] branch = payload diff --git a/bin/validator.py b/bin/validator.py index 5a1a7db..7bd22a9 100644 --- a/bin/validator.py +++ b/bin/validator.py @@ -1,11 +1,18 @@ from typing import Any, Dict +import logging + from bin.github_validator_repo import ( get_student_github_connector, get_trigger, github_validator_repo, ) -from config import SOLUTION_OWNER, SOLUTION_REPO_NAME, SOLUTION_TESTS_ACCESS_TOKEN, default_message +from config.config import ( + SOLUTION_OWNER, + SOLUTION_REPO_NAME, + SOLUTION_TESTS_ACCESS_TOKEN, + default_message, +) from lib.connectors.github_connector import GitHubConnector from lib.connectors.google_sheet_connector import GSheet from lib.user import GitHubUser @@ -34,7 +41,7 @@ def validator(payload: Dict[str, Any]) -> Any: gsheet.add_new_repo_valid_result( student_user, False, "[ERROR]: cannot get the student github repository." ) - print("[ERROR]: cannot get the student github repository.") + logging.error("[ERROR]: cannot get the student github repository.") return solution_github_connector = GitHubConnector(solution_user, SOLUTION_REPO_NAME, "main") @@ -42,7 +49,7 @@ def validator(payload: Dict[str, Any]) -> Any: gsheet.add_new_repo_valid_result( student_user, False, "[ERROR]: cannot get the solution github repository." ) - print("[ERROR]: cannot get the solution github repository.") + logging.error("[ERROR]: cannot get the solution github repository.") return tests_havent_changed = github_validator_repo( diff --git a/config/__init__.py b/config/__init__.py new file mode 100644 index 0000000..cf878d5 --- /dev/null +++ b/config/__init__.py @@ -0,0 +1,11 @@ +import logging + +FORMAT = "%(asctime)s - %(levelname)s: %(message)s" +DATEFMT = "%H:%M:%S" +logging.basicConfig( + format=FORMAT, + level=logging.INFO, + datefmt=DATEFMT, +) + +logging.getLogger("uvicorn").removeHandler(logging.getLogger("uvicorn").handlers[0]) diff --git a/config.py b/config/config.py similarity index 100% rename from config.py rename to config/config.py diff --git a/lib/connectors/github_connector.py b/lib/connectors/github_connector.py index d8ecc47..be16d3d 100644 --- a/lib/connectors/github_connector.py +++ b/lib/connectors/github_connector.py @@ -1,5 +1,7 @@ from typing import Any, List +import logging + from github import ContentFile, Github, Repository from lib.user import GitHubUser from lib.utils import get_hash_files @@ -11,16 +13,16 @@ def __init__(self, user: GitHubUser, repo_name: str, branch_name: str): self.REPO_NAME = repo_name self.BRANCH_NAME = branch_name - print(f"Connecting to Github with user {self.user.LOGIN} on repo: {repo_name} ...") + logging.info(f"Connecting to Github with user {self.user.LOGIN} on repo: {repo_name} ...") self.connector = Github(login_or_token=self.user.ACCESS_TOKEN, timeout=30) self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") - print("Done.") + logging.info("Done.") def get_repo(self, repo_name: str) -> Repository.Repository: self.REPO_NAME = repo_name - print(f"Connecting to new repo: {repo_name} with user: {self.user.LOGIN} ...") + logging.info(f"Connecting to new repo: {repo_name} with user: {self.user.LOGIN} ...") self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") - print("Done.") + logging.info("Done.") return self.repo def get_last_hash_commit(self) -> str: diff --git a/lib/connectors/google_sheet_connector.py b/lib/connectors/google_sheet_connector.py index 0ce13d8..4250951 100644 --- a/lib/connectors/google_sheet_connector.py +++ b/lib/connectors/google_sheet_connector.py @@ -1,14 +1,16 @@ +import logging + import gspread -from config import GSHEET_SA_JSON, GSHEET_WORKSHEET_ID +from config.config import GSHEET_SA_JSON, GSHEET_WORKSHEET_ID from lib.user import GitHubUser class GSheet: def __init__(self): - print(f"Connecting to Google Sheet API ...") + logging.info(f"Connecting to Google Sheet API ...") self.gs_client = gspread.service_account(filename=GSHEET_SA_JSON) self.spreadsheet = self.gs_client.open_by_key(GSHEET_WORKSHEET_ID) - print("Done.") + logging.info("Done.") def get_new_sheet(self, sheet_id: str) -> gspread.spreadsheet.Spreadsheet: self.spreadsheet = self.gs_client.open_by_key(sheet_id) @@ -22,9 +24,9 @@ def add_new_user_on_sheet(self, user: GitHubUser) -> None: id_cell = worksheet.find(user.ID) login_cell = worksheet.find(user.LOGIN) if id_cell and login_cell and id_cell.row == login_cell.row: - print("User already exist in student worksheet.") + logging.info("User already exist in student worksheet.") else: - print(f"Add new user {user.LOGIN} in student worksheet ...") + logging.info(f"Add new user {user.LOGIN} in student worksheet ...") headers = worksheet.row_values(1) user_dict = user.__dict__ new_row = [ @@ -32,7 +34,7 @@ def add_new_user_on_sheet(self, user: GitHubUser) -> None: for header in headers ] worksheet.append_row(new_row) - print("Done.") + logging.info("Done.") def add_new_repo_valid_result(self, user: GitHubUser, result: bool, info: str = "") -> None: # Controle the workseet exist of not diff --git a/lib/user.py b/lib/user.py index 44036d4..a012a5e 100644 --- a/lib/user.py +++ b/lib/user.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from datetime import datetime -from config import git_integration +from config.config import git_integration @dataclass From 6e389ea3161ed171f64ba9a5d3a4d103dcec775e Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Mon, 29 Aug 2022 12:59:49 +0200 Subject: [PATCH 09/13] feat: update packages with poetry --- poetry.lock | 244 ++++++++++++++++++++++++++++++++++++++++++------- pyproject.toml | 1 + 2 files changed, 211 insertions(+), 34 deletions(-) diff --git a/poetry.lock b/poetry.lock index d5e00d7..e1f9e90 100644 --- a/poetry.lock +++ b/poetry.lock @@ -11,8 +11,8 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] trio = ["trio (>=0.16)"] [[package]] @@ -45,10 +45,10 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] [[package]] name = "bandit" @@ -65,7 +65,7 @@ PyYAML = ">=5.3.1" stevedore = ">=1.20.0" [package.extras] -test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] toml = ["toml"] yaml = ["pyyaml"] @@ -91,6 +91,14 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "cachetools" +version = "5.2.0" +description = "Extensible memoizing collections and decorators" +category = "main" +optional = false +python-versions = "~=3.7" + [[package]] name = "certifi" version = "2022.6.15" @@ -172,11 +180,11 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] [[package]] name = "darglint" @@ -198,7 +206,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" wrapt = ">=1.10,<2" [package.extras] -dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] +dev = ["PyTest (<5)", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "pytest", "pytest-cov", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] [[package]] name = "dill" @@ -248,10 +256,10 @@ pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1. starlette = "0.19.1" [package.extras] -all = ["requests (>=2.24.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "itsdangerous (>=1.1.0,<3.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "orjson (>=3.2.1,<4.0.0)", "email_validator (>=1.1.1,<2.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] -dev = ["python-jose[cryptography] (>=3.3.0,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "typer (>=0.4.1,<0.5.0)", "pyyaml (>=5.3.1,<7.0.0)"] -test = ["pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "mypy (==0.910)", "flake8 (>=3.8.3,<4.0.0)", "black (==22.3.0)", "isort (>=5.0.6,<6.0.0)", "requests (>=2.24.0,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "email_validator (>=1.1.1,<2.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "peewee (>=3.13.3,<4.0.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "orjson (>=3.2.1,<4.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "flask (>=1.1.2,<3.0.0)", "anyio[trio] (>=3.2.1,<4.0.0)", "types-ujson (==4.2.1)", "types-orjson (==3.6.2)", "types-dataclasses (==0.6.5)"] +all = ["email_validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.5.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.3.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.6.5)", "types-orjson (==3.6.2)", "types-ujson (==4.2.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] [[package]] name = "filelock" @@ -287,6 +295,53 @@ python-versions = ">=3.7" [package.dependencies] gitdb = ">=4.0.1,<5" +[[package]] +name = "google-auth" +version = "2.11.0" +description = "Google Authentication Library" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*" + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = {version = ">=3.1.4,<5", markers = "python_version >= \"3.6\""} +six = ">=1.9.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "requests (>=2.20.0,<3.0.0dev)"] +enterprise_cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] + +[[package]] +name = "google-auth-oauthlib" +version = "0.5.2" +description = "Google Authentication Library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +google-auth = ">=1.0.0" +requests-oauthlib = ">=0.7.0" + +[package.extras] +tool = ["click (>=6.0.0)"] + +[[package]] +name = "gspread" +version = "5.4.0" +description = "Google Spreadsheets Python API" +category = "main" +optional = false +python-versions = ">=3.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +google-auth = ">=1.12.0" +google-auth-oauthlib = ">=0.4.1" + [[package]] name = "h11" version = "0.13.0" @@ -331,10 +386,10 @@ optional = false python-versions = ">=3.6.1,<4.0" [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "lazy-object-proxy" @@ -386,6 +441,19 @@ category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +[[package]] +name = "oauthlib" +version = "3.2.0" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + [[package]] name = "packaging" version = "21.3" @@ -422,8 +490,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pluggy" @@ -461,6 +529,25 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pyasn1" +version = "0.4.8" +description = "ASN.1 types and codecs" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pyasn1-modules" +version = "0.2.8" +description = "A collection of ASN.1-based protocols modules." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.5.0" + [[package]] name = "pycparser" version = "2.21" @@ -533,9 +620,9 @@ python-versions = ">=3.6" [package.extras] crypto = ["cryptography (>=3.3.1)"] -dev = ["sphinx", "sphinx-rtd-theme", "zope.interface", "cryptography (>=3.3.1)", "pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)", "mypy", "pre-commit"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "mypy", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["pytest (>=6.0.0,<7.0.0)", "coverage[toml] (==5.0.4)"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" @@ -573,7 +660,7 @@ cffi = ">=1.4.1" [package.extras] docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] -tests = ["pytest (>=3.2.1,!=3.3.0)", "hypothesis (>=3.27.0)"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyparsing" @@ -584,7 +671,7 @@ optional = false python-versions = ">=3.6.8" [package.extras] -diagrams = ["railroad-diagrams", "jinja2"] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" @@ -644,6 +731,21 @@ urllib3 = ">=1.21.1,<1.27" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + [[package]] name = "rich" version = "12.5.1" @@ -659,6 +761,17 @@ pygments = ">=2.6.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +category = "main" +optional = false +python-versions = ">=3.6,<4" + +[package.dependencies] +pyasn1 = ">=0.1.3" + [[package]] name = "ruamel.yaml" version = "0.17.21" @@ -709,7 +822,7 @@ python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,>=2.6" name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" @@ -797,7 +910,7 @@ python-versions = ">=3.6,<4.0" [[package]] name = "typer" -version = "0.5.0" +version = "0.6.1" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." category = "main" optional = false @@ -810,10 +923,10 @@ rich = {version = ">=10.11.0,<13.0.0", optional = true, markers = "extra == \"al shellingham = {version = ">=1.3.0,<2.0.0", optional = true, markers = "extra == \"all\""} [package.extras] -all = ["colorama (>=0.4.3,<0.5.0)", "shellingham (>=1.3.0,<2.0.0)", "rich (>=10.11.0,<13.0.0)"] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] -doc = ["mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "mdx-include (>=1.4.1,<2.0.0)"] -test = ["shellingham (>=1.3.0,<2.0.0)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov (>=2.10.0,<3.0.0)", "coverage (>=5.2,<6.0)", "pytest-xdist (>=1.32.0,<2.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "mypy (==0.910)", "black (>=22.3.0,<23.0.0)", "isort (>=5.0.6,<6.0.0)", "rich (>=10.11.0,<13.0.0)"] +doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=5.2,<6.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov (>=2.10.0,<3.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<2.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "typing-extensions" @@ -832,8 +945,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -849,7 +962,7 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["websockets (>=10.0)", "httptools (>=0.4.0)", "watchfiles (>=0.13)", "python-dotenv (>=0.13)", "PyYAML (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "colorama (>=0.4)"] +standard = ["PyYAML (>=5.1)", "colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] [[package]] name = "virtualenv" @@ -867,7 +980,7 @@ six = ">=1.9.0,<2" [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] [[package]] name = "wrapt" @@ -880,7 +993,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "3f8fda370fccd98d786ac541f03b197047c1a443b99561c698b6bf05d66f2bc5" +content-hash = "f4e2e123e74e110d6859d6225270b87eff885fb3ea4e21bb3f58a39fc57488e1" [metadata.files] anyio = [ @@ -927,6 +1040,10 @@ black = [ {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, ] +cachetools = [ + {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, + {file = "cachetools-5.2.0.tar.gz", hash = "sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757"}, +] certifi = [ {file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"}, {file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"}, @@ -1077,6 +1194,18 @@ gitpython = [ {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, ] +google-auth = [ + {file = "google-auth-2.11.0.tar.gz", hash = "sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb"}, + {file = "google_auth-2.11.0-py2.py3-none-any.whl", hash = "sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9"}, +] +google-auth-oauthlib = [ + {file = "google-auth-oauthlib-0.5.2.tar.gz", hash = "sha256:d5e98a71203330699f92a26bc08847a92e8c3b1b8d82a021f1af34164db143ae"}, + {file = "google_auth_oauthlib-0.5.2-py2.py3-none-any.whl", hash = "sha256:6d6161d0ec0a62e2abf2207c6071c117ec5897b300823c4bb2d963ee86e20e4f"}, +] +gspread = [ + {file = "gspread-5.4.0-py3-none-any.whl", hash = "sha256:21704b47d007c3b5fd34eddfa4c4a9dcd1ecc1dc615083b9c636127726e66c18"}, + {file = "gspread-5.4.0.tar.gz", hash = "sha256:b6172b62fa899e3e4199d2d0ea1008b64305554ba08d3d3a96e9123824fdec48"}, +] h11 = [ {file = "h11-0.13.0-py3-none-any.whl", hash = "sha256:8ddd78563b633ca55346c8cd41ec0af27d3c79931828beffb46ce70a379e7442"}, {file = "h11-0.13.0.tar.gz", hash = "sha256:70813c1135087a248a4d38cc0e1a0181ffab2188141a93eaf567940c3957ff06"}, @@ -1173,6 +1302,10 @@ nodeenv = [ {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, ] +oauthlib = [ + {file = "oauthlib-3.2.0-py3-none-any.whl", hash = "sha256:6db33440354787f9b7f3a6dbd4febf5d0f93758354060e802f6c06cb493022fe"}, + {file = "oauthlib-3.2.0.tar.gz", hash = "sha256:23a8208d75b902797ea29fd31fa80a15ed9dc2c6c16fe73f5d346f83f6fa27a2"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, @@ -1201,6 +1334,36 @@ py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] +pyasn1 = [ + {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, + {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, + {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, + {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, + {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, + {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, + {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, + {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, + {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, + {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, + {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, + {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, +] +pyasn1-modules = [ + {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, + {file = "pyasn1_modules-0.2.8-py2.4.egg", hash = "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199"}, + {file = "pyasn1_modules-0.2.8-py2.5.egg", hash = "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"}, + {file = "pyasn1_modules-0.2.8-py2.6.egg", hash = "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb"}, + {file = "pyasn1_modules-0.2.8-py2.7.egg", hash = "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8"}, + {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, + {file = "pyasn1_modules-0.2.8-py3.1.egg", hash = "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d"}, + {file = "pyasn1_modules-0.2.8-py3.2.egg", hash = "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45"}, + {file = "pyasn1_modules-0.2.8-py3.3.egg", hash = "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4"}, + {file = "pyasn1_modules-0.2.8-py3.4.egg", hash = "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811"}, + {file = "pyasn1_modules-0.2.8-py3.5.egg", hash = "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed"}, + {file = "pyasn1_modules-0.2.8-py3.6.egg", hash = "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0"}, + {file = "pyasn1_modules-0.2.8-py3.7.egg", hash = "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd"}, +] pycparser = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, @@ -1325,16 +1488,25 @@ requests = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] +requests-oauthlib = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] rich = [ {file = "rich-12.5.1-py3-none-any.whl", hash = "sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb"}, {file = "rich-12.5.1.tar.gz", hash = "sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca"}, ] +rsa = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] "ruamel.yaml" = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] "ruamel.yaml.clib" = [ {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"}, {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, @@ -1344,18 +1516,22 @@ rich = [ {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"}, {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"}, {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"}, {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"}, {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, @@ -1409,8 +1585,8 @@ tomlkit = [ {file = "tomlkit-0.11.1.tar.gz", hash = "sha256:61901f81ff4017951119cd0d1ed9b7af31c821d6845c8c477587bbdcd5e5854e"}, ] typer = [ - {file = "typer-0.5.0-py3-none-any.whl", hash = "sha256:a34409c0029ba7e48cb9e4f54c6400bf4158a6145b5dea32788e7a36ebbcb312"}, - {file = "typer-0.5.0.tar.gz", hash = "sha256:4c285a5585c94d32c305444af934f0078b6a8ba91464f3f85807c91cd499d195"}, + {file = "typer-0.6.1-py3-none-any.whl", hash = "sha256:54b19e5df18654070a82f8c2aa1da456a4ac16a2a83e6dcd9f170e291c56338e"}, + {file = "typer-0.6.1.tar.gz", hash = "sha256:2d5720a5e63f73eaf31edaa15f6ab87f35f0690f8ca233017d7d23d743a91d73"}, ] typing-extensions = [ {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, diff --git a/pyproject.toml b/pyproject.toml index e8d4806..1cd87c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,6 +43,7 @@ requests = ">=2.22.0" PyGithub = ">=1.55" cryptography = ">=36.0.1" urllib3 = ">=1.26.5" +gspread = "^5.4.0" [tool.poetry.dev-dependencies] darglint = ">=1.8.0" From d2e6f9750e360d6058291634e54146b8e7dee486 Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Tue, 30 Aug 2022 11:20:23 +0200 Subject: [PATCH 10/13] feat: add login and action information on Google sheet --- bin/validator.py | 27 ++++++++++++++++++------ config/config.py | 4 ++-- lib/connectors/google_sheet_connector.py | 8 ++++--- 3 files changed, 28 insertions(+), 11 deletions(-) diff --git a/bin/validator.py b/bin/validator.py index 7bd22a9..fa9d50a 100644 --- a/bin/validator.py +++ b/bin/validator.py @@ -22,7 +22,9 @@ def validator(payload: Dict[str, Any]) -> Any: # Init trigger trigger = get_trigger(payload) - if trigger == "pull_request" and payload["action"] not in ["reopened", "opened"]: + if not trigger or ( + trigger == "pull_request" and payload["action"] not in ["reopened", "opened"] + ): return # Init Google Sheet @@ -39,7 +41,10 @@ def validator(payload: Dict[str, Any]) -> Any: student_github_connector = get_student_github_connector(student_user, payload) if not student_github_connector: gsheet.add_new_repo_valid_result( - student_user, False, "[ERROR]: cannot get the student github repository." + student_user, + "Connect to student repository", + False, + "[ERROR]: cannot get the student github repository.", ) logging.error("[ERROR]: cannot get the student github repository.") return @@ -47,7 +52,10 @@ def validator(payload: Dict[str, Any]) -> Any: solution_github_connector = GitHubConnector(solution_user, SOLUTION_REPO_NAME, "main") if not student_github_connector: gsheet.add_new_repo_valid_result( - student_user, False, "[ERROR]: cannot get the solution github repository." + student_user, + "Connect to school_of_data_tests repository", + False, + "[ERROR]: cannot get the solution github repository.", ) logging.error("[ERROR]: cannot get the solution github repository.") return @@ -59,6 +67,7 @@ def validator(payload: Dict[str, Any]) -> Any: # Add valid repo result on Google Sheet gsheet.add_new_repo_valid_result( student_user, + "Check the test folder", tests_havent_changed, default_message["valid_repository"][str(tests_havent_changed)], ) @@ -66,6 +75,12 @@ def validator(payload: Dict[str, Any]) -> Any: # Update Pull Request if "pull_request" in payload: issue = student_github_connector.repo.get_issue(number=payload["pull_request"]["number"]) - issue.create_comment(default_message["valid_repository"][str(tests_havent_changed)]) - if not tests_havent_changed: - issue.edit(state="closed") + message = default_message["valid_repository"][str(tests_havent_changed)] + issue.create_comment(message) + conclusion = "success" if tests_havent_changed else "failure" + student_github_connector.repo.create_check_run( + name=message, + head_sha=payload["pull_request"]["head"]["sha"], + status="completed", + conclusion=conclusion, + ) diff --git a/config/config.py b/config/config.py index ce3e4a9..5be68c6 100644 --- a/config/config.py +++ b/config/config.py @@ -24,7 +24,7 @@ # Others default_message = { "valid_repository": { - "True": "Validation of the repository `school_of_data_tests`", - "False": "The repository `school_of_data_tests` does not seem to be the correct version.", + "True": "Your folder `Test` is valid", + "False": "Your folder `Test` has been modified and is no longer valid.", } } diff --git a/lib/connectors/google_sheet_connector.py b/lib/connectors/google_sheet_connector.py index 4250951..203cfeb 100644 --- a/lib/connectors/google_sheet_connector.py +++ b/lib/connectors/google_sheet_connector.py @@ -36,16 +36,18 @@ def add_new_user_on_sheet(self, user: GitHubUser) -> None: worksheet.append_row(new_row) logging.info("Done.") - def add_new_repo_valid_result(self, user: GitHubUser, result: bool, info: str = "") -> None: - # Controle the workseet exist of not + def add_new_repo_valid_result( + self, user: GitHubUser, action: str, result: bool, info: str = "" + ) -> None: worksheet = self.spreadsheet.worksheet("check_validation_repo") - headers = worksheet.row_values(1) user_dict = user.__dict__ new_row = list() for header in headers: if header == "is_valid": new_row.append(str(result)) + elif header == "action": + new_row.append(action) elif header == "user_id": new_row.append(user.ID) elif header == "info": From ad7ccfa75e6f62d4d9f99dfc1a5e41ba62b3d8f1 Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Tue, 30 Aug 2022 15:41:19 +0200 Subject: [PATCH 11/13] feat: fix and add tests on pytest --- bin/github_validator_repo.py | 30 +++++++++---- config/__init__.py | 3 +- config/config.py | 4 ++ tests/units/basic_test.py | 5 --- tests/units/test_github_repo_validation.py | 30 +++++++++++++ tests/units/test_utils.py | 51 ++++++++++++++++++++++ 6 files changed, 108 insertions(+), 15 deletions(-) delete mode 100644 tests/units/basic_test.py create mode 100644 tests/units/test_github_repo_validation.py create mode 100644 tests/units/test_utils.py diff --git a/bin/github_validator_repo.py b/bin/github_validator_repo.py index f867100..6c9a4ac 100644 --- a/bin/github_validator_repo.py +++ b/bin/github_validator_repo.py @@ -1,24 +1,22 @@ -from typing import Any, Dict, Union +from typing import Any, Dict, Optional, Union import logging -from config.config import TESTS_FOLDER_NAME +from config.config import TESTS_FOLDER_NAME, commit_sha_path from github import ContentFile from lib.connectors.github_connector import GitHubConnector from lib.user import GitHubUser -triggers = {"pull_request": ["pull_request", "head", "ref"], "pusher": ["ref"]} - def get_trigger(payload: Dict[str, Any]) -> Any: - for trigger in triggers: + for trigger in commit_sha_path: if trigger in payload: return trigger return None def get_student_branch(payload: Dict[str, Any], trigger: Union[str, None] = None) -> Any: - trigger = get_trigger(payload) if not trigger else None + trigger = get_trigger(payload) if not trigger else trigger if not trigger: # Log error # FIXME @@ -26,16 +24,30 @@ def get_student_branch(payload: Dict[str, Any], trigger: Union[str, None] = None # FIXME logging.error("Couldn't find the student branch, maybe the trigger is not managed") return None - path = triggers[trigger] + + path = commit_sha_path[trigger].copy() branch = payload while path: - branch = branch[path.pop(0)] + + try: + branch = branch[path.pop(0)] + except KeyError as key_err: + logging.error(key_err) + return None + except Exception as err: + logging.error(err) + return None + return branch def get_student_github_connector( student: GitHubUser, payload: Dict[str, Any] -) -> Union[bool, GitHubConnector]: +) -> Union[GitHubConnector, None]: + + if student is None: + return None + github_student_branch = get_student_branch(payload) if github_student_branch is None: return None diff --git a/config/__init__.py b/config/__init__.py index cf878d5..44c8e8e 100644 --- a/config/__init__.py +++ b/config/__init__.py @@ -8,4 +8,5 @@ datefmt=DATEFMT, ) -logging.getLogger("uvicorn").removeHandler(logging.getLogger("uvicorn").handlers[0]) +if logging.getLogger("uvicorn") and logging.getLogger("uvicorn").handlers: + logging.getLogger("uvicorn").removeHandler(logging.getLogger("uvicorn").handlers[0]) diff --git a/config/config.py b/config/config.py index 5be68c6..4391b1d 100644 --- a/config/config.py +++ b/config/config.py @@ -10,6 +10,8 @@ SOLUTION_TESTS_ACCESS_TOKEN = cast(str, os.getenv("SOLUTION_TESTS_ACCESS_TOKEN")) SOLUTION_OWNER = "artefactory-fr" SOLUTION_REPO_NAME = "school_of_data_tests" + +# Also github workflow TESTS_FOLDER_NAME = "tests" git_integration = GithubIntegration( @@ -28,3 +30,5 @@ "False": "Your folder `Test` has been modified and is no longer valid.", } } + +commit_sha_path = {"pull_request": ["pull_request", "head", "ref"], "pusher": ["ref"]} diff --git a/tests/units/basic_test.py b/tests/units/basic_test.py deleted file mode 100644 index f8dbcf1..0000000 --- a/tests/units/basic_test.py +++ /dev/null @@ -1,5 +0,0 @@ -from github_tests_validator_app.utils import get_hash_files - - -def test_length_hash(): - assert len(get_hash_files([])) == 64 diff --git a/tests/units/test_github_repo_validation.py b/tests/units/test_github_repo_validation.py new file mode 100644 index 0000000..dd3a2a1 --- /dev/null +++ b/tests/units/test_github_repo_validation.py @@ -0,0 +1,30 @@ +import pytest +from github_tests_validator_app.github_repo_validation import get_event, get_student_branch +from lib.user import GitHubUser + + +@pytest.mark.parametrize( + "payload,expected", + [ + ({}, None), + ({"unkown": "unkown"}, None), + ({"pull_request": "test"}, "pull_request"), + ({"pusher": "test"}, "pusher"), + ], +) +def test_get_event(payload, expected): + assert get_event(payload) == expected + + +@pytest.mark.parametrize( + "payload,trigger,expected", + [ + ({"unknown": "unknown"}, None, None), + ({"no_path": "no_path"}, "pull_request", None), + ({"pull_request": {"head": {"unknown": "unknown"}}}, "pull_request", None), + ({"pull_request": {"head": {"ref": "path"}}}, "pull_request", "path"), + ({"ref": "path"}, "pusher", "path"), + ], +) +def test_get_student_branch(payload, trigger, expected): + assert get_student_branch(payload, trigger) == expected diff --git a/tests/units/test_utils.py b/tests/units/test_utils.py new file mode 100644 index 0000000..b5dd8da --- /dev/null +++ b/tests/units/test_utils.py @@ -0,0 +1,51 @@ +from unittest.mock import PropertyMock + +import pytest +from github import ContentFile +from lib.user import GitHubUser +from lib.utils import get_hash_files, init_github_user_from_github_event + + +@pytest.mark.parametrize( + "contents,expected", + [ + ([""], "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"), + (["1", "2", "3", "4"], "03ac674216f3e15c761ee1a5e255f067953623c8b388b4459e13f978d7c846f4"), + ( + ["abc", "def", "ghi", "klmn"], + "00c0780a668d95d2052f927c4fd5b4bbc35feaaf6a3e3c54b048eee29d0eef6a", + ), + ], +) +def test_get_hast_files(mocker, contents, expected): + mocker.patch( + "github.ContentFile.ContentFile.sha", + new_callable=PropertyMock, + side_effect=contents, + ) + assert get_hash_files([ContentFile.ContentFile for _ in contents]) == expected + + +@pytest.mark.parametrize( + "contents,expected", + [ + ( + {"repository": {"owner": {"login": "test", "id": "1234", "url": "url"}}}, + GitHubUser(LOGIN="test", ID="1234", URL="url"), + ), + ( + {"repository": {"owner": {"login": None, "id": None, "url": None}}}, + GitHubUser(LOGIN=None, ID=None, URL=None), + ), + ({}, None), + ], +) +def test_init_github_user_from_github_event(contents, expected): + github_user = init_github_user_from_github_event(contents) + assert isinstance(github_user, type(expected)) + if isinstance(github_user, GitHubUser): + assert ( + github_user.LOGIN == expected.LOGIN + and github_user.ID == expected.ID + and github_user.URL == expected.URL + ) From a17ae007542c145b069159378bcb2b636613b43a Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Tue, 6 Sep 2022 14:18:56 +0200 Subject: [PATCH 12/13] feat: new achitecture of project, new process of artifact * new app github_tests_validator_app * new process to handle artifact after a finished workflow * draft: parsing the result of artifact and send to google sheet --- Makefile | 2 +- bin/github_validator_repo.py | 83 --------- bin/validator.py | 86 --------- config/config.py | 34 ---- github_tests_validator_app/__init__.py | 0 github_tests_validator_app/bin/__init__.py | 0 .../bin/github_event_process.py | 67 +++++++ .../bin/github_repo_validation.py | 124 +++++++++++++ .../student_challenge_results_validation.py | 166 ++++++++++++++++++ .../config}/__init__.py | 0 github_tests_validator_app/config/config.py | 49 ++++++ github_tests_validator_app/lib/__init__.py | 0 .../lib/connectors/github_connector.py | 110 ++++++++++++ .../lib/connectors/google_sheet_connector.py | 100 +++++++++++ .../lib/pytest_result.py | 14 ++ .../lib/users.py | 4 +- github_tests_validator_app/lib/utils.py | 26 +++ lib/connectors/github_connector.py | 46 ----- lib/connectors/google_sheet_connector.py | 59 ------- lib/utils.py | 22 --- server.py | 2 +- tests/units/test_github_repo_validation.py | 3 +- tests/units/test_utils.py | 12 +- 23 files changed, 668 insertions(+), 341 deletions(-) delete mode 100644 bin/github_validator_repo.py delete mode 100644 bin/validator.py delete mode 100644 config/config.py create mode 100644 github_tests_validator_app/__init__.py create mode 100644 github_tests_validator_app/bin/__init__.py create mode 100644 github_tests_validator_app/bin/github_event_process.py create mode 100644 github_tests_validator_app/bin/github_repo_validation.py create mode 100644 github_tests_validator_app/bin/student_challenge_results_validation.py rename {config => github_tests_validator_app/config}/__init__.py (100%) create mode 100644 github_tests_validator_app/config/config.py create mode 100644 github_tests_validator_app/lib/__init__.py create mode 100644 github_tests_validator_app/lib/connectors/github_connector.py create mode 100644 github_tests_validator_app/lib/connectors/google_sheet_connector.py create mode 100644 github_tests_validator_app/lib/pytest_result.py rename lib/user.py => github_tests_validator_app/lib/users.py (85%) create mode 100644 github_tests_validator_app/lib/utils.py delete mode 100644 lib/connectors/github_connector.py delete mode 100644 lib/connectors/google_sheet_connector.py delete mode 100644 lib/utils.py diff --git a/Makefile b/Makefile index 6962d6e..a9d8017 100644 --- a/Makefile +++ b/Makefile @@ -113,7 +113,7 @@ check-style: poetry run black --config pyproject.toml --diff --check ./$(BLACK_COMMAND_FLAG) && \ poetry run darglint -v 2 **/*.py$(DARGLINT_COMMAND_FLAG) && \ poetry run isort --settings-path pyproject.toml --check-only **/*.py$(ISORT_COMMAND_FLAG) && \ - poetry run mypy --config-file setup.cfg bin lib tests/**/*.py$(MYPY_COMMAND_FLAG) + poetry run mypy --config-file setup.cfg github_tests_validator_app tests/**/*.py$(MYPY_COMMAND_FLAG) .PHONY: format-code format-code: diff --git a/bin/github_validator_repo.py b/bin/github_validator_repo.py deleted file mode 100644 index 6c9a4ac..0000000 --- a/bin/github_validator_repo.py +++ /dev/null @@ -1,83 +0,0 @@ -from typing import Any, Dict, Optional, Union - -import logging - -from config.config import TESTS_FOLDER_NAME, commit_sha_path -from github import ContentFile -from lib.connectors.github_connector import GitHubConnector -from lib.user import GitHubUser - - -def get_trigger(payload: Dict[str, Any]) -> Any: - for trigger in commit_sha_path: - if trigger in payload: - return trigger - return None - - -def get_student_branch(payload: Dict[str, Any], trigger: Union[str, None] = None) -> Any: - trigger = get_trigger(payload) if not trigger else trigger - if not trigger: - # Log error - # FIXME - # Archive the payload - # FIXME - logging.error("Couldn't find the student branch, maybe the trigger is not managed") - return None - - path = commit_sha_path[trigger].copy() - branch = payload - while path: - - try: - branch = branch[path.pop(0)] - except KeyError as key_err: - logging.error(key_err) - return None - except Exception as err: - logging.error(err) - return None - - return branch - - -def get_student_github_connector( - student: GitHubUser, payload: Dict[str, Any] -) -> Union[GitHubConnector, None]: - - if student is None: - return None - - github_student_branch = get_student_branch(payload) - if github_student_branch is None: - return None - - repo_name = payload["repository"]["name"] - student.get_access_token(repo_name) - return GitHubConnector(student, repo_name, github_student_branch) - - -def compare_tests_folder(student_github: GitHubConnector, solution_repo: GitHubConnector) -> Any: - - student_contents = student_github.repo.get_contents( - TESTS_FOLDER_NAME, ref=student_github.BRANCH_NAME - ) - - if ( - isinstance(student_contents, ContentFile.ContentFile) - and student_contents.type == "submodule" - ): - solution_last_commit = solution_repo.get_last_hash_commit() - student_tests_commit = student_contents.sha - return solution_last_commit == student_tests_commit - - student_hash_tests = student_github.get_tests_hash(TESTS_FOLDER_NAME) - solution_hash_tests = solution_repo.get_tests_hash(TESTS_FOLDER_NAME) - return student_hash_tests == solution_hash_tests - - -def github_validator_repo(student_github: GitHubConnector, solution_github: GitHubConnector) -> Any: - # Valide of repo - tests_havent_changed = compare_tests_folder(student_github, solution_github) - - return tests_havent_changed diff --git a/bin/validator.py b/bin/validator.py deleted file mode 100644 index fa9d50a..0000000 --- a/bin/validator.py +++ /dev/null @@ -1,86 +0,0 @@ -from typing import Any, Dict - -import logging - -from bin.github_validator_repo import ( - get_student_github_connector, - get_trigger, - github_validator_repo, -) -from config.config import ( - SOLUTION_OWNER, - SOLUTION_REPO_NAME, - SOLUTION_TESTS_ACCESS_TOKEN, - default_message, -) -from lib.connectors.github_connector import GitHubConnector -from lib.connectors.google_sheet_connector import GSheet -from lib.user import GitHubUser -from lib.utils import get_github_user - - -def validator(payload: Dict[str, Any]) -> Any: - # Init trigger - trigger = get_trigger(payload) - if not trigger or ( - trigger == "pull_request" and payload["action"] not in ["reopened", "opened"] - ): - return - - # Init Google Sheet - gsheet = GSheet() - - # Init GitHubUser - student_user = get_github_user(payload) - solution_user = GitHubUser(LOGIN=str(SOLUTION_OWNER), ACCESS_TOKEN=SOLUTION_TESTS_ACCESS_TOKEN) - - # Add user on Google Sheet - gsheet.add_new_user_on_sheet(student_user) - - # Check valid repo - student_github_connector = get_student_github_connector(student_user, payload) - if not student_github_connector: - gsheet.add_new_repo_valid_result( - student_user, - "Connect to student repository", - False, - "[ERROR]: cannot get the student github repository.", - ) - logging.error("[ERROR]: cannot get the student github repository.") - return - - solution_github_connector = GitHubConnector(solution_user, SOLUTION_REPO_NAME, "main") - if not student_github_connector: - gsheet.add_new_repo_valid_result( - student_user, - "Connect to school_of_data_tests repository", - False, - "[ERROR]: cannot get the solution github repository.", - ) - logging.error("[ERROR]: cannot get the solution github repository.") - return - - tests_havent_changed = github_validator_repo( - student_github_connector, solution_github_connector - ) - - # Add valid repo result on Google Sheet - gsheet.add_new_repo_valid_result( - student_user, - "Check the test folder", - tests_havent_changed, - default_message["valid_repository"][str(tests_havent_changed)], - ) - - # Update Pull Request - if "pull_request" in payload: - issue = student_github_connector.repo.get_issue(number=payload["pull_request"]["number"]) - message = default_message["valid_repository"][str(tests_havent_changed)] - issue.create_comment(message) - conclusion = "success" if tests_havent_changed else "failure" - student_github_connector.repo.create_check_run( - name=message, - head_sha=payload["pull_request"]["head"]["sha"], - status="completed", - conclusion=conclusion, - ) diff --git a/config/config.py b/config/config.py deleted file mode 100644 index 4391b1d..0000000 --- a/config/config.py +++ /dev/null @@ -1,34 +0,0 @@ -from typing import cast - -import os - -from github import GithubIntegration - -# GitHub -APP_ID = cast(str, os.getenv("GH_APP_ID")) -APP_KEY = cast(str, os.getenv("GH_APP_KEY")) -SOLUTION_TESTS_ACCESS_TOKEN = cast(str, os.getenv("SOLUTION_TESTS_ACCESS_TOKEN")) -SOLUTION_OWNER = "artefactory-fr" -SOLUTION_REPO_NAME = "school_of_data_tests" - -# Also github workflow -TESTS_FOLDER_NAME = "tests" - -git_integration = GithubIntegration( - APP_ID, - APP_KEY, -) - -# Google Sheet -GSHEET_SA_JSON = cast(str, os.getenv("GSHEET_SA_JSON")) -GSHEET_WORKSHEET_ID = "1tzn73q_QhZ2gLAmZObRsE_JmD6yD6433uZBGc-Llsdk" - -# Others -default_message = { - "valid_repository": { - "True": "Your folder `Test` is valid", - "False": "Your folder `Test` has been modified and is no longer valid.", - } -} - -commit_sha_path = {"pull_request": ["pull_request", "head", "ref"], "pusher": ["ref"]} diff --git a/github_tests_validator_app/__init__.py b/github_tests_validator_app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/github_tests_validator_app/bin/__init__.py b/github_tests_validator_app/bin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/github_tests_validator_app/bin/github_event_process.py b/github_tests_validator_app/bin/github_event_process.py new file mode 100644 index 0000000..9746d51 --- /dev/null +++ b/github_tests_validator_app/bin/github_event_process.py @@ -0,0 +1,67 @@ +from typing import Any, Dict + +import logging + +from github_tests_validator_app.bin.github_repo_validation import ( + get_event, + get_student_github_connector, + github_repo_validation, +) +from github_tests_validator_app.bin.student_challenge_results_validation import ( + send_student_challenge_results, +) +from github_tests_validator_app.lib.connectors.google_sheet_connector import GSheet +from github_tests_validator_app.lib.users import GitHubUser +from github_tests_validator_app.lib.utils import init_github_user_from_github_event + +process = { + "pull_request": github_repo_validation, + "pusher": github_repo_validation, + "workflow_job": send_student_challenge_results, +} + + +def validator(payload: Dict[str, Any]) -> Any: + """ + Validator function + + Args: + payload Dict[str, Any]: information of new event + + Returns: + None: Return nothing + """ + # Get event + event = get_event(payload) + if ( + not event + or (event == "pull_request" and payload["action"] not in ["reopened", "opened"]) + or (event == "workflow_job" and payload["action"] not in ["completed"]) + ): + return + + # Init Google Sheet + gsheet = GSheet() + + # Init GitHubUser + student_user = init_github_user_from_github_event(payload) + if not isinstance(student_user, GitHubUser): + # Logging + return + + # Add user on Google Sheet + gsheet.add_new_user_on_sheet(student_user) + + # Check valid repo + student_github_connector = get_student_github_connector(student_user, payload) + if not student_github_connector: + gsheet.add_new_repo_valid_result( + student_user, + False, + "[ERROR]: cannot get the student github repository.", + ) + logging.error("[ERROR]: cannot get the student github repository.") + return + + logging.info(f"Begin {event} process...") + process[event](student_github_connector, gsheet, payload) diff --git a/github_tests_validator_app/bin/github_repo_validation.py b/github_tests_validator_app/bin/github_repo_validation.py new file mode 100644 index 0000000..b6467e0 --- /dev/null +++ b/github_tests_validator_app/bin/github_repo_validation.py @@ -0,0 +1,124 @@ +from typing import Any, Dict, Union + +import logging + +from github import ContentFile +from github_tests_validator_app.config.config import ( + GH_SOLUTION_OWNER, + GH_SOLUTION_REPO_NAME, + GH_SOLUTION_TESTS_ACCESS_TOKEN, + GH_TESTS_FOLDER_NAME, + commit_sha_path, + default_message, +) +from github_tests_validator_app.lib.connectors.github_connector import GitHubConnector +from github_tests_validator_app.lib.connectors.google_sheet_connector import GSheet +from github_tests_validator_app.lib.users import GitHubUser + + +def get_event(payload: Dict[str, Any]) -> Any: + for event in commit_sha_path: + if event in payload: + return event + return None + + +def get_student_branch(payload: Dict[str, Any], trigger: Union[str, None] = None) -> Any: + trigger = get_event(payload) if not trigger else trigger + if not trigger: + # Log error + # FIXME + # Archive the payload + # FIXME + logging.error("Couldn't find the student branch, maybe the trigger is not managed") + return None + + path = commit_sha_path[trigger].copy() + branch = payload + while path: + + try: + branch = branch[path.pop(0)] + except KeyError as key_err: + logging.error(key_err) + return None + except Exception as err: + logging.error(err) + return None + + return branch + + +def get_student_github_connector( + student: GitHubUser, payload: Dict[str, Any] +) -> Union[GitHubConnector, None]: + + if not student: + return None + + github_student_branch = get_student_branch(payload) + if github_student_branch is None: + return None + + repo_name = payload["repository"]["name"] + student.get_access_token(repo_name) + return GitHubConnector(student, repo_name, github_student_branch) + + +def compare_tests_folder(student_github: GitHubConnector, solution_repo: GitHubConnector) -> Any: + + student_contents = student_github.repo.get_contents( + GH_TESTS_FOLDER_NAME, ref=student_github.BRANCH_NAME + ) + + if ( + isinstance(student_contents, ContentFile.ContentFile) + and student_contents.type == "submodule" + ): + solution_last_commit = solution_repo.get_last_hash_commit() + student_tests_commit = student_contents.sha + return solution_last_commit == student_tests_commit + + student_hash_tests = student_github.get_tests_hash(GH_TESTS_FOLDER_NAME) + solution_hash_tests = solution_repo.get_tests_hash(GH_TESTS_FOLDER_NAME) + return student_hash_tests == solution_hash_tests + + +def github_repo_validation( + student_github_connector: GitHubConnector, gsheet: GSheet, payload: Dict[str, Any] +) -> None: + + solution_user = GitHubUser( + LOGIN=str(GH_SOLUTION_OWNER), ACCESS_TOKEN=GH_SOLUTION_TESTS_ACCESS_TOKEN + ) + solution_github_connector = GitHubConnector(solution_user, GH_SOLUTION_REPO_NAME, "main") + if not solution_github_connector: + gsheet.add_new_repo_valid_result( + solution_user, + False, + "[ERROR]: cannot get the solution github repository.", + ) + logging.error("[ERROR]: cannot get the solution github repository.") + return + + tests_havent_changed = compare_tests_folder(student_github_connector, solution_github_connector) + + # Add valid repo result on Google Sheet + gsheet.add_new_repo_valid_result( + student_github_connector.user, + tests_havent_changed, + default_message["valid_repository"][str(tests_havent_changed)], + ) + + # Update Pull Request + if "pull_request" in payload: + issue = student_github_connector.repo.get_issue(number=payload["pull_request"]["number"]) + message = default_message["valid_repository"][str(tests_havent_changed)] + issue.create_comment(message) + conclusion = "success" if tests_havent_changed else "failure" + student_github_connector.repo.create_check_run( + name=message, + head_sha=payload["pull_request"]["head"]["sha"], + status="completed", + conclusion=conclusion, + ) diff --git a/github_tests_validator_app/bin/student_challenge_results_validation.py b/github_tests_validator_app/bin/student_challenge_results_validation.py new file mode 100644 index 0000000..54d48b0 --- /dev/null +++ b/github_tests_validator_app/bin/student_challenge_results_validation.py @@ -0,0 +1,166 @@ +from typing import Any, Dict, List, Tuple, Union + +import logging +from collections import defaultdict + +from github_tests_validator_app.config.config import CHALLENGES_PATH +from github_tests_validator_app.lib.connectors.github_connector import GitHubConnector +from github_tests_validator_app.lib.connectors.google_sheet_connector import GSheet +from github_tests_validator_app.lib.pytest_result import PytestResult +from github_tests_validator_app.lib.users import GitHubUser + + +def init_pytest_result_from_artifact(artifact: Dict[str, Any]) -> Union[PytestResult, None]: + if not artifact: + return None + + # Get result of test + + return PytestResult( + DURATION=artifact["duration"], + TOTAL_TESTS_COLLECTED=artifact["summary"]["collected"], + TOTAL_PASSED=artifact["summary"]["passed"], + TOTAL_FAILED=artifact["summary"]["failed"], + DESCRIPTION_TEST_RESULTS=artifact["tests"], + ) + + +def get_student_artifact( + student_github_connector: GitHubConnector, + gsheet: GSheet, + all_student_artifact: Dict[str, Any], + payload: Dict[str, Any], +) -> Any: + workflow_run_id = payload["workflow_job"]["run_id"] + artifact_info = student_github_connector.get_artifact_info_from_artifacts_with_worflow_run_id( + all_student_artifact["artifacts"], workflow_run_id + ) + if not artifact_info: + gsheet.add_new_student_challenge_result( + user=student_github_connector.user, + result={}, + info="[ERROR]: Cannot find the artifact of Pytest result on GitHub user repository.", + ) + logging.error( + "[ERROR]: Cannot find the artifact of Pytest result on GitHub user repository." + ) + return None + + ### Read Artifact + artifact_resp = student_github_connector.get_artifact(artifact_info) + artifact = student_github_connector.get_artifact_from_format_bytes_zip(artifact_resp.content) + if not artifact: + gsheet.add_new_student_challenge_result( + user=student_github_connector.user, + result={}, + info="[ERROR]: Cannot read the artifact of Pytest result on GitHub user repository.", + ) + logging.error( + "[ERROR]: Cannot read the artifact of Pytest result on GitHub user repository." + ) + return None + + return artifact + + +def get_challenge_information_from_path(path: str) -> Tuple[str, str, str]: + list_path_name = path.split(CHALLENGES_PATH)[1].split("/") + challenge_id = "-".join([name[0:2] for name in list_path_name if not ".py" in name]) + challenge_name = path[-1].split(".py")[0].split("test_")[1] + test_name = path[-1].split("::")[1] + return challenge_name, challenge_id, test_name + + +def parsing_challenge_results(results: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + + # challenge_results = defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(str))))) + challenge_results = [] + for test in results: + challenge_name, challenge_id, test_name = get_challenge_information_from_path( + test["nodeid"] + ) + # if challenge_id in challenges_ref and challenge_name in challenges_ref[challenge_id]['name']: + # challenge_results[challenge_id][challenge_name]["coef"] = challenges_ref[challenge_id]["coef"] + # challenge_results[challenge_id][challenge_name]["tests"][test_name]["result"] = 1 if test.pop("outcome") == "passed" else 0 + + # challenge_results[challenge_id][challenge_name]["tests"][test_name]["result"] = test.pop("outcome") + # challenge_results[challenge_id][challenge_name]["tests"][test_name]["setup"] = test["setup"] + # challenge_results[challenge_id][challenge_name]["tests"][test_name]["call"] = test["call"] + # challenge_results[challenge_id][challenge_name]["tests"][test_name]["teardown"] = test["teardown"] + info = {"setup": test["setup"], "call": test["call"], "teardown": test["teardown"]} + + challenge_results.append( + { + "id": challenge_id, + "script": challenge_name, + "test": test_name, + "info": info, + "result": test["outcome"], + "path": test["nodeid"], + } + ) + + # list_id = challenges_ref.keys() - challenge_results.keys() + # for challenge_id in list_id: + # challenge_results[challenge_id][challenge_name]["coef"] = challenges_ref[challenge_id]["coef"] + # breakpoint() + return challenge_results + + +def get_final_results_challenges(challenge_results: Any) -> float: + + final_results = 0.0 + total_test = 0 + + for challenge_id in challenge_results: + for challenge_info in challenge_results[challenge_id].values(): + total_test += challenge_info["coef"] + passed = sum(test["result"] for test in challenge_info["tests"].values()) / len( + challenge_info["tests"] + ) + passed = passed * challenge_info["coef"] + final_results += passed + + final_results /= total_test + return final_results + + +def send_student_challenge_results( + student_github_connector: GitHubConnector, gsheet: GSheet, payload: Dict[str, Any] +) -> None: + + ### Get student artifact + all_student_artifact = student_github_connector.get_all_artifacts() + if not all_student_artifact: + message = f"[ERROR]: Cannot get all artifact on repository {student_github_connector.REPO_NAME} of user {student_github_connector.user.LOGIN}." + if all_student_artifact["total_count"] == 0: + message = f"[ERROR]: No artifact on repository {student_github_connector.REPO_NAME} of user {student_github_connector.user.LOGIN}." + gsheet.add_new_student_challenge_result( + user=student_github_connector.user, + result={}, + info=message, + ) + logging.error(message) + return + + artifact = get_student_artifact(student_github_connector, gsheet, all_student_artifact, payload) + # challenges_ref = gsheet.get_challenge_coef() + if not artifact: + # Logging error + return + + ### Parsing artifact / challenge results + challenge_results = parsing_challenge_results(artifact["tests"]) + ### Get final results of student challenge results + # final_result = get_final_results_challenges(challenge_results) + + # Get result + pytest_result = init_pytest_result_from_artifact(artifact) + ## Send Results to Google Sheet + gsheet.add_new_student_challenge_result( + user=student_github_connector.user, + result=pytest_result, + info="Result of student tests", + ) + + ### Delete artifact ? diff --git a/config/__init__.py b/github_tests_validator_app/config/__init__.py similarity index 100% rename from config/__init__.py rename to github_tests_validator_app/config/__init__.py diff --git a/github_tests_validator_app/config/config.py b/github_tests_validator_app/config/config.py new file mode 100644 index 0000000..36a0c59 --- /dev/null +++ b/github_tests_validator_app/config/config.py @@ -0,0 +1,49 @@ +from typing import Dict, List, cast + +import os + +from github import GithubIntegration + +### GitHub ### +GH_APP_ID = cast(str, os.getenv("GH_APP_ID")) +# APP_KEY = cast(str, os.getenv("GH_APP_KEY")) +GH_APP_KEY_PATH = os.getenv("GH_APP_KEY") +GH_APP_KEY = "" +if GH_APP_KEY_PATH: + with open(GH_APP_KEY_PATH) as f: + GH_APP_KEY = f.read() +GH_SOLUTION_TESTS_ACCESS_TOKEN = cast(str, os.getenv("SOLUTION_TESTS_ACCESS_TOKEN")) +GH_SOLUTION_OWNER = "artefactory-fr" +GH_SOLUTION_REPO_NAME = "school_of_data_tests" +GH_TESTS_FOLDER_NAME = "tests" +GH_API = "https://api.github.com/repos" +GH_ALL_ARTIFACT_ENDPOINT = "actions/artifacts" + +git_integration = GithubIntegration( + GH_APP_ID, + GH_APP_KEY, +) + +# Google Sheet +GSHEET_SA_JSON = cast(str, os.getenv("GSHEET_SA_JSON")) +GSHEET_SPREADSHEET_ID = "1tzn73q_QhZ2gLAmZObRsE_JmD6yD6433uZBGc-Llsdk" +GSHEET_WORKSHEET_STUDENT = "students" +GSHEET_WORKSHEET_CHECK_VALIDATION_REPO = "check_validation_repo" +GSHEET_WORKSHEET_STUDENT_CHALLENGE_RESULT = "student_challenge_results" +GSHEET_WORKSHEET_STUDENT_CHALLENGE_REF = "student_challenge_ref" + +# Others +default_message: Dict[str, Dict[str, str]] = { + "valid_repository": { + "True": "Your folder `Test` is valid", + "False": "Your folder `Test` has been modified and is no longer valid.", + } +} + +commit_sha_path: Dict[str, List[str]] = { + "pull_request": ["pull_request", "head", "ref"], + "pusher": ["ref"], + "workflow_job": [], +} + +CHALLENGES_PATH = "tests/tests/" diff --git a/github_tests_validator_app/lib/__init__.py b/github_tests_validator_app/lib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/github_tests_validator_app/lib/connectors/github_connector.py b/github_tests_validator_app/lib/connectors/github_connector.py new file mode 100644 index 0000000..3c89330 --- /dev/null +++ b/github_tests_validator_app/lib/connectors/github_connector.py @@ -0,0 +1,110 @@ +from typing import Any, Dict, List, Union + +import io +import json +import logging +import zipfile + +import requests +from github import ContentFile, Github, Repository +from github_tests_validator_app.config.config import GH_ALL_ARTIFACT_ENDPOINT, GH_API +from github_tests_validator_app.lib.users import GitHubUser +from github_tests_validator_app.lib.utils import get_hash_files + + +class GitHubConnector: + def __init__(self, user: GitHubUser, repo_name: str, branch_name: str): + self.user = user + self.REPO_NAME = repo_name + self.BRANCH_NAME = branch_name + + logging.info(f"Connecting to Github with user {self.user.LOGIN} on repo: {repo_name} ...") + self.connector = Github(login_or_token=self.user.ACCESS_TOKEN, timeout=30) + self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") + logging.info("Done.") + + def get_repo(self, repo_name: str) -> Repository.Repository: + self.REPO_NAME = repo_name + logging.info(f"Connecting to new repo: {repo_name} with user: {self.user.LOGIN} ...") + self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") + logging.info("Done.") + return self.repo + + def get_last_hash_commit(self) -> str: + branch = self.repo.get_branch(self.BRANCH_NAME) + return branch.commit.sha + + def get_files_content(self, contents: Any) -> List[ContentFile.ContentFile]: + files_content = [] + while contents: + file_content = contents.pop(0) + if file_content.type == "dir": + contents.extend(self.repo.get_contents(file_content.path)) + else: + files_content.append(file_content) + return files_content + + def get_tests_hash(self, folder_name: str) -> str: + contents = self.repo.get_contents(folder_name) + files_content = self.get_files_content(contents) + hash = str(get_hash_files(files_content)) + return hash + + def get_all_artifacts(self) -> Union[requests.models.Response, Any]: + url = "/".join([GH_API, self.user.LOGIN, self.REPO_NAME, GH_ALL_ARTIFACT_ENDPOINT]) + headers = self._get_headers() + response = self._request_data(url, headers=headers) + return response + + def get_artifact_info_from_artifacts_with_worflow_run_id( + self, artifacts: List[Dict[str, Any]], worflow_run_id: int + ) -> Union[None, Dict[str, Any]]: + for artifact in artifacts: + if artifact["workflow_run"]["id"] == worflow_run_id: + return artifact + return None + + def get_artifact_from_format_bytes_zip(self, artifact_content: bytes) -> Any: + z = zipfile.ZipFile(io.BytesIO(artifact_content)) + f = z.read(z.namelist()[0]) + decode = f.decode("utf-8") + return json.loads(decode) + + def get_artifact(self, artifact_info: Dict[str, Any]) -> Union[requests.models.Response, Any]: + artifact_id = str(artifact_info["id"]) + archive_format = "zip" + url = "/".join( + [ + GH_API, + self.user.LOGIN, + self.REPO_NAME, + GH_ALL_ARTIFACT_ENDPOINT, + artifact_id, + archive_format, + ] + ) + headers = self._get_headers() + response = self._request_data(url, headers=headers, dict_format=False) + return response + + def _get_headers(self) -> Dict[str, str]: + if not self.user.ACCESS_TOKEN: + self.user.get_access_token(self.REPO_NAME) + + return { + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {self.user.ACCESS_TOKEN}", + } + + def _request_data( + self, + url: str, + headers: Dict[str, Any], + params: Union[Dict[str, Any], None] = None, + dict_format: Union[bool, None] = True, + ) -> Union[requests.models.Response, Any]: + response = requests.get(url, headers=headers, params=params) + response.raise_for_status() + if dict_format: + return response.json() + return response diff --git a/github_tests_validator_app/lib/connectors/google_sheet_connector.py b/github_tests_validator_app/lib/connectors/google_sheet_connector.py new file mode 100644 index 0000000..48d8913 --- /dev/null +++ b/github_tests_validator_app/lib/connectors/google_sheet_connector.py @@ -0,0 +1,100 @@ +from typing import Any, DefaultDict, List + +import logging +from collections import defaultdict + +import gspread +from github_tests_validator_app.config.config import ( + GSHEET_SA_JSON, + GSHEET_SPREADSHEET_ID, + GSHEET_WORKSHEET_CHECK_VALIDATION_REPO, + GSHEET_WORKSHEET_STUDENT, + GSHEET_WORKSHEET_STUDENT_CHALLENGE_REF, + GSHEET_WORKSHEET_STUDENT_CHALLENGE_RESULT, +) +from github_tests_validator_app.lib.pytest_result import PytestResult +from github_tests_validator_app.lib.users import GitHubUser + + +class GSheet: + def __init__(self): + logging.info(f"Connecting to Google Sheet API ...") + self.gs_client = gspread.service_account(filename=GSHEET_SA_JSON) + self.spreadsheet = self.gs_client.open_by_key(GSHEET_SPREADSHEET_ID) + logging.info("Done.") + + def get_new_sheet(self, sheet_id: str) -> gspread.spreadsheet.Spreadsheet: + self.spreadsheet = self.gs_client.open_by_key(sheet_id) + return self.spreadsheet + + def add_new_user_on_sheet(self, user: GitHubUser) -> None: + # Controle the workseet exist of not + worksheet = self.spreadsheet.worksheet(GSHEET_WORKSHEET_STUDENT) + + # Check is user exist + id_cell = worksheet.find(str(user.ID)) + login_cell = worksheet.find(user.LOGIN) + if id_cell and login_cell and id_cell.row == login_cell.row: + logging.info("User already exist in student worksheet.") + else: + logging.info(f"Add new user {user.LOGIN} in student worksheet ...") + headers = worksheet.row_values(1) + user_dict = user.__dict__ + new_row = [ + user_dict[header.upper()] if header.upper() in user_dict else None + for header in headers + ] + worksheet.append_row(new_row) + logging.info("Done.") + + def add_new_repo_valid_result(self, user: GitHubUser, result: bool, info: str = "") -> None: + worksheet = self.spreadsheet.worksheet(GSHEET_WORKSHEET_CHECK_VALIDATION_REPO) + headers = worksheet.row_values(1) + user_dict = user.__dict__ + new_row = list() + for header in headers: + if header == "is_valid": + new_row.append(str(result)) + elif header == "user_id": + new_row.append(user.ID) + elif header == "info": + new_row.append(info) + elif header.upper() in user_dict: + new_row.append(user_dict[header.upper()]) + else: + new_row.append("") + worksheet.append_row(new_row) + + def add_new_student_challenge_result( + self, user: GitHubUser, result: PytestResult, info: str = "" + ) -> None: + worksheet = self.spreadsheet.worksheet(GSHEET_WORKSHEET_STUDENT_CHALLENGE_RESULT) + headers = worksheet.row_values(1) + user_dict = user.__dict__ + result_dict = result.__dict__ + new_row = list() + for header in headers: + + if header.upper() in user_dict: + new_row.append(user_dict[header.upper()]) + elif header.upper() in result_dict: + new_row.append(result_dict[header.upper()]) + elif header == "info": + new_row.append(info) + else: + new_row.append("") + worksheet.append_row(new_row) + + def get_challenge_coef(self) -> DefaultDict[str, DefaultDict[str, Any]]: + + worksheet = self.spreadsheet.worksheet(GSHEET_WORKSHEET_STUDENT_CHALLENGE_REF) + dict_results = defaultdict( + lambda: defaultdict(list) + ) # type: DefaultDict[str, DefaultDict[str, Any]] + for row in worksheet.get_all_records(): + id = row.pop("id") + breakpoint() + dict_results[id]["name"].append(row.pop("challenge_name")) + dict_results[id] = defaultdict(defaultdict, {**dict_results[id], **row}) + + return dict_results diff --git a/github_tests_validator_app/lib/pytest_result.py b/github_tests_validator_app/lib/pytest_result.py new file mode 100644 index 0000000..ab1a8e0 --- /dev/null +++ b/github_tests_validator_app/lib/pytest_result.py @@ -0,0 +1,14 @@ +from typing import Any, Dict, Union + +from dataclasses import dataclass + + +@dataclass +class PytestResult: + + DURATION: float + TOTAL_TESTS_COLLECTED: int + TOTAL_PASSED: int + TOTAL_FAILED: int + DESCRIPTION_TEST_RESULTS: Dict[str, Any] + RESULT: Union[float, None] = None diff --git a/lib/user.py b/github_tests_validator_app/lib/users.py similarity index 85% rename from lib/user.py rename to github_tests_validator_app/lib/users.py index a012a5e..8e0f4c1 100644 --- a/lib/user.py +++ b/github_tests_validator_app/lib/users.py @@ -3,13 +3,13 @@ from dataclasses import dataclass from datetime import datetime -from config.config import git_integration +from github_tests_validator_app.config.config import git_integration @dataclass class GitHubUser: - LOGIN: str + LOGIN: str = "" URL: str = "" ID: str = "" ACCESS_TOKEN: Union[str, None] = None diff --git a/github_tests_validator_app/lib/utils.py b/github_tests_validator_app/lib/utils.py new file mode 100644 index 0000000..b1cc55b --- /dev/null +++ b/github_tests_validator_app/lib/utils.py @@ -0,0 +1,26 @@ +from typing import Any, Dict, List, Union + +import hashlib + +from github import ContentFile +from github_tests_validator_app.lib.users import GitHubUser + + +def get_hash_files(contents: List[ContentFile.ContentFile]) -> str: + hash_sum = "" + for content in contents: + hash_sum += content.sha + hash = hashlib.sha256() + hash.update(hash_sum.encode()) + return str(hash.hexdigest()) + + +def init_github_user_from_github_event(data: Dict[str, Any]) -> Union[GitHubUser, None]: + + if not "repository" in data or not "owner" in data["repository"]: + return None + + login = data["repository"]["owner"].get("login", None) + id = data["repository"]["owner"].get("id", None) + url = data["repository"]["owner"].get("url", None) + return GitHubUser(LOGIN=login, ID=id, URL=url) diff --git a/lib/connectors/github_connector.py b/lib/connectors/github_connector.py deleted file mode 100644 index be16d3d..0000000 --- a/lib/connectors/github_connector.py +++ /dev/null @@ -1,46 +0,0 @@ -from typing import Any, List - -import logging - -from github import ContentFile, Github, Repository -from lib.user import GitHubUser -from lib.utils import get_hash_files - - -class GitHubConnector: - def __init__(self, user: GitHubUser, repo_name: str, branch_name: str): - self.user = user - self.REPO_NAME = repo_name - self.BRANCH_NAME = branch_name - - logging.info(f"Connecting to Github with user {self.user.LOGIN} on repo: {repo_name} ...") - self.connector = Github(login_or_token=self.user.ACCESS_TOKEN, timeout=30) - self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") - logging.info("Done.") - - def get_repo(self, repo_name: str) -> Repository.Repository: - self.REPO_NAME = repo_name - logging.info(f"Connecting to new repo: {repo_name} with user: {self.user.LOGIN} ...") - self.repo = self.connector.get_repo(f"{self.user.LOGIN}/{repo_name}") - logging.info("Done.") - return self.repo - - def get_last_hash_commit(self) -> str: - branch = self.repo.get_branch(self.BRANCH_NAME) - return branch.commit.sha - - def get_files_content(self, contents: Any) -> List[ContentFile.ContentFile]: - files_content = [] - while contents: - file_content = contents.pop(0) - if file_content.type == "dir": - contents.extend(self.repo.get_contents(file_content.path)) - else: - files_content.append(file_content) - return files_content - - def get_tests_hash(self, folder_name: str) -> str: - contents = self.repo.get_contents(folder_name) - files_content = self.get_files_content(contents) - hash = str(get_hash_files(files_content)) - return hash diff --git a/lib/connectors/google_sheet_connector.py b/lib/connectors/google_sheet_connector.py deleted file mode 100644 index 203cfeb..0000000 --- a/lib/connectors/google_sheet_connector.py +++ /dev/null @@ -1,59 +0,0 @@ -import logging - -import gspread -from config.config import GSHEET_SA_JSON, GSHEET_WORKSHEET_ID -from lib.user import GitHubUser - - -class GSheet: - def __init__(self): - logging.info(f"Connecting to Google Sheet API ...") - self.gs_client = gspread.service_account(filename=GSHEET_SA_JSON) - self.spreadsheet = self.gs_client.open_by_key(GSHEET_WORKSHEET_ID) - logging.info("Done.") - - def get_new_sheet(self, sheet_id: str) -> gspread.spreadsheet.Spreadsheet: - self.spreadsheet = self.gs_client.open_by_key(sheet_id) - return self.spreadsheet - - def add_new_user_on_sheet(self, user: GitHubUser) -> None: - # Controle the workseet exist of not - worksheet = self.spreadsheet.worksheet("students") - - # Check is user exist - id_cell = worksheet.find(user.ID) - login_cell = worksheet.find(user.LOGIN) - if id_cell and login_cell and id_cell.row == login_cell.row: - logging.info("User already exist in student worksheet.") - else: - logging.info(f"Add new user {user.LOGIN} in student worksheet ...") - headers = worksheet.row_values(1) - user_dict = user.__dict__ - new_row = [ - user_dict[header.upper()] if header.upper() in user_dict else None - for header in headers - ] - worksheet.append_row(new_row) - logging.info("Done.") - - def add_new_repo_valid_result( - self, user: GitHubUser, action: str, result: bool, info: str = "" - ) -> None: - worksheet = self.spreadsheet.worksheet("check_validation_repo") - headers = worksheet.row_values(1) - user_dict = user.__dict__ - new_row = list() - for header in headers: - if header == "is_valid": - new_row.append(str(result)) - elif header == "action": - new_row.append(action) - elif header == "user_id": - new_row.append(user.ID) - elif header == "info": - new_row.append(info) - elif header.upper() in user_dict: - new_row.append(user_dict[header.upper()]) - else: - new_row.append("") - worksheet.append_row(new_row) diff --git a/lib/utils.py b/lib/utils.py deleted file mode 100644 index 15a7793..0000000 --- a/lib/utils.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Any, Dict, List - -import hashlib - -from github import ContentFile -from lib.user import GitHubUser - - -def get_hash_files(contents: List[ContentFile.ContentFile]) -> str: - hash_sum = "" - for content in contents: - hash_sum += content.sha - hash = hashlib.sha256() - hash.update(hash_sum.encode()) - return str(hash.hexdigest()) - - -def get_github_user(data: Dict[str, Any]) -> GitHubUser: - login = data["repository"]["owner"]["login"] - id = data["repository"]["owner"]["id"] - url = data["repository"]["owner"]["url"] - return GitHubUser(LOGIN=login, ID=str(id), URL=url) diff --git a/server.py b/server.py index c64cb9e..5a4f9ad 100644 --- a/server.py +++ b/server.py @@ -1,8 +1,8 @@ from typing import Any import uvicorn -from bin.validator import validator from fastapi import FastAPI, Request +from github_tests_validator_app.bin.github_event_process import validator app = FastAPI() diff --git a/tests/units/test_github_repo_validation.py b/tests/units/test_github_repo_validation.py index dd3a2a1..62592b6 100644 --- a/tests/units/test_github_repo_validation.py +++ b/tests/units/test_github_repo_validation.py @@ -1,6 +1,5 @@ import pytest -from github_tests_validator_app.github_repo_validation import get_event, get_student_branch -from lib.user import GitHubUser +from github_tests_validator_app.bin.github_repo_validation import get_event, get_student_branch @pytest.mark.parametrize( diff --git a/tests/units/test_utils.py b/tests/units/test_utils.py index b5dd8da..0b6e8b7 100644 --- a/tests/units/test_utils.py +++ b/tests/units/test_utils.py @@ -2,8 +2,8 @@ import pytest from github import ContentFile -from lib.user import GitHubUser -from lib.utils import get_hash_files, init_github_user_from_github_event +from github_tests_validator_app.lib.users import GitHubUser +from github_tests_validator_app.lib.utils import get_hash_files, init_github_user_from_github_event @pytest.mark.parametrize( @@ -18,12 +18,14 @@ ], ) def test_get_hast_files(mocker, contents, expected): + mocker.patch("github.ContentFile.ContentFile.__init__", return_value=None) mocker.patch( "github.ContentFile.ContentFile.sha", new_callable=PropertyMock, side_effect=contents, ) - assert get_hash_files([ContentFile.ContentFile for _ in contents]) == expected + contents = [ContentFile.ContentFile for _ in contents] + assert get_hash_files(contents) == expected @pytest.mark.parametrize( @@ -34,8 +36,8 @@ def test_get_hast_files(mocker, contents, expected): GitHubUser(LOGIN="test", ID="1234", URL="url"), ), ( - {"repository": {"owner": {"login": None, "id": None, "url": None}}}, - GitHubUser(LOGIN=None, ID=None, URL=None), + {"repository": {"owner": {"login": "", "id": "", "url": ""}}}, + GitHubUser(LOGIN="", ID="", URL=""), ), ({}, None), ], From 36e18a5b417385fbb83e7dcc7a076df6e748ee57 Mon Sep 17 00:00:00 2001 From: bruno zheng Date: Tue, 6 Sep 2022 14:33:45 +0200 Subject: [PATCH 13/13] fix: update packages, new type-resquests and pytest-mock packages --- poetry.lock | 433 +++++++++++++++++++++++++++---------------------- pyproject.toml | 2 + 2 files changed, 238 insertions(+), 197 deletions(-) diff --git a/poetry.lock b/poetry.lock index e1f9e90..8559989 100644 --- a/poetry.lock +++ b/poetry.lock @@ -17,38 +17,33 @@ trio = ["trio (>=0.16)"] [[package]] name = "astroid" -version = "2.11.7" +version = "2.12.7" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false -python-versions = ">=3.6.2" +python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = ">=1.11,<2" - -[[package]] -name = "atomicwrites" -version = "1.4.1" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +wrapt = [ + {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, +] [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" -category = "dev" +category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "bandit" @@ -71,7 +66,7 @@ yaml = ["pyyaml"] [[package]] name = "black" -version = "22.6.0" +version = "22.8.0" description = "The uncompromising code formatter." category = "dev" optional = false @@ -128,7 +123,7 @@ python-versions = ">=3.6.1" [[package]] name = "charset-normalizer" -version = "2.1.0" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false @@ -221,7 +216,7 @@ graph = ["objgraph (>=1.7.2)"] [[package]] name = "distlib" -version = "0.3.4" +version = "0.3.6" description = "Distribution utilities" category = "dev" optional = false @@ -229,7 +224,7 @@ python-versions = "*" [[package]] name = "dparse" -version = "0.5.1" +version = "0.5.2" description = "A parser for Python dependency files" category = "dev" optional = false @@ -237,15 +232,15 @@ python-versions = ">=3.5" [package.dependencies] packaging = "*" -pyyaml = "*" toml = "*" [package.extras] +conda = ["pyyaml"] pipenv = ["pipenv"] [[package]] name = "fastapi" -version = "0.78.0" +version = "0.82.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" category = "main" optional = false @@ -257,21 +252,21 @@ starlette = "0.19.1" [package.extras] all = ["email_validator (>=1.1.1,<2.0.0)", "itsdangerous (>=1.1.0,<3.0.0)", "jinja2 (>=2.11.2,<4.0.0)", "orjson (>=3.2.1,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "requests (>=2.24.0,<3.0.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] -dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] +dev = ["autoflake (>=1.4.0,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "pre-commit (>=2.17.0,<3.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "uvicorn[standard] (>=0.12.0,<0.18.0)"] doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer (>=0.4.1,<0.5.0)"] -test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.3.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.6.5)", "types-orjson (==3.6.2)", "types-ujson (==4.2.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] +test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.3.0)", "databases[sqlite] (>=0.3.2,<0.6.0)", "email_validator (>=1.1.1,<2.0.0)", "flake8 (>=3.8.3,<6.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.14.0,<0.19.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "orjson (>=3.2.1,<4.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=6.2.4,<7.0.0)", "pytest-cov (>=2.12.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "requests (>=2.24.0,<3.0.0)", "sqlalchemy (>=1.3.18,<1.5.0)", "types-dataclasses (==0.6.5)", "types-orjson (==3.6.2)", "types-ujson (==4.2.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"] [[package]] name = "filelock" -version = "3.7.1" +version = "3.8.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] -testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] [[package]] name = "gitdb" @@ -332,7 +327,7 @@ tool = ["click (>=6.0.0)"] [[package]] name = "gspread" -version = "5.4.0" +version = "5.5.0" description = "Google Spreadsheets Python API" category = "main" optional = false @@ -352,7 +347,7 @@ python-versions = ">=3.6" [[package]] name = "identify" -version = "2.5.1" +version = "2.5.5" description = "File identification library for Python" category = "dev" optional = false @@ -373,7 +368,7 @@ python-versions = ">=3.5" name = "iniconfig" version = "1.1.1" description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" +category = "main" optional = false python-versions = "*" @@ -409,7 +404,7 @@ python-versions = ">=3.6" [[package]] name = "mypy" -version = "0.961" +version = "0.971" description = "Optional static typing for Python" category = "dev" optional = false @@ -458,7 +453,7 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "packaging" version = "21.3" description = "Core utilities for Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -467,15 +462,15 @@ pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pathspec" -version = "0.9.0" +version = "0.10.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.7" [[package]] name = "pbr" -version = "5.9.0" +version = "5.10.0" description = "Python Build Reasonableness" category = "dev" optional = false @@ -497,7 +492,7 @@ test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -525,7 +520,7 @@ virtualenv = ">=20.0.8" name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -558,14 +553,14 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pydantic" -version = "1.9.1" +version = "1.10.2" description = "Data validation and settings management using python type hints" category = "main" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7" [package.dependencies] -typing-extensions = ">=3.7.4.3" +typing-extensions = ">=4.1.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -604,12 +599,15 @@ integrations = ["cryptography"] [[package]] name = "pygments" -version = "2.12.0" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false python-versions = ">=3.6" +[package.extras] +plugins = ["importlib-metadata"] + [[package]] name = "pyjwt" version = "2.4.0" @@ -626,14 +624,14 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "2.14.4" +version = "2.15.0" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.7.2" [package.dependencies] -astroid = ">=2.11.6,<=2.12.0-dev0" +astroid = ">=2.12.4,<=2.14.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = ">=0.2" isort = ">=4.2.5,<6" @@ -666,7 +664,7 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyparsing" version = "3.0.9" description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "dev" +category = "main" optional = false python-versions = ">=3.6.8" @@ -675,14 +673,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.1.2" +version = "7.1.3" description = "pytest: simple powerful testing with Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" @@ -694,9 +691,23 @@ tomli = ">=1.0.0" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] +[[package]] +name = "pytest-mock" +version = "3.8.2" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + [[package]] name = "pyupgrade" -version = "2.37.1" +version = "2.37.3" description = "A tool to automatically upgrade syntax for newer versions." category = "dev" optional = false @@ -797,7 +808,7 @@ python-versions = ">=3.5" [[package]] name = "safety" -version = "2.0.0" +version = "2.1.1" description = "Checks installed dependencies for known vulnerabilities and licenses." category = "dev" optional = false @@ -812,11 +823,11 @@ requests = "*" [[package]] name = "shellingham" -version = "1.4.0" +version = "1.5.0" description = "Tool to Detect Surrounding Shell" category = "main" optional = false -python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,>=2.6" +python-versions = ">=3.4" [[package]] name = "six" @@ -836,11 +847,11 @@ python-versions = ">=3.6" [[package]] name = "sniffio" -version = "1.2.0" +version = "1.3.0" description = "Sniff out which async library your code is running under" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" [[package]] name = "snowballstemmer" @@ -896,13 +907,13 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" [[package]] name = "tomlkit" -version = "0.11.1" +version = "0.11.4" description = "Style preserving TOML library" category = "dev" optional = false @@ -928,6 +939,25 @@ dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2 doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)"] test = ["black (>=22.3.0,<23.0.0)", "coverage (>=5.2,<6.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<5.4.0)", "pytest-cov (>=2.10.0,<3.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<2.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +[[package]] +name = "types-requests" +version = "2.28.9" +description = "Typing stubs for requests" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +types-urllib3 = "<1.27" + +[[package]] +name = "types-urllib3" +version = "1.26.23" +description = "Typing stubs for urllib3" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "typing-extensions" version = "4.3.0" @@ -938,7 +968,7 @@ python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.10" +version = "1.26.12" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -946,12 +976,12 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uvicorn" -version = "0.18.2" +version = "0.18.3" description = "The lightning-fast ASGI server." category = "main" optional = false @@ -962,25 +992,24 @@ click = ">=7.0" h11 = ">=0.8" [package.extras] -standard = ["PyYAML (>=5.1)", "colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] +standard = ["colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] [[package]] name = "virtualenv" -version = "20.15.1" +version = "20.16.4" description = "Virtual Python Environment builder" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6" [package.dependencies] -distlib = ">=0.3.1,<1" -filelock = ">=3.2,<4" -platformdirs = ">=2,<3" -six = ">=1.9.0,<2" +distlib = ">=0.3.5,<1" +filelock = ">=3.4.1,<4" +platformdirs = ">=2.4,<3" [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] +docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"] +testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] [[package]] name = "wrapt" @@ -993,7 +1022,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "f4e2e123e74e110d6859d6225270b87eff885fb3ea4e21bb3f58a39fc57488e1" +content-hash = "11f574272877e99f874c61dc9e0e01feb578098d3d2addaeb133e8006b4b31a8" [metadata.files] anyio = [ @@ -1001,44 +1030,41 @@ anyio = [ {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, ] astroid = [ - {file = "astroid-2.11.7-py3-none-any.whl", hash = "sha256:86b0a340a512c65abf4368b80252754cda17c02cdbbd3f587dddf98112233e7b"}, - {file = "astroid-2.11.7.tar.gz", hash = "sha256:bb24615c77f4837c707669d16907331374ae8a964650a66999da3f5ca68dc946"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.1.tar.gz", hash = "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11"}, + {file = "astroid-2.12.7-py3-none-any.whl", hash = "sha256:9b408d5d540387a74ca5405a5197aa24fbf9178b4019b16b3e532fbdf0e467cc"}, + {file = "astroid-2.12.7.tar.gz", hash = "sha256:cd468be9d9d03d086d4d7e6643a59bfc025762d2c895e1e22cf21feced7bb148"}, ] attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, ] bandit = [ {file = "bandit-1.7.4-py3-none-any.whl", hash = "sha256:412d3f259dab4077d0e7f0c11f50f650cc7d10db905d98f6520a95a18049658a"}, {file = "bandit-1.7.4.tar.gz", hash = "sha256:2d63a8c573417bae338962d4b9b06fbc6080f74ecd955a092849e1e65c717bd2"}, ] black = [ - {file = "black-22.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f586c26118bc6e714ec58c09df0157fe2d9ee195c764f630eb0d8e7ccce72e69"}, - {file = "black-22.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b270a168d69edb8b7ed32c193ef10fd27844e5c60852039599f9184460ce0807"}, - {file = "black-22.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6797f58943fceb1c461fb572edbe828d811e719c24e03375fd25170ada53825e"}, - {file = "black-22.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c85928b9d5f83b23cee7d0efcb310172412fbf7cb9d9ce963bd67fd141781def"}, - {file = "black-22.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6fe02afde060bbeef044af7996f335fbe90b039ccf3f5eb8f16df8b20f77666"}, - {file = "black-22.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cfaf3895a9634e882bf9d2363fed5af8888802d670f58b279b0bece00e9a872d"}, - {file = "black-22.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94783f636bca89f11eb5d50437e8e17fbc6a929a628d82304c80fa9cd945f256"}, - {file = "black-22.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2ea29072e954a4d55a2ff58971b83365eba5d3d357352a07a7a4df0d95f51c78"}, - {file = "black-22.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e439798f819d49ba1c0bd9664427a05aab79bfba777a6db94fd4e56fae0cb849"}, - {file = "black-22.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:187d96c5e713f441a5829e77120c269b6514418f4513a390b0499b0987f2ff1c"}, - {file = "black-22.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:074458dc2f6e0d3dab7928d4417bb6957bb834434516f21514138437accdbe90"}, - {file = "black-22.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a218d7e5856f91d20f04e931b6f16d15356db1c846ee55f01bac297a705ca24f"}, - {file = "black-22.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:568ac3c465b1c8b34b61cd7a4e349e93f91abf0f9371eda1cf87194663ab684e"}, - {file = "black-22.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6c1734ab264b8f7929cef8ae5f900b85d579e6cbfde09d7387da8f04771b51c6"}, - {file = "black-22.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a3ac16efe9ec7d7381ddebcc022119794872abce99475345c5a61aa18c45ad"}, - {file = "black-22.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:b9fd45787ba8aa3f5e0a0a98920c1012c884622c6c920dbe98dbd05bc7c70fbf"}, - {file = "black-22.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7ba9be198ecca5031cd78745780d65a3f75a34b2ff9be5837045dce55db83d1c"}, - {file = "black-22.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a3db5b6409b96d9bd543323b23ef32a1a2b06416d525d27e0f67e74f1446c8f2"}, - {file = "black-22.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:560558527e52ce8afba936fcce93a7411ab40c7d5fe8c2463e279e843c0328ee"}, - {file = "black-22.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b154e6bbde1e79ea3260c4b40c0b7b3109ffcdf7bc4ebf8859169a6af72cd70b"}, - {file = "black-22.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:4af5bc0e1f96be5ae9bd7aaec219c901a94d6caa2484c21983d043371c733fc4"}, - {file = "black-22.6.0-py3-none-any.whl", hash = "sha256:ac609cf8ef5e7115ddd07d85d988d074ed00e10fbc3445aee393e70164a2219c"}, - {file = "black-22.6.0.tar.gz", hash = "sha256:6c6d39e28aed379aec40da1c65434c77d75e65bb59a1e1c283de545fb4e7c6c9"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, + {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, + {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, + {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, + {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, + {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, + {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, + {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, + {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, + {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, + {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, + {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, + {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, + {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, + {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, + {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, + {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, + {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, ] cachetools = [ {file = "cachetools-5.2.0-py3-none-any.whl", hash = "sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db"}, @@ -1119,8 +1145,8 @@ cfgv = [ {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] charset-normalizer = [ - {file = "charset-normalizer-2.1.0.tar.gz", hash = "sha256:575e708016ff3a5e3681541cb9d79312c416835686d054a23accb873b254f413"}, - {file = "charset_normalizer-2.1.0-py3-none-any.whl", hash = "sha256:5189b6f22b01957427f35b6a08d9a0bc45b46d3788ef5a92e978433c7a35f8a5"}, + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, ] click = [ {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, @@ -1171,20 +1197,20 @@ dill = [ {file = "dill-0.3.5.1.tar.gz", hash = "sha256:d75e41f3eff1eee599d738e76ba8f4ad98ea229db8b085318aa2b3333a208c86"}, ] distlib = [ - {file = "distlib-0.3.4-py2.py3-none-any.whl", hash = "sha256:6564fe0a8f51e734df6333d08b8b94d4ea8ee6b99b5ed50613f731fd4089f34b"}, - {file = "distlib-0.3.4.zip", hash = "sha256:e4b58818180336dc9c529bfb9a0b58728ffc09ad92027a3f30b7cd91e3458579"}, + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, ] dparse = [ - {file = "dparse-0.5.1-py3-none-any.whl", hash = "sha256:e953a25e44ebb60a5c6efc2add4420c177f1d8404509da88da9729202f306994"}, - {file = "dparse-0.5.1.tar.gz", hash = "sha256:a1b5f169102e1c894f9a7d5ccf6f9402a836a5d24be80a986c7ce9eaed78f367"}, + {file = "dparse-0.5.2-py3-none-any.whl", hash = "sha256:b1514fb08895d85b18d4eba3b1b7025ff9e6ea07286282021e19def872129975"}, + {file = "dparse-0.5.2.tar.gz", hash = "sha256:c348994a1f41c85f664d8f5a47442647bc4e22c5af5b1b26ef29aff0fa5dddcd"}, ] fastapi = [ - {file = "fastapi-0.78.0-py3-none-any.whl", hash = "sha256:15fcabd5c78c266fa7ae7d8de9b384bfc2375ee0503463a6febbe3bab69d6f65"}, - {file = "fastapi-0.78.0.tar.gz", hash = "sha256:3233d4a789ba018578658e2af1a4bb5e38bdd122ff722b313666a9b2c6786a83"}, + {file = "fastapi-0.82.0-py3-none-any.whl", hash = "sha256:a4269329a7374c78f6e92c195d14cc4ce3a525e25b79e62edf2df8196469743f"}, + {file = "fastapi-0.82.0.tar.gz", hash = "sha256:5ee7b7473a55940a18d4869ff57d29c372363bf8d3033a0e660a8cf38b1d3d9e"}, ] filelock = [ - {file = "filelock-3.7.1-py3-none-any.whl", hash = "sha256:37def7b658813cda163b56fc564cdc75e86d338246458c4c28ae84cabefa2404"}, - {file = "filelock-3.7.1.tar.gz", hash = "sha256:3a0fd85166ad9dbab54c9aec96737b744106dc5f15c0b09a6744a445299fcf04"}, + {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, + {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, ] gitdb = [ {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, @@ -1203,16 +1229,16 @@ google-auth-oauthlib = [ {file = "google_auth_oauthlib-0.5.2-py2.py3-none-any.whl", hash = "sha256:6d6161d0ec0a62e2abf2207c6071c117ec5897b300823c4bb2d963ee86e20e4f"}, ] gspread = [ - {file = "gspread-5.4.0-py3-none-any.whl", hash = "sha256:21704b47d007c3b5fd34eddfa4c4a9dcd1ecc1dc615083b9c636127726e66c18"}, - {file = "gspread-5.4.0.tar.gz", hash = "sha256:b6172b62fa899e3e4199d2d0ea1008b64305554ba08d3d3a96e9123824fdec48"}, + {file = "gspread-5.5.0-py3-none-any.whl", hash = "sha256:787b5fab9dd61a7d6d84af73356d7ff905cd3978438e528dc66dc8a9407fb851"}, + {file = "gspread-5.5.0.tar.gz", hash = "sha256:8620e987e5340315f2b8d8d26cf97e4736a84b3325a17c7d9bcff70525dc3003"}, ] h11 = [ {file = "h11-0.13.0-py3-none-any.whl", hash = "sha256:8ddd78563b633ca55346c8cd41ec0af27d3c79931828beffb46ce70a379e7442"}, {file = "h11-0.13.0.tar.gz", hash = "sha256:70813c1135087a248a4d38cc0e1a0181ffab2188141a93eaf567940c3957ff06"}, ] identify = [ - {file = "identify-2.5.1-py2.py3-none-any.whl", hash = "sha256:0dca2ea3e4381c435ef9c33ba100a78a9b40c0bab11189c7cf121f75815efeaa"}, - {file = "identify-2.5.1.tar.gz", hash = "sha256:3d11b16f3fe19f52039fb7e39c9c884b21cb1b586988114fbe42671f03de3e82"}, + {file = "identify-2.5.5-py2.py3-none-any.whl", hash = "sha256:ef78c0d96098a3b5fe7720be4a97e73f439af7cf088ebf47b620aeaa10fadf97"}, + {file = "identify-2.5.5.tar.gz", hash = "sha256:322a5699daecf7c6fd60e68852f36f2ecbb6a36ff6e6e973e0d2bb6fca203ee6"}, ] idna = [ {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, @@ -1270,29 +1296,29 @@ mccabe = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] mypy = [ - {file = "mypy-0.961-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0"}, - {file = "mypy-0.961-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15"}, - {file = "mypy-0.961-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3"}, - {file = "mypy-0.961-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e"}, - {file = "mypy-0.961-cp310-cp310-win_amd64.whl", hash = "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24"}, - {file = "mypy-0.961-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723"}, - {file = "mypy-0.961-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b"}, - {file = "mypy-0.961-cp36-cp36m-win_amd64.whl", hash = "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d"}, - {file = "mypy-0.961-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813"}, - {file = "mypy-0.961-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e"}, - {file = "mypy-0.961-cp37-cp37m-win_amd64.whl", hash = "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a"}, - {file = "mypy-0.961-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6"}, - {file = "mypy-0.961-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6"}, - {file = "mypy-0.961-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d"}, - {file = "mypy-0.961-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b"}, - {file = "mypy-0.961-cp38-cp38-win_amd64.whl", hash = "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569"}, - {file = "mypy-0.961-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932"}, - {file = "mypy-0.961-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5"}, - {file = "mypy-0.961-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648"}, - {file = "mypy-0.961-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950"}, - {file = "mypy-0.961-cp39-cp39-win_amd64.whl", hash = "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56"}, - {file = "mypy-0.961-py3-none-any.whl", hash = "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66"}, - {file = "mypy-0.961.tar.gz", hash = "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492"}, + {file = "mypy-0.971-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2899a3cbd394da157194f913a931edfd4be5f274a88041c9dc2d9cdcb1c315c"}, + {file = "mypy-0.971-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:98e02d56ebe93981c41211c05adb630d1d26c14195d04d95e49cd97dbc046dc5"}, + {file = "mypy-0.971-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:19830b7dba7d5356d3e26e2427a2ec91c994cd92d983142cbd025ebe81d69cf3"}, + {file = "mypy-0.971-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02ef476f6dcb86e6f502ae39a16b93285fef97e7f1ff22932b657d1ef1f28655"}, + {file = "mypy-0.971-cp310-cp310-win_amd64.whl", hash = "sha256:25c5750ba5609a0c7550b73a33deb314ecfb559c350bb050b655505e8aed4103"}, + {file = "mypy-0.971-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d3348e7eb2eea2472db611486846742d5d52d1290576de99d59edeb7cd4a42ca"}, + {file = "mypy-0.971-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fa7a477b9900be9b7dd4bab30a12759e5abe9586574ceb944bc29cddf8f0417"}, + {file = "mypy-0.971-cp36-cp36m-win_amd64.whl", hash = "sha256:2ad53cf9c3adc43cf3bea0a7d01a2f2e86db9fe7596dfecb4496a5dda63cbb09"}, + {file = "mypy-0.971-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:855048b6feb6dfe09d3353466004490b1872887150c5bb5caad7838b57328cc8"}, + {file = "mypy-0.971-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:23488a14a83bca6e54402c2e6435467a4138785df93ec85aeff64c6170077fb0"}, + {file = "mypy-0.971-cp37-cp37m-win_amd64.whl", hash = "sha256:4b21e5b1a70dfb972490035128f305c39bc4bc253f34e96a4adf9127cf943eb2"}, + {file = "mypy-0.971-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9796a2ba7b4b538649caa5cecd398d873f4022ed2333ffde58eaf604c4d2cb27"}, + {file = "mypy-0.971-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a361d92635ad4ada1b1b2d3630fc2f53f2127d51cf2def9db83cba32e47c856"}, + {file = "mypy-0.971-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b793b899f7cf563b1e7044a5c97361196b938e92f0a4343a5d27966a53d2ec71"}, + {file = "mypy-0.971-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d1ea5d12c8e2d266b5fb8c7a5d2e9c0219fedfeb493b7ed60cd350322384ac27"}, + {file = "mypy-0.971-cp38-cp38-win_amd64.whl", hash = "sha256:23c7ff43fff4b0df93a186581885c8512bc50fc4d4910e0f838e35d6bb6b5e58"}, + {file = "mypy-0.971-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f7656b69974a6933e987ee8ffb951d836272d6c0f81d727f1d0e2696074d9e6"}, + {file = "mypy-0.971-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2022bfadb7a5c2ef410d6a7c9763188afdb7f3533f22a0a32be10d571ee4bbe"}, + {file = "mypy-0.971-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef943c72a786b0f8d90fd76e9b39ce81fb7171172daf84bf43eaf937e9f220a9"}, + {file = "mypy-0.971-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d744f72eb39f69312bc6c2abf8ff6656973120e2eb3f3ec4f758ed47e414a4bf"}, + {file = "mypy-0.971-cp39-cp39-win_amd64.whl", hash = "sha256:77a514ea15d3007d33a9e2157b0ba9c267496acf12a7f2b9b9f8446337aac5b0"}, + {file = "mypy-0.971-py3-none-any.whl", hash = "sha256:0d054ef16b071149917085f51f89555a576e2618d5d9dd70bd6eea6410af3ac9"}, + {file = "mypy-0.971.tar.gz", hash = "sha256:40b0f21484238269ae6a57200c807d80debc6459d444c0489a102d7c6a75fa56"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, @@ -1311,12 +1337,12 @@ packaging = [ {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pathspec = [ - {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, - {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, + {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, + {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, ] pbr = [ - {file = "pbr-5.9.0-py2.py3-none-any.whl", hash = "sha256:e547125940bcc052856ded43be8e101f63828c2d94239ffbe2b327ba3d5ccf0a"}, - {file = "pbr-5.9.0.tar.gz", hash = "sha256:e8dca2f4b43560edef58813969f52a56cef023146cbb8931626db80e6c1c4308"}, + {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, + {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, ] platformdirs = [ {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, @@ -1369,41 +1395,42 @@ pycparser = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pydantic = [ - {file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"}, - {file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"}, - {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"}, - {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"}, - {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"}, - {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"}, - {file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"}, - {file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"}, - {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"}, - {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"}, - {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"}, - {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"}, - {file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"}, - {file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"}, - {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"}, - {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"}, - {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"}, - {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"}, - {file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"}, - {file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"}, - {file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"}, - {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"}, - {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"}, - {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"}, - {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"}, - {file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"}, - {file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"}, - {file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"}, - {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"}, - {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"}, - {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"}, - {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"}, - {file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"}, - {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"}, - {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"}, + {file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"}, + {file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"}, + {file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"}, + {file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"}, + {file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"}, + {file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"}, + {file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"}, + {file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"}, + {file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"}, + {file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"}, + {file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"}, + {file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"}, + {file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"}, + {file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"}, + {file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"}, + {file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"}, + {file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"}, + {file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"}, + {file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"}, + {file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"}, + {file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"}, + {file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"}, ] pydocstyle = [ {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, @@ -1414,16 +1441,16 @@ pygithub = [ {file = "PyGithub-1.55.tar.gz", hash = "sha256:1bbfff9372047ff3f21d5cd8e07720f3dbfdaf6462fcaed9d815f528f1ba7283"}, ] pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, ] pyjwt = [ {file = "PyJWT-2.4.0-py3-none-any.whl", hash = "sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf"}, {file = "PyJWT-2.4.0.tar.gz", hash = "sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba"}, ] pylint = [ - {file = "pylint-2.14.4-py3-none-any.whl", hash = "sha256:89b61867db16eefb7b3c5b84afc94081edaf11544189e2b238154677529ad69f"}, - {file = "pylint-2.14.4.tar.gz", hash = "sha256:47705453aa9dce520e123a7d51843d5f0032cbfa06870f89f00927aa1f735a4a"}, + {file = "pylint-2.15.0-py3-none-any.whl", hash = "sha256:4b124affc198b7f7c9b5f9ab690d85db48282a025ef9333f51d2d7281b92a6c3"}, + {file = "pylint-2.15.0.tar.gz", hash = "sha256:4f3f7e869646b0bd63b3dfb79f3c0f28fc3d2d923ea220d52620fd625aed92b0"}, ] pynacl = [ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, @@ -1442,12 +1469,16 @@ pyparsing = [ {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ - {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, - {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, + {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, + {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, +] +pytest-mock = [ + {file = "pytest-mock-3.8.2.tar.gz", hash = "sha256:77f03f4554392558700295e05aed0b1096a20d4a60a4f3ddcde58b0c31c8fca2"}, + {file = "pytest_mock-3.8.2-py3-none-any.whl", hash = "sha256:8a9e226d6c0ef09fcf20c94eb3405c388af438a90f3e39687f84166da82d5948"}, ] pyupgrade = [ - {file = "pyupgrade-2.37.1-py2.py3-none-any.whl", hash = "sha256:dd2a32628d6d2a7dd6c086d98420e234b9e60c1e1d4c55431578491703e762a5"}, - {file = "pyupgrade-2.37.1.tar.gz", hash = "sha256:3d9cbd88507a0f3d7397c46870617f0d073d61401c451c08a06763d6235d9e7d"}, + {file = "pyupgrade-2.37.3-py2.py3-none-any.whl", hash = "sha256:9746efd064dbf53d7f86d6f88a1d48120f58dbfc378f517768634740ea2225e2"}, + {file = "pyupgrade-2.37.3.tar.gz", hash = "sha256:1414c7a7c558004cf610e6180716b876814b639b5a5789c3da023c5cdaebcd49"}, ] pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, @@ -1537,12 +1568,12 @@ rsa = [ {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, ] safety = [ - {file = "safety-2.0.0-py3-none-any.whl", hash = "sha256:77cebdd128ce47b941e68a1b3bbc29fbbd2b9e98d11f179c5def64c1d05da295"}, - {file = "safety-2.0.0.tar.gz", hash = "sha256:d739d00a9e4203cfaba34540c822a73ca1d327159ed7776b3dce09391f81c35d"}, + {file = "safety-2.1.1-py3-none-any.whl", hash = "sha256:05ba551fb61ef24c864835d21089f75bc8b37292680047b9f29693a6552e2fc7"}, + {file = "safety-2.1.1.tar.gz", hash = "sha256:dbc5dffa2e47da76cc43dfe8cbbbfca99d29118d0c6c54dfcfa11c2bd349dff6"}, ] shellingham = [ - {file = "shellingham-1.4.0-py2.py3-none-any.whl", hash = "sha256:536b67a0697f2e4af32ab176c00a50ac2899c5a05e0d8e2dadac8e58888283f9"}, - {file = "shellingham-1.4.0.tar.gz", hash = "sha256:4855c2458d6904829bd34c299f11fdeed7cfefbf8a2c522e4caea6cd76b3171e"}, + {file = "shellingham-1.5.0-py2.py3-none-any.whl", hash = "sha256:a8f02ba61b69baaa13facdba62908ca8690a94b8119b69f5ec5873ea85f7391b"}, + {file = "shellingham-1.5.0.tar.gz", hash = "sha256:72fb7f5c63103ca2cb91b23dee0c71fe8ad6fbfd46418ef17dbe40db51592dad"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -1553,8 +1584,8 @@ smmap = [ {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] sniffio = [ - {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"}, - {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"}, + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, ] snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, @@ -1581,28 +1612,36 @@ tomli = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] tomlkit = [ - {file = "tomlkit-0.11.1-py3-none-any.whl", hash = "sha256:1c5bebdf19d5051e2e1de6cf70adfc5948d47221f097fcff7a3ffc91e953eaf5"}, - {file = "tomlkit-0.11.1.tar.gz", hash = "sha256:61901f81ff4017951119cd0d1ed9b7af31c821d6845c8c477587bbdcd5e5854e"}, + {file = "tomlkit-0.11.4-py3-none-any.whl", hash = "sha256:25d4e2e446c453be6360c67ddfb88838cfc42026322770ba13d1fbd403a93a5c"}, + {file = "tomlkit-0.11.4.tar.gz", hash = "sha256:3235a9010fae54323e727c3ac06fb720752fe6635b3426e379daec60fbd44a83"}, ] typer = [ {file = "typer-0.6.1-py3-none-any.whl", hash = "sha256:54b19e5df18654070a82f8c2aa1da456a4ac16a2a83e6dcd9f170e291c56338e"}, {file = "typer-0.6.1.tar.gz", hash = "sha256:2d5720a5e63f73eaf31edaa15f6ab87f35f0690f8ca233017d7d23d743a91d73"}, ] +types-requests = [ + {file = "types-requests-2.28.9.tar.gz", hash = "sha256:feaf581bd580497a47fe845d506fa3b91b484cf706ff27774e87659837de9962"}, + {file = "types_requests-2.28.9-py3-none-any.whl", hash = "sha256:86cb66d3de2f53eac5c09adc42cf6547eefbd0c7e1210beca1ee751c35d96083"}, +] +types-urllib3 = [ + {file = "types-urllib3-1.26.23.tar.gz", hash = "sha256:b78e819f0e350221d0689a5666162e467ba3910737bafda14b5c2c85e9bb1e56"}, + {file = "types_urllib3-1.26.23-py3-none-any.whl", hash = "sha256:333e675b188a1c1fd980b4b352f9e40572413a4c1ac689c23cd546e96310070a"}, +] typing-extensions = [ {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, ] urllib3 = [ - {file = "urllib3-1.26.10-py2.py3-none-any.whl", hash = "sha256:8298d6d56d39be0e3bc13c1c97d133f9b45d797169a0e11cdd0e0489d786f7ec"}, - {file = "urllib3-1.26.10.tar.gz", hash = "sha256:879ba4d1e89654d9769ce13121e0f94310ea32e8d2f8cf587b77c08bbcdb30d6"}, + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] uvicorn = [ - {file = "uvicorn-0.18.2-py3-none-any.whl", hash = "sha256:c19a057deb1c5bb060946e2e5c262fc01590c6529c0af2c3d9ce941e89bc30e0"}, - {file = "uvicorn-0.18.2.tar.gz", hash = "sha256:cade07c403c397f9fe275492a48c1b869efd175d5d8a692df649e6e7e2ed8f4e"}, + {file = "uvicorn-0.18.3-py3-none-any.whl", hash = "sha256:0abd429ebb41e604ed8d2be6c60530de3408f250e8d2d84967d85ba9e86fe3af"}, + {file = "uvicorn-0.18.3.tar.gz", hash = "sha256:9a66e7c42a2a95222f76ec24a4b754c158261c4696e683b9dadc72b590e0311b"}, ] virtualenv = [ - {file = "virtualenv-20.15.1-py2.py3-none-any.whl", hash = "sha256:b30aefac647e86af6d82bfc944c556f8f1a9c90427b2fb4e3bfbf338cb82becf"}, - {file = "virtualenv-20.15.1.tar.gz", hash = "sha256:288171134a2ff3bfb1a2f54f119e77cd1b81c29fc1265a2356f3e8d14c7d58c4"}, + {file = "virtualenv-20.16.4-py3-none-any.whl", hash = "sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22"}, + {file = "virtualenv-20.16.4.tar.gz", hash = "sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782"}, ] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, diff --git a/pyproject.toml b/pyproject.toml index 1cd87c5..688e0cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,8 @@ PyGithub = ">=1.55" cryptography = ">=36.0.1" urllib3 = ">=1.26.5" gspread = "^5.4.0" +types-requests = "^2.28.9" +pytest-mock = "^3.8.2" [tool.poetry.dev-dependencies] darglint = ">=1.8.0"