From 7c10ee6a4165db0eb20d708e203102b7229f483f Mon Sep 17 00:00:00 2001 From: driazati Date: Fri, 25 Mar 2022 15:48:53 -0700 Subject: [PATCH 1/4] [ci] Add GitHub Actions bot to merge PRs on demand This implements https://discuss.tvm.apache.org/t/rfc-allow-merging-via-pr-comments/12220. The bot can be invoked from a top-level review comment or via a regular PR comment. The text `@tvm-bot merge` anywhere in the body will trigger the bot. Right now it checks that the latest commit is reviewed and that all CI jobs that have run on that commit are successful. If it fails, it will leave a comment on the PR with the reason. This is just a start and some features are left for followups: * Various TODOs throughout the code * "Scheduled" merges that happen once CI finishes * Allowing committers to merge without getting a fresh review for changes after an approval --- .github/workflows/merge.yml | 28 + .gitignore | 2 + tests/python/ci/sample_prs/pr10786-badci.json | 123 +++++ .../python/ci/sample_prs/pr10786-merges.json | 123 +++++ .../ci/sample_prs/pr10786-nottriggered.json | 123 +++++ .../ci/sample_prs/pr10786-oldreview.json | 123 +++++ tests/python/ci/test_mergebot.py | 95 ++++ tests/scripts/git_utils.py | 17 +- tests/scripts/github_mergebot.py | 501 ++++++++++++++++++ 9 files changed, 1129 insertions(+), 6 deletions(-) create mode 100644 .github/workflows/merge.yml create mode 100644 tests/python/ci/sample_prs/pr10786-badci.json create mode 100644 tests/python/ci/sample_prs/pr10786-merges.json create mode 100644 tests/python/ci/sample_prs/pr10786-nottriggered.json create mode 100644 tests/python/ci/sample_prs/pr10786-oldreview.json create mode 100644 tests/python/ci/test_mergebot.py create mode 100755 tests/scripts/github_mergebot.py diff --git a/.github/workflows/merge.yml b/.github/workflows/merge.yml new file mode 100644 index 000000000000..de2fba707fa9 --- /dev/null +++ b/.github/workflows/merge.yml @@ -0,0 +1,28 @@ + +name: Merge +on: + status: + pull_request_review: + types: + - submitted + issue_comment: + +concurrency: + group: merge-${{ github.event.pull_request.number }}-${{ github.event.issue.number }} + cancel-in-progress: true + +jobs: + maybe-merge: + if: github.repository == 'driazati/tvm' + # if: github.repository == 'apache/tvm' # TODO: uncomment after testing + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + - name: Merge if requested and possible + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PR_NUMBER: ${{ github.event.issue.number }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + run: | + set -eux + python tests/scripts/github_mergebot.py --pr "$PR_NUMBER" --run-url "$RUN_URL" diff --git a/.gitignore b/.gitignore index 887231895383..184ff17ab25e 100644 --- a/.gitignore +++ b/.gitignore @@ -263,3 +263,5 @@ tvm-site/ # Generated docs files gallery/how_to/work_with_microtvm/micro_tvmc.py +# Test sample data files +!tests/python/ci/sample_prs/*.json diff --git a/tests/python/ci/sample_prs/pr10786-badci.json b/tests/python/ci/sample_prs/pr10786-badci.json new file mode 100644 index 000000000000..274d452dc12d --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-badci.json @@ -0,0 +1,123 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "FAILED", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-merges.json b/tests/python/ci/sample_prs/pr10786-merges.json new file mode 100644 index 000000000000..6ee9864466ac --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-merges.json @@ -0,0 +1,123 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-nottriggered.json b/tests/python/ci/sample_prs/pr10786-nottriggered.json new file mode 100644 index 000000000000..be14c6d1d9a5 --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-nottriggered.json @@ -0,0 +1,123 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-oldreview.json b/tests/python/ci/sample_prs/pr10786-oldreview.json new file mode 100644 index 000000000000..a5e226f6403e --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-oldreview.json @@ -0,0 +1,123 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f24bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/test_mergebot.py b/tests/python/ci/test_mergebot.py new file mode 100644 index 000000000000..c62bde7c0c00 --- /dev/null +++ b/tests/python/ci/test_mergebot.py @@ -0,0 +1,95 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import subprocess +import json +import sys +import pytest + +from pathlib import Path + +from test_utils import REPO_ROOT + + +class TempGit: + def __init__(self, cwd): + self.cwd = cwd + + def run(self, *args): + proc = subprocess.run(["git"] + list(args), cwd=self.cwd) + if proc.returncode != 0: + raise RuntimeError(f"git command failed: '{args}'") + + +def test_mergebot(tmpdir_factory): + mergebot_script = REPO_ROOT / "tests" / "scripts" / "github_mergebot.py" + test_json_dir = Path(__file__).resolve().parent / "sample_prs" + + def run(number, filename, expected): + git = TempGit(tmpdir_factory.mktemp("tmp_git_dir")) + git.run("init") + git.run("checkout", "-b", "main") + git.run("remote", "add", "origin", "https://github.com/apache/tvm.git") + with open(test_json_dir / filename) as f: + test_data = json.load(f) + + proc = subprocess.run( + [ + str(mergebot_script), + "--pr", + str(number), + "--dry-run", + "--run-url", + "https://example.com", + "--testing-pr-json", + json.dumps(test_data), + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding="utf-8", + cwd=git.cwd, + ) + if proc.returncode != 0: + raise RuntimeError(f"Process failed:\nstdout:\n{proc.stdout}\n\nstderr:\n{proc.stderr}") + + if expected not in proc.stderr: + raise RuntimeError(f"{proc.stderr}\ndid not contain\n{expected}") + + run( + number=10786, + filename="pr10786-merges.json", + expected="Dry run, would have merged with url=pulls/10786/merge", + ) + run( + number=10786, + filename="pr10786-nottriggered.json", + expected="No merge requested, exiting", + ) + run( + number=10786, + filename="pr10786-badci.json", + expected="Cannot merge, these CI jobs failed on", + ) + run( + number=10786, + filename="pr10786-oldreview.json", + expected="Cannot merge, did not find any approving reviews", + ) + + +if __name__ == "__main__": + sys.exit(pytest.main([__file__] + sys.argv[1:])) diff --git a/tests/scripts/git_utils.py b/tests/scripts/git_utils.py index bc00bdf127fd..9f2468638cad 100644 --- a/tests/scripts/git_utils.py +++ b/tests/scripts/git_utils.py @@ -45,17 +45,19 @@ def graphql(self, query: str, variables: Optional[Dict[str, str]] = None) -> Dic query = compress_query(query) if variables is None: variables = {} - response = self._post( - "https://api.github.com/graphql", {"query": query, "variables": variables} + response = self._request( + "https://api.github.com/graphql", + {"query": query, "variables": variables}, + method="POST", ) if "data" not in response: msg = f"Error fetching data with query:\n{query}\n\nvariables:\n{variables}\n\nerror:\n{json.dumps(response, indent=2)}" raise RuntimeError(msg) return response - def _post(self, full_url: str, body: Dict[str, Any]) -> Dict[str, Any]: - print("Requesting POST to", full_url, "with", body) - req = request.Request(full_url, headers=self.headers(), method="POST") + def _request(self, full_url: str, body: Dict[str, Any], method: str) -> Dict[str, Any]: + print(f"Requesting {method} to", full_url, "with", body) + req = request.Request(full_url, headers=self.headers(), method=method.upper()) req.add_header("Content-Type", "application/json; charset=utf-8") data = json.dumps(body) data = data.encode("utf-8") @@ -65,8 +67,11 @@ def _post(self, full_url: str, body: Dict[str, Any]) -> Dict[str, Any]: response = json.loads(response.read()) return response + def put(self, url: str, data: Dict[str, Any]) -> Dict[str, Any]: + return self._request(self.base + url, data, method="PUT") + def post(self, url: str, data: Dict[str, Any]) -> Dict[str, Any]: - return self._post(self.base + url, data) + return self._request(self.base + url, data, method="POST") def get(self, url: str) -> Dict[str, Any]: url = self.base + url diff --git a/tests/scripts/github_mergebot.py b/tests/scripts/github_mergebot.py new file mode 100755 index 000000000000..a4ce7876195d --- /dev/null +++ b/tests/scripts/github_mergebot.py @@ -0,0 +1,501 @@ +#!/usr/bin/env python3 +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +import json +import argparse +import warnings +import logging +import traceback +from typing import Dict, Any, List, Optional +from pathlib import Path + +from git_utils import git, GitHubRepo, parse_remote +from cmd_utils import init_log + + +Review = Dict[str, Any] +CIJob = Dict[str, Any] + + +def js(obj: Any) -> str: + return json.dumps(obj, indent=2) + + +PR_QUERY = """ + query ($owner: String!, $name: String!, $number: Int!) { + repository(owner: $owner, name: $name) { + pullRequest(number: $number) { + title + body + state + comments(last: 100) { + pageInfo { + hasPreviousPage + } + nodes { + author { + login + } + updatedAt + body + } + } + authorCommits:commits(last:100) { + nodes { + commit { + authors(first:100) { + nodes { + name + email + } + } + } + } + } + commits(last: 1) { + nodes { + commit { + oid + statusCheckRollup { + contexts(first: 100) { + pageInfo { + hasNextPage + } + nodes { + ... on CheckRun { + name + checkSuite { + workflowRun { + workflow { + name + } + } + } + status + conclusion + url + } + ... on StatusContext { + state + context + targetUrl + } + } + } + } + } + } + } + reviewDecision + reviews(last: 100) { + pageInfo { + hasPreviousPage + } + nodes { + body + updatedAt + authorCanPushToRepository + commit { + oid + } + state + } + } + } + } + } + """ + + +def walk(obj, visitor, parent_key=None): + """ + Recursively call 'visitor' on all the children of a dictionary + """ + visitor(obj, parent_key) + if isinstance(obj, dict): + for k, v in obj.items(): + walk(v, visitor, parent_key=k) + elif isinstance(obj, list): + for v in obj: + walk(v, visitor) + + +class PR: + def __init__( + self, + number: int, + owner: str, + repo: str, + dry_run: bool = False, + raw_data: Dict[str, Any] = None, + ): + self.owner = owner + self.number = number + self.repo_name = repo + self.dry_run = dry_run + + self.github = GitHubRepo(user=owner, repo=repo, token=os.environ["GITHUB_TOKEN"]) + + if dry_run and raw_data: + # In test mode there is no need to fetch anything + self.raw = raw_data + else: + if os.getenv("DEBUG", "0") == "1": + # For local runs fill in the requested data but cache it for + # later use + cached_path = Path("pr.json") + if not cached_path.exists(): + self.raw = self.fetch_data() + with open(cached_path, "w") as f: + json.dump(self.raw, f, indent=2) + else: + with open(cached_path) as f: + self.raw = json.load(f) + else: + # Usual path, fetch the PR's data based on the number from + # GitHub + self.raw = self.fetch_data() + + def checker(obj, parent_key): + """ + Verify that any paged results don't have extra data (if so the bot + may still work since most relevant comments will be more recent) + """ + if parent_key == "pageInfo": + if obj.get("hasPreviousPage", False): + warnings.warn(f"Found {obj} with a previous page, bot may be missing data") + if obj.get("hasNextPage", False): + warnings.warn(f"Found {obj} with a next page, bot may be missing data") + + walk(self.raw, checker) + + logging.info(f"Verified data, running with PR {js(self.raw)}") + + def __repr__(self): + return json.dumps(self.raw, indent=2) + + def head_commit(self): + return self.raw["commits"]["nodes"][0]["commit"] + + def co_authors(self) -> List[str]: + authors = [] + for commit in self.raw["authorCommits"]["nodes"]: + # Co-authors always come after the main author according to the + # GitHub docs, so ignore the first item + for author in commit["commit"]["authors"]["nodes"][1:]: + name = author["name"] + email = author["email"] + authors.append(f"{name} <{email}>") + + return list(set(authors)) + + def head_oid(self): + return self.head_commit()["oid"] + + def ci_jobs(self) -> List[CIJob]: + """ + Get a list of all CI jobs (GitHub Actions and other) in a unified format + """ + jobs = [] + for item in self.head_commit()["statusCheckRollup"]["contexts"]["nodes"]: + if "checkSuite" in item: + # GitHub Actions job, parse separately + status = item["conclusion"] + if status is None: + # If the 'conclusion' isn't filled out the job hasn't + # finished yet + status = "PENDING" + jobs.append( + { + "name": item["checkSuite"]["workflowRun"]["workflow"]["name"] + + " / " + + item["name"], + "url": item["url"], + "status": status.upper(), + } + ) + else: + # GitHub Status (e.g. from Jenkins) + jobs.append( + { + "name": item["context"], + "url": item["targetUrl"], + "status": item["state"].upper(), + } + ) + + logging.info(f"Found CI jobs for {self.head_commit()['oid']} {js(jobs)}") + return jobs + + def reviews(self) -> List[Review]: + return self.raw["reviews"]["nodes"] + + def head_commit_reviews(self) -> List[Review]: + """ + Find reviews associated with the head commit + """ + commits_to_review_status: Dict[str, List[Review]] = {} + + for review in self.reviews(): + if not review["authorCanPushToRepository"]: + # ignore reviews from non-committers + continue + + oid = review["commit"]["oid"] + if oid in commits_to_review_status: + commits_to_review_status[oid].append(review) + else: + commits_to_review_status[oid] = [review] + + # Only use the data for the head commit of the PR + head_reviews = commits_to_review_status.get(self.head_oid(), []) + return head_reviews + + def fetch_data(self): + """ + Fetch the data for this PR from GitHub + """ + return self.github.graphql( + query=PR_QUERY, + variables={ + "owner": self.owner, + "name": self.repo_name, + "number": self.number, + }, + )["data"]["repository"]["pullRequest"] + + def comment(self, text: str) -> None: + """ + Leave the comment 'text' on this PR + """ + logging.info(f"Commenting:\n{text}") + # TODO: Update latest comment in-place if there has been no activity + data = {"body": text} + url = f"issues/{self.number}/comments" + if self.dry_run: + logging.info( + f"Dry run, would have commented on url={url} commenting with data={js(data)}" + ) + return + + self.github.post(url, data=data) + + def state(self) -> str: + """ + PR state (OPEN, CLOSED, MERGED, etc) + """ + return self.raw["state"] + + def lint_commit_message(self, subject: str, body: str) -> bool: + # TODO: NYI (Add rules as decided in https://discuss.tvm.apache.org/t/commit-message-guideline/12334) + return True + + def processed_body(self) -> str: + body = self.raw["body"].strip().replace("\r", "") + body = body.replace( + "Thanks for contributing to TVM! Please refer to guideline https://tvm.apache.org/docs/contribute/ for useful information and tips. After the pull request is submitted, please request code reviews from [Reviewers](https://github.com/apache/incubator-tvm/blob/master/CONTRIBUTORS.md#reviewers) by @ them in the pull request thread.", + "", + ) + return body + + def body_with_co_authors(self) -> str: + """ + Add 'Co-authored-by' strings to the PR body based on the prior commits + in the PR + """ + body = self.processed_body() + author_lines = self.co_authors() + logging.info(f"Found co-authors: author_lines={author_lines}") + for author_line in author_lines: + if author_line not in body: + # If the line isn't already in the PR body (it could have been + # added manually), put it in + body = f"{body}\n\nCo-authored-by: {author_line}" + + return body + + def merge(self) -> None: + """ + Request a merge of this PR via the GitHub API + """ + url = f"pulls/{self.number}/merge" + + title = self.raw["title"] + body = self.body_with_co_authors() + self.lint_commit_message(title, body) + logging.info(f"Full commit:\n{title}\n\n{body}") + + data = { + "commit_title": title, + "commit_message": body, + # The SHA is necessary in case there was an update right when this + # script ran, GitHub will sort out who won + "sha": self.head_oid(), + "merge_method": "squash", + } + if self.dry_run: + logging.info(f"Dry run, would have merged with url={url} and data={js(data)}") + return + + self.github.put(url, data=data) + + def merge_requested(self) -> bool: + """ + Check if this PR has had a merge requested + """ + merge_commands = [ + "merge", + "merge this", + "merge this pr", + ] + cancel_commands = [ + "cancel", + "cancel merge", + "cancel the merge", + "stop", + "stop merge", + "stop the merge", + ] + + def parse_action(s: str) -> Optional[str]: + if any(f"@tvm-bot {c}" in s for c in merge_commands): + return "merge" + + if any(f"@tvm-bot {c}" in s for c in cancel_commands): + return "cancel" + + return None + + # Check regular comments + all_comments = [] + for comment in self.raw["comments"]["nodes"]: + all_comments.append((comment["updatedAt"], comment["body"])) + + # Check top-level review comments + for review in self.reviews(): + all_comments.append((review["updatedAt"], review["body"])) + + all_comments = sorted(all_comments, key=lambda x: x[0]) + actions = [parse_action(body) for _, body in all_comments] + logging.info(f"Found these tvm-bot actions: {actions}") + actions = [a for a in actions if a is not None] + + if len(actions) == 0: + return False + + return actions[-1] == "merge" + + def merge_if_passed_checks(self): + # NEUTRAL is GitHub Action's way of saying cancelled + failed_ci_jobs = [ + job + for job in self.ci_jobs() + if job["status"] not in {"SUCCESS", "SUCCESSFUL", "NEUTRAL", "SKIPPED"} + ] + all_ci_passed = False + has_one_approval = False + if len(failed_ci_jobs) > 0: + failed_jobs_msg = "\n".join( + [f" * [{job['name']} (`{job['status']}`)]({job['url']})" for job in failed_ci_jobs] + ) + self.comment( + f"Cannot merge, these CI jobs are not successful on {self.head_oid()}:\n{failed_jobs_msg}" + ) + return + else: + all_ci_passed = True + + head_commit_reviews = self.head_commit_reviews() + for review in head_commit_reviews: + if review["state"] == "CHANGES_REQUESTED": + self.comment( + f"Cannot merge, found [this review]({review['url']}) on {self.head_oid()} with changes requested" + ) + return + + if review["state"] == "APPROVED": + has_one_approval = True + logging.info(f"Found approving review: {js(review)}") + + if has_one_approval and all_ci_passed: + self.merge() + elif not has_one_approval: + self.comment( + f"Cannot merge, did not find any approving reviews from users with write access on {self.head_oid()}" + ) + return + elif not all_ci_passed: + self.comment(f"Cannot merge, CI did not pass on on {self.head_oid()}") + return + + +if __name__ == "__main__": + help = "Automatically tag people based on PR / issue labels" + parser = argparse.ArgumentParser(description=help) + parser.add_argument("--remote", default="origin", help="ssh remote to parse") + parser.add_argument("--pr", required=True, help="pr number to check") + parser.add_argument("--run-url", required=True, help="workflow run URL") + parser.add_argument("--testing-pr-json", help="(testing only) manual data for testing") + parser.add_argument( + "--dry-run", + action="store_true", + default=False, + help="run but don't send any request to GitHub", + ) + args = parser.parse_args() + init_log() + + remote = git(["config", "--get", f"remote.{args.remote}.url"]) + logging.info(f"Using remote remote={remote}") + owner, repo = parse_remote(remote) + + if args.pr.strip() == "": + logging.info("No PR number passed") + exit(0) + + logging.info(f"Checking owner={owner} repo={repo}") + if args.testing_pr_json: + pr = PR( + number=int(args.pr), + owner=owner, + repo=repo, + dry_run=args.dry_run, + raw_data=json.loads(args.testing_pr_json), + ) + else: + pr = PR(number=int(args.pr), owner=owner, repo=repo, dry_run=args.dry_run) + + state = pr.state() + + if state != "OPEN": + logging.info(f"Ignoring event on PR, state was not OPEN, instead was state={state}") + exit(0) + + if pr.merge_requested(): + try: + pr.merge_if_passed_checks() + except Exception as e: + if not args.dry_run: + msg = traceback.format_exc() + pr.comment( + f"Failed to process merge request in {args.run_url}\n\n
\n\n```\n{msg}\n```\n\n
" + ) + raise e + else: + logging.info("No merge requested, exiting") From 5d4dbc002344852b9d792527ebb3a882c0ae0b3c Mon Sep 17 00:00:00 2001 From: driazati Date: Tue, 12 Apr 2022 14:58:55 -0700 Subject: [PATCH 2/4] Address comments --- .../ci/sample_prs/pr10786-missing-job.json | 123 ++++++++++++++++++ .../ci/sample_prs/pr10786-oldreview.json | 2 +- tests/python/ci/test_mergebot.py | 110 +++++++++------- tests/scripts/github_mergebot.py | 23 +++- 4 files changed, 206 insertions(+), 52 deletions(-) create mode 100644 tests/python/ci/sample_prs/pr10786-missing-job.json diff --git a/tests/python/ci/sample_prs/pr10786-missing-job.json b/tests/python/ci/sample_prs/pr10786-missing-job.json new file mode 100644 index 000000000000..f702dc9e7a37 --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-missing-job.json @@ -0,0 +1,123 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/definitely-not-pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-oldreview.json b/tests/python/ci/sample_prs/pr10786-oldreview.json index a5e226f6403e..52abf7a30d7a 100644 --- a/tests/python/ci/sample_prs/pr10786-oldreview.json +++ b/tests/python/ci/sample_prs/pr10786-oldreview.json @@ -114,7 +114,7 @@ "updatedAt": "2022-03-25T22:13:50Z", "authorCanPushToRepository": true, "commit": { - "oid": "6f24bcf57d07f915a98fd91178f04d9e92a09fcd" + "oid": "abc12345" }, "state": "APPROVED" } diff --git a/tests/python/ci/test_mergebot.py b/tests/python/ci/test_mergebot.py index c62bde7c0c00..0dafb2a31f44 100644 --- a/tests/python/ci/test_mergebot.py +++ b/tests/python/ci/test_mergebot.py @@ -35,60 +35,72 @@ def run(self, *args): raise RuntimeError(f"git command failed: '{args}'") -def test_mergebot(tmpdir_factory): - mergebot_script = REPO_ROOT / "tests" / "scripts" / "github_mergebot.py" - test_json_dir = Path(__file__).resolve().parent / "sample_prs" +test_data = { + "successful-merge": { + "number": 10786, + "filename": "pr10786-merges.json", + "expected": "Dry run, would have merged with url=pulls/10786/merge", + }, + "no-request": { + "number": 10786, + "filename": "pr10786-nottriggered.json", + "expected": "No merge requested, exiting", + }, + "bad-ci": { + "number": 10786, + "filename": "pr10786-badci.json", + "expected": "Cannot merge, these CI jobs are not successful on", + }, + "old-review": { + "number": 10786, + "filename": "pr10786-oldreview.json", + "expected": "Cannot merge, did not find any approving reviews", + }, + "missing-job": { + "number": 10786, + "filename": "pr10786-missing-job.json", + "expected": "Cannot merge, missing expected jobs", + }, +} - def run(number, filename, expected): - git = TempGit(tmpdir_factory.mktemp("tmp_git_dir")) - git.run("init") - git.run("checkout", "-b", "main") - git.run("remote", "add", "origin", "https://github.com/apache/tvm.git") - with open(test_json_dir / filename) as f: - test_data = json.load(f) - proc = subprocess.run( - [ - str(mergebot_script), - "--pr", - str(number), - "--dry-run", - "--run-url", - "https://example.com", - "--testing-pr-json", - json.dumps(test_data), - ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - encoding="utf-8", - cwd=git.cwd, - ) - if proc.returncode != 0: - raise RuntimeError(f"Process failed:\nstdout:\n{proc.stdout}\n\nstderr:\n{proc.stderr}") +@pytest.mark.parametrize( + ["number", "filename", "expected"], + [tuple(d.values()) for d in test_data.values()], + ids=test_data.keys(), +) +def test_mergebot(tmpdir_factory, number, filename, expected): + mergebot_script = REPO_ROOT / "tests" / "scripts" / "github_mergebot.py" + test_json_dir = Path(__file__).resolve().parent / "sample_prs" - if expected not in proc.stderr: - raise RuntimeError(f"{proc.stderr}\ndid not contain\n{expected}") + git = TempGit(tmpdir_factory.mktemp("tmp_git_dir")) + git.run("init") + git.run("checkout", "-b", "main") + git.run("remote", "add", "origin", "https://github.com/apache/tvm.git") + with open(test_json_dir / filename) as f: + test_data = json.load(f) - run( - number=10786, - filename="pr10786-merges.json", - expected="Dry run, would have merged with url=pulls/10786/merge", - ) - run( - number=10786, - filename="pr10786-nottriggered.json", - expected="No merge requested, exiting", - ) - run( - number=10786, - filename="pr10786-badci.json", - expected="Cannot merge, these CI jobs failed on", - ) - run( - number=10786, - filename="pr10786-oldreview.json", - expected="Cannot merge, did not find any approving reviews", + proc = subprocess.run( + [ + str(mergebot_script), + "--pr", + str(number), + "--dry-run", + "--run-url", + "https://example.com", + "--testing-pr-json", + json.dumps(test_data), + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + encoding="utf-8", + cwd=git.cwd, ) + if proc.returncode != 0: + raise RuntimeError(f"Process failed:\nstdout:\n{proc.stdout}\n\nstderr:\n{proc.stderr}") + + if expected not in proc.stderr: + raise RuntimeError(f"{proc.stderr}\ndid not contain\n{expected}") if __name__ == "__main__": diff --git a/tests/scripts/github_mergebot.py b/tests/scripts/github_mergebot.py index a4ce7876195d..8b2e78334325 100755 --- a/tests/scripts/github_mergebot.py +++ b/tests/scripts/github_mergebot.py @@ -150,12 +150,12 @@ def __init__( self.repo_name = repo self.dry_run = dry_run - self.github = GitHubRepo(user=owner, repo=repo, token=os.environ["GITHUB_TOKEN"]) - if dry_run and raw_data: # In test mode there is no need to fetch anything self.raw = raw_data + self.github = None else: + self.github = GitHubRepo(user=owner, repo=repo, token=os.environ["GITHUB_TOKEN"]) if os.getenv("DEBUG", "0") == "1": # For local runs fill in the requested data but cache it for # later use @@ -421,6 +421,25 @@ def merge_if_passed_checks(self): else: all_ci_passed = True + # Map of job name: has seen in completed jobs + seen_expected_jobs = { + "tvm-ci/pr-merge": False, + } + logging.info(f"Expected to see jobs: {seen_expected_jobs}") + + missing_expected_jobs = [] + for job in self.ci_jobs(): + seen_expected_jobs[job["name"]] = True + + for name, seen in seen_expected_jobs.items(): + if not seen: + missing_expected_jobs.append(name) + + if len(missing_expected_jobs) > 0: + missing_jobs_msg = "\n".join([f" * `{name}`" for name in missing_expected_jobs]) + self.comment(f"Cannot merge, missing expected jobs:\n{missing_jobs_msg}") + return + head_commit_reviews = self.head_commit_reviews() for review in head_commit_reviews: if review["state"] == "CHANGES_REQUESTED": From f49c19c18faeb95489d85f3d506ca259f8dc8f8e Mon Sep 17 00:00:00 2001 From: driazati Date: Mon, 25 Apr 2022 13:58:45 -0700 Subject: [PATCH 3/4] Address comments, add author filter --- .github/workflows/merge.yml | 3 +- tests/python/ci/sample_prs/pr10786-badci.json | 10 +- .../ci/sample_prs/pr10786-invalid-author.json | 129 ++++++++++++++++++ .../python/ci/sample_prs/pr10786-merges.json | 6 + .../ci/sample_prs/pr10786-missing-job.json | 6 + .../ci/sample_prs/pr10786-nottriggered.json | 6 + .../ci/sample_prs/pr10786-oldreview.json | 6 + tests/python/ci/test_mergebot.py | 15 +- tests/scripts/github_mergebot.py | 48 +++++-- 9 files changed, 215 insertions(+), 14 deletions(-) create mode 100644 tests/python/ci/sample_prs/pr10786-invalid-author.json diff --git a/.github/workflows/merge.yml b/.github/workflows/merge.yml index de2fba707fa9..efbada4b00a4 100644 --- a/.github/workflows/merge.yml +++ b/.github/workflows/merge.yml @@ -13,8 +13,7 @@ concurrency: jobs: maybe-merge: - if: github.repository == 'driazati/tvm' - # if: github.repository == 'apache/tvm' # TODO: uncomment after testing + if: github.repository == 'apache/tvm' runs-on: ubuntu-20.04 steps: - uses: actions/checkout@v2 diff --git a/tests/python/ci/sample_prs/pr10786-badci.json b/tests/python/ci/sample_prs/pr10786-badci.json index 274d452dc12d..3f22b1d82fc8 100644 --- a/tests/python/ci/sample_prs/pr10786-badci.json +++ b/tests/python/ci/sample_prs/pr10786-badci.json @@ -2,6 +2,9 @@ "title": "[Hexagon] 2-d allocation cleanup", "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", "state": "OPEN", + "author": { + "login": "Lunderberg" + }, "comments": { "pageInfo": { "hasPreviousPage": false @@ -62,7 +65,7 @@ } }, "status": "COMPLETED", - "conclusion": "FAILED", + "conclusion": "SUCCESS", "url": "https://github.com/apache/tvm/runs/5694945029" }, { @@ -92,7 +95,7 @@ "url": "https://github.com/apache/tvm/runs/5694945524" }, { - "state": "SUCCESS", + "state": "FAILED", "context": "tvm-ci/pr-merge", "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" } @@ -116,6 +119,9 @@ "commit": { "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" }, + "author": { + "login": "kparzysz-quic" + }, "state": "APPROVED" } ] diff --git a/tests/python/ci/sample_prs/pr10786-invalid-author.json b/tests/python/ci/sample_prs/pr10786-invalid-author.json new file mode 100644 index 000000000000..2819e2666165 --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-invalid-author.json @@ -0,0 +1,129 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "author": { + "login": "Lunderberg" + }, + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-merge", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": false, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "author": { + "login": "kparzysz-quic" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-merges.json b/tests/python/ci/sample_prs/pr10786-merges.json index 6ee9864466ac..a98f5ec130c5 100644 --- a/tests/python/ci/sample_prs/pr10786-merges.json +++ b/tests/python/ci/sample_prs/pr10786-merges.json @@ -2,6 +2,9 @@ "title": "[Hexagon] 2-d allocation cleanup", "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", "state": "OPEN", + "author": { + "login": "Lunderberg" + }, "comments": { "pageInfo": { "hasPreviousPage": false @@ -116,6 +119,9 @@ "commit": { "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" }, + "author": { + "login": "kparzysz-quic" + }, "state": "APPROVED" } ] diff --git a/tests/python/ci/sample_prs/pr10786-missing-job.json b/tests/python/ci/sample_prs/pr10786-missing-job.json index f702dc9e7a37..17de7b53928a 100644 --- a/tests/python/ci/sample_prs/pr10786-missing-job.json +++ b/tests/python/ci/sample_prs/pr10786-missing-job.json @@ -2,6 +2,9 @@ "title": "[Hexagon] 2-d allocation cleanup", "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", "state": "OPEN", + "author": { + "login": "Lunderberg" + }, "comments": { "pageInfo": { "hasPreviousPage": false @@ -116,6 +119,9 @@ "commit": { "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" }, + "author": { + "login": "kparzysz-quic" + }, "state": "APPROVED" } ] diff --git a/tests/python/ci/sample_prs/pr10786-nottriggered.json b/tests/python/ci/sample_prs/pr10786-nottriggered.json index be14c6d1d9a5..8cafdfaefdb6 100644 --- a/tests/python/ci/sample_prs/pr10786-nottriggered.json +++ b/tests/python/ci/sample_prs/pr10786-nottriggered.json @@ -2,6 +2,9 @@ "title": "[Hexagon] 2-d allocation cleanup", "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", "state": "OPEN", + "author": { + "login": "Lunderberg" + }, "comments": { "pageInfo": { "hasPreviousPage": false @@ -116,6 +119,9 @@ "commit": { "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" }, + "author": { + "login": "kparzysz-quic" + }, "state": "APPROVED" } ] diff --git a/tests/python/ci/sample_prs/pr10786-oldreview.json b/tests/python/ci/sample_prs/pr10786-oldreview.json index 52abf7a30d7a..0a4103361a56 100644 --- a/tests/python/ci/sample_prs/pr10786-oldreview.json +++ b/tests/python/ci/sample_prs/pr10786-oldreview.json @@ -2,6 +2,9 @@ "title": "[Hexagon] 2-d allocation cleanup", "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", "state": "OPEN", + "author": { + "login": "Lunderberg" + }, "comments": { "pageInfo": { "hasPreviousPage": false @@ -116,6 +119,9 @@ "commit": { "oid": "abc12345" }, + "author": { + "login": "kparzysz-quic" + }, "state": "APPROVED" } ] diff --git a/tests/python/ci/test_mergebot.py b/tests/python/ci/test_mergebot.py index 0dafb2a31f44..571f2c2d7ea2 100644 --- a/tests/python/ci/test_mergebot.py +++ b/tests/python/ci/test_mergebot.py @@ -40,36 +40,47 @@ def run(self, *args): "number": 10786, "filename": "pr10786-merges.json", "expected": "Dry run, would have merged with url=pulls/10786/merge", + "detail": "Everything is fine so this PR will merge", }, "no-request": { "number": 10786, "filename": "pr10786-nottriggered.json", "expected": "No merge requested, exiting", + "detail": "A PR for which the mergebot runs but no merge is requested", }, "bad-ci": { "number": 10786, "filename": "pr10786-badci.json", "expected": "Cannot merge, these CI jobs are not successful on", + "detail": "A PR which failed CI and cannot merge", }, "old-review": { "number": 10786, "filename": "pr10786-oldreview.json", "expected": "Cannot merge, did not find any approving reviews", + "detail": "A PR with passing CI and approving reviews on an old commit so it cannot merge", }, "missing-job": { "number": 10786, "filename": "pr10786-missing-job.json", "expected": "Cannot merge, missing expected jobs", + "detail": "PR missing an expected CI job and cannot merge", + }, + "invalid-author": { + "number": 10786, + "filename": "pr10786-invalid-author.json", + "expected": "No merge requested, exiting", + "detail": "Merge requester is not a committer and cannot merge", }, } @pytest.mark.parametrize( - ["number", "filename", "expected"], + ["number", "filename", "expected", "detail"], [tuple(d.values()) for d in test_data.values()], ids=test_data.keys(), ) -def test_mergebot(tmpdir_factory, number, filename, expected): +def test_mergebot(tmpdir_factory, number, filename, expected, detail): mergebot_script = REPO_ROOT / "tests" / "scripts" / "github_mergebot.py" test_json_dir = Path(__file__).resolve().parent / "sample_prs" diff --git a/tests/scripts/github_mergebot.py b/tests/scripts/github_mergebot.py index 8b2e78334325..f5ff78d06e02 100755 --- a/tests/scripts/github_mergebot.py +++ b/tests/scripts/github_mergebot.py @@ -44,11 +44,15 @@ def js(obj: Any) -> str: title body state + author { + login + } comments(last: 100) { pageInfo { hasPreviousPage } nodes { + authorAssociation author { login } @@ -114,6 +118,9 @@ def js(obj: Any) -> str: commit { oid } + author { + login + } state } } @@ -355,6 +362,27 @@ def merge(self) -> None: self.github.put(url, data=data) + def comment_can_merge(self, comment: Dict[str, Any]) -> bool: + """ + Check if a comment was left by the PR author or by a committer + """ + print(comment) + print(self.raw) + if comment["author"]["login"] == self.raw["author"]["login"]: + logging.info(f"Comment {comment} was from author and is mergable") + return True + + if comment.get("authorAssociation", "") == "CONTRIBUTOR": + logging.info(f"Comment {comment} was from committer commment and is mergable") + return True + + if comment.get("authorCanPushToRepository", False): + logging.info(f"Comment {comment} was from a committer review comment and is mergable") + return True + + logging.info(f"Comment {comment} was not from author or committers and is not mergable") + return False + def merge_requested(self) -> bool: """ Check if this PR has had a merge requested @@ -373,11 +401,15 @@ def merge_requested(self) -> bool: "stop the merge", ] - def parse_action(s: str) -> Optional[str]: - if any(f"@tvm-bot {c}" in s for c in merge_commands): + def parse_action(comment: Dict[str, Any]) -> Optional[str]: + if not self.comment_can_merge(comment): + return None + + body = comment["body"] + if any(f"@tvm-bot {c}" in body for c in merge_commands): return "merge" - if any(f"@tvm-bot {c}" in s for c in cancel_commands): + if any(f"@tvm-bot {c}" in body for c in cancel_commands): return "cancel" return None @@ -385,14 +417,14 @@ def parse_action(s: str) -> Optional[str]: # Check regular comments all_comments = [] for comment in self.raw["comments"]["nodes"]: - all_comments.append((comment["updatedAt"], comment["body"])) + all_comments.append(comment) # Check top-level review comments for review in self.reviews(): - all_comments.append((review["updatedAt"], review["body"])) + all_comments.append(review) - all_comments = sorted(all_comments, key=lambda x: x[0]) - actions = [parse_action(body) for _, body in all_comments] + all_comments = sorted(all_comments, key=lambda comment: comment["updatedAt"]) + actions = [parse_action(comment) for comment in all_comments] logging.info(f"Found these tvm-bot actions: {actions}") actions = [a for a in actions if a is not None] @@ -465,7 +497,7 @@ def merge_if_passed_checks(self): if __name__ == "__main__": - help = "Automatically tag people based on PR / issue labels" + help = "Automatically tag people based on PR / issue labels (for local development, setting DEBUG=1 will fetch and cache a PR)" parser = argparse.ArgumentParser(description=help) parser.add_argument("--remote", default="origin", help="ssh remote to parse") parser.add_argument("--pr", required=True, help="pr number to check") From 9b74a5a37064b16e8840ac007e8e594dd7ab4c05 Mon Sep 17 00:00:00 2001 From: driazati Date: Wed, 11 May 2022 13:00:56 -0700 Subject: [PATCH 4/4] Address comments: test cases, code cleanups --- tests/python/ci/sample_prs/pr10786-badci.json | 2 +- .../sample_prs/pr10786-changes-requested.json | 130 +++++++++++++++ .../ci/sample_prs/pr10786-co-authors.json | 129 ++++++++++++++ .../ci/sample_prs/pr10786-invalid-author.json | 2 +- .../python/ci/sample_prs/pr10786-merges.json | 2 +- .../ci/sample_prs/pr10786-missing-job.json | 2 +- .../ci/sample_prs/pr10786-nottriggered.json | 2 +- .../ci/sample_prs/pr10786-oldreview.json | 2 +- .../pr11244-unauthorized-comment.json | 103 ++++++++++++ .../ci/sample_prs/pr11267-no-review.json | 142 ++++++++++++++++ .../ci/sample_prs/pr11276-no-review.json | 157 ++++++++++++++++++ tests/python/ci/test_mergebot.py | 24 +++ tests/scripts/github_mergebot.py | 100 +++++------ 13 files changed, 742 insertions(+), 55 deletions(-) create mode 100644 tests/python/ci/sample_prs/pr10786-changes-requested.json create mode 100644 tests/python/ci/sample_prs/pr10786-co-authors.json create mode 100644 tests/python/ci/sample_prs/pr11244-unauthorized-comment.json create mode 100644 tests/python/ci/sample_prs/pr11267-no-review.json create mode 100644 tests/python/ci/sample_prs/pr11276-no-review.json diff --git a/tests/python/ci/sample_prs/pr10786-badci.json b/tests/python/ci/sample_prs/pr10786-badci.json index 3f22b1d82fc8..b49899b86bca 100644 --- a/tests/python/ci/sample_prs/pr10786-badci.json +++ b/tests/python/ci/sample_prs/pr10786-badci.json @@ -96,7 +96,7 @@ }, { "state": "FAILED", - "context": "tvm-ci/pr-merge", + "context": "tvm-ci/pr-head", "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" } ] diff --git a/tests/python/ci/sample_prs/pr10786-changes-requested.json b/tests/python/ci/sample_prs/pr10786-changes-requested.json new file mode 100644 index 000000000000..46b13a7f6c6c --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-changes-requested.json @@ -0,0 +1,130 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "author": { + "login": "Lunderberg" + }, + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Adam Straw", + "email": "astraw@octoml.ai" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-head", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "CHANGES_REQUESTED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "url": "https://github.com/apache/tvm/pull/10786#pullrequestreview-922186273", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "author": { + "login": "kparzysz-quic" + }, + "state": "CHANGES_REQUESTED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-co-authors.json b/tests/python/ci/sample_prs/pr10786-co-authors.json new file mode 100644 index 000000000000..a660c9d9b214 --- /dev/null +++ b/tests/python/ci/sample_prs/pr10786-co-authors.json @@ -0,0 +1,129 @@ +{ + "title": "[Hexagon] 2-d allocation cleanup", + "body": "- Added device validity check in allocation. HexagonDeviceAPI should only be called for CPU/Hexagon types.\r\n\r\n- Check for \"global.vtcm\" scope instead of \"vtcm\". The ccope of N-d allocations produced by `LowerVtcmAlloc` should be `\"global.vtcm\"`. The previous check allowed unsupported scope such as `\"local.vtcm\"`.\r\n\r\n- Remove `vtcmallocs` entry after calling free. Previously, the vtcm allocation map kept dangling pointers to `HexagonBuffer` objects after they had been freed.\r\n\r\n- Rename N-d alloc and free packed functions. Since most of the similar device functions use snake case, renaming `*.AllocND` to `*.alloc_nd` and `*.FreeND` to `*.free_nd`.\r\n\r\nCo-authored-by: Adam Straw ", + "state": "OPEN", + "author": { + "login": "Lunderberg" + }, + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Eric Lunderberg", + "email": "elunderberg@octoml.ai" + }, + { + "name": "Some One", + "email": "someone@email.com" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945392" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945029" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945030" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/5694945524" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-head", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "@tvm-bot merge", + "updatedAt": "2022-03-25T22:13:50Z", + "authorCanPushToRepository": true, + "commit": { + "oid": "6f04bcf57d07f915a98fd91178f04d9e92a09fcd" + }, + "author": { + "login": "kparzysz-quic" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr10786-invalid-author.json b/tests/python/ci/sample_prs/pr10786-invalid-author.json index 2819e2666165..d19d6dad8a44 100644 --- a/tests/python/ci/sample_prs/pr10786-invalid-author.json +++ b/tests/python/ci/sample_prs/pr10786-invalid-author.json @@ -96,7 +96,7 @@ }, { "state": "SUCCESS", - "context": "tvm-ci/pr-merge", + "context": "tvm-ci/pr-head", "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" } ] diff --git a/tests/python/ci/sample_prs/pr10786-merges.json b/tests/python/ci/sample_prs/pr10786-merges.json index a98f5ec130c5..673fc753e323 100644 --- a/tests/python/ci/sample_prs/pr10786-merges.json +++ b/tests/python/ci/sample_prs/pr10786-merges.json @@ -96,7 +96,7 @@ }, { "state": "SUCCESS", - "context": "tvm-ci/pr-merge", + "context": "tvm-ci/pr-head", "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" } ] diff --git a/tests/python/ci/sample_prs/pr10786-missing-job.json b/tests/python/ci/sample_prs/pr10786-missing-job.json index 17de7b53928a..81be0ebe4795 100644 --- a/tests/python/ci/sample_prs/pr10786-missing-job.json +++ b/tests/python/ci/sample_prs/pr10786-missing-job.json @@ -96,7 +96,7 @@ }, { "state": "SUCCESS", - "context": "tvm-ci/definitely-not-pr-merge", + "context": "tvm-ci/definitely-not-pr-head", "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" } ] diff --git a/tests/python/ci/sample_prs/pr10786-nottriggered.json b/tests/python/ci/sample_prs/pr10786-nottriggered.json index 8cafdfaefdb6..11c5976bd6e4 100644 --- a/tests/python/ci/sample_prs/pr10786-nottriggered.json +++ b/tests/python/ci/sample_prs/pr10786-nottriggered.json @@ -96,7 +96,7 @@ }, { "state": "SUCCESS", - "context": "tvm-ci/pr-merge", + "context": "tvm-ci/pr-head", "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" } ] diff --git a/tests/python/ci/sample_prs/pr10786-oldreview.json b/tests/python/ci/sample_prs/pr10786-oldreview.json index 0a4103361a56..27ba0e872918 100644 --- a/tests/python/ci/sample_prs/pr10786-oldreview.json +++ b/tests/python/ci/sample_prs/pr10786-oldreview.json @@ -96,7 +96,7 @@ }, { "state": "SUCCESS", - "context": "tvm-ci/pr-merge", + "context": "tvm-ci/pr-head", "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-10786/1/display/redirect" } ] diff --git a/tests/python/ci/sample_prs/pr11244-unauthorized-comment.json b/tests/python/ci/sample_prs/pr11244-unauthorized-comment.json new file mode 100644 index 000000000000..206adc9a9eac --- /dev/null +++ b/tests/python/ci/sample_prs/pr11244-unauthorized-comment.json @@ -0,0 +1,103 @@ +{ + "title": "[CRT runtime] Added functions TVMPlatformPreFuncCall and TVMPlatformPostFuncCall", + "body": "See [this thread ](https://discuss.tvm.apache.org/t/crt-add-platform-specific-pre-and-post-function-calls-in-crt-runtime/12723)for an explanation.", + "state": "OPEN", + "author": { + "login": "fPecc" + }, + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "authorAssociation": "NONE", + "author": { + "login": "abc" + }, + "updatedAt": "2022-05-09T13:39:04Z", + "body": "@tvm-bot merge" + }, + { + "authorAssociation": "CONTRIBUTOR", + "author": { + "login": "areusch" + }, + "updatedAt": "2022-05-11T19:22:01Z", + "body": "i commented on the discuss forum thread. let's resolve there and then continue this PR." + } + ] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "Federico Peccia", + "email": "peccia@fzi.de" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "79d355c5f837b3bdadb5d25b2a5d0d2802783ae2", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6352791017" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6352791014" + }, + { + "state": "ERROR", + "context": "tvm-ci/pr-head", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-11244/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "REVIEW_REQUIRED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr11267-no-review.json b/tests/python/ci/sample_prs/pr11267-no-review.json new file mode 100644 index 000000000000..31577671f0b6 --- /dev/null +++ b/tests/python/ci/sample_prs/pr11267-no-review.json @@ -0,0 +1,142 @@ +{ + "title": "[ci][docker] Use sccache everywhere by default", + "body": "This adds `/opt/sccache` to the PATH of each of the CI docker images so when cmake looks for a C compiler it will pick up the sccache wrapper by default. This fixes some issues where compiler invocations weren't being run though sccache. With this approach the invoker doesn't need to do anything specific to set up sccache.\n\nThis will require a follow up PR to update the Docker images and remove some of the sccache logic in `task_build.py`\n\n\n\ncc @Mousius @areusch", + "state": "OPEN", + "author": { + "login": "driazati" + }, + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "authorAssociation": "CONTRIBUTOR", + "author": { + "login": "areusch" + }, + "updatedAt": "2022-05-11T16:54:32Z", + "body": "just confirming--we can disable this when doing a local build, correct? what's the mechanism by which we do that?" + }, + { + "authorAssociation": "COLLABORATOR", + "author": { + "login": "driazati" + }, + "updatedAt": "2022-05-11T18:46:54Z", + "body": "@tvm-bot merge" + } + ] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "driazati", + "email": "driazati@users.noreply.github.com" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "bb7f51d3e0fd50997012dfcce3c9b2b852cd3136", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6377784092" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6377778488" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6390508806" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6390511833" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6377784248" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-head", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-11267/2/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "REVIEW_REQUIRED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + } +} \ No newline at end of file diff --git a/tests/python/ci/sample_prs/pr11276-no-review.json b/tests/python/ci/sample_prs/pr11276-no-review.json new file mode 100644 index 000000000000..3f8459eb00f7 --- /dev/null +++ b/tests/python/ci/sample_prs/pr11276-no-review.json @@ -0,0 +1,157 @@ +{ + "title": "[COMMUNITY] mikepapadim -> Reviewer", + "body": "Please join us to welcome Michalis Papadimitriou (@mikepapadim) as a new reviewer to TVM. Michalis has contributed a lot to BYOC and TensorRT backend.\r\n\r\n- [Commits History](https://github.com/apache/tvm/commits?author=mikepapadim)\r\n- [Code Review](https://github.com/apache/tvm/pulls?utf8=%E2%9C%93&q=reviewed-by:mikepapadim)\r\n- [Community Forum Summary](https://github.com/apache/tvm/commits?author=mikepapadim)", + "state": "OPEN", + "author": { + "login": "ZihengJiang" + }, + "comments": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [] + }, + "authorCommits": { + "nodes": [ + { + "commit": { + "authors": { + "nodes": [ + { + "name": "ZihengJiang", + "email": "ziheng@apache.org" + } + ] + } + } + } + ] + }, + "commits": { + "nodes": [ + { + "commit": { + "oid": "96075744cc687caafc131361d006c5967edddbc6", + "statusCheckRollup": { + "contexts": { + "pageInfo": { + "hasNextPage": false + }, + "nodes": [ + { + "name": "MacOS", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6391733373" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6391732791" + }, + { + "name": "cc-reviewers", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "PR" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6391754960" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6391732788" + }, + { + "name": "tag-teams", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "Teams" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6391754947" + }, + { + "name": "Windows", + "checkSuite": { + "workflowRun": { + "workflow": { + "name": "CI" + } + } + }, + "status": "COMPLETED", + "conclusion": "SUCCESS", + "url": "https://github.com/apache/tvm/runs/6391733127" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/branch", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/ziheng%252Fcommunity/1/display/redirect" + }, + { + "state": "SUCCESS", + "context": "tvm-ci/pr-head", + "targetUrl": "https://ci.tlcpack.ai/job/tvm/job/PR-11276/1/display/redirect" + } + ] + } + } + } + } + ] + }, + "reviewDecision": "APPROVED", + "reviews": { + "pageInfo": { + "hasPreviousPage": false + }, + "nodes": [ + { + "body": "", + "updatedAt": "2022-05-11T16:50:16Z", + "url": "https://github.com/apache/tvm/pull/11276#pullrequestreview-969701502", + "authorCanPushToRepository": true, + "commit": { + "oid": "96075744cc687caafc131361d006c5967edddbc6" + }, + "author": { + "login": "tqchen" + }, + "state": "APPROVED" + } + ] + } +} \ No newline at end of file diff --git a/tests/python/ci/test_mergebot.py b/tests/python/ci/test_mergebot.py index 571f2c2d7ea2..fbe9262b0939 100644 --- a/tests/python/ci/test_mergebot.py +++ b/tests/python/ci/test_mergebot.py @@ -72,6 +72,30 @@ def run(self, *args): "expected": "No merge requested, exiting", "detail": "Merge requester is not a committer and cannot merge", }, + "unauthorized-comment": { + "number": 11244, + "filename": "pr11244-unauthorized-comment.json", + "expected": "No merge requested, exiting", + "detail": "Check that a merge comment not from a CONTRIBUTOR is rejected", + }, + "no-review": { + "number": 11267, + "filename": "pr11267-no-review.json", + "expected": "Cannot merge, did not find any approving reviews from users with write access", + "detail": "Check that a merge request without any reviews is rejected", + }, + "changes-requested": { + "number": 10786, + "filename": "pr10786-changes-requested.json", + "expected": "Cannot merge, found [this review]", + "detail": "Check that a merge request with a 'Changes Requested' review on HEAD is rejected", + }, + "co-authors": { + "number": 10786, + "filename": "pr10786-co-authors.json", + "expected": "Co-authored-by: Some One ", + "detail": "Check that a merge request with co-authors generates the correct commit message", + }, } diff --git a/tests/scripts/github_mergebot.py b/tests/scripts/github_mergebot.py index f5ff78d06e02..65c949caf510 100755 --- a/tests/scripts/github_mergebot.py +++ b/tests/scripts/github_mergebot.py @@ -32,8 +32,11 @@ Review = Dict[str, Any] CIJob = Dict[str, Any] +EXPECTED_JOBS = ["tvm-ci/pr-head"] +THANKS_MESSAGE = "Thanks for contributing to TVM! Please refer to guideline https://tvm.apache.org/docs/contribute/ for useful information and tips. After the pull request is submitted, please request code reviews from [Reviewers](https://github.com/apache/incubator-tvm/blob/master/CONTRIBUTORS.md#reviewers) by @ them in the pull request thread." -def js(obj: Any) -> str: + +def to_json_str(obj: Any) -> str: return json.dumps(obj, indent=2) @@ -114,6 +117,7 @@ def js(obj: Any) -> str: nodes { body updatedAt + url authorCanPushToRepository commit { oid @@ -192,7 +196,7 @@ def checker(obj, parent_key): walk(self.raw, checker) - logging.info(f"Verified data, running with PR {js(self.raw)}") + logging.info(f"Verified data, running with PR {to_json_str(self.raw)}") def __repr__(self): return json.dumps(self.raw, indent=2) @@ -247,7 +251,7 @@ def ci_jobs(self) -> List[CIJob]: } ) - logging.info(f"Found CI jobs for {self.head_commit()['oid']} {js(jobs)}") + logging.info(f"Found CI jobs for {self.head_commit()['oid']} {to_json_str(jobs)}") return jobs def reviews(self) -> List[Review]: @@ -297,7 +301,7 @@ def comment(self, text: str) -> None: url = f"issues/{self.number}/comments" if self.dry_run: logging.info( - f"Dry run, would have commented on url={url} commenting with data={js(data)}" + f"Dry run, would have commented on url={url} commenting with data={to_json_str(data)}" ) return @@ -309,14 +313,10 @@ def state(self) -> str: """ return self.raw["state"] - def lint_commit_message(self, subject: str, body: str) -> bool: - # TODO: NYI (Add rules as decided in https://discuss.tvm.apache.org/t/commit-message-guideline/12334) - return True - def processed_body(self) -> str: body = self.raw["body"].strip().replace("\r", "") body = body.replace( - "Thanks for contributing to TVM! Please refer to guideline https://tvm.apache.org/docs/contribute/ for useful information and tips. After the pull request is submitted, please request code reviews from [Reviewers](https://github.com/apache/incubator-tvm/blob/master/CONTRIBUTORS.md#reviewers) by @ them in the pull request thread.", + THANKS_MESSAGE, "", ) return body @@ -329,11 +329,18 @@ def body_with_co_authors(self) -> str: body = self.processed_body() author_lines = self.co_authors() logging.info(f"Found co-authors: author_lines={author_lines}") + full_author_lines = [f"Co-authored-by: {author_line}" for author_line in author_lines] + + authors_to_add = [] for author_line in author_lines: if author_line not in body: - # If the line isn't already in the PR body (it could have been - # added manually), put it in - body = f"{body}\n\nCo-authored-by: {author_line}" + authors_to_add.append(f"Co-authored-by: {author_line}") + + if len(authors_to_add) > 0: + # If the line isn't already in the PR body (it could have been + # added manually), put it in + full_author_text = "\n".join(authors_to_add) + body = f"{body}\n\n{full_author_text}" return body @@ -345,7 +352,6 @@ def merge(self) -> None: title = self.raw["title"] body = self.body_with_co_authors() - self.lint_commit_message(title, body) logging.info(f"Full commit:\n{title}\n\n{body}") data = { @@ -357,7 +363,7 @@ def merge(self) -> None: "merge_method": "squash", } if self.dry_run: - logging.info(f"Dry run, would have merged with url={url} and data={js(data)}") + logging.info(f"Dry run, would have merged with url={url} and data={to_json_str(data)}") return self.github.put(url, data=data) @@ -366,21 +372,19 @@ def comment_can_merge(self, comment: Dict[str, Any]) -> bool: """ Check if a comment was left by the PR author or by a committer """ - print(comment) - print(self.raw) if comment["author"]["login"] == self.raw["author"]["login"]: - logging.info(f"Comment {comment} was from author and is mergable") + logging.info(f"Comment {comment} was from author and is mergeable") return True if comment.get("authorAssociation", "") == "CONTRIBUTOR": - logging.info(f"Comment {comment} was from committer commment and is mergable") + logging.info(f"Comment {comment} was from committer comment and is mergeable") return True if comment.get("authorCanPushToRepository", False): - logging.info(f"Comment {comment} was from a committer review comment and is mergable") + logging.info(f"Comment {comment} was from a committer review comment and is mergeable") return True - logging.info(f"Comment {comment} was not from author or committers and is not mergable") + logging.info(f"Comment {comment} was not from author or committers and is not mergeable") return False def merge_requested(self) -> bool: @@ -414,15 +418,8 @@ def parse_action(comment: Dict[str, Any]) -> Optional[str]: return None - # Check regular comments - all_comments = [] - for comment in self.raw["comments"]["nodes"]: - all_comments.append(comment) - - # Check top-level review comments - for review in self.reviews(): - all_comments.append(review) - + # Check regular comments and top-level review comments + all_comments = self.raw["comments"]["nodes"] + self.reviews() all_comments = sorted(all_comments, key=lambda comment: comment["updatedAt"]) actions = [parse_action(comment) for comment in all_comments] logging.info(f"Found these tvm-bot actions: {actions}") @@ -433,30 +430,17 @@ def parse_action(comment: Dict[str, Any]) -> Optional[str]: return actions[-1] == "merge" - def merge_if_passed_checks(self): + def find_failed_ci_jobs(self) -> List[CIJob]: # NEUTRAL is GitHub Action's way of saying cancelled - failed_ci_jobs = [ + return [ job for job in self.ci_jobs() - if job["status"] not in {"SUCCESS", "SUCCESSFUL", "NEUTRAL", "SKIPPED"} + if job["status"] not in {"SUCCESS", "SUCCESSFUL", "SKIPPED"} ] - all_ci_passed = False - has_one_approval = False - if len(failed_ci_jobs) > 0: - failed_jobs_msg = "\n".join( - [f" * [{job['name']} (`{job['status']}`)]({job['url']})" for job in failed_ci_jobs] - ) - self.comment( - f"Cannot merge, these CI jobs are not successful on {self.head_oid()}:\n{failed_jobs_msg}" - ) - return - else: - all_ci_passed = True + def find_missing_expected_jobs(self) -> List[str]: # Map of job name: has seen in completed jobs - seen_expected_jobs = { - "tvm-ci/pr-merge": False, - } + seen_expected_jobs = {name: False for name in EXPECTED_JOBS} logging.info(f"Expected to see jobs: {seen_expected_jobs}") missing_expected_jobs = [] @@ -467,6 +451,24 @@ def merge_if_passed_checks(self): if not seen: missing_expected_jobs.append(name) + return missing_expected_jobs + + def merge_if_passed_checks(self) -> None: + failed_ci_jobs = self.find_failed_ci_jobs() + all_ci_passed = len(failed_ci_jobs) == 0 + has_one_approval = False + + if not all_ci_passed: + failed_jobs_msg = "\n".join( + [f" * [{job['name']} (`{job['status']}`)]({job['url']})" for job in failed_ci_jobs] + ) + self.comment( + f"Cannot merge, these CI jobs are not successful on {self.head_oid()}:\n{failed_jobs_msg}" + ) + return + + missing_expected_jobs = self.find_missing_expected_jobs() + if len(missing_expected_jobs) > 0: missing_jobs_msg = "\n".join([f" * `{name}`" for name in missing_expected_jobs]) self.comment(f"Cannot merge, missing expected jobs:\n{missing_jobs_msg}") @@ -482,7 +484,7 @@ def merge_if_passed_checks(self): if review["state"] == "APPROVED": has_one_approval = True - logging.info(f"Found approving review: {js(review)}") + logging.info(f"Found approving review: {to_json_str(review)}") if has_one_approval and all_ci_passed: self.merge() @@ -497,7 +499,7 @@ def merge_if_passed_checks(self): if __name__ == "__main__": - help = "Automatically tag people based on PR / issue labels (for local development, setting DEBUG=1 will fetch and cache a PR)" + help = "Check if a PR has comments trying to merge it, and do so based on reviews/CI status" parser = argparse.ArgumentParser(description=help) parser.add_argument("--remote", default="origin", help="ssh remote to parse") parser.add_argument("--pr", required=True, help="pr number to check")