2021-11-12 12:36:25 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2023-09-22 11:16:46 +00:00
|
|
|
import csv
|
2024-02-02 14:33:08 +00:00
|
|
|
import json
|
2022-07-30 05:07:22 +00:00
|
|
|
import logging
|
2023-09-22 11:16:46 +00:00
|
|
|
import time
|
2024-02-02 14:33:08 +00:00
|
|
|
from collections import defaultdict
|
2023-12-18 08:07:22 +00:00
|
|
|
from dataclasses import asdict, dataclass
|
2024-02-02 14:33:08 +00:00
|
|
|
from pathlib import Path
|
|
|
|
from typing import Dict, List, Optional, Union
|
2022-07-21 16:12:07 +00:00
|
|
|
|
|
|
|
from github import Github
|
|
|
|
from github.Commit import Commit
|
2022-09-14 14:00:46 +00:00
|
|
|
from github.CommitStatus import CommitStatus
|
2023-10-13 11:32:56 +00:00
|
|
|
from github.GithubException import GithubException
|
2023-12-18 08:07:22 +00:00
|
|
|
from github.GithubObject import NotSet
|
2023-04-18 23:03:48 +00:00
|
|
|
from github.IssueComment import IssueComment
|
2023-04-18 19:32:22 +00:00
|
|
|
from github.Repository import Repository
|
2023-02-22 16:00:06 +00:00
|
|
|
|
2024-06-02 16:25:14 +00:00
|
|
|
from ci_config import CHECK_DESCRIPTIONS, CheckDescription, StatusNames, CIConfig
|
2024-06-04 10:36:13 +00:00
|
|
|
from env_helper import GITHUB_REPOSITORY, GITHUB_UPSTREAM_REPOSITORY, TEMP_PATH
|
2024-03-15 15:57:59 +00:00
|
|
|
from lambda_shared_package.lambda_shared.pr import Labels
|
|
|
|
from pr_info import PRInfo
|
2023-09-01 17:15:11 +00:00
|
|
|
from report import (
|
|
|
|
ERROR,
|
|
|
|
FAILURE,
|
|
|
|
PENDING,
|
|
|
|
SUCCESS,
|
2024-02-02 14:33:08 +00:00
|
|
|
StatusType,
|
2023-09-01 17:15:11 +00:00
|
|
|
TestResult,
|
|
|
|
TestResults,
|
2024-05-09 10:48:44 +00:00
|
|
|
get_status,
|
2023-09-01 17:15:11 +00:00
|
|
|
get_worst_status,
|
|
|
|
)
|
2023-04-25 15:02:41 +00:00
|
|
|
from s3_helper import S3Helper
|
|
|
|
from upload_result_helper import upload_results
|
2021-11-26 14:00:09 +00:00
|
|
|
|
2021-12-28 10:13:51 +00:00
|
|
|
RETRY = 5
|
2022-09-14 14:00:46 +00:00
|
|
|
CommitStatuses = List[CommitStatus]
|
2023-04-18 19:32:22 +00:00
|
|
|
GH_REPO = None # type: Optional[Repository]
|
2023-12-18 08:07:22 +00:00
|
|
|
STATUS_FILE_PATH = Path(TEMP_PATH) / "status.json"
|
2021-12-28 10:13:51 +00:00
|
|
|
|
|
|
|
|
2023-04-18 14:58:17 +00:00
|
|
|
class RerunHelper:
|
|
|
|
def __init__(self, commit: Commit, check_name: str):
|
|
|
|
self.check_name = check_name
|
|
|
|
self.commit = commit
|
|
|
|
self.statuses = get_commit_filtered_statuses(commit)
|
|
|
|
|
|
|
|
def is_already_finished_by_status(self) -> bool:
|
|
|
|
# currently we agree even for failed statuses
|
|
|
|
for status in self.statuses:
|
|
|
|
if self.check_name in status.context and status.state in (
|
2023-09-01 17:15:11 +00:00
|
|
|
SUCCESS,
|
|
|
|
FAILURE,
|
2023-04-18 14:58:17 +00:00
|
|
|
):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def get_finished_status(self) -> Optional[CommitStatus]:
|
|
|
|
for status in self.statuses:
|
|
|
|
if self.check_name in status.context:
|
|
|
|
return status
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2022-09-14 13:40:45 +00:00
|
|
|
def get_commit(gh: Github, commit_sha: str, retry_count: int = RETRY) -> Commit:
|
2021-12-28 10:13:51 +00:00
|
|
|
for i in range(retry_count):
|
|
|
|
try:
|
2023-04-18 19:32:22 +00:00
|
|
|
repo = get_repo(gh)
|
2021-12-28 10:13:51 +00:00
|
|
|
commit = repo.get_commit(commit_sha)
|
2022-09-14 13:40:45 +00:00
|
|
|
break
|
2021-12-28 10:13:51 +00:00
|
|
|
except Exception as ex:
|
|
|
|
if i == retry_count - 1:
|
|
|
|
raise ex
|
|
|
|
time.sleep(i)
|
|
|
|
|
2022-09-14 13:40:45 +00:00
|
|
|
return commit
|
2021-11-12 12:36:25 +00:00
|
|
|
|
|
|
|
|
2022-09-14 13:40:45 +00:00
|
|
|
def post_commit_status(
|
2023-04-18 23:03:48 +00:00
|
|
|
commit: Commit,
|
2024-06-04 10:36:13 +00:00
|
|
|
state: StatusType, # do not change it, it MUST be StatusType and nothing else
|
2023-12-18 08:07:22 +00:00
|
|
|
report_url: Optional[str] = None,
|
|
|
|
description: Optional[str] = None,
|
|
|
|
check_name: Optional[str] = None,
|
2023-04-18 23:03:48 +00:00
|
|
|
pr_info: Optional[PRInfo] = None,
|
2023-12-18 08:07:22 +00:00
|
|
|
dump_to_file: bool = False,
|
2024-04-24 15:29:59 +00:00
|
|
|
) -> CommitStatus:
|
2023-04-18 23:03:48 +00:00
|
|
|
"""The parameters are given in the same order as for commit.create_status,
|
|
|
|
if an optional parameter `pr_info` is given, the `set_status_comment` functions
|
|
|
|
is invoked to add or update the comment with statuses overview"""
|
2021-12-28 10:13:51 +00:00
|
|
|
for i in range(RETRY):
|
|
|
|
try:
|
2024-04-24 15:29:59 +00:00
|
|
|
commit_status = commit.create_status(
|
2022-01-10 16:37:38 +00:00
|
|
|
state=state,
|
2023-12-18 08:07:22 +00:00
|
|
|
target_url=report_url if report_url is not None else NotSet,
|
|
|
|
description=description if description is not None else NotSet,
|
|
|
|
context=check_name if check_name is not None else NotSet,
|
2022-01-10 16:37:38 +00:00
|
|
|
)
|
2021-12-28 10:13:51 +00:00
|
|
|
break
|
|
|
|
except Exception as ex:
|
|
|
|
if i == RETRY - 1:
|
|
|
|
raise ex
|
|
|
|
time.sleep(i)
|
2023-04-18 23:03:48 +00:00
|
|
|
if pr_info:
|
2023-05-02 10:34:40 +00:00
|
|
|
status_updated = False
|
|
|
|
for i in range(RETRY):
|
|
|
|
try:
|
|
|
|
set_status_comment(commit, pr_info)
|
|
|
|
status_updated = True
|
|
|
|
break
|
|
|
|
except Exception as ex:
|
|
|
|
logging.warning(
|
|
|
|
"Failed to update the status commit, will retry %s times: %s",
|
|
|
|
RETRY - i - 1,
|
|
|
|
ex,
|
|
|
|
)
|
|
|
|
|
|
|
|
if not status_updated:
|
|
|
|
logging.error("Failed to update the status comment, continue anyway")
|
2023-12-18 08:07:22 +00:00
|
|
|
if dump_to_file:
|
|
|
|
assert pr_info
|
|
|
|
CommitStatusData(
|
|
|
|
status=state,
|
|
|
|
description=description or "",
|
|
|
|
report_url=report_url or "",
|
|
|
|
sha=pr_info.sha,
|
|
|
|
pr_num=pr_info.number,
|
|
|
|
).dump_status()
|
2023-04-18 23:03:48 +00:00
|
|
|
|
2024-04-24 15:29:59 +00:00
|
|
|
return commit_status
|
|
|
|
|
2023-04-18 23:03:48 +00:00
|
|
|
|
2023-09-08 15:36:32 +00:00
|
|
|
STATUS_ICON_MAP = defaultdict(
|
|
|
|
str,
|
|
|
|
{
|
2023-09-11 16:13:11 +00:00
|
|
|
ERROR: "❌",
|
|
|
|
FAILURE: "❌",
|
|
|
|
PENDING: "⏳",
|
|
|
|
SUCCESS: "✅",
|
2023-09-08 15:36:32 +00:00
|
|
|
},
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-04-18 23:03:48 +00:00
|
|
|
def set_status_comment(commit: Commit, pr_info: PRInfo) -> None:
|
|
|
|
"""It adds or updates the comment status to all Pull Requests but for release
|
|
|
|
one, so the method does nothing for simple pushes and pull requests with
|
|
|
|
`release`/`release-lts` labels"""
|
2024-04-09 10:25:41 +00:00
|
|
|
|
2024-04-17 20:23:41 +00:00
|
|
|
if pr_info.is_merge_queue:
|
2024-04-09 10:25:41 +00:00
|
|
|
# skip report creation for the MQ
|
|
|
|
return
|
|
|
|
|
2023-04-18 23:03:48 +00:00
|
|
|
# to reduce number of parameters, the Github is constructed on the fly
|
|
|
|
gh = Github()
|
|
|
|
gh.__requester = commit._requester # type:ignore #pylint:disable=protected-access
|
|
|
|
repo = get_repo(gh)
|
2023-04-25 15:02:41 +00:00
|
|
|
statuses = sorted(get_commit_filtered_statuses(commit), key=lambda x: x.context)
|
|
|
|
if not statuses:
|
|
|
|
return
|
|
|
|
|
2024-04-17 13:11:42 +00:00
|
|
|
if not [status for status in statuses if status.context == StatusNames.CI]:
|
2023-05-02 12:13:23 +00:00
|
|
|
# This is the case, when some statuses already exist for the check,
|
2024-04-17 13:11:42 +00:00
|
|
|
# but not the StatusNames.CI. We should create it as pending.
|
2023-05-02 12:13:23 +00:00
|
|
|
# W/o pr_info to avoid recursion, and yes, one extra create_ci_report
|
|
|
|
post_commit_status(
|
|
|
|
commit,
|
2023-09-01 17:15:11 +00:00
|
|
|
PENDING,
|
2023-05-02 12:13:23 +00:00
|
|
|
create_ci_report(pr_info, statuses),
|
|
|
|
"The report for running CI",
|
2024-04-17 13:11:42 +00:00
|
|
|
StatusNames.CI,
|
2023-05-02 12:13:23 +00:00
|
|
|
)
|
|
|
|
|
2023-04-25 15:02:41 +00:00
|
|
|
# We update the report in generate_status_comment function, so do it each
|
|
|
|
# run, even in the release PRs and normal pushes
|
|
|
|
comment_body = generate_status_comment(pr_info, statuses)
|
|
|
|
# We post the comment only to normal and backport PRs
|
|
|
|
if pr_info.number == 0 or pr_info.labels.intersection({"release", "release-lts"}):
|
|
|
|
return
|
|
|
|
|
|
|
|
comment_service_header = comment_body.split("\n", 1)[0]
|
2023-04-18 23:03:48 +00:00
|
|
|
comment = None # type: Optional[IssueComment]
|
2023-04-25 15:02:41 +00:00
|
|
|
pr = repo.get_pull(pr_info.number)
|
2023-04-18 23:03:48 +00:00
|
|
|
for ic in pr.get_issue_comments():
|
2023-04-25 15:02:41 +00:00
|
|
|
if ic.body.startswith(comment_service_header):
|
2023-04-18 23:03:48 +00:00
|
|
|
comment = ic
|
|
|
|
break
|
|
|
|
|
2023-04-25 15:02:41 +00:00
|
|
|
if comment is None:
|
|
|
|
pr.create_issue_comment(comment_body)
|
|
|
|
return
|
|
|
|
|
|
|
|
if comment.body == comment_body:
|
|
|
|
logging.info("The status comment is already updated, no needs to change it")
|
|
|
|
return
|
|
|
|
comment.edit(comment_body)
|
|
|
|
|
|
|
|
|
|
|
|
def generate_status_comment(pr_info: PRInfo, statuses: CommitStatuses) -> str:
|
|
|
|
"""The method generates the comment body, as well it updates the CI report"""
|
|
|
|
|
|
|
|
report_url = create_ci_report(pr_info, statuses)
|
|
|
|
worst_state = get_worst_state(statuses)
|
|
|
|
|
2023-04-18 23:03:48 +00:00
|
|
|
comment_body = (
|
2023-04-26 15:26:34 +00:00
|
|
|
f"<!-- automatic status comment for PR #{pr_info.number} "
|
|
|
|
f"from {pr_info.head_name}:{pr_info.head_ref} -->\n"
|
2023-09-08 09:56:53 +00:00
|
|
|
f"*This is an automated comment for commit {pr_info.sha} with "
|
2023-09-08 15:36:32 +00:00
|
|
|
f"description of existing statuses. It's updated for the latest CI running*\n\n"
|
2023-09-08 15:49:13 +00:00
|
|
|
f"[{STATUS_ICON_MAP[worst_state]} Click here]({report_url}) to open a full report in a separate page\n"
|
|
|
|
f"\n"
|
2023-04-18 23:03:48 +00:00
|
|
|
)
|
2023-04-25 15:02:41 +00:00
|
|
|
# group checks by the name to get the worst one per each
|
|
|
|
grouped_statuses = {} # type: Dict[CheckDescription, CommitStatuses]
|
2023-04-18 23:03:48 +00:00
|
|
|
for status in statuses:
|
2023-04-25 15:02:41 +00:00
|
|
|
cd = None
|
|
|
|
for c in CHECK_DESCRIPTIONS:
|
|
|
|
if c.match_func(status.context):
|
|
|
|
cd = c
|
2023-04-18 23:03:48 +00:00
|
|
|
break
|
|
|
|
|
2023-04-25 15:02:41 +00:00
|
|
|
if cd is None or cd == CHECK_DESCRIPTIONS[-1]:
|
|
|
|
# This is the case for either non-found description or a fallback
|
|
|
|
cd = CheckDescription(
|
|
|
|
status.context,
|
|
|
|
CHECK_DESCRIPTIONS[-1].description,
|
|
|
|
CHECK_DESCRIPTIONS[-1].match_func,
|
|
|
|
)
|
2023-04-18 23:03:48 +00:00
|
|
|
|
2023-04-25 15:02:41 +00:00
|
|
|
if cd in grouped_statuses:
|
|
|
|
grouped_statuses[cd].append(status)
|
2023-04-18 23:03:48 +00:00
|
|
|
else:
|
2023-04-25 15:02:41 +00:00
|
|
|
grouped_statuses[cd] = [status]
|
2023-04-18 23:03:48 +00:00
|
|
|
|
2023-09-08 09:56:53 +00:00
|
|
|
table_header = (
|
|
|
|
"<table>\n"
|
|
|
|
"<thead><tr><th>Check name</th><th>Description</th><th>Status</th></tr></thead>\n"
|
|
|
|
"<tbody>\n"
|
|
|
|
)
|
|
|
|
table_footer = "<tbody>\n</table>\n"
|
|
|
|
|
2023-09-08 16:01:13 +00:00
|
|
|
details_header = "<details><summary>Successful checks</summary>\n"
|
2023-09-08 09:56:53 +00:00
|
|
|
details_footer = "</details>\n"
|
|
|
|
|
|
|
|
visible_table_rows = [] # type: List[str]
|
|
|
|
hidden_table_rows = [] # type: List[str]
|
2023-04-25 15:02:41 +00:00
|
|
|
for desc, gs in grouped_statuses.items():
|
2023-09-08 09:56:53 +00:00
|
|
|
state = get_worst_state(gs)
|
2023-09-29 08:46:23 +00:00
|
|
|
state_text = f"{STATUS_ICON_MAP[state]} {state}"
|
2023-11-02 08:44:03 +00:00
|
|
|
# take the first target_url with the worst state
|
|
|
|
for status in gs:
|
|
|
|
if status.target_url and status.state == state:
|
|
|
|
state_text = f'<a href="{status.target_url}">{state_text}</a>'
|
|
|
|
break
|
|
|
|
|
2023-09-08 09:56:53 +00:00
|
|
|
table_row = (
|
2023-04-25 15:02:41 +00:00
|
|
|
f"<tr><td>{desc.name}</td><td>{desc.description}</td>"
|
2023-09-29 08:46:23 +00:00
|
|
|
f"<td>{state_text}</td></tr>\n"
|
2023-04-18 23:03:48 +00:00
|
|
|
)
|
2023-09-08 09:56:53 +00:00
|
|
|
if state == SUCCESS:
|
|
|
|
hidden_table_rows.append(table_row)
|
|
|
|
else:
|
|
|
|
visible_table_rows.append(table_row)
|
|
|
|
|
2023-09-08 15:36:32 +00:00
|
|
|
result = [comment_body]
|
|
|
|
|
2024-03-14 13:08:43 +00:00
|
|
|
if visible_table_rows:
|
|
|
|
visible_table_rows.sort()
|
|
|
|
result.append(table_header)
|
|
|
|
result.extend(visible_table_rows)
|
|
|
|
result.append(table_footer)
|
|
|
|
|
2023-09-08 15:36:32 +00:00
|
|
|
if hidden_table_rows:
|
|
|
|
hidden_table_rows.sort()
|
|
|
|
result.append(details_header)
|
|
|
|
result.append(table_header)
|
|
|
|
result.extend(hidden_table_rows)
|
|
|
|
result.append(table_footer)
|
|
|
|
result.append(details_footer)
|
|
|
|
|
|
|
|
return "".join(result)
|
2023-04-25 15:02:41 +00:00
|
|
|
|
|
|
|
|
2024-02-02 14:35:13 +00:00
|
|
|
def get_worst_state(statuses: CommitStatuses) -> StatusType:
|
2023-09-01 17:15:11 +00:00
|
|
|
return get_worst_status(status.state for status in statuses)
|
2023-04-25 15:02:41 +00:00
|
|
|
|
|
|
|
|
|
|
|
def create_ci_report(pr_info: PRInfo, statuses: CommitStatuses) -> str:
|
|
|
|
"""The function converst the statuses to TestResults and uploads the report
|
|
|
|
to S3 tests bucket. Then it returns the URL"""
|
|
|
|
test_results = [] # type: TestResults
|
|
|
|
for status in statuses:
|
2023-09-12 15:34:52 +00:00
|
|
|
log_urls = []
|
2023-04-25 15:02:41 +00:00
|
|
|
if status.target_url is not None:
|
2023-09-12 15:34:52 +00:00
|
|
|
log_urls.append(status.target_url)
|
2023-09-11 16:13:52 +00:00
|
|
|
raw_logs = status.description or None
|
2023-09-08 15:39:07 +00:00
|
|
|
test_results.append(
|
|
|
|
TestResult(
|
|
|
|
status.context, status.state, log_urls=log_urls, raw_logs=raw_logs
|
|
|
|
)
|
|
|
|
)
|
2023-04-25 15:02:41 +00:00
|
|
|
return upload_results(
|
2024-04-17 13:11:42 +00:00
|
|
|
S3Helper(), pr_info.number, pr_info.sha, test_results, [], StatusNames.CI
|
2023-04-25 15:02:41 +00:00
|
|
|
)
|
2022-03-18 12:36:45 +00:00
|
|
|
|
|
|
|
|
2022-09-14 13:40:45 +00:00
|
|
|
def post_commit_status_to_file(
|
2023-09-22 11:16:46 +00:00
|
|
|
file_path: Path, description: str, state: str, report_url: str
|
2022-11-10 16:11:23 +00:00
|
|
|
) -> None:
|
2023-09-22 11:16:46 +00:00
|
|
|
if file_path.exists():
|
2024-02-26 18:25:02 +00:00
|
|
|
raise FileExistsError(f'File "{file_path}" already exists!')
|
2022-03-22 16:39:58 +00:00
|
|
|
with open(file_path, "w", encoding="utf-8") as f:
|
|
|
|
out = csv.writer(f, delimiter="\t")
|
2022-03-18 12:36:45 +00:00
|
|
|
out.writerow([state, report_url, description])
|
2022-03-29 13:48:57 +00:00
|
|
|
|
2022-03-29 17:15:25 +00:00
|
|
|
|
2023-12-18 08:07:22 +00:00
|
|
|
@dataclass
|
|
|
|
class CommitStatusData:
|
|
|
|
"""
|
|
|
|
if u about to add/remove fields in this class be causious that it dumps/loads to/from files (see it's method)
|
|
|
|
- you might want to add default values for new fields so that it won't break with old files
|
|
|
|
"""
|
|
|
|
|
|
|
|
status: str
|
|
|
|
report_url: str
|
|
|
|
description: str
|
|
|
|
sha: str = "deadbeaf"
|
|
|
|
pr_num: int = -1
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def _filter_dict(cls, data: dict) -> Dict:
|
|
|
|
return {k: v for k, v in data.items() if k in cls.__annotations__.keys()}
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def load_from_file(cls, file_path: Union[Path, str]): # type: ignore
|
|
|
|
res = {}
|
2024-02-26 18:25:02 +00:00
|
|
|
with open(file_path, "r", encoding="utf-8") as json_file:
|
2023-12-18 08:07:22 +00:00
|
|
|
res = json.load(json_file)
|
|
|
|
return CommitStatusData(**cls._filter_dict(res))
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def load_status(cls): # type: ignore
|
|
|
|
return cls.load_from_file(STATUS_FILE_PATH)
|
|
|
|
|
|
|
|
@classmethod
|
2024-02-02 17:10:47 +00:00
|
|
|
def exist(cls) -> bool:
|
2023-12-18 08:07:22 +00:00
|
|
|
return STATUS_FILE_PATH.is_file()
|
|
|
|
|
|
|
|
def dump_status(self) -> None:
|
|
|
|
STATUS_FILE_PATH.parent.mkdir(parents=True, exist_ok=True)
|
|
|
|
self.dump_to_file(STATUS_FILE_PATH)
|
|
|
|
|
|
|
|
def dump_to_file(self, file_path: Union[Path, str]) -> None:
|
|
|
|
file_path = Path(file_path) or STATUS_FILE_PATH
|
2024-02-26 18:25:02 +00:00
|
|
|
with open(file_path, "w", encoding="utf-8") as json_file:
|
2023-12-18 08:07:22 +00:00
|
|
|
json.dump(asdict(self), json_file)
|
|
|
|
|
|
|
|
def is_ok(self):
|
|
|
|
return self.status == SUCCESS
|
|
|
|
|
2024-02-04 19:12:37 +00:00
|
|
|
def is_failure(self):
|
|
|
|
return self.status == FAILURE
|
|
|
|
|
2023-12-18 08:07:22 +00:00
|
|
|
@staticmethod
|
|
|
|
def cleanup():
|
|
|
|
STATUS_FILE_PATH.unlink(missing_ok=True)
|
|
|
|
|
|
|
|
|
2022-09-14 14:00:46 +00:00
|
|
|
def get_commit_filtered_statuses(commit: Commit) -> CommitStatuses:
|
|
|
|
"""
|
|
|
|
Squash statuses to latest state
|
2024-02-06 12:39:34 +00:00
|
|
|
1. context="first", state=SUCCESS, update_time=1
|
|
|
|
2. context="second", state=SUCCESS, update_time=2
|
|
|
|
3. context="first", stat=FAILURE, update_time=3
|
2022-09-14 14:00:46 +00:00
|
|
|
=========>
|
2024-02-06 12:39:34 +00:00
|
|
|
1. context="second", state=SUCCESS
|
|
|
|
2. context="first", stat=FAILURE
|
2022-09-14 14:00:46 +00:00
|
|
|
"""
|
|
|
|
filtered = {}
|
|
|
|
for status in sorted(commit.get_statuses(), key=lambda x: x.updated_at):
|
|
|
|
filtered[status.context] = status
|
|
|
|
return list(filtered.values())
|
|
|
|
|
|
|
|
|
2023-04-18 19:32:22 +00:00
|
|
|
def get_repo(gh: Github) -> Repository:
|
|
|
|
global GH_REPO
|
|
|
|
if GH_REPO is not None:
|
|
|
|
return GH_REPO
|
|
|
|
GH_REPO = gh.get_repo(GITHUB_REPOSITORY)
|
|
|
|
return GH_REPO
|
|
|
|
|
|
|
|
|
2022-11-10 16:11:23 +00:00
|
|
|
def remove_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]) -> None:
|
2023-04-18 19:32:22 +00:00
|
|
|
repo = get_repo(gh)
|
2022-03-29 17:50:06 +00:00
|
|
|
pull_request = repo.get_pull(pr_info.number)
|
|
|
|
for label in labels_names:
|
2023-10-13 11:32:56 +00:00
|
|
|
try:
|
|
|
|
pull_request.remove_from_labels(label)
|
|
|
|
except GithubException as exc:
|
|
|
|
if not (
|
|
|
|
exc.status == 404
|
|
|
|
and isinstance(exc.data, dict)
|
|
|
|
and exc.data.get("message", "") == "Label does not exist"
|
|
|
|
):
|
|
|
|
raise
|
|
|
|
logging.warning(
|
|
|
|
"The label '%s' does not exist in PR #%s", pr_info.number, label
|
|
|
|
)
|
2023-04-24 09:03:25 +00:00
|
|
|
pr_info.labels.remove(label)
|
2022-03-29 17:50:06 +00:00
|
|
|
|
|
|
|
|
2022-11-10 16:11:23 +00:00
|
|
|
def post_labels(gh: Github, pr_info: PRInfo, labels_names: List[str]) -> None:
|
2023-04-18 19:32:22 +00:00
|
|
|
repo = get_repo(gh)
|
2022-03-29 13:48:57 +00:00
|
|
|
pull_request = repo.get_pull(pr_info.number)
|
2022-03-29 17:28:18 +00:00
|
|
|
for label in labels_names:
|
|
|
|
pull_request.add_to_labels(label)
|
2023-04-24 09:03:25 +00:00
|
|
|
pr_info.labels.add(label)
|
2022-07-19 12:57:03 +00:00
|
|
|
|
|
|
|
|
2023-02-23 14:21:19 +00:00
|
|
|
def format_description(description: str) -> str:
|
|
|
|
if len(description) > 140:
|
|
|
|
description = description[:137] + "..."
|
|
|
|
return description
|
2022-07-18 19:15:21 +00:00
|
|
|
|
|
|
|
|
2023-02-23 14:25:29 +00:00
|
|
|
def set_mergeable_check(
|
|
|
|
commit: Commit,
|
|
|
|
description: str = "",
|
2024-02-06 12:39:34 +00:00
|
|
|
state: StatusType = SUCCESS,
|
2024-04-24 15:29:59 +00:00
|
|
|
) -> CommitStatus:
|
2024-05-24 09:36:28 +00:00
|
|
|
report_url = ""
|
2024-04-24 15:29:59 +00:00
|
|
|
return post_commit_status(
|
2024-04-12 11:59:17 +00:00
|
|
|
commit,
|
|
|
|
state,
|
|
|
|
report_url,
|
|
|
|
format_description(description),
|
|
|
|
StatusNames.MERGEABLE,
|
2022-07-18 19:15:21 +00:00
|
|
|
)
|
2022-07-30 05:07:22 +00:00
|
|
|
|
|
|
|
|
2024-05-18 17:13:34 +00:00
|
|
|
def update_mergeable_check(commit: Commit, pr_info: PRInfo, check_name: str) -> None:
|
2024-02-26 20:31:41 +00:00
|
|
|
"check if the check_name in REQUIRED_CHECKS and then trigger update"
|
2023-02-23 14:38:07 +00:00
|
|
|
not_run = (
|
2024-03-15 15:57:59 +00:00
|
|
|
pr_info.labels.intersection({Labels.SKIP_MERGEABLE_CHECK, Labels.RELEASE})
|
2024-06-02 16:25:14 +00:00
|
|
|
or not CIConfig.is_required(check_name)
|
2023-02-23 14:38:07 +00:00
|
|
|
or pr_info.release_pr
|
|
|
|
or pr_info.number == 0
|
|
|
|
)
|
2024-04-09 10:25:41 +00:00
|
|
|
|
2024-04-25 15:05:02 +00:00
|
|
|
if not_run:
|
2023-02-23 14:38:07 +00:00
|
|
|
# Let's avoid unnecessary work
|
2024-05-18 17:13:34 +00:00
|
|
|
return
|
2022-07-30 05:07:22 +00:00
|
|
|
|
|
|
|
logging.info("Update Mergeable Check by %s", check_name)
|
|
|
|
|
2023-02-23 14:25:29 +00:00
|
|
|
statuses = get_commit_filtered_statuses(commit)
|
2024-05-18 17:13:34 +00:00
|
|
|
trigger_mergeable_check(commit, statuses)
|
2023-02-23 14:25:29 +00:00
|
|
|
|
2024-02-26 20:31:41 +00:00
|
|
|
|
2024-04-12 11:59:17 +00:00
|
|
|
def trigger_mergeable_check(
|
2024-05-18 16:00:32 +00:00
|
|
|
commit: Commit,
|
|
|
|
statuses: CommitStatuses,
|
|
|
|
set_if_green: bool = False,
|
2024-05-18 19:17:22 +00:00
|
|
|
workflow_failed: bool = False,
|
2024-05-18 18:36:04 +00:00
|
|
|
) -> StatusType:
|
2024-04-17 13:11:42 +00:00
|
|
|
"""calculate and update StatusNames.MERGEABLE"""
|
2024-06-02 16:25:14 +00:00
|
|
|
required_checks = [
|
|
|
|
status for status in statuses if CIConfig.is_required(status.context)
|
|
|
|
]
|
2023-02-23 14:25:29 +00:00
|
|
|
|
|
|
|
mergeable_status = None
|
|
|
|
for status in statuses:
|
2024-04-17 13:11:42 +00:00
|
|
|
if status.context == StatusNames.MERGEABLE:
|
2023-02-23 14:25:29 +00:00
|
|
|
mergeable_status = status
|
|
|
|
break
|
2022-07-30 05:07:22 +00:00
|
|
|
|
|
|
|
success = []
|
|
|
|
fail = []
|
2024-05-24 09:36:28 +00:00
|
|
|
pending = []
|
2023-02-23 14:25:29 +00:00
|
|
|
for status in required_checks:
|
2023-09-01 17:15:11 +00:00
|
|
|
if status.state == SUCCESS:
|
2023-02-23 14:25:29 +00:00
|
|
|
success.append(status.context)
|
2024-05-24 09:36:28 +00:00
|
|
|
elif status.state == PENDING:
|
|
|
|
pending.append(status.context)
|
2022-07-30 05:07:22 +00:00
|
|
|
else:
|
2023-02-23 14:25:29 +00:00
|
|
|
fail.append(status.context)
|
2022-07-30 05:07:22 +00:00
|
|
|
|
2023-12-21 10:53:01 +00:00
|
|
|
state: StatusType = SUCCESS
|
|
|
|
|
2022-07-30 05:07:22 +00:00
|
|
|
if fail:
|
|
|
|
description = "failed: " + ", ".join(fail)
|
2023-12-21 10:53:01 +00:00
|
|
|
state = FAILURE
|
2024-05-18 19:17:22 +00:00
|
|
|
elif workflow_failed:
|
|
|
|
description = "check workflow failures"
|
|
|
|
state = FAILURE
|
2024-05-24 09:36:28 +00:00
|
|
|
elif pending:
|
|
|
|
description = "pending: " + ", ".join(pending)
|
2024-05-24 10:39:35 +00:00
|
|
|
state = PENDING
|
|
|
|
else:
|
|
|
|
# all good
|
|
|
|
description = ", ".join(success)
|
|
|
|
|
2023-02-23 14:21:19 +00:00
|
|
|
description = format_description(description)
|
2023-12-21 10:53:01 +00:00
|
|
|
|
2024-05-18 16:00:32 +00:00
|
|
|
if not set_if_green and state == SUCCESS:
|
|
|
|
# do not set green Mergeable Check status
|
2024-05-18 18:36:04 +00:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if mergeable_status is None or mergeable_status.description != description:
|
2024-05-24 09:36:28 +00:00
|
|
|
set_mergeable_check(commit, description, state)
|
2024-04-24 15:29:59 +00:00
|
|
|
|
2024-05-18 18:36:04 +00:00
|
|
|
return state
|
2024-05-09 10:48:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
def update_upstream_sync_status(
|
|
|
|
upstream_pr_number: int,
|
|
|
|
sync_pr_number: int,
|
|
|
|
gh: Github,
|
2024-05-18 18:36:04 +00:00
|
|
|
state: StatusType,
|
2024-05-20 14:09:17 +00:00
|
|
|
can_set_green_mergeable_status: bool = False,
|
2024-05-09 10:48:44 +00:00
|
|
|
) -> None:
|
|
|
|
upstream_repo = gh.get_repo(GITHUB_UPSTREAM_REPOSITORY)
|
|
|
|
upstream_pr = upstream_repo.get_pull(upstream_pr_number)
|
|
|
|
sync_repo = gh.get_repo(GITHUB_REPOSITORY)
|
|
|
|
sync_pr = sync_repo.get_pull(sync_pr_number)
|
|
|
|
# Find the commit that is in both repos, upstream and cloud
|
|
|
|
sync_commits = sync_pr.get_commits().reversed
|
2024-05-20 18:58:56 +00:00
|
|
|
upstream_commits = upstream_pr.get_commits().reversed
|
2024-05-09 10:48:44 +00:00
|
|
|
# Github objects are compared by _url attribute. We can't compare them directly and
|
|
|
|
# should compare commits by SHA1
|
2024-05-20 18:58:56 +00:00
|
|
|
upstream_shas = [c.sha for c in upstream_commits]
|
2024-05-09 10:57:30 +00:00
|
|
|
logging.info("Commits in upstream PR:\n %s", ", ".join(upstream_shas))
|
2024-05-20 18:58:56 +00:00
|
|
|
sync_shas = [c.sha for c in sync_commits]
|
2024-05-09 10:57:30 +00:00
|
|
|
logging.info("Commits in sync PR:\n %s", ", ".join(reversed(sync_shas)))
|
2024-05-20 18:58:56 +00:00
|
|
|
|
|
|
|
# find latest synced commit
|
|
|
|
last_synced_upstream_commit = None
|
|
|
|
for commit in upstream_commits:
|
|
|
|
if commit.sha in sync_shas:
|
|
|
|
last_synced_upstream_commit = commit
|
2024-05-09 10:48:44 +00:00
|
|
|
break
|
|
|
|
|
2024-05-20 18:58:56 +00:00
|
|
|
assert last_synced_upstream_commit
|
2024-05-09 10:48:44 +00:00
|
|
|
|
2024-05-18 18:36:04 +00:00
|
|
|
sync_status = get_status(state)
|
2024-05-09 10:57:30 +00:00
|
|
|
logging.info(
|
|
|
|
"Using commit %s to post the %s status `%s`: [%s]",
|
2024-05-20 18:58:56 +00:00
|
|
|
last_synced_upstream_commit.sha,
|
2024-05-09 10:57:30 +00:00
|
|
|
sync_status,
|
|
|
|
StatusNames.SYNC,
|
2024-05-18 18:36:04 +00:00
|
|
|
"",
|
2024-05-09 10:57:30 +00:00
|
|
|
)
|
2024-05-09 10:48:44 +00:00
|
|
|
post_commit_status(
|
2024-05-20 18:58:56 +00:00
|
|
|
last_synced_upstream_commit,
|
2024-05-09 10:57:30 +00:00
|
|
|
sync_status,
|
2024-05-24 10:02:14 +00:00
|
|
|
"",
|
2024-05-18 18:36:04 +00:00
|
|
|
"",
|
2024-05-09 10:48:44 +00:00
|
|
|
StatusNames.SYNC,
|
|
|
|
)
|
|
|
|
trigger_mergeable_check(
|
2024-05-20 18:58:56 +00:00
|
|
|
last_synced_upstream_commit,
|
|
|
|
get_commit_filtered_statuses(last_synced_upstream_commit),
|
2024-05-20 14:09:17 +00:00
|
|
|
set_if_green=can_set_green_mergeable_status,
|
2024-05-09 10:48:44 +00:00
|
|
|
)
|