ClickHouse/tests/ci/bugfix_validate_check.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

113 lines
3.1 KiB
Python
Raw Normal View History

2022-03-18 12:36:45 +00:00
#!/usr/bin/env python3
2023-09-27 14:27:37 +00:00
from pathlib import Path
from typing import List, Tuple
2022-03-18 12:36:45 +00:00
import argparse
import csv
import logging
2022-03-18 12:36:45 +00:00
2022-10-27 11:32:42 +00:00
from github import Github
from commit_status_helper import get_commit, post_commit_status
2022-10-27 11:32:42 +00:00
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import TestResults, TestResult
from s3_helper import S3Helper
2022-10-27 11:32:42 +00:00
from upload_result_helper import upload_results
2023-09-27 14:27:37 +00:00
def parse_args() -> argparse.Namespace:
2022-03-18 12:36:45 +00:00
parser = argparse.ArgumentParser()
2023-09-27 14:27:37 +00:00
parser.add_argument("files", nargs="+", type=Path, help="Path to status files")
2022-03-18 12:36:45 +00:00
return parser.parse_args()
2023-09-27 14:27:37 +00:00
def post_commit_status_from_file(file_path: Path) -> List[str]:
with open(file_path, "r", encoding="utf-8") as f:
res = list(csv.reader(f, delimiter="\t"))
2022-03-18 12:36:45 +00:00
if len(res) < 1:
raise Exception(f'Can\'t read from "{file_path}"')
if len(res[0]) != 3:
raise Exception(f'Can\'t read from "{file_path}"')
return res[0]
2023-09-27 14:27:37 +00:00
def process_result(file_path: Path) -> Tuple[bool, TestResults]:
test_results = [] # type: TestResults
2022-03-18 12:36:45 +00:00
state, report_url, description = post_commit_status_from_file(file_path)
2023-09-27 14:27:37 +00:00
prefix = file_path.parent.name
if description.strip() in [
"Invalid check_status.tsv",
"Not found test_results.tsv",
"Empty test_results.tsv",
]:
status = (
f'Check failed (<a href="{report_url}">Report</a>)'
if report_url != "null"
else "Check failed"
)
return False, [TestResult(f"{prefix}: {description}", status)]
2022-10-27 13:06:37 +00:00
is_ok = state == "success"
if is_ok and report_url == "null":
return is_ok, test_results
2022-10-27 13:06:37 +00:00
2023-09-27 14:27:37 +00:00
status = (
f'OK: Bug reproduced (<a href="{report_url}">Report</a>)'
if is_ok
else f'Bug is not reproduced (<a href="{report_url}">Report</a>)'
)
test_results.append(TestResult(f"{prefix}: {description}", status))
2022-10-27 13:06:37 +00:00
return is_ok, test_results
2022-03-18 12:36:45 +00:00
2023-09-27 14:27:37 +00:00
def process_all_results(file_paths: List[Path]) -> Tuple[bool, TestResults]:
2022-10-27 16:51:11 +00:00
any_ok = False
2022-10-27 11:32:42 +00:00
all_results = []
2022-10-27 15:11:16 +00:00
for status_path in file_paths:
is_ok, test_results = process_result(status_path)
2022-10-27 16:51:11 +00:00
any_ok = any_ok or is_ok
if test_results is not None:
all_results.extend(test_results)
2022-10-27 16:51:11 +00:00
return any_ok, all_results
2022-10-27 15:11:16 +00:00
2022-10-27 11:32:42 +00:00
2023-09-27 14:27:37 +00:00
def main():
logging.basicConfig(level=logging.INFO)
2023-09-27 14:27:37 +00:00
args = parse_args()
status_files = args.files # type: List[Path]
2022-10-27 11:32:42 +00:00
check_name_with_group = "Bugfix validate check"
2023-09-27 14:27:37 +00:00
is_ok, test_results = process_all_results(status_files)
2022-10-27 15:11:16 +00:00
if not test_results:
logging.info("No results to upload")
return
2022-10-27 11:32:42 +00:00
pr_info = PRInfo()
report_url = upload_results(
S3Helper(),
pr_info.number,
pr_info.sha,
2022-10-27 15:11:16 +00:00
test_results,
2023-09-27 14:27:37 +00:00
status_files,
2022-10-27 11:32:42 +00:00
check_name_with_group,
)
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
2022-10-27 11:32:42 +00:00
post_commit_status(
commit,
2022-10-27 11:32:42 +00:00
"success" if is_ok else "error",
report_url,
"" if is_ok else "Changed tests don't reproduce the bug",
check_name_with_group,
pr_info,
2022-10-27 11:32:42 +00:00
)
2022-03-18 12:36:45 +00:00
if __name__ == "__main__":
2023-09-27 14:27:37 +00:00
main()