ClickHouse/tests/ci/build_report_check.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

191 lines
5.7 KiB
Python
Raw Normal View History

2021-10-21 14:41:07 +00:00
#!/usr/bin/env python3
import json
import logging
import os
import sys
import atexit
2023-08-29 14:35:53 +00:00
from pathlib import Path
2022-04-11 13:35:13 +00:00
2021-10-21 14:41:07 +00:00
from github import Github
2021-11-26 14:00:09 +00:00
from env_helper import (
2023-01-04 11:18:53 +00:00
GITHUB_JOB_URL,
GITHUB_REPOSITORY,
2022-05-25 13:15:11 +00:00
GITHUB_SERVER_URL,
REPORTS_PATH,
TEMP_PATH,
)
from report import (
BuildResult,
ERROR,
PENDING,
SUCCESS,
create_build_html_report,
get_worst_status,
)
2021-10-21 14:41:07 +00:00
from s3_helper import S3Helper
from get_robot_token import get_best_robot_token
2022-11-15 12:01:27 +00:00
from pr_info import NeedsDataType, PRInfo
2022-07-21 15:11:43 +00:00
from commit_status_helper import (
RerunHelper,
format_description,
get_commit,
post_commit_status,
update_mergeable_check,
2022-07-21 15:11:43 +00:00
)
2021-11-30 15:33:29 +00:00
from ci_config import CI_CONFIG
2021-10-21 14:41:07 +00:00
# Old way to read the neads_data
2022-11-15 12:01:27 +00:00
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "")
# Now it's set here. Two-steps migration for backward compatibility
NEEDS_DATA = os.getenv("NEEDS_DATA", "")
2022-05-25 13:15:11 +00:00
2022-04-11 13:36:18 +00:00
def main():
2021-10-21 14:41:07 +00:00
logging.basicConfig(level=logging.INFO)
2023-08-29 14:35:53 +00:00
temp_path = Path(TEMP_PATH)
temp_path.mkdir(parents=True, exist_ok=True)
2021-10-21 15:32:15 +00:00
logging.info("Reports path %s", REPORTS_PATH)
reports_path = Path(REPORTS_PATH)
logging.info(
"Reports found:\n %s",
"\n ".join(p.as_posix() for p in reports_path.rglob("*.json")),
)
2021-10-21 14:41:07 +00:00
build_check_name = sys.argv[1]
2022-11-15 12:01:27 +00:00
needs_data = {} # type: NeedsDataType
2022-05-25 13:15:11 +00:00
required_builds = 0
if os.path.exists(NEEDS_DATA_PATH):
with open(NEEDS_DATA_PATH, "rb") as file_handler:
needs_data = json.load(file_handler)
if NEEDS_DATA:
needs_data = json.loads(NEEDS_DATA)
required_builds = len(needs_data)
2021-10-21 14:41:07 +00:00
if needs_data:
logging.info("The next builds are required: %s", ", ".join(needs_data))
if all(i["result"] == "skipped" for i in needs_data.values()):
logging.info("All builds are skipped, exiting")
sys.exit(0)
2022-06-28 08:27:25 +00:00
gh = Github(get_best_robot_token(), per_page=100)
2021-11-26 14:00:09 +00:00
pr_info = PRInfo()
commit = get_commit(gh, pr_info.sha)
atexit.register(update_mergeable_check, gh, pr_info, build_check_name)
rerun_helper = RerunHelper(commit, build_check_name)
2021-12-01 14:23:51 +00:00
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
builds_for_check = CI_CONFIG.builds_report_config[build_check_name]
2022-04-11 13:36:18 +00:00
required_builds = required_builds or len(builds_for_check)
2021-11-30 15:33:29 +00:00
2022-04-11 13:36:18 +00:00
# Collect reports from json artifacts
build_results = []
for build_name in builds_for_check:
report_name = BuildResult.get_report_name(build_name).stem
build_result = BuildResult.read_json(reports_path / report_name, build_name)
if build_result.is_missing:
logging.warning("Build results for %s are missing", build_name)
continue
build_results.append(build_result)
# The code to collect missing reports for failed jobs
missing_job_names = [
name
for name in needs_data
if not any(1 for br in build_results if br.job_name.startswith(name))
2022-04-11 13:36:18 +00:00
]
missing_builds = len(missing_job_names)
for job_name in reversed(missing_job_names):
build_result = BuildResult.missing_result("missing")
build_result.job_name = job_name
build_result.status = PENDING
2022-04-11 13:36:18 +00:00
logging.info(
"There is missing report for %s, created a dummy result %s",
job_name,
build_result,
2022-05-25 13:15:11 +00:00
)
build_results.insert(0, build_result)
2022-05-25 13:15:11 +00:00
# Calculate artifact groups like packages and binaries
total_groups = sum(len(br.grouped_urls) for br in build_results)
ok_groups = sum(
len(br.grouped_urls) for br in build_results if br.status == SUCCESS
)
2022-04-11 13:36:18 +00:00
logging.info("Totally got %s artifact groups", total_groups)
if total_groups == 0:
logging.error("No success builds, failing check without creating a status")
2021-12-09 09:17:03 +00:00
sys.exit(1)
2021-10-21 14:41:07 +00:00
2022-08-11 13:01:32 +00:00
s3_helper = S3Helper()
2021-10-21 14:41:07 +00:00
2021-11-26 14:00:09 +00:00
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
2021-10-21 14:41:07 +00:00
branch_name = "master"
if pr_info.number != 0:
2022-04-06 12:34:20 +00:00
branch_name = f"PR #{pr_info.number}"
2021-11-26 14:00:09 +00:00
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/pull/{pr_info.number}"
commit_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commit/{pr_info.sha}"
2023-01-04 11:18:53 +00:00
task_url = GITHUB_JOB_URL()
2021-10-21 14:41:07 +00:00
report = create_build_html_report(
build_check_name,
build_results,
task_url,
branch_url,
branch_name,
commit_url,
2021-10-21 14:41:07 +00:00
)
2023-08-29 14:35:53 +00:00
report_path = temp_path / "report.html"
report_path.write_text(report, encoding="utf-8")
2021-10-21 14:41:07 +00:00
logging.info("Going to upload prepared report")
context_name_for_path = build_check_name.lower().replace(" ", "_")
s3_path_prefix = (
str(pr_info.number) + "/" + pr_info.sha + "/" + context_name_for_path
)
2021-10-21 14:41:07 +00:00
url = s3_helper.upload_test_report_to_s3(
report_path, s3_path_prefix + "/report.html"
)
2021-10-21 14:41:07 +00:00
logging.info("Report url %s", url)
2022-04-11 13:36:18 +00:00
print(f"::notice ::Report url: {url}")
2021-10-21 14:41:07 +00:00
2022-04-11 13:36:18 +00:00
# Prepare a commit status
summary_status = get_worst_status(br.status for br in build_results)
2021-10-21 14:41:07 +00:00
# Check if there are no builds at all, do not override bad status
if summary_status == SUCCESS:
if missing_builds:
summary_status = PENDING
elif ok_groups == 0:
summary_status = ERROR
2021-11-30 15:33:29 +00:00
2022-03-30 09:15:54 +00:00
addition = ""
if missing_builds:
addition = (
f" ({required_builds - missing_builds} of {required_builds} builds are OK)"
)
2022-03-30 09:15:54 +00:00
description = format_description(
f"{ok_groups}/{total_groups} artifact groups are OK{addition}"
)
2021-10-21 14:41:07 +00:00
post_commit_status(
commit, summary_status, url, description, build_check_name, pr_info
)
2022-03-29 12:41:47 +00:00
if summary_status == ERROR:
2022-03-29 12:41:47 +00:00
sys.exit(1)
2022-04-11 13:36:18 +00:00
if __name__ == "__main__":
main()