2021-10-21 14:41:07 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import json
|
2021-10-27 07:03:23 +00:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import sys
|
2022-07-30 05:07:22 +00:00
|
|
|
import atexit
|
2022-04-11 13:35:13 +00:00
|
|
|
from typing import Dict, List, Tuple
|
|
|
|
|
2021-10-21 14:41:07 +00:00
|
|
|
from github import Github
|
2021-11-26 14:00:09 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
from env_helper import (
|
|
|
|
GITHUB_REPOSITORY,
|
2022-03-24 14:37:53 +00:00
|
|
|
GITHUB_RUN_URL,
|
2022-05-25 13:15:11 +00:00
|
|
|
GITHUB_SERVER_URL,
|
|
|
|
REPORTS_PATH,
|
|
|
|
TEMP_PATH,
|
2022-03-22 16:39:58 +00:00
|
|
|
)
|
2021-10-21 14:41:07 +00:00
|
|
|
from report import create_build_html_report
|
|
|
|
from s3_helper import S3Helper
|
|
|
|
from get_robot_token import get_best_robot_token
|
2022-11-15 12:01:27 +00:00
|
|
|
from pr_info import NeedsDataType, PRInfo
|
2022-07-21 15:11:43 +00:00
|
|
|
from commit_status_helper import (
|
|
|
|
get_commit,
|
2022-07-30 05:07:22 +00:00
|
|
|
update_mergeable_check,
|
2022-07-21 15:11:43 +00:00
|
|
|
)
|
2021-11-30 15:33:29 +00:00
|
|
|
from ci_config import CI_CONFIG
|
2021-12-01 14:23:51 +00:00
|
|
|
from rerun_helper import RerunHelper
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2022-11-15 12:01:27 +00:00
|
|
|
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "")
|
2022-05-25 13:15:11 +00:00
|
|
|
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
class BuildResult:
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
compiler,
|
|
|
|
build_type,
|
|
|
|
sanitizer,
|
|
|
|
status,
|
|
|
|
elapsed_seconds,
|
|
|
|
with_coverage,
|
|
|
|
):
|
2021-10-21 14:41:07 +00:00
|
|
|
self.compiler = compiler
|
|
|
|
self.build_type = build_type
|
|
|
|
self.sanitizer = sanitizer
|
|
|
|
self.status = status
|
|
|
|
self.elapsed_seconds = elapsed_seconds
|
|
|
|
self.with_coverage = with_coverage
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2022-04-11 13:35:13 +00:00
|
|
|
def group_by_artifacts(build_urls: List[str]) -> Dict[str, List[str]]:
|
2022-03-22 16:39:58 +00:00
|
|
|
groups = {
|
|
|
|
"apk": [],
|
|
|
|
"deb": [],
|
|
|
|
"binary": [],
|
|
|
|
"tgz": [],
|
|
|
|
"rpm": [],
|
|
|
|
"performance": [],
|
2022-04-11 13:35:13 +00:00
|
|
|
} # type: Dict[str, List[str]]
|
2021-10-21 14:41:07 +00:00
|
|
|
for url in build_urls:
|
2023-01-09 00:58:42 +00:00
|
|
|
if url.endswith("performance.tar.zst"):
|
2022-03-22 16:39:58 +00:00
|
|
|
groups["performance"].append(url)
|
|
|
|
elif (
|
|
|
|
url.endswith(".deb")
|
|
|
|
or url.endswith(".buildinfo")
|
|
|
|
or url.endswith(".changes")
|
|
|
|
or url.endswith(".tar.gz")
|
|
|
|
):
|
|
|
|
groups["deb"].append(url)
|
|
|
|
elif url.endswith(".apk"):
|
|
|
|
groups["apk"].append(url)
|
|
|
|
elif url.endswith(".rpm"):
|
|
|
|
groups["rpm"].append(url)
|
2022-12-07 17:24:08 +00:00
|
|
|
elif url.endswith(".tgz") or url.endswith(".tgz.sha512"):
|
2022-03-22 16:39:58 +00:00
|
|
|
groups["tgz"].append(url)
|
2021-10-21 14:41:07 +00:00
|
|
|
else:
|
2022-03-22 16:39:58 +00:00
|
|
|
groups["binary"].append(url)
|
2021-10-21 14:41:07 +00:00
|
|
|
return groups
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2022-05-25 13:15:11 +00:00
|
|
|
def get_failed_report(
|
|
|
|
job_name: str,
|
|
|
|
) -> Tuple[List[BuildResult], List[List[str]], List[str]]:
|
|
|
|
message = f"{job_name} failed"
|
|
|
|
build_result = BuildResult(
|
|
|
|
compiler="unknown",
|
|
|
|
build_type="unknown",
|
|
|
|
sanitizer="unknown",
|
|
|
|
status=message,
|
|
|
|
elapsed_seconds=0,
|
|
|
|
with_coverage=False,
|
|
|
|
)
|
2022-06-14 14:22:41 +00:00
|
|
|
return [build_result], [[""]], [GITHUB_RUN_URL]
|
2022-05-25 13:15:11 +00:00
|
|
|
|
|
|
|
|
2022-04-11 13:35:13 +00:00
|
|
|
def process_report(
|
2022-11-15 12:01:27 +00:00
|
|
|
build_report: dict,
|
2022-04-11 13:35:13 +00:00
|
|
|
) -> Tuple[List[BuildResult], List[List[str]], List[str]]:
|
2022-03-22 16:39:58 +00:00
|
|
|
build_config = build_report["build_config"]
|
2021-10-21 14:41:07 +00:00
|
|
|
build_result = BuildResult(
|
2022-03-22 16:39:58 +00:00
|
|
|
compiler=build_config["compiler"],
|
|
|
|
build_type=build_config["build_type"],
|
|
|
|
sanitizer=build_config["sanitizer"],
|
|
|
|
status="success" if build_report["status"] else "failure",
|
|
|
|
elapsed_seconds=build_report["elapsed_seconds"],
|
|
|
|
with_coverage=False,
|
2021-10-21 14:41:07 +00:00
|
|
|
)
|
|
|
|
build_results = []
|
|
|
|
build_urls = []
|
|
|
|
build_logs_urls = []
|
2022-03-22 16:39:58 +00:00
|
|
|
urls_groups = group_by_artifacts(build_report["build_urls"])
|
2021-10-25 15:52:03 +00:00
|
|
|
found_group = False
|
2021-10-21 14:41:07 +00:00
|
|
|
for _, group_urls in urls_groups.items():
|
|
|
|
if group_urls:
|
|
|
|
build_results.append(build_result)
|
|
|
|
build_urls.append(group_urls)
|
2022-03-22 16:39:58 +00:00
|
|
|
build_logs_urls.append(build_report["log_url"])
|
2021-10-25 15:52:03 +00:00
|
|
|
found_group = True
|
|
|
|
|
2022-04-11 13:35:13 +00:00
|
|
|
# No one group of urls is found, a failed report
|
2021-10-25 15:52:03 +00:00
|
|
|
if not found_group:
|
|
|
|
build_results.append(build_result)
|
|
|
|
build_urls.append([""])
|
2022-03-22 16:39:58 +00:00
|
|
|
build_logs_urls.append(build_report["log_url"])
|
2021-10-21 14:41:07 +00:00
|
|
|
|
|
|
|
return build_results, build_urls, build_logs_urls
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2021-11-30 15:33:29 +00:00
|
|
|
def get_build_name_from_file_name(file_name):
|
2022-03-22 16:39:58 +00:00
|
|
|
return file_name.replace("build_urls_", "").replace(".json", "")
|
|
|
|
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
def main():
|
2021-10-21 14:41:07 +00:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
2021-11-26 14:00:09 +00:00
|
|
|
temp_path = TEMP_PATH
|
2022-05-25 13:15:11 +00:00
|
|
|
logging.info("Reports path %s", REPORTS_PATH)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2021-10-21 15:32:15 +00:00
|
|
|
if not os.path.exists(temp_path):
|
|
|
|
os.makedirs(temp_path)
|
|
|
|
|
2021-10-21 14:41:07 +00:00
|
|
|
build_check_name = sys.argv[1]
|
2022-11-15 12:01:27 +00:00
|
|
|
needs_data = {} # type: NeedsDataType
|
2022-05-25 13:15:11 +00:00
|
|
|
required_builds = 0
|
|
|
|
if os.path.exists(NEEDS_DATA_PATH):
|
|
|
|
with open(NEEDS_DATA_PATH, "rb") as file_handler:
|
|
|
|
needs_data = json.load(file_handler)
|
|
|
|
required_builds = len(needs_data)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-11-15 12:01:27 +00:00
|
|
|
if needs_data and all(i["result"] == "skipped" for i in needs_data.values()):
|
2022-07-30 05:07:22 +00:00
|
|
|
logging.info("All builds are skipped, exiting")
|
|
|
|
sys.exit(0)
|
2022-07-21 15:11:43 +00:00
|
|
|
|
2022-06-28 08:27:25 +00:00
|
|
|
logging.info("The next builds are required: %s", ", ".join(needs_data))
|
|
|
|
|
2022-07-30 05:07:22 +00:00
|
|
|
gh = Github(get_best_robot_token(), per_page=100)
|
2021-11-26 14:00:09 +00:00
|
|
|
pr_info = PRInfo()
|
2022-07-30 05:07:22 +00:00
|
|
|
|
|
|
|
atexit.register(update_mergeable_check, gh, pr_info, build_check_name)
|
|
|
|
|
2021-12-01 14:23:51 +00:00
|
|
|
rerun_helper = RerunHelper(gh, pr_info, build_check_name)
|
|
|
|
if rerun_helper.is_already_finished_by_status():
|
|
|
|
logging.info("Check is already finished according to github status, exiting")
|
|
|
|
sys.exit(0)
|
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
builds_for_check = CI_CONFIG["builds_report_config"][build_check_name]
|
|
|
|
required_builds = required_builds or len(builds_for_check)
|
2021-11-30 15:33:29 +00:00
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
# Collect reports from json artifacts
|
|
|
|
builds_report_map = {}
|
2022-05-25 13:15:11 +00:00
|
|
|
for root, _, files in os.walk(REPORTS_PATH):
|
2021-10-21 14:41:07 +00:00
|
|
|
for f in files:
|
2022-03-22 16:39:58 +00:00
|
|
|
if f.startswith("build_urls_") and f.endswith(".json"):
|
2021-10-21 14:41:07 +00:00
|
|
|
logging.info("Found build report json %s", f)
|
2021-11-30 15:33:29 +00:00
|
|
|
build_name = get_build_name_from_file_name(f)
|
2022-04-11 13:36:18 +00:00
|
|
|
if build_name in builds_for_check:
|
2022-04-06 12:34:20 +00:00
|
|
|
with open(os.path.join(root, f), "rb") as file_handler:
|
2022-04-11 13:36:18 +00:00
|
|
|
builds_report_map[build_name] = json.load(file_handler)
|
2021-11-30 15:33:29 +00:00
|
|
|
else:
|
2022-03-22 16:39:58 +00:00
|
|
|
logging.info(
|
|
|
|
"Skipping report %s for build %s, it's not in our reports list",
|
|
|
|
f,
|
|
|
|
build_name,
|
|
|
|
)
|
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
# Sort reports by config order
|
|
|
|
build_reports = [
|
|
|
|
builds_report_map[build_name]
|
|
|
|
for build_name in builds_for_check
|
|
|
|
if build_name in builds_report_map
|
|
|
|
]
|
2022-03-26 21:45:45 +00:00
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
some_builds_are_missing = len(build_reports) < required_builds
|
2022-05-25 13:15:11 +00:00
|
|
|
missing_build_names = []
|
2022-03-26 21:45:45 +00:00
|
|
|
if some_builds_are_missing:
|
2022-04-11 13:36:18 +00:00
|
|
|
logging.warning(
|
|
|
|
"Expected to get %s build results, got only %s",
|
|
|
|
required_builds,
|
|
|
|
len(build_reports),
|
2022-03-28 12:43:15 +00:00
|
|
|
)
|
2022-05-25 13:15:11 +00:00
|
|
|
missing_build_names = [
|
|
|
|
name
|
|
|
|
for name in needs_data
|
|
|
|
if not any(rep for rep in build_reports if rep["job_name"] == name)
|
|
|
|
]
|
2022-03-26 21:45:45 +00:00
|
|
|
else:
|
2022-04-11 13:36:18 +00:00
|
|
|
logging.info("Got exactly %s builds", len(builds_report_map))
|
2022-03-26 21:45:45 +00:00
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
# Group build artifacts by groups
|
|
|
|
build_results = [] # type: List[BuildResult]
|
|
|
|
build_artifacts = [] #
|
2021-10-21 14:41:07 +00:00
|
|
|
build_logs = []
|
|
|
|
|
|
|
|
for build_report in build_reports:
|
2022-11-15 12:01:27 +00:00
|
|
|
_build_results, build_artifacts_url, build_logs_url = process_report(
|
|
|
|
build_report
|
|
|
|
)
|
2022-04-11 13:36:18 +00:00
|
|
|
logging.info(
|
2022-11-15 12:01:27 +00:00
|
|
|
"Got %s artifact groups for build report report", len(_build_results)
|
2022-04-11 13:36:18 +00:00
|
|
|
)
|
2022-11-15 12:01:27 +00:00
|
|
|
build_results.extend(_build_results)
|
2022-04-11 13:36:18 +00:00
|
|
|
build_artifacts.extend(build_artifacts_url)
|
|
|
|
build_logs.extend(build_logs_url)
|
|
|
|
|
2022-05-25 13:15:11 +00:00
|
|
|
for failed_job in missing_build_names:
|
2022-11-15 12:01:27 +00:00
|
|
|
_build_results, build_artifacts_url, build_logs_url = get_failed_report(
|
2022-05-25 13:15:11 +00:00
|
|
|
failed_job
|
|
|
|
)
|
2022-11-15 12:01:27 +00:00
|
|
|
build_results.extend(_build_results)
|
2022-05-25 13:15:11 +00:00
|
|
|
build_artifacts.extend(build_artifacts_url)
|
|
|
|
build_logs.extend(build_logs_url)
|
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
total_groups = len(build_results)
|
|
|
|
logging.info("Totally got %s artifact groups", total_groups)
|
|
|
|
if total_groups == 0:
|
|
|
|
logging.error("No success builds, failing check")
|
2021-12-09 09:17:03 +00:00
|
|
|
sys.exit(1)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-08-11 13:01:32 +00:00
|
|
|
s3_helper = S3Helper()
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2021-11-26 14:00:09 +00:00
|
|
|
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
|
2021-10-21 14:41:07 +00:00
|
|
|
branch_name = "master"
|
|
|
|
if pr_info.number != 0:
|
2022-04-06 12:34:20 +00:00
|
|
|
branch_name = f"PR #{pr_info.number}"
|
2021-11-26 14:00:09 +00:00
|
|
|
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/pull/{pr_info.number}"
|
|
|
|
commit_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commit/{pr_info.sha}"
|
2022-03-24 14:37:53 +00:00
|
|
|
task_url = GITHUB_RUN_URL
|
2021-10-21 14:41:07 +00:00
|
|
|
report = create_build_html_report(
|
|
|
|
build_check_name,
|
|
|
|
build_results,
|
|
|
|
build_logs,
|
|
|
|
build_artifacts,
|
|
|
|
task_url,
|
|
|
|
branch_url,
|
|
|
|
branch_name,
|
2022-03-22 16:39:58 +00:00
|
|
|
commit_url,
|
2021-10-21 14:41:07 +00:00
|
|
|
)
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
report_path = os.path.join(temp_path, "report.html")
|
2022-04-06 12:34:20 +00:00
|
|
|
with open(report_path, "w", encoding="utf-8") as fd:
|
|
|
|
fd.write(report)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
|
|
|
logging.info("Going to upload prepared report")
|
2022-03-22 16:39:58 +00:00
|
|
|
context_name_for_path = build_check_name.lower().replace(" ", "_")
|
|
|
|
s3_path_prefix = (
|
|
|
|
str(pr_info.number) + "/" + pr_info.sha + "/" + context_name_for_path
|
|
|
|
)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
url = s3_helper.upload_build_file_to_s3(
|
|
|
|
report_path, s3_path_prefix + "/report.html"
|
|
|
|
)
|
2021-10-21 14:41:07 +00:00
|
|
|
logging.info("Report url %s", url)
|
2022-04-11 13:36:18 +00:00
|
|
|
print(f"::notice ::Report url: {url}")
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
# Prepare a commit status
|
|
|
|
ok_groups = 0
|
2021-10-21 14:41:07 +00:00
|
|
|
summary_status = "success"
|
|
|
|
for build_result in build_results:
|
|
|
|
if build_result.status == "failure" and summary_status != "error":
|
|
|
|
summary_status = "failure"
|
|
|
|
if build_result.status == "error" or not build_result.status:
|
|
|
|
summary_status = "error"
|
|
|
|
|
|
|
|
if build_result.status == "success":
|
2022-04-11 13:36:18 +00:00
|
|
|
ok_groups += 1
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
if ok_groups == 0 or some_builds_are_missing:
|
2021-12-01 15:50:48 +00:00
|
|
|
summary_status = "error"
|
2021-11-30 15:33:29 +00:00
|
|
|
|
2022-03-30 09:15:54 +00:00
|
|
|
addition = ""
|
|
|
|
if some_builds_are_missing:
|
2022-04-11 13:36:18 +00:00
|
|
|
addition = f"({len(build_reports)} of {required_builds} builds are OK)"
|
2022-03-30 09:15:54 +00:00
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
description = f"{ok_groups}/{total_groups} artifact groups are OK {addition}"
|
2021-10-21 14:41:07 +00:00
|
|
|
|
|
|
|
commit = get_commit(gh, pr_info.sha)
|
2022-03-22 16:39:58 +00:00
|
|
|
commit.create_status(
|
|
|
|
context=build_check_name,
|
|
|
|
description=description,
|
|
|
|
state=summary_status,
|
|
|
|
target_url=url,
|
|
|
|
)
|
2022-03-29 12:41:47 +00:00
|
|
|
|
|
|
|
if summary_status == "error":
|
|
|
|
sys.exit(1)
|
2022-04-11 13:36:18 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|