2021-10-21 14:41:07 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import json
|
2021-10-27 07:03:23 +00:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import sys
|
2022-07-30 05:07:22 +00:00
|
|
|
import atexit
|
2023-08-29 14:35:53 +00:00
|
|
|
from pathlib import Path
|
2022-04-11 13:35:13 +00:00
|
|
|
|
2021-10-21 14:41:07 +00:00
|
|
|
from github import Github
|
2021-11-26 14:00:09 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
from env_helper import (
|
2023-01-04 11:18:53 +00:00
|
|
|
GITHUB_JOB_URL,
|
2022-03-22 16:39:58 +00:00
|
|
|
GITHUB_REPOSITORY,
|
2022-05-25 13:15:11 +00:00
|
|
|
GITHUB_SERVER_URL,
|
|
|
|
REPORTS_PATH,
|
|
|
|
TEMP_PATH,
|
2022-03-22 16:39:58 +00:00
|
|
|
)
|
2023-09-01 20:35:31 +00:00
|
|
|
from report import (
|
|
|
|
BuildResult,
|
|
|
|
ERROR,
|
|
|
|
PENDING,
|
|
|
|
SUCCESS,
|
|
|
|
create_build_html_report,
|
|
|
|
get_worst_status,
|
|
|
|
)
|
2021-10-21 14:41:07 +00:00
|
|
|
from s3_helper import S3Helper
|
|
|
|
from get_robot_token import get_best_robot_token
|
2022-11-15 12:01:27 +00:00
|
|
|
from pr_info import NeedsDataType, PRInfo
|
2022-07-21 15:11:43 +00:00
|
|
|
from commit_status_helper import (
|
2023-04-18 14:58:17 +00:00
|
|
|
RerunHelper,
|
2023-06-01 06:51:44 +00:00
|
|
|
format_description,
|
2023-04-18 14:58:17 +00:00
|
|
|
get_commit,
|
2023-04-06 09:40:32 +00:00
|
|
|
post_commit_status,
|
2022-07-30 05:07:22 +00:00
|
|
|
update_mergeable_check,
|
2022-07-21 15:11:43 +00:00
|
|
|
)
|
2021-11-30 15:33:29 +00:00
|
|
|
from ci_config import CI_CONFIG
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2022-11-15 12:01:27 +00:00
|
|
|
NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "")
|
2022-05-25 13:15:11 +00:00
|
|
|
|
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
def main():
|
2021-10-21 14:41:07 +00:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
2023-08-29 14:35:53 +00:00
|
|
|
temp_path = Path(TEMP_PATH)
|
|
|
|
temp_path.mkdir(parents=True, exist_ok=True)
|
2021-10-21 15:32:15 +00:00
|
|
|
|
2023-09-01 20:35:31 +00:00
|
|
|
logging.info("Reports path %s", REPORTS_PATH)
|
|
|
|
reports_path = Path(REPORTS_PATH)
|
|
|
|
logging.info(
|
|
|
|
"Reports found:\n %s",
|
|
|
|
"\n ".join(p.as_posix() for p in reports_path.rglob("*.json")),
|
|
|
|
)
|
|
|
|
|
2021-10-21 14:41:07 +00:00
|
|
|
build_check_name = sys.argv[1]
|
2022-11-15 12:01:27 +00:00
|
|
|
needs_data = {} # type: NeedsDataType
|
2022-05-25 13:15:11 +00:00
|
|
|
required_builds = 0
|
|
|
|
if os.path.exists(NEEDS_DATA_PATH):
|
|
|
|
with open(NEEDS_DATA_PATH, "rb") as file_handler:
|
|
|
|
needs_data = json.load(file_handler)
|
|
|
|
required_builds = len(needs_data)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2023-09-01 20:35:31 +00:00
|
|
|
if needs_data:
|
|
|
|
logging.info("The next builds are required: %s", ", ".join(needs_data))
|
|
|
|
if all(i["result"] == "skipped" for i in needs_data.values()):
|
|
|
|
logging.info("All builds are skipped, exiting")
|
|
|
|
sys.exit(0)
|
2022-06-28 08:27:25 +00:00
|
|
|
|
2022-07-30 05:07:22 +00:00
|
|
|
gh = Github(get_best_robot_token(), per_page=100)
|
2021-11-26 14:00:09 +00:00
|
|
|
pr_info = PRInfo()
|
2023-04-18 14:58:17 +00:00
|
|
|
commit = get_commit(gh, pr_info.sha)
|
2022-07-30 05:07:22 +00:00
|
|
|
|
|
|
|
atexit.register(update_mergeable_check, gh, pr_info, build_check_name)
|
|
|
|
|
2023-04-18 14:58:17 +00:00
|
|
|
rerun_helper = RerunHelper(commit, build_check_name)
|
2021-12-01 14:23:51 +00:00
|
|
|
if rerun_helper.is_already_finished_by_status():
|
|
|
|
logging.info("Check is already finished according to github status, exiting")
|
|
|
|
sys.exit(0)
|
|
|
|
|
2023-08-02 16:27:14 +00:00
|
|
|
builds_for_check = CI_CONFIG.builds_report_config[build_check_name]
|
2022-04-11 13:36:18 +00:00
|
|
|
required_builds = required_builds or len(builds_for_check)
|
2021-11-30 15:33:29 +00:00
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
# Collect reports from json artifacts
|
2023-09-01 20:35:31 +00:00
|
|
|
build_results = []
|
|
|
|
for build_name in builds_for_check:
|
|
|
|
report_name = BuildResult.get_report_name(build_name).stem
|
|
|
|
build_result = BuildResult.read_json(reports_path / report_name, build_name)
|
|
|
|
if build_result.is_missing:
|
|
|
|
logging.warning("Build results for %s are missing", build_name)
|
|
|
|
continue
|
|
|
|
build_results.append(build_result)
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2023-09-01 20:35:31 +00:00
|
|
|
# The code to collect missing reports for failed jobs
|
|
|
|
missing_job_names = [
|
|
|
|
name
|
|
|
|
for name in needs_data
|
|
|
|
if not any(1 for build_result in build_results if build_result.job_name == name)
|
2022-04-11 13:36:18 +00:00
|
|
|
]
|
2023-09-01 20:35:31 +00:00
|
|
|
missing_builds = len(missing_job_names)
|
|
|
|
for job_name in reversed(missing_job_names):
|
|
|
|
build_result = BuildResult.missing_result("missing")
|
|
|
|
build_result.job_name = job_name
|
|
|
|
build_result.status = PENDING
|
2022-04-11 13:36:18 +00:00
|
|
|
logging.info(
|
2023-09-01 20:35:31 +00:00
|
|
|
"There is missing report for %s, created a dummy result %s",
|
|
|
|
job_name,
|
|
|
|
build_result,
|
2022-05-25 13:15:11 +00:00
|
|
|
)
|
2023-09-01 20:35:31 +00:00
|
|
|
build_results.insert(0, build_result)
|
2022-05-25 13:15:11 +00:00
|
|
|
|
2023-09-01 20:35:31 +00:00
|
|
|
# Calculate artifact groups like packages and binaries
|
|
|
|
total_groups = sum(len(br.grouped_urls) for br in build_results)
|
|
|
|
ok_groups = sum(
|
|
|
|
len(br.grouped_urls) for br in build_results if br.status == SUCCESS
|
|
|
|
)
|
2022-04-11 13:36:18 +00:00
|
|
|
logging.info("Totally got %s artifact groups", total_groups)
|
|
|
|
if total_groups == 0:
|
2023-09-01 20:35:31 +00:00
|
|
|
logging.error("No success builds, failing check without creating a status")
|
2021-12-09 09:17:03 +00:00
|
|
|
sys.exit(1)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-08-11 13:01:32 +00:00
|
|
|
s3_helper = S3Helper()
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2021-11-26 14:00:09 +00:00
|
|
|
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
|
2021-10-21 14:41:07 +00:00
|
|
|
branch_name = "master"
|
|
|
|
if pr_info.number != 0:
|
2022-04-06 12:34:20 +00:00
|
|
|
branch_name = f"PR #{pr_info.number}"
|
2021-11-26 14:00:09 +00:00
|
|
|
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/pull/{pr_info.number}"
|
|
|
|
commit_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commit/{pr_info.sha}"
|
2023-01-04 11:18:53 +00:00
|
|
|
task_url = GITHUB_JOB_URL()
|
2021-10-21 14:41:07 +00:00
|
|
|
report = create_build_html_report(
|
|
|
|
build_check_name,
|
|
|
|
build_results,
|
|
|
|
task_url,
|
|
|
|
branch_url,
|
|
|
|
branch_name,
|
2022-03-22 16:39:58 +00:00
|
|
|
commit_url,
|
2021-10-21 14:41:07 +00:00
|
|
|
)
|
|
|
|
|
2023-08-29 14:35:53 +00:00
|
|
|
report_path = temp_path / "report.html"
|
|
|
|
report_path.write_text(report, encoding="utf-8")
|
2021-10-21 14:41:07 +00:00
|
|
|
|
|
|
|
logging.info("Going to upload prepared report")
|
2022-03-22 16:39:58 +00:00
|
|
|
context_name_for_path = build_check_name.lower().replace(" ", "_")
|
|
|
|
s3_path_prefix = (
|
|
|
|
str(pr_info.number) + "/" + pr_info.sha + "/" + context_name_for_path
|
|
|
|
)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2023-03-23 14:49:59 +00:00
|
|
|
url = s3_helper.upload_test_report_to_s3(
|
2022-03-22 16:39:58 +00:00
|
|
|
report_path, s3_path_prefix + "/report.html"
|
|
|
|
)
|
2021-10-21 14:41:07 +00:00
|
|
|
logging.info("Report url %s", url)
|
2022-04-11 13:36:18 +00:00
|
|
|
print(f"::notice ::Report url: {url}")
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2022-04-11 13:36:18 +00:00
|
|
|
# Prepare a commit status
|
2023-09-01 20:35:31 +00:00
|
|
|
summary_status = get_worst_status(br.status for br in build_results)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2023-06-01 06:51:44 +00:00
|
|
|
# Check if there are no builds at all, do not override bad status
|
2023-09-01 20:35:31 +00:00
|
|
|
if summary_status == SUCCESS:
|
|
|
|
if missing_builds:
|
|
|
|
summary_status = PENDING
|
2023-06-01 06:51:44 +00:00
|
|
|
elif ok_groups == 0:
|
2023-09-01 20:35:31 +00:00
|
|
|
summary_status = ERROR
|
2021-11-30 15:33:29 +00:00
|
|
|
|
2022-03-30 09:15:54 +00:00
|
|
|
addition = ""
|
2023-09-01 20:35:31 +00:00
|
|
|
if missing_builds:
|
|
|
|
addition = (
|
|
|
|
f" ({required_builds - missing_builds} of {required_builds} builds are OK)"
|
|
|
|
)
|
2022-03-30 09:15:54 +00:00
|
|
|
|
2023-06-01 06:51:44 +00:00
|
|
|
description = format_description(
|
|
|
|
f"{ok_groups}/{total_groups} artifact groups are OK{addition}"
|
|
|
|
)
|
2021-10-21 14:41:07 +00:00
|
|
|
|
2023-04-06 09:40:32 +00:00
|
|
|
post_commit_status(
|
2023-04-18 23:03:48 +00:00
|
|
|
commit, summary_status, url, description, build_check_name, pr_info
|
2022-03-22 16:39:58 +00:00
|
|
|
)
|
2022-03-29 12:41:47 +00:00
|
|
|
|
2023-09-01 20:35:31 +00:00
|
|
|
if summary_status == ERROR:
|
2022-03-29 12:41:47 +00:00
|
|
|
sys.exit(1)
|
2022-04-11 13:36:18 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|