diff --git a/tests/ci/build_report_check.py b/tests/ci/build_report_check.py index 04c8d12fc30..ba7f5daed10 100644 --- a/tests/ci/build_report_check.py +++ b/tests/ci/build_report_check.py @@ -7,28 +7,28 @@ import sys from pathlib import Path from typing import List +from ci_config import CI +from commit_status_helper import GITHUB_JOB_URL from env_helper import ( - GITHUB_JOB_URL, + CI_CONFIG_PATH, GITHUB_REPOSITORY, GITHUB_SERVER_URL, + IS_CI, REPORT_PATH, TEMP_PATH, - CI_CONFIG_PATH, - IS_CI, ) from pr_info import PRInfo from report import ( ERROR, + FAILURE, PENDING, SUCCESS, BuildResult, JobReport, create_build_html_report, get_worst_status, - FAILURE, ) from stopwatch import Stopwatch -from ci_config import CI # Old way to read the neads_data NEEDS_DATA_PATH = os.getenv("NEEDS_DATA_PATH", "") diff --git a/tests/ci/ci.py b/tests/ci/ci.py index 3321d641c27..11a4eb56145 100644 --- a/tests/ci/ci.py +++ b/tests/ci/ci.py @@ -14,9 +14,12 @@ from typing import Any, Dict, List, Optional import docker_images_helper import upload_result_helper from build_check import get_release_or_pr +from ci_buddy import CIBuddy +from ci_cache import CiCache from ci_config import CI from ci_metadata import CiMetadata -from ci_utils import GH, Utils, Envs +from ci_settings import CiSettings +from ci_utils import GH, Envs, Utils from clickhouse_helper import ( CiLogsCredentials, ClickHouseHelper, @@ -26,23 +29,17 @@ from clickhouse_helper import ( prepare_tests_results_for_clickhouse, ) from commit_status_helper import ( + GITHUB_JOB_API_URL, CommitStatusData, RerunHelper, format_description, get_commit, + get_commit_filtered_statuses, post_commit_status, set_status_comment, - get_commit_filtered_statuses, ) from digest_helper import DockerDigester -from env_helper import ( - IS_CI, - GITHUB_JOB_API_URL, - GITHUB_REPOSITORY, - GITHUB_RUN_ID, - REPO_COPY, - TEMP_PATH, -) +from env_helper import GITHUB_REPOSITORY, GITHUB_RUN_ID, IS_CI, REPO_COPY, TEMP_PATH from get_robot_token import get_best_robot_token from git_helper import GIT_PREFIX, Git from git_helper import Runner as GitRunner @@ -50,22 +47,19 @@ from github_helper import GitHub from pr_info import PRInfo from report import ( ERROR, + FAIL, + JOB_FINISHED_TEST_NAME, + JOB_STARTED_TEST_NAME, + OK, PENDING, SUCCESS, BuildResult, JobReport, TestResult, - OK, - JOB_STARTED_TEST_NAME, - JOB_FINISHED_TEST_NAME, - FAIL, ) from s3_helper import S3Helper -from tee_popen import TeePopen -from ci_cache import CiCache -from ci_settings import CiSettings -from ci_buddy import CIBuddy from stopwatch import Stopwatch +from tee_popen import TeePopen from version_helper import get_version_from_repo # pylint: disable=too-many-lines diff --git a/tests/ci/ci_utils.py b/tests/ci/ci_utils.py index 86fa1c008c9..a5dbac52618 100644 --- a/tests/ci/ci_utils.py +++ b/tests/ci/ci_utils.py @@ -11,6 +11,8 @@ from typing import Any, Dict, Iterator, List, Optional, Sequence, Tuple, Union import requests +from env_helper import IS_CI + logger = logging.getLogger(__name__) @@ -42,7 +44,7 @@ def cd(path: Union[Path, str]) -> Iterator[None]: def kill_ci_runner(message: str) -> None: """The function to kill the current process with all parents when it's possible. Works only when run with the set `CI` environment""" - if not os.getenv("CI", ""): # cycle import env_helper + if not IS_CI: logger.info("Running outside the CI, won't kill the runner") return print(f"::error::{message}") diff --git a/tests/ci/commit_status_helper.py b/tests/ci/commit_status_helper.py index a6287d5629b..9b70e7198a2 100644 --- a/tests/ci/commit_status_helper.py +++ b/tests/ci/commit_status_helper.py @@ -7,7 +7,7 @@ import time from collections import defaultdict from dataclasses import asdict, dataclass from pathlib import Path -from typing import Dict, List, Optional, Union, Callable +from typing import Callable, Dict, List, Optional, Tuple, Union from github import Github from github.Commit import Commit @@ -17,8 +17,15 @@ from github.GithubObject import NotSet from github.IssueComment import IssueComment from github.Repository import Repository +from build_download_helper import APIException, get_gh_api from ci_config import CI -from env_helper import GITHUB_REPOSITORY, TEMP_PATH +from env_helper import ( + GITHUB_JOB, + GITHUB_REPOSITORY, + GITHUB_RUN_ID, + GITHUB_RUN_URL, + TEMP_PATH, +) from pr_info import PRInfo from report import ( ERROR, @@ -33,6 +40,106 @@ from report import ( from s3_helper import S3Helper from upload_result_helper import upload_results +# These parameters are set only on demand, and only once +_GITHUB_JOB_ID = "" +_GITHUB_JOB_URL = "" +_GITHUB_JOB_API_URL = "" + + +def GITHUB_JOB_ID(safe: bool = True) -> str: + global _GITHUB_JOB_ID + global _GITHUB_JOB_URL + global _GITHUB_JOB_API_URL + if _GITHUB_JOB_ID: + return _GITHUB_JOB_ID + try: + _GITHUB_JOB_ID, _GITHUB_JOB_URL, _GITHUB_JOB_API_URL = get_job_id_url( + GITHUB_JOB + ) + except APIException as e: + logging.warning("Unable to retrieve the job info from GH API: %s", e) + if not safe: + raise e + return _GITHUB_JOB_ID + + +def GITHUB_JOB_URL(safe: bool = True) -> str: + try: + GITHUB_JOB_ID() + except APIException: + if safe: + logging.warning("Using run URL as a fallback to not fail the job") + return GITHUB_RUN_URL + raise + + return _GITHUB_JOB_URL + + +def GITHUB_JOB_API_URL(safe: bool = True) -> str: + GITHUB_JOB_ID(safe) + return _GITHUB_JOB_API_URL + + +def get_job_id_url(job_name: str) -> Tuple[str, str, str]: + job_id = "" + job_url = "" + job_api_url = "" + if GITHUB_RUN_ID == "0": + job_id = "0" + if job_id: + return job_id, job_url, job_api_url + jobs = [] + page = 1 + while not job_id: + response = get_gh_api( + f"https://api.github.com/repos/{GITHUB_REPOSITORY}/" + f"actions/runs/{GITHUB_RUN_ID}/jobs?per_page=100&page={page}" + ) + page += 1 + data = response.json() + jobs.extend(data["jobs"]) + for job in data["jobs"]: + if job["name"] != job_name: + continue + job_id = job["id"] + job_url = job["html_url"] + job_api_url = job["url"] + return job_id, job_url, job_api_url + if ( + len(jobs) >= data["total_count"] # just in case of inconsistency + or len(data["jobs"]) == 0 # if we excided pages + ): + job_id = "0" + + if not job_url: + # This is a terrible workaround for the case of another broken part of + # GitHub actions. For nested workflows it doesn't provide a proper job_name + # value, but only the final one. So, for `OriginalJob / NestedJob / FinalJob` + # full name, job_name contains only FinalJob + matched_jobs = [] + for job in jobs: + nested_parts = job["name"].split(" / ") + if len(nested_parts) <= 1: + continue + if nested_parts[-1] == job_name: + matched_jobs.append(job) + if len(matched_jobs) == 1: + # The best case scenario + job_id = matched_jobs[0]["id"] + job_url = matched_jobs[0]["html_url"] + job_api_url = matched_jobs[0]["url"] + return job_id, job_url, job_api_url + if matched_jobs: + logging.error( + "We could not get the ID and URL for the current job name %s, there " + "are more than one jobs match it for the nested workflows. Please, " + "refer to https://github.com/actions/runner/issues/2577", + job_name, + ) + + return job_id, job_url, job_api_url + + RETRY = 5 CommitStatuses = List[CommitStatus] GH_REPO = None # type: Optional[Repository] diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py index 5217e4035da..e4886aedf0c 100644 --- a/tests/ci/env_helper.py +++ b/tests/ci/env_helper.py @@ -1,11 +1,7 @@ #!/usr/bin/env python -import logging import os from os import path as p -from typing import Tuple - -from build_download_helper import APIException, get_gh_api module_dir = p.abspath(p.dirname(__file__)) git_root = p.abspath(p.join(module_dir, "..", "..")) @@ -41,102 +37,3 @@ S3_ARTIFACT_DOWNLOAD_TEMPLATE = ( "{pr_or_release}/{commit}/{build_name}/{artifact}" ) CI_CONFIG_PATH = f"{TEMP_PATH}/ci_config.json" - -# These parameters are set only on demand, and only once -_GITHUB_JOB_ID = "" -_GITHUB_JOB_URL = "" -_GITHUB_JOB_API_URL = "" - - -def GITHUB_JOB_ID(safe: bool = True) -> str: - global _GITHUB_JOB_ID - global _GITHUB_JOB_URL - global _GITHUB_JOB_API_URL - if _GITHUB_JOB_ID: - return _GITHUB_JOB_ID - try: - _GITHUB_JOB_ID, _GITHUB_JOB_URL, _GITHUB_JOB_API_URL = get_job_id_url( - GITHUB_JOB - ) - except APIException as e: - logging.warning("Unable to retrieve the job info from GH API: %s", e) - if not safe: - raise e - return _GITHUB_JOB_ID - - -def GITHUB_JOB_URL(safe: bool = True) -> str: - try: - GITHUB_JOB_ID() - except APIException: - if safe: - logging.warning("Using run URL as a fallback to not fail the job") - return GITHUB_RUN_URL - raise - - return _GITHUB_JOB_URL - - -def GITHUB_JOB_API_URL(safe: bool = True) -> str: - GITHUB_JOB_ID(safe) - return _GITHUB_JOB_API_URL - - -def get_job_id_url(job_name: str) -> Tuple[str, str, str]: - job_id = "" - job_url = "" - job_api_url = "" - if GITHUB_RUN_ID == "0": - job_id = "0" - if job_id: - return job_id, job_url, job_api_url - jobs = [] - page = 1 - while not job_id: - response = get_gh_api( - f"https://api.github.com/repos/{GITHUB_REPOSITORY}/" - f"actions/runs/{GITHUB_RUN_ID}/jobs?per_page=100&page={page}" - ) - page += 1 - data = response.json() - jobs.extend(data["jobs"]) - for job in data["jobs"]: - if job["name"] != job_name: - continue - job_id = job["id"] - job_url = job["html_url"] - job_api_url = job["url"] - return job_id, job_url, job_api_url - if ( - len(jobs) >= data["total_count"] # just in case of inconsistency - or len(data["jobs"]) == 0 # if we excided pages - ): - job_id = "0" - - if not job_url: - # This is a terrible workaround for the case of another broken part of - # GitHub actions. For nested workflows it doesn't provide a proper job_name - # value, but only the final one. So, for `OriginalJob / NestedJob / FinalJob` - # full name, job_name contains only FinalJob - matched_jobs = [] - for job in jobs: - nested_parts = job["name"].split(" / ") - if len(nested_parts) <= 1: - continue - if nested_parts[-1] == job_name: - matched_jobs.append(job) - if len(matched_jobs) == 1: - # The best case scenario - job_id = matched_jobs[0]["id"] - job_url = matched_jobs[0]["html_url"] - job_api_url = matched_jobs[0]["url"] - return job_id, job_url, job_api_url - if matched_jobs: - logging.error( - "We could not get the ID and URL for the current job name %s, there " - "are more than one jobs match it for the nested workflows. Please, " - "refer to https://github.com/actions/runner/issues/2577", - job_name, - ) - - return job_id, job_url, job_api_url diff --git a/tests/ci/upload_result_helper.py b/tests/ci/upload_result_helper.py index cb745131e0d..8426ccdad3d 100644 --- a/tests/ci/upload_result_helper.py +++ b/tests/ci/upload_result_helper.py @@ -1,14 +1,10 @@ +import logging +import os from pathlib import Path from typing import Dict, List, Optional, Sequence, Union -import os -import logging -from env_helper import ( - GITHUB_JOB_URL, - GITHUB_REPOSITORY, - GITHUB_RUN_URL, - GITHUB_SERVER_URL, -) +from commit_status_helper import GITHUB_JOB_URL +from env_helper import GITHUB_REPOSITORY, GITHUB_RUN_URL, GITHUB_SERVER_URL from report import TestResults, create_test_html_report from s3_helper import S3Helper