2023-09-05 12:37:37 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
2023-05-12 10:17:37 +00:00
|
|
|
import logging
|
2021-11-26 14:00:09 +00:00
|
|
|
import os
|
2022-03-23 10:53:32 +00:00
|
|
|
from os import path as p
|
2023-09-05 12:37:37 +00:00
|
|
|
from typing import Tuple
|
2022-03-23 10:53:32 +00:00
|
|
|
|
2024-04-15 12:32:02 +00:00
|
|
|
from build_download_helper import APIException, get_gh_api
|
2022-08-09 16:34:12 +00:00
|
|
|
|
2022-03-23 10:53:32 +00:00
|
|
|
module_dir = p.abspath(p.dirname(__file__))
|
|
|
|
git_root = p.abspath(p.join(module_dir, "..", ".."))
|
2023-12-18 08:07:22 +00:00
|
|
|
ROOT_DIR = git_root
|
2021-11-26 14:00:09 +00:00
|
|
|
CI = bool(os.getenv("CI"))
|
2022-05-25 13:15:11 +00:00
|
|
|
TEMP_PATH = os.getenv("TEMP_PATH", p.abspath(p.join(module_dir, "./tmp")))
|
2023-12-18 08:07:22 +00:00
|
|
|
REPORT_PATH = f"{TEMP_PATH}/reports"
|
|
|
|
# FIXME: latest should not be used in CI, set temporary for transition to "docker with digest as a tag"
|
|
|
|
DOCKER_TAG = os.getenv("DOCKER_TAG", "latest")
|
2021-11-26 14:00:09 +00:00
|
|
|
CACHES_PATH = os.getenv("CACHES_PATH", TEMP_PATH)
|
|
|
|
CLOUDFLARE_TOKEN = os.getenv("CLOUDFLARE_TOKEN")
|
2022-05-16 18:39:10 +00:00
|
|
|
GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH", "")
|
2023-11-07 14:56:00 +00:00
|
|
|
GITHUB_JOB = os.getenv("GITHUB_JOB_OVERRIDDEN", "") or os.getenv("GITHUB_JOB", "local")
|
2021-11-26 14:00:09 +00:00
|
|
|
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "ClickHouse/ClickHouse")
|
2022-03-24 14:37:53 +00:00
|
|
|
GITHUB_RUN_ID = os.getenv("GITHUB_RUN_ID", "0")
|
2021-11-26 14:00:09 +00:00
|
|
|
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL", "https://github.com")
|
2022-03-23 10:53:32 +00:00
|
|
|
GITHUB_WORKSPACE = os.getenv("GITHUB_WORKSPACE", git_root)
|
2022-03-24 14:37:53 +00:00
|
|
|
GITHUB_RUN_URL = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/actions/runs/{GITHUB_RUN_ID}"
|
2022-04-07 16:22:29 +00:00
|
|
|
IMAGES_PATH = os.getenv("IMAGES_PATH", TEMP_PATH)
|
2023-11-10 20:16:54 +00:00
|
|
|
REPO_COPY = os.getenv("REPO_COPY", GITHUB_WORKSPACE)
|
2022-03-23 10:53:32 +00:00
|
|
|
RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp")))
|
2021-11-26 14:00:09 +00:00
|
|
|
S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds")
|
|
|
|
S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports")
|
2022-09-07 15:10:58 +00:00
|
|
|
S3_URL = os.getenv("S3_URL", "https://s3.amazonaws.com")
|
|
|
|
S3_DOWNLOAD = os.getenv("S3_DOWNLOAD", S3_URL)
|
|
|
|
S3_ARTIFACT_DOWNLOAD_TEMPLATE = (
|
|
|
|
f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/"
|
|
|
|
"{pr_or_release}/{commit}/{build_name}/{artifact}"
|
|
|
|
)
|
2022-08-09 16:34:12 +00:00
|
|
|
|
|
|
|
# These parameters are set only on demand, and only once
|
|
|
|
_GITHUB_JOB_ID = ""
|
|
|
|
_GITHUB_JOB_URL = ""
|
2023-11-07 14:56:00 +00:00
|
|
|
_GITHUB_JOB_API_URL = ""
|
2022-08-09 16:34:12 +00:00
|
|
|
|
|
|
|
|
2024-04-15 12:32:02 +00:00
|
|
|
def GITHUB_JOB_ID(safe: bool = True) -> str:
|
2022-08-09 16:34:12 +00:00
|
|
|
global _GITHUB_JOB_ID
|
|
|
|
global _GITHUB_JOB_URL
|
2023-11-07 14:56:00 +00:00
|
|
|
global _GITHUB_JOB_API_URL
|
2022-08-09 16:34:12 +00:00
|
|
|
if _GITHUB_JOB_ID:
|
|
|
|
return _GITHUB_JOB_ID
|
2024-04-15 12:32:02 +00:00
|
|
|
try:
|
|
|
|
_GITHUB_JOB_ID, _GITHUB_JOB_URL, _GITHUB_JOB_API_URL = get_job_id_url(
|
|
|
|
GITHUB_JOB
|
|
|
|
)
|
|
|
|
except APIException as e:
|
|
|
|
logging.warning("Unable to retrieve the job info from GH API: %s", e)
|
|
|
|
if not safe:
|
|
|
|
raise e
|
2023-09-05 12:37:37 +00:00
|
|
|
return _GITHUB_JOB_ID
|
|
|
|
|
|
|
|
|
2024-04-15 12:32:02 +00:00
|
|
|
def GITHUB_JOB_URL(safe: bool = True) -> str:
|
|
|
|
try:
|
|
|
|
GITHUB_JOB_ID()
|
|
|
|
except APIException:
|
|
|
|
if safe:
|
|
|
|
logging.warning("Using run URL as a fallback to not fail the job")
|
|
|
|
return GITHUB_RUN_URL
|
2024-04-19 08:58:47 +00:00
|
|
|
raise
|
|
|
|
|
2023-09-05 12:37:37 +00:00
|
|
|
return _GITHUB_JOB_URL
|
|
|
|
|
|
|
|
|
2024-04-15 12:32:02 +00:00
|
|
|
def GITHUB_JOB_API_URL(safe: bool = True) -> str:
|
|
|
|
GITHUB_JOB_ID(safe)
|
2023-11-07 14:56:00 +00:00
|
|
|
return _GITHUB_JOB_API_URL
|
|
|
|
|
|
|
|
|
|
|
|
def get_job_id_url(job_name: str) -> Tuple[str, str, str]:
|
2023-09-05 12:37:37 +00:00
|
|
|
job_id = ""
|
|
|
|
job_url = ""
|
2023-11-07 14:56:00 +00:00
|
|
|
job_api_url = ""
|
2023-09-05 12:37:37 +00:00
|
|
|
if GITHUB_RUN_ID == "0":
|
|
|
|
job_id = "0"
|
|
|
|
if job_id:
|
2023-11-07 14:56:00 +00:00
|
|
|
return job_id, job_url, job_api_url
|
2022-08-09 16:34:12 +00:00
|
|
|
jobs = []
|
2022-11-25 16:28:13 +00:00
|
|
|
page = 1
|
2023-09-05 12:37:37 +00:00
|
|
|
while not job_id:
|
2023-04-28 16:26:50 +00:00
|
|
|
response = get_gh_api(
|
2022-08-09 16:34:12 +00:00
|
|
|
f"https://api.github.com/repos/{GITHUB_REPOSITORY}/"
|
2022-11-25 16:28:13 +00:00
|
|
|
f"actions/runs/{GITHUB_RUN_ID}/jobs?per_page=100&page={page}"
|
2022-08-09 16:34:12 +00:00
|
|
|
)
|
2022-11-25 16:28:13 +00:00
|
|
|
page += 1
|
2022-08-09 16:34:12 +00:00
|
|
|
data = response.json()
|
|
|
|
jobs.extend(data["jobs"])
|
|
|
|
for job in data["jobs"]:
|
2023-09-05 12:37:37 +00:00
|
|
|
if job["name"] != job_name:
|
2022-08-09 16:34:12 +00:00
|
|
|
continue
|
2023-09-05 12:37:37 +00:00
|
|
|
job_id = job["id"]
|
|
|
|
job_url = job["html_url"]
|
2023-11-07 14:56:00 +00:00
|
|
|
job_api_url = job["url"]
|
|
|
|
return job_id, job_url, job_api_url
|
2022-11-25 16:28:13 +00:00
|
|
|
if (
|
|
|
|
len(jobs) >= data["total_count"] # just in case of inconsistency
|
|
|
|
or len(data["jobs"]) == 0 # if we excided pages
|
|
|
|
):
|
2023-09-05 12:37:37 +00:00
|
|
|
job_id = "0"
|
2022-08-09 16:34:12 +00:00
|
|
|
|
2023-09-05 12:37:37 +00:00
|
|
|
if not job_url:
|
2023-05-12 10:17:37 +00:00
|
|
|
# This is a terrible workaround for the case of another broken part of
|
2023-09-05 12:37:37 +00:00
|
|
|
# GitHub actions. For nested workflows it doesn't provide a proper job_name
|
2023-05-12 10:17:37 +00:00
|
|
|
# value, but only the final one. So, for `OriginalJob / NestedJob / FinalJob`
|
2023-09-05 12:37:37 +00:00
|
|
|
# full name, job_name contains only FinalJob
|
2023-05-12 10:17:37 +00:00
|
|
|
matched_jobs = []
|
|
|
|
for job in jobs:
|
|
|
|
nested_parts = job["name"].split(" / ")
|
|
|
|
if len(nested_parts) <= 1:
|
|
|
|
continue
|
2023-09-05 12:37:37 +00:00
|
|
|
if nested_parts[-1] == job_name:
|
2023-05-12 10:17:37 +00:00
|
|
|
matched_jobs.append(job)
|
|
|
|
if len(matched_jobs) == 1:
|
|
|
|
# The best case scenario
|
2023-09-05 12:37:37 +00:00
|
|
|
job_id = matched_jobs[0]["id"]
|
|
|
|
job_url = matched_jobs[0]["html_url"]
|
2023-11-07 14:56:00 +00:00
|
|
|
job_api_url = matched_jobs[0]["url"]
|
|
|
|
return job_id, job_url, job_api_url
|
2023-05-12 10:17:37 +00:00
|
|
|
if matched_jobs:
|
|
|
|
logging.error(
|
|
|
|
"We could not get the ID and URL for the current job name %s, there "
|
|
|
|
"are more than one jobs match it for the nested workflows. Please, "
|
|
|
|
"refer to https://github.com/actions/runner/issues/2577",
|
2023-09-05 12:37:37 +00:00
|
|
|
job_name,
|
2023-05-12 10:17:37 +00:00
|
|
|
)
|
|
|
|
|
2023-11-07 14:56:00 +00:00
|
|
|
return job_id, job_url, job_api_url
|