ClickHouse/tests/ci/docker_images_check.py

284 lines
8.4 KiB
Python
Raw Normal View History

2021-09-15 16:32:17 +00:00
#!/usr/bin/env python3
import argparse
2021-09-15 17:01:16 +00:00
import json
import logging
import os
2022-03-29 12:41:47 +00:00
import sys
2024-02-02 14:33:08 +00:00
import time
from pathlib import Path
from typing import List, Optional, Tuple
from github import Github
2021-11-26 14:00:09 +00:00
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import format_description, get_commit, post_commit_status
2024-02-02 14:33:08 +00:00
from docker_images_helper import DockerImageData, docker_login, get_images_oredered_list
from env_helper import GITHUB_RUN_URL, RUNNER_TEMP
from get_robot_token import get_best_robot_token
from pr_info import PRInfo
from report import FAILURE, SUCCESS, StatusType, TestResult, TestResults
from s3_helper import S3Helper
2021-11-19 14:47:04 +00:00
from stopwatch import Stopwatch
from tee_popen import TeePopen
from upload_result_helper import upload_results
2021-09-15 16:32:17 +00:00
TEMP_PATH = Path(RUNNER_TEMP) / "docker_images_check"
TEMP_PATH.mkdir(parents=True, exist_ok=True)
2021-12-21 16:32:04 +00:00
def build_and_push_one_image(
image: DockerImageData,
version_string: str,
additional_cache: List[str],
push: bool,
from_tag: Optional[str] = None,
) -> Tuple[bool, Path]:
2021-12-21 16:32:04 +00:00
logging.info(
"Building docker image %s with version %s from path %s",
image.repo,
2021-12-21 16:32:04 +00:00
version_string,
image.path,
2021-12-21 16:32:04 +00:00
)
build_log = (
Path(TEMP_PATH)
/ f"build_and_push_log_{image.repo.replace('/', '_')}_{version_string}.log"
)
push_arg = ""
if push:
push_arg = "--push "
from_tag_arg = ""
if from_tag:
from_tag_arg = f"--build-arg FROM_TAG={from_tag} "
cache_from = (
f"--cache-from type=registry,ref={image.repo}:{version_string} "
f"--cache-from type=registry,ref={image.repo}:latest"
)
for tag in additional_cache:
assert tag
cache_from = f"{cache_from} --cache-from type=registry,ref={image.repo}:{tag}"
cmd = (
"docker buildx build --builder default "
f"--label build-url={GITHUB_RUN_URL} "
f"{from_tag_arg}"
# A hack to invalidate cache, grep for it in docker/ dir
f"--build-arg CACHE_INVALIDATOR={GITHUB_RUN_URL} "
f"--tag {image.repo}:{version_string} "
f"{cache_from} "
f"--cache-to type=inline,mode=max "
f"{push_arg}"
f"--progress plain {image.path}"
)
logging.info("Docker command to run: %s", cmd)
with TeePopen(cmd, build_log) as proc:
retcode = proc.wait()
if retcode != 0:
return False, build_log
2021-09-15 16:32:17 +00:00
logging.info("Processing of %s successfully finished", image.repo)
return True, build_log
2021-09-15 16:32:17 +00:00
2021-12-21 16:32:04 +00:00
def process_single_image(
image: DockerImageData,
versions: List[str],
additional_cache: List[str],
push: bool,
from_tag: Optional[str] = None,
) -> TestResults:
2021-12-21 16:32:04 +00:00
logging.info("Image will be pushed with versions %s", ", ".join(versions))
results = [] # type: TestResults
2021-09-15 16:32:17 +00:00
for ver in versions:
stopwatch = Stopwatch()
2024-08-09 14:22:33 +00:00
for i in range(2):
success, build_log = build_and_push_one_image(
image, ver, additional_cache, push, from_tag
)
2021-09-15 16:32:17 +00:00
if success:
results.append(
TestResult(
image.repo + ":" + ver,
"OK",
stopwatch.duration_seconds,
[build_log],
)
)
2021-09-15 16:32:17 +00:00
break
2021-12-21 16:32:04 +00:00
logging.info(
"Got error will retry %s time and sleep for %s seconds", i, i * 5
)
2021-09-15 16:32:17 +00:00
time.sleep(i * 5)
else:
results.append(
TestResult(
image.repo + ":" + ver,
"FAIL",
stopwatch.duration_seconds,
[build_log],
)
)
2021-09-15 16:32:17 +00:00
logging.info("Processing finished")
image.built = True
return results
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Program to build changed or given docker images with all "
"dependant images. Example for local running: "
"python docker_images_check.py --no-push-images --no-reports "
"--image-path docker/packager/binary",
)
parser.add_argument("--suffix", type=str, required=True, help="arch suffix")
parser.add_argument(
"--missing-images",
type=str,
required=True,
help="json string or json file with images to build {IMAGE: TAG} or type all to build all",
)
parser.add_argument(
"--image-tags",
type=str,
required=True,
help="json string or json file with all images and their tags {IMAGE: TAG}",
)
2022-02-21 11:44:37 +00:00
parser.add_argument("--reports", default=True, help=argparse.SUPPRESS)
parser.add_argument(
"--no-reports",
2022-02-21 11:44:37 +00:00
action="store_false",
dest="reports",
default=argparse.SUPPRESS,
help="don't push reports to S3 and github",
)
2022-02-21 11:44:37 +00:00
parser.add_argument("--push", default=True, help=argparse.SUPPRESS)
parser.add_argument(
"--no-push-images",
2022-02-21 11:44:37 +00:00
action="store_false",
dest="push",
default=argparse.SUPPRESS,
help="don't push images to docker hub",
)
return parser.parse_args()
2021-11-19 14:47:04 +00:00
def main():
logging.basicConfig(level=logging.INFO)
2021-11-19 14:47:04 +00:00
stopwatch = Stopwatch()
args = parse_args()
NAME = f"Push to Dockerhub {args.suffix}"
if args.push:
logging.info("login to docker hub")
docker_login()
2021-09-15 16:32:17 +00:00
test_results = [] # type: TestResults
additional_cache = [] # type: List[str]
# FIXME: add all tags taht we need. latest on master!
# if pr_info.release_pr:
# logging.info("Use %s as additional cache tag", pr_info.release_pr)
# additional_cache.append(str(pr_info.release_pr))
# if pr_info.merged_pr:
# logging.info("Use %s as additional cache tag", pr_info.merged_pr)
# additional_cache.append(str(pr_info.merged_pr))
ok_cnt = 0
status = SUCCESS # type: StatusType
2024-02-26 18:25:02 +00:00
if os.path.isfile(args.image_tags):
with open(args.image_tags, "r", encoding="utf-8") as jfd:
image_tags = json.load(jfd)
else:
image_tags = json.loads(args.image_tags)
if args.missing_images == "all":
missing_images = image_tags
elif os.path.isfile(args.missing_images):
with open(args.missing_images, "r", encoding="utf-8") as jfd:
missing_images = json.load(jfd)
else:
missing_images = json.loads(args.missing_images)
images_build_list = get_images_oredered_list()
for image in images_build_list:
if image.repo not in missing_images:
continue
logging.info("Start building image: %s", image)
image_versions = (
[image_tags[image.repo]]
if not args.suffix
else [f"{image_tags[image.repo]}-{args.suffix}"]
)
parent_version = (
None
if not image.parent
else (
image_tags[image.parent]
if not args.suffix
else f"{image_tags[image.parent]}-{args.suffix}"
)
)
res = process_single_image(
image,
image_versions,
additional_cache,
args.push,
from_tag=parent_version,
)
test_results += res
if all(x.status == "OK" for x in res):
ok_cnt += 1
else:
status = FAILURE
break # No need to continue with next images
2021-09-15 16:32:17 +00:00
description = format_description(
f"Images build done. built {ok_cnt} out of {len(missing_images)} images."
)
2021-09-15 16:32:17 +00:00
s3_helper = S3Helper()
pr_info = PRInfo()
2021-11-12 11:39:00 +00:00
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
2021-09-15 16:32:17 +00:00
2022-01-18 21:28:26 +00:00
print(f"::notice ::Report url: {url}")
2022-02-21 11:44:37 +00:00
if not args.reports:
return
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
post_commit_status(
commit, status, url, description, NAME, pr_info, dump_to_file=True
)
2021-12-21 16:32:04 +00:00
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,
test_results,
status,
stopwatch.duration_seconds,
stopwatch.start_time_str,
url,
NAME,
)
ch_helper = ClickHouseHelper()
2022-03-29 19:06:50 +00:00
ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
if status == FAILURE:
2022-03-29 12:41:47 +00:00
sys.exit(1)
2022-03-29 16:23:18 +00:00
if __name__ == "__main__":
main()