ClickHouse/tests/ci/docker_manifests_merge.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

242 lines
7.3 KiB
Python
Raw Normal View History

#!/usr/bin/env python3
import argparse
import json
import logging
import os
import subprocess
from typing import List, Dict, Tuple
from github import Github
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import format_description, get_commit, post_commit_status
2022-08-11 13:01:32 +00:00
from env_helper import RUNNER_TEMP
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from report import TestResults, TestResult
from s3_helper import S3Helper
from stopwatch import Stopwatch
from upload_result_helper import upload_results
NAME = "Push multi-arch images to Dockerhub"
CHANGED_IMAGES = "changed_images_{}.json"
Images = Dict[str, List[str]]
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
2023-01-08 06:08:20 +00:00
description="The program gets images from changed_images_*.json, merges images "
"with different architectures into one manifest and pushes back to docker hub",
)
parser.add_argument(
"--suffix",
dest="suffixes",
type=str,
required=True,
action="append",
help="suffixes for existing images' tags. More than two should be given",
)
parser.add_argument(
"--path",
type=str,
default=RUNNER_TEMP,
help="path to changed_images_*.json files",
)
2022-02-21 11:44:37 +00:00
parser.add_argument("--reports", default=True, help=argparse.SUPPRESS)
parser.add_argument(
"--no-reports",
2022-02-21 11:44:37 +00:00
action="store_false",
dest="reports",
default=argparse.SUPPRESS,
help="don't push reports to S3 and github",
)
2022-02-21 11:44:37 +00:00
parser.add_argument("--push", default=True, help=argparse.SUPPRESS)
parser.add_argument(
"--no-push-images",
2022-02-21 11:44:37 +00:00
action="store_false",
dest="push",
default=argparse.SUPPRESS,
help="don't push images to docker hub",
)
args = parser.parse_args()
if len(args.suffixes) < 2:
2022-02-10 15:22:32 +00:00
parser.error("more than two --suffix should be given")
return args
def load_images(path: str, suffix: str) -> Images:
2022-02-21 12:16:54 +00:00
with open(os.path.join(path, CHANGED_IMAGES.format(suffix)), "rb") as images:
2022-11-14 17:52:38 +00:00
return json.load(images) # type: ignore
def strip_suffix(suffix: str, images: Images) -> Images:
result = {}
for image, versions in images.items():
for v in versions:
if not v.endswith(f"-{suffix}"):
raise ValueError(
f"version {image}:{v} does not contain suffix {suffix}"
)
result[image] = [v[: -len(suffix) - 1] for v in versions]
return result
def check_sources(to_merge: Dict[str, Images]) -> Images:
2022-02-10 15:22:32 +00:00
"""get a dict {arch1: Images, arch2: Images}"""
result = {} # type: Images
first_suffix = ""
for suffix, images in to_merge.items():
if not result:
first_suffix = suffix
result = strip_suffix(suffix, images)
continue
if not result == strip_suffix(suffix, images):
raise ValueError(
f"images in {images} are not equal to {to_merge[first_suffix]}"
)
return result
def get_changed_images(images: Images) -> Dict[str, str]:
"""The original json format is {"image": "tag"}, so the output artifact is
produced here. The latest version is {PR_NUMBER}-{SHA1}
"""
return {k: v[-1] for k, v in images.items()}
def merge_images(to_merge: Dict[str, Images]) -> Dict[str, List[List[str]]]:
"""The function merges image-name:version-suffix1 and image-name:version-suffix2
into image-name:version"""
suffixes = to_merge.keys()
result_images = check_sources(to_merge)
merge = {} # type: Dict[str, List[List[str]]]
for image, versions in result_images.items():
merge[image] = []
for i, v in enumerate(versions):
merged_v = [v] # type: List[str]
for suf in suffixes:
merged_v.append(to_merge[suf][image][i])
merge[image].append(merged_v)
return merge
def create_manifest(image: str, tags: List[str], push: bool) -> Tuple[str, str]:
tag = tags[0]
manifest = f"{image}:{tag}"
2022-02-21 12:16:54 +00:00
cmd = "docker manifest create --amend " + " ".join((f"{image}:{t}" for t in tags))
logging.info("running: %s", cmd)
2022-02-21 12:16:54 +00:00
with subprocess.Popen(
cmd,
shell=True,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE,
universal_newlines=True,
2022-02-21 12:16:54 +00:00
) as popen:
retcode = popen.wait()
if retcode != 0:
output = popen.stdout.read() # type: ignore
logging.error("failed to create manifest for %s:\n %s\n", manifest, output)
return manifest, "FAIL"
if not push:
return manifest, "OK"
cmd = f"docker manifest push {manifest}"
logging.info("running: %s", cmd)
2022-02-21 12:16:54 +00:00
with subprocess.Popen(
cmd,
shell=True,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE,
universal_newlines=True,
2022-02-21 12:16:54 +00:00
) as popen:
retcode = popen.wait()
if retcode != 0:
output = popen.stdout.read() # type: ignore
logging.error("failed to push %s:\n %s\n", manifest, output)
return manifest, "FAIL"
return manifest, "OK"
def main():
logging.basicConfig(level=logging.INFO)
stopwatch = Stopwatch()
args = parse_args()
2022-02-21 11:44:37 +00:00
if args.push:
subprocess.check_output( # pylint: disable=unexpected-keyword-arg
"docker login --username 'robotclickhouse' --password-stdin",
input=get_parameter_from_ssm("dockerhub_robot_password"),
encoding="utf-8",
shell=True,
)
to_merge = {}
for suf in args.suffixes:
to_merge[suf] = load_images(args.path, suf)
changed_images = get_changed_images(check_sources(to_merge))
os.environ["DOCKER_CLI_EXPERIMENTAL"] = "enabled"
merged = merge_images(to_merge)
status = "success"
test_results = [] # type: TestResults
for image, versions in merged.items():
for tags in versions:
2022-02-21 11:44:37 +00:00
manifest, test_result = create_manifest(image, tags, args.push)
test_results.append(TestResult(manifest, test_result))
if test_result != "OK":
status = "failure"
2022-02-21 12:16:54 +00:00
with open(
os.path.join(args.path, "changed_images.json"), "w", encoding="utf-8"
) as ci:
json.dump(changed_images, ci)
pr_info = PRInfo()
2022-08-11 13:01:32 +00:00
s3_helper = S3Helper()
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
2022-02-21 12:16:54 +00:00
print(f"::notice ::Report url: {url}")
2022-02-21 11:44:37 +00:00
if not args.reports:
return
if changed_images:
description = "Updated " + ", ".join(changed_images.keys())
else:
description = "Nothing to update"
2023-02-27 12:11:17 +00:00
description = format_description(description)
gh = Github(get_best_robot_token(), per_page=100)
commit = get_commit(gh, pr_info.sha)
post_commit_status(commit, status, url, description, NAME, pr_info)
prepared_events = prepare_tests_results_for_clickhouse(
pr_info,
test_results,
status,
stopwatch.duration_seconds,
stopwatch.start_time_str,
url,
NAME,
)
ch_helper = ClickHouseHelper()
2022-03-29 19:06:50 +00:00
ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
if __name__ == "__main__":
main()