2022-01-04 13:50:01 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
|
2023-09-27 11:39:09 +00:00
|
|
|
from pathlib import Path
|
2022-01-04 13:50:01 +00:00
|
|
|
from typing import List, Dict, Tuple
|
2023-09-27 11:39:09 +00:00
|
|
|
|
2022-01-04 13:50:01 +00:00
|
|
|
from github import Github
|
|
|
|
|
2023-09-08 06:42:56 +00:00
|
|
|
from clickhouse_helper import (
|
|
|
|
ClickHouseHelper,
|
|
|
|
prepare_tests_results_for_clickhouse,
|
|
|
|
CHException,
|
|
|
|
)
|
2023-04-18 23:03:48 +00:00
|
|
|
from commit_status_helper import format_description, get_commit, post_commit_status
|
2023-09-08 06:42:56 +00:00
|
|
|
from docker_images_helper import IMAGES_FILE_PATH, get_image_names
|
2023-09-27 11:39:09 +00:00
|
|
|
from env_helper import RUNNER_TEMP, REPO_COPY
|
2022-01-04 13:50:01 +00:00
|
|
|
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
|
2023-09-08 06:42:56 +00:00
|
|
|
from git_helper import Runner
|
2022-01-04 13:50:01 +00:00
|
|
|
from pr_info import PRInfo
|
2023-01-03 14:23:19 +00:00
|
|
|
from report import TestResults, TestResult
|
2022-01-04 13:50:01 +00:00
|
|
|
from s3_helper import S3Helper
|
|
|
|
from stopwatch import Stopwatch
|
|
|
|
from upload_result_helper import upload_results
|
|
|
|
|
2022-07-21 11:10:22 +00:00
|
|
|
NAME = "Push multi-arch images to Dockerhub"
|
2022-01-04 13:50:01 +00:00
|
|
|
CHANGED_IMAGES = "changed_images_{}.json"
|
|
|
|
Images = Dict[str, List[str]]
|
|
|
|
|
|
|
|
|
|
|
|
def parse_args() -> argparse.Namespace:
|
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
2023-01-08 06:08:20 +00:00
|
|
|
description="The program gets images from changed_images_*.json, merges images "
|
2022-01-04 13:50:01 +00:00
|
|
|
"with different architectures into one manifest and pushes back to docker hub",
|
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
"--suffix",
|
|
|
|
dest="suffixes",
|
|
|
|
type=str,
|
|
|
|
required=True,
|
|
|
|
action="append",
|
|
|
|
help="suffixes for existing images' tags. More than two should be given",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--path",
|
2023-09-27 11:39:09 +00:00
|
|
|
type=Path,
|
2022-01-04 13:50:01 +00:00
|
|
|
default=RUNNER_TEMP,
|
|
|
|
help="path to changed_images_*.json files",
|
|
|
|
)
|
2022-02-21 11:44:37 +00:00
|
|
|
parser.add_argument("--reports", default=True, help=argparse.SUPPRESS)
|
2022-01-04 13:50:01 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--no-reports",
|
2022-02-21 11:44:37 +00:00
|
|
|
action="store_false",
|
|
|
|
dest="reports",
|
|
|
|
default=argparse.SUPPRESS,
|
2022-01-04 13:50:01 +00:00
|
|
|
help="don't push reports to S3 and github",
|
|
|
|
)
|
2022-02-21 11:44:37 +00:00
|
|
|
parser.add_argument("--push", default=True, help=argparse.SUPPRESS)
|
2022-01-04 13:50:01 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--no-push-images",
|
2022-02-21 11:44:37 +00:00
|
|
|
action="store_false",
|
|
|
|
dest="push",
|
|
|
|
default=argparse.SUPPRESS,
|
2022-01-04 13:50:01 +00:00
|
|
|
help="don't push images to docker hub",
|
|
|
|
)
|
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
if len(args.suffixes) < 2:
|
2022-02-10 15:22:32 +00:00
|
|
|
parser.error("more than two --suffix should be given")
|
2022-01-04 13:50:01 +00:00
|
|
|
|
|
|
|
return args
|
|
|
|
|
|
|
|
|
2023-09-27 11:39:09 +00:00
|
|
|
def load_images(path: Path, suffix: str) -> Images:
|
|
|
|
with open(path / CHANGED_IMAGES.format(suffix), "rb") as images:
|
2022-11-14 17:52:38 +00:00
|
|
|
return json.load(images) # type: ignore
|
2022-01-04 13:50:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
def strip_suffix(suffix: str, images: Images) -> Images:
|
|
|
|
result = {}
|
|
|
|
for image, versions in images.items():
|
|
|
|
for v in versions:
|
|
|
|
if not v.endswith(f"-{suffix}"):
|
|
|
|
raise ValueError(
|
|
|
|
f"version {image}:{v} does not contain suffix {suffix}"
|
|
|
|
)
|
|
|
|
result[image] = [v[: -len(suffix) - 1] for v in versions]
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def check_sources(to_merge: Dict[str, Images]) -> Images:
|
2022-02-10 15:22:32 +00:00
|
|
|
"""get a dict {arch1: Images, arch2: Images}"""
|
2022-01-04 13:50:01 +00:00
|
|
|
result = {} # type: Images
|
|
|
|
first_suffix = ""
|
|
|
|
for suffix, images in to_merge.items():
|
|
|
|
if not result:
|
|
|
|
first_suffix = suffix
|
|
|
|
result = strip_suffix(suffix, images)
|
|
|
|
continue
|
|
|
|
if not result == strip_suffix(suffix, images):
|
|
|
|
raise ValueError(
|
|
|
|
f"images in {images} are not equal to {to_merge[first_suffix]}"
|
|
|
|
)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def get_changed_images(images: Images) -> Dict[str, str]:
|
|
|
|
"""The original json format is {"image": "tag"}, so the output artifact is
|
|
|
|
produced here. The latest version is {PR_NUMBER}-{SHA1}
|
|
|
|
"""
|
|
|
|
return {k: v[-1] for k, v in images.items()}
|
|
|
|
|
|
|
|
|
|
|
|
def merge_images(to_merge: Dict[str, Images]) -> Dict[str, List[List[str]]]:
|
|
|
|
"""The function merges image-name:version-suffix1 and image-name:version-suffix2
|
|
|
|
into image-name:version"""
|
|
|
|
suffixes = to_merge.keys()
|
|
|
|
result_images = check_sources(to_merge)
|
|
|
|
merge = {} # type: Dict[str, List[List[str]]]
|
|
|
|
|
|
|
|
for image, versions in result_images.items():
|
|
|
|
merge[image] = []
|
|
|
|
for i, v in enumerate(versions):
|
|
|
|
merged_v = [v] # type: List[str]
|
|
|
|
for suf in suffixes:
|
|
|
|
merged_v.append(to_merge[suf][image][i])
|
|
|
|
merge[image].append(merged_v)
|
|
|
|
|
|
|
|
return merge
|
|
|
|
|
|
|
|
|
|
|
|
def create_manifest(image: str, tags: List[str], push: bool) -> Tuple[str, str]:
|
|
|
|
tag = tags[0]
|
|
|
|
manifest = f"{image}:{tag}"
|
2022-02-21 12:16:54 +00:00
|
|
|
cmd = "docker manifest create --amend " + " ".join((f"{image}:{t}" for t in tags))
|
2022-01-04 13:50:01 +00:00
|
|
|
logging.info("running: %s", cmd)
|
2022-02-21 12:16:54 +00:00
|
|
|
with subprocess.Popen(
|
2022-01-04 13:50:01 +00:00
|
|
|
cmd,
|
|
|
|
shell=True,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
2022-02-21 12:16:54 +00:00
|
|
|
) as popen:
|
|
|
|
retcode = popen.wait()
|
|
|
|
if retcode != 0:
|
|
|
|
output = popen.stdout.read() # type: ignore
|
|
|
|
logging.error("failed to create manifest for %s:\n %s\n", manifest, output)
|
|
|
|
return manifest, "FAIL"
|
|
|
|
if not push:
|
|
|
|
return manifest, "OK"
|
2022-01-04 13:50:01 +00:00
|
|
|
|
|
|
|
cmd = f"docker manifest push {manifest}"
|
|
|
|
logging.info("running: %s", cmd)
|
2022-02-21 12:16:54 +00:00
|
|
|
with subprocess.Popen(
|
2022-01-04 13:50:01 +00:00
|
|
|
cmd,
|
|
|
|
shell=True,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
universal_newlines=True,
|
2022-02-21 12:16:54 +00:00
|
|
|
) as popen:
|
|
|
|
retcode = popen.wait()
|
|
|
|
if retcode != 0:
|
|
|
|
output = popen.stdout.read() # type: ignore
|
|
|
|
logging.error("failed to push %s:\n %s\n", manifest, output)
|
|
|
|
return manifest, "FAIL"
|
2022-01-04 13:50:01 +00:00
|
|
|
|
|
|
|
return manifest, "OK"
|
|
|
|
|
|
|
|
|
2023-09-08 06:42:56 +00:00
|
|
|
def enrich_images(changed_images: Dict[str, str]) -> None:
|
2023-09-27 11:39:09 +00:00
|
|
|
all_image_names = get_image_names(Path(REPO_COPY), IMAGES_FILE_PATH)
|
2023-09-08 06:42:56 +00:00
|
|
|
|
|
|
|
images_to_find_tags_for = [
|
|
|
|
image for image in all_image_names if image not in changed_images
|
|
|
|
]
|
|
|
|
images_to_find_tags_for.sort()
|
|
|
|
|
|
|
|
logging.info(
|
|
|
|
"Trying to find versions for images:\n %s", "\n ".join(images_to_find_tags_for)
|
|
|
|
)
|
|
|
|
|
|
|
|
COMMIT_SHA_BATCH_SIZE = 100
|
|
|
|
MAX_COMMIT_BATCHES_TO_CHECK = 10
|
|
|
|
# Gets the sha of the last COMMIT_SHA_BATCH_SIZE commits after skipping some commits (see below)
|
|
|
|
LAST_N_ANCESTOR_SHA_COMMAND = f"git log --format=format:'%H' --max-count={COMMIT_SHA_BATCH_SIZE} --skip={{}} --merges"
|
|
|
|
git_runner = Runner()
|
|
|
|
|
|
|
|
GET_COMMIT_SHAS_QUERY = """
|
|
|
|
WITH {commit_shas:Array(String)} AS commit_shas,
|
|
|
|
{images:Array(String)} AS images
|
|
|
|
SELECT
|
2023-10-18 17:38:45 +00:00
|
|
|
splitByChar(':', test_name)[1] AS image_name,
|
|
|
|
argMax(splitByChar(':', test_name)[2], check_start_time) AS tag
|
2023-09-08 06:42:56 +00:00
|
|
|
FROM checks
|
|
|
|
WHERE
|
|
|
|
check_name == 'Push multi-arch images to Dockerhub'
|
|
|
|
AND position(test_name, checks.commit_sha)
|
|
|
|
AND checks.commit_sha IN commit_shas
|
|
|
|
AND image_name IN images
|
|
|
|
GROUP BY image_name
|
|
|
|
"""
|
|
|
|
|
|
|
|
batch_count = 0
|
2023-10-18 17:36:34 +00:00
|
|
|
# We use always publicly available DB here intentionally
|
|
|
|
ch_helper = ClickHouseHelper(
|
|
|
|
"https://play.clickhouse.com", {"X-ClickHouse-User": "play"}
|
|
|
|
)
|
2023-09-08 06:42:56 +00:00
|
|
|
|
|
|
|
while (
|
|
|
|
batch_count <= MAX_COMMIT_BATCHES_TO_CHECK and len(images_to_find_tags_for) != 0
|
|
|
|
):
|
|
|
|
commit_shas = git_runner(
|
|
|
|
LAST_N_ANCESTOR_SHA_COMMAND.format(batch_count * COMMIT_SHA_BATCH_SIZE)
|
|
|
|
).split("\n")
|
|
|
|
|
|
|
|
result = ch_helper.select_json_each_row(
|
|
|
|
"default",
|
|
|
|
GET_COMMIT_SHAS_QUERY,
|
|
|
|
{"commit_shas": commit_shas, "images": images_to_find_tags_for},
|
|
|
|
)
|
|
|
|
result.sort(key=lambda x: x["image_name"])
|
|
|
|
|
|
|
|
logging.info(
|
|
|
|
"Found images for commits %s..%s:\n %s",
|
|
|
|
commit_shas[0],
|
|
|
|
commit_shas[-1],
|
2023-10-18 17:38:45 +00:00
|
|
|
"\n ".join(f"{im['image_name']}:{im['tag']}" for im in result),
|
2023-09-08 06:42:56 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
for row in result:
|
|
|
|
image_name = row["image_name"]
|
2023-10-18 17:38:45 +00:00
|
|
|
changed_images[image_name] = row["tag"]
|
2023-09-08 06:42:56 +00:00
|
|
|
images_to_find_tags_for.remove(image_name)
|
|
|
|
|
|
|
|
batch_count += 1
|
|
|
|
|
|
|
|
|
2022-01-04 13:50:01 +00:00
|
|
|
def main():
|
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
stopwatch = Stopwatch()
|
|
|
|
|
|
|
|
args = parse_args()
|
2022-02-21 11:44:37 +00:00
|
|
|
if args.push:
|
2022-01-13 13:12:48 +00:00
|
|
|
subprocess.check_output( # pylint: disable=unexpected-keyword-arg
|
|
|
|
"docker login --username 'robotclickhouse' --password-stdin",
|
|
|
|
input=get_parameter_from_ssm("dockerhub_robot_password"),
|
|
|
|
encoding="utf-8",
|
2022-01-04 13:50:01 +00:00
|
|
|
shell=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
to_merge = {}
|
|
|
|
for suf in args.suffixes:
|
|
|
|
to_merge[suf] = load_images(args.path, suf)
|
|
|
|
|
|
|
|
changed_images = get_changed_images(check_sources(to_merge))
|
|
|
|
|
|
|
|
os.environ["DOCKER_CLI_EXPERIMENTAL"] = "enabled"
|
|
|
|
merged = merge_images(to_merge)
|
|
|
|
|
|
|
|
status = "success"
|
2023-01-03 14:23:19 +00:00
|
|
|
test_results = [] # type: TestResults
|
2022-01-04 13:50:01 +00:00
|
|
|
for image, versions in merged.items():
|
|
|
|
for tags in versions:
|
2022-02-21 11:44:37 +00:00
|
|
|
manifest, test_result = create_manifest(image, tags, args.push)
|
2023-01-03 14:23:19 +00:00
|
|
|
test_results.append(TestResult(manifest, test_result))
|
2022-01-04 13:50:01 +00:00
|
|
|
if test_result != "OK":
|
|
|
|
status = "failure"
|
|
|
|
|
2023-09-12 19:26:39 +00:00
|
|
|
enriched_images = changed_images.copy()
|
2023-09-08 06:42:56 +00:00
|
|
|
try:
|
|
|
|
# changed_images now contains all the images that are changed in this PR. Let's find the latest tag for the images that are not changed.
|
2023-09-12 19:26:39 +00:00
|
|
|
enrich_images(enriched_images)
|
2023-09-08 06:42:56 +00:00
|
|
|
except CHException as ex:
|
|
|
|
logging.warning("Couldn't get proper tags for not changed images: %s", ex)
|
|
|
|
|
2023-09-27 11:39:09 +00:00
|
|
|
with open(args.path / "changed_images.json", "w", encoding="utf-8") as ci:
|
2023-09-12 19:26:39 +00:00
|
|
|
json.dump(enriched_images, ci)
|
2022-01-04 13:50:01 +00:00
|
|
|
|
|
|
|
pr_info = PRInfo()
|
2022-08-11 13:01:32 +00:00
|
|
|
s3_helper = S3Helper()
|
2022-01-04 13:50:01 +00:00
|
|
|
|
|
|
|
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
|
|
|
|
|
2022-02-21 12:16:54 +00:00
|
|
|
print(f"::notice ::Report url: {url}")
|
2022-01-04 13:50:01 +00:00
|
|
|
|
2022-02-21 11:44:37 +00:00
|
|
|
if not args.reports:
|
2022-01-04 13:50:01 +00:00
|
|
|
return
|
|
|
|
|
|
|
|
if changed_images:
|
|
|
|
description = "Updated " + ", ".join(changed_images.keys())
|
|
|
|
else:
|
|
|
|
description = "Nothing to update"
|
|
|
|
|
2023-02-27 12:11:17 +00:00
|
|
|
description = format_description(description)
|
2022-01-04 13:50:01 +00:00
|
|
|
|
2022-07-30 05:07:22 +00:00
|
|
|
gh = Github(get_best_robot_token(), per_page=100)
|
2023-04-18 23:03:48 +00:00
|
|
|
commit = get_commit(gh, pr_info.sha)
|
|
|
|
post_commit_status(commit, status, url, description, NAME, pr_info)
|
2022-01-04 13:50:01 +00:00
|
|
|
|
|
|
|
prepared_events = prepare_tests_results_for_clickhouse(
|
|
|
|
pr_info,
|
|
|
|
test_results,
|
|
|
|
status,
|
|
|
|
stopwatch.duration_seconds,
|
|
|
|
stopwatch.start_time_str,
|
|
|
|
url,
|
|
|
|
NAME,
|
|
|
|
)
|
|
|
|
ch_helper = ClickHouseHelper()
|
2022-03-29 19:06:50 +00:00
|
|
|
ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
|
2022-01-04 13:50:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|