mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-09-20 00:30:49 +00:00
Create a script to build and push server images
This commit is contained in:
parent
b950b53156
commit
9aab7e9910
356
tests/ci/docker_server.py
Normal file
356
tests/ci/docker_server.py
Normal file
@ -0,0 +1,356 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# here
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import subprocess
|
||||
from os import path as p, makedirs
|
||||
from typing import List, Tuple
|
||||
|
||||
from github import Github
|
||||
|
||||
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
|
||||
from commit_status_helper import post_commit_status
|
||||
from docker_images_check import DockerImage
|
||||
from env_helper import CI, GITHUB_RUN_URL, RUNNER_TEMP, S3_BUILDS_BUCKET
|
||||
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
|
||||
from pr_info import PRInfo
|
||||
from s3_helper import S3Helper
|
||||
from stopwatch import Stopwatch
|
||||
from upload_result_helper import upload_results
|
||||
from version_helper import (
|
||||
get_tagged_versions,
|
||||
get_version_from_repo,
|
||||
get_version_from_string,
|
||||
validate_version,
|
||||
)
|
||||
|
||||
TEMP_PATH = p.join(RUNNER_TEMP, "docker_images_check")
|
||||
BUCKETS = {"amd64": "package_release", "arm64": "package_aarch64"}
|
||||
|
||||
|
||||
class DelOS(argparse.Action):
|
||||
def __call__(self, _, namespace, __, option_string=None):
|
||||
no_build = self.dest[3:] if self.dest.startswith("no_") else self.dest
|
||||
if no_build in namespace.os:
|
||||
namespace.os.remove(no_build)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
||||
description="A program to build clickhouse-server image, both alpine and "
|
||||
"ubuntu versions",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--version",
|
||||
type=version_arg,
|
||||
default=get_version_from_repo().string,
|
||||
help="a version to build",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--release-type",
|
||||
type=str,
|
||||
choices=("auto", "latest", "major", "minor", "patch", "head"),
|
||||
default="head",
|
||||
help="version part that will be updated when '--version' is set; "
|
||||
"'auto' is a special case, it will get versions from github and detect the "
|
||||
"release type (latest, major, minor or patch) automatically",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--image-path",
|
||||
type=str,
|
||||
default="docker/server",
|
||||
help="a path to docker context directory",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--image-repo",
|
||||
type=str,
|
||||
default="clickhouse/clickhouse-server",
|
||||
help="image name on docker hub",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--bucket-prefix",
|
||||
help="if set, then is used as source for deb and tgz files",
|
||||
)
|
||||
parser.add_argument("--reports", default=True, help=argparse.SUPPRESS)
|
||||
parser.add_argument(
|
||||
"--no-reports",
|
||||
action="store_false",
|
||||
dest="reports",
|
||||
default=argparse.SUPPRESS,
|
||||
help="don't push reports to S3 and github",
|
||||
)
|
||||
parser.add_argument("--push", default=True, help=argparse.SUPPRESS)
|
||||
parser.add_argument(
|
||||
"--no-push-images",
|
||||
action="store_false",
|
||||
dest="push",
|
||||
default=argparse.SUPPRESS,
|
||||
help="don't push images to docker hub",
|
||||
)
|
||||
parser.add_argument("--os", default=["ubuntu", "alpine"], help=argparse.SUPPRESS)
|
||||
parser.add_argument(
|
||||
"--no-ubuntu",
|
||||
action=DelOS,
|
||||
nargs=0,
|
||||
default=argparse.SUPPRESS,
|
||||
help="don't build ubuntu image",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-alpine",
|
||||
action=DelOS,
|
||||
nargs=0,
|
||||
default=argparse.SUPPRESS,
|
||||
help="don't build alpine image",
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def version_arg(version: str) -> str:
|
||||
try:
|
||||
validate_version(version)
|
||||
return version
|
||||
except ValueError as e:
|
||||
raise argparse.ArgumentTypeError(e)
|
||||
|
||||
|
||||
def auto_release_type(version: str, release_type: str) -> str:
|
||||
if release_type != "auto":
|
||||
return release_type
|
||||
current_version = get_version_from_string(version)
|
||||
|
||||
git_versions = get_tagged_versions()
|
||||
reference_version = git_versions[0]
|
||||
for i in reversed(range(len(git_versions))):
|
||||
if git_versions[i] < current_version:
|
||||
if i == len(git_versions) - 1:
|
||||
return "latest"
|
||||
reference_version = git_versions[i + 1]
|
||||
break
|
||||
|
||||
if current_version.major < reference_version.major:
|
||||
return "major"
|
||||
if current_version.minor < reference_version.minor:
|
||||
return "minor"
|
||||
if current_version.patch < reference_version.patch:
|
||||
return "patch"
|
||||
|
||||
raise ValueError(
|
||||
"Release type 'tweak' is not supported for "
|
||||
f"{current_version.string} < {reference_version.string}"
|
||||
)
|
||||
|
||||
|
||||
def gen_tags(version: str, release_type: str) -> List[str]:
|
||||
"""
|
||||
22.2.2.2 + latest:
|
||||
- latest
|
||||
- 22
|
||||
- 22.2
|
||||
- 22.2.2
|
||||
- 22.2.2.2
|
||||
22.2.2.2 + major:
|
||||
- 22
|
||||
- 22.2
|
||||
- 22.2.2
|
||||
- 22.2.2.2
|
||||
22.2.2.2 + minor:
|
||||
- 22.2
|
||||
- 22.2.2
|
||||
- 22.2.2.2
|
||||
22.2.2.2 + patch:
|
||||
- 22.2.2
|
||||
- 22.2.2.2
|
||||
22.2.2.2 + head:
|
||||
- head
|
||||
"""
|
||||
validate_version(version)
|
||||
parts = version.split(".")
|
||||
tags = []
|
||||
if release_type == "latest":
|
||||
tags.append(release_type)
|
||||
for i in range(len(parts)):
|
||||
tags.append(".".join(parts[: i + 1]))
|
||||
elif release_type == "major":
|
||||
for i in range(len(parts)):
|
||||
tags.append(".".join(parts[: i + 1]))
|
||||
elif release_type == "minor":
|
||||
for i in range(1, len(parts)):
|
||||
tags.append(".".join(parts[: i + 1]))
|
||||
elif release_type == "patch":
|
||||
for i in range(2, len(parts)):
|
||||
tags.append(".".join(parts[: i + 1]))
|
||||
elif release_type == "head":
|
||||
tags.append(release_type)
|
||||
else:
|
||||
raise ValueError(f"{release_type} is not valid release part")
|
||||
return tags
|
||||
|
||||
|
||||
def buildx_args(bucket_prefix: str, arch: str) -> List[str]:
|
||||
args = [f"--platform=linux/{arch}", f"--label=build-url={GITHUB_RUN_URL}"]
|
||||
if bucket_prefix:
|
||||
url = p.join(bucket_prefix, BUCKETS[arch]) # to prevent a double //
|
||||
args.append(f"--build-arg=REPOSITORY='{url}'")
|
||||
args.append(f"--build-arg=deb_location_url='{url}'")
|
||||
return args
|
||||
|
||||
|
||||
def build_and_push_image(
|
||||
image: DockerImage, push: bool, bucket_prefix: str, os: str, tag: str, version: str
|
||||
) -> List[Tuple[str, str]]:
|
||||
result = []
|
||||
if os != "ubuntu":
|
||||
tag += f"-{os}"
|
||||
init_args = ["docker", "buildx", "build"]
|
||||
if push:
|
||||
init_args.append("--push")
|
||||
init_args.append("--output=type=image,push-by-digest=true")
|
||||
init_args.append(f"--tag={image.repo}")
|
||||
else:
|
||||
init_args.append("--output=type=docker")
|
||||
|
||||
# `docker buildx build --load` does not support multiple images currently
|
||||
# images must be built separately and merged together with `docker manifest`
|
||||
digests = []
|
||||
for arch in BUCKETS:
|
||||
arch_tag = f"{tag}-{arch}"
|
||||
metadata_path = p.join(TEMP_PATH, arch_tag)
|
||||
dockerfile = p.join(image.full_path, f"Dockerfile.{os}")
|
||||
cmd_args = list(init_args)
|
||||
cmd_args.extend(buildx_args(bucket_prefix, arch))
|
||||
if not push:
|
||||
cmd_args.append(f"--tag={image.repo}:{arch_tag}")
|
||||
cmd_args.extend(
|
||||
[
|
||||
f"--metadata-file={metadata_path}",
|
||||
f"--build-arg=VERSION='{version}'",
|
||||
"--progress=plain",
|
||||
f"--file={dockerfile}",
|
||||
image.full_path,
|
||||
]
|
||||
)
|
||||
cmd = " ".join(cmd_args)
|
||||
logging.info("Building image %s:%s for arch %s: %s", image.repo, tag, arch, cmd)
|
||||
with subprocess.Popen(
|
||||
cmd,
|
||||
shell=True,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
) as process:
|
||||
for line in process.stdout: # type: ignore
|
||||
print(line, end="")
|
||||
retcode = process.wait()
|
||||
if retcode != 0:
|
||||
result.append((f"{image.repo}:{tag}-{arch}", "FAIL"))
|
||||
return result
|
||||
result.append((f"{image.repo}:{tag}-{arch}", "OK"))
|
||||
with open(metadata_path, "rb") as m:
|
||||
metadata = json.load(m)
|
||||
digests.append(metadata["containerimage.digest"])
|
||||
if push:
|
||||
cmd = (
|
||||
"docker buildx imagetools create "
|
||||
f"--tag {image.repo}:{tag} {' '.join(digests)}"
|
||||
)
|
||||
logging.info("Pushing merged %s:%s image: %s", image.repo, tag, cmd)
|
||||
with subprocess.Popen(
|
||||
cmd,
|
||||
shell=True,
|
||||
stderr=subprocess.STDOUT,
|
||||
stdout=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
) as process:
|
||||
for line in process.stdout: # type: ignore
|
||||
print(line, end="")
|
||||
retcode = process.wait()
|
||||
if retcode != 0:
|
||||
result.append((f"{image.repo}:{tag}", "FAIL"))
|
||||
else:
|
||||
logging.info(
|
||||
"Merging is available only on push, separate %s images are created",
|
||||
f"{image.repo}:{tag}-$arch",
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
stopwatch = Stopwatch()
|
||||
makedirs(TEMP_PATH, exist_ok=True)
|
||||
|
||||
args = parse_args()
|
||||
image = DockerImage(args.image_path, args.image_repo, False)
|
||||
args.release_type = auto_release_type(args.version, args.release_type)
|
||||
tags = gen_tags(args.version, args.release_type)
|
||||
NAME = f"Docker image {image.repo} building check (actions)"
|
||||
pr_info = None
|
||||
if CI:
|
||||
pr_info = PRInfo()
|
||||
args.bucket_prefix = (
|
||||
f"https://s3.amazonaws.com/{S3_BUILDS_BUCKET}/"
|
||||
f"{pr_info.number}/{pr_info.sha}"
|
||||
)
|
||||
|
||||
if args.push:
|
||||
subprocess.check_output( # pylint: disable=unexpected-keyword-arg
|
||||
"docker login --username 'robotclickhouse' --password-stdin",
|
||||
input=get_parameter_from_ssm("dockerhub_robot_password"),
|
||||
encoding="utf-8",
|
||||
shell=True,
|
||||
)
|
||||
NAME = f"Docker image {image.repo} build and push (actions)"
|
||||
|
||||
logging.info("Following tags will be created: %s", ", ".join(tags))
|
||||
status = "success"
|
||||
test_results = [] # type: List[Tuple[str, str]]
|
||||
for os in args.os:
|
||||
for tag in tags:
|
||||
test_results.extend(
|
||||
build_and_push_image(
|
||||
image, args.push, args.bucket_prefix, os, tag, args.version
|
||||
)
|
||||
)
|
||||
if test_results[-1][1] != "OK":
|
||||
status = "failure"
|
||||
|
||||
pr_info = pr_info or PRInfo()
|
||||
s3_helper = S3Helper("https://s3.amazonaws.com")
|
||||
|
||||
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
|
||||
|
||||
print(f"::notice ::Report url: {url}")
|
||||
print(f'::set-output name=url_output::"{url}"')
|
||||
|
||||
if not args.reports:
|
||||
return
|
||||
|
||||
description = f"Processed tags: {', '.join(tags)}"
|
||||
|
||||
if len(description) >= 140:
|
||||
description = description[:136] + "..."
|
||||
|
||||
gh = Github(get_best_robot_token())
|
||||
post_commit_status(gh, pr_info.sha, NAME, description, status, url)
|
||||
|
||||
prepared_events = prepare_tests_results_for_clickhouse(
|
||||
pr_info,
|
||||
test_results,
|
||||
status,
|
||||
stopwatch.duration_seconds,
|
||||
stopwatch.start_time_str,
|
||||
url,
|
||||
NAME,
|
||||
)
|
||||
ch_helper = ClickHouseHelper()
|
||||
ch_helper.insert_events_into(db="default", table="checks", events=prepared_events)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -2,12 +2,16 @@
|
||||
|
||||
import os
|
||||
import unittest
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from env_helper import GITHUB_RUN_URL
|
||||
from pr_info import PRInfo
|
||||
import docker_images_check as di
|
||||
|
||||
with patch("git_helper.Git"):
|
||||
from version_helper import get_version_from_string, get_tagged_versions
|
||||
import docker_server as ds
|
||||
|
||||
# di.logging.basicConfig(level=di.logging.INFO)
|
||||
|
||||
|
||||
@ -221,5 +225,45 @@ class TestDockerImageCheck(unittest.TestCase):
|
||||
self.assertEqual(results, expected)
|
||||
|
||||
|
||||
class TestDockerServer(unittest.TestCase):
|
||||
def test_gen_tags(self):
|
||||
cases = (
|
||||
(("22.2.2.2", "latest"), ["latest", "22", "22.2", "22.2.2", "22.2.2.2"]),
|
||||
(("22.2.2.2", "major"), ["22", "22.2", "22.2.2", "22.2.2.2"]),
|
||||
(("22.2.2.2", "minor"), ["22.2", "22.2.2", "22.2.2.2"]),
|
||||
(("22.2.2.2", "patch"), ["22.2.2", "22.2.2.2"]),
|
||||
(("22.2.2.2", "head"), ["head"]),
|
||||
)
|
||||
for case in cases:
|
||||
version, release_type = case[0]
|
||||
self.assertEqual(case[1], ds.gen_tags(version, release_type))
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
ds.gen_tags("22.2.2.2", "auto")
|
||||
|
||||
@patch("docker_server.get_tagged_versions")
|
||||
def test_auto_release_type(self, mock_tagged_versions: MagicMock):
|
||||
mock_tagged_versions.return_value = [
|
||||
get_version_from_string("1.1.1.1"),
|
||||
get_version_from_string("1.2.1.1"),
|
||||
get_version_from_string("2.1.1.1"),
|
||||
get_version_from_string("2.2.1.1"),
|
||||
get_version_from_string("2.2.2.1"),
|
||||
]
|
||||
cases = (
|
||||
("1.0.1.1", "minor"),
|
||||
("1.1.2.1", "minor"),
|
||||
("1.3.1.1", "major"),
|
||||
("2.1.2.1", "minor"),
|
||||
("2.2.1.3", "patch"),
|
||||
("2.2.3.1", "latest"),
|
||||
("2.3.1.1", "latest"),
|
||||
)
|
||||
_ = get_tagged_versions()
|
||||
for case in cases:
|
||||
release = ds.auto_release_type(case[0], "auto")
|
||||
self.assertEqual(case[1], release)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
Loading…
Reference in New Issue
Block a user