mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-22 15:42:02 +00:00
Apply black to docker_images_check.py
This commit is contained in:
parent
44e02fa075
commit
3c6f14ae63
@ -18,34 +18,46 @@ from stopwatch import Stopwatch
|
||||
|
||||
NAME = "Push to Dockerhub (actions)"
|
||||
|
||||
|
||||
def get_changed_docker_images(pr_info, repo_path, image_file_path):
|
||||
images_dict = {}
|
||||
path_to_images_file = os.path.join(repo_path, image_file_path)
|
||||
if os.path.exists(path_to_images_file):
|
||||
with open(path_to_images_file, 'r') as dict_file:
|
||||
with open(path_to_images_file, "r") as dict_file:
|
||||
images_dict = json.load(dict_file)
|
||||
else:
|
||||
logging.info("Image file %s doesnt exists in repo %s", image_file_path, repo_path)
|
||||
logging.info(
|
||||
"Image file %s doesnt exists in repo %s", image_file_path, repo_path
|
||||
)
|
||||
|
||||
dockerhub_repo_name = 'yandex'
|
||||
dockerhub_repo_name = "yandex"
|
||||
if not images_dict:
|
||||
return [], dockerhub_repo_name
|
||||
|
||||
files_changed = pr_info.changed_files
|
||||
|
||||
logging.info("Changed files for PR %s @ %s: %s", pr_info.number, pr_info.sha, str(files_changed))
|
||||
logging.info(
|
||||
"Changed files for PR %s @ %s: %s",
|
||||
pr_info.number,
|
||||
pr_info.sha,
|
||||
str(files_changed),
|
||||
)
|
||||
|
||||
changed_images = []
|
||||
|
||||
for dockerfile_dir, image_description in images_dict.items():
|
||||
if image_description['name'].startswith('clickhouse/'):
|
||||
dockerhub_repo_name = 'clickhouse'
|
||||
if image_description["name"].startswith("clickhouse/"):
|
||||
dockerhub_repo_name = "clickhouse"
|
||||
|
||||
for f in files_changed:
|
||||
if f.startswith(dockerfile_dir):
|
||||
logging.info(
|
||||
"Found changed file '%s' which affects docker image '%s' with path '%s'",
|
||||
f, image_description['name'], dockerfile_dir)
|
||||
"Found changed file '%s' which affects "
|
||||
"docker image '%s' with path '%s'",
|
||||
f,
|
||||
image_description["name"],
|
||||
dockerfile_dir,
|
||||
)
|
||||
changed_images.append(dockerfile_dir)
|
||||
break
|
||||
|
||||
@ -54,15 +66,20 @@ def get_changed_docker_images(pr_info, repo_path, image_file_path):
|
||||
index = 0
|
||||
while index < len(changed_images):
|
||||
image = changed_images[index]
|
||||
for dependent in images_dict[image]['dependent']:
|
||||
for dependent in images_dict[image]["dependent"]:
|
||||
logging.info(
|
||||
"Marking docker image '%s' as changed because it depends on changed docker image '%s'",
|
||||
dependent, image)
|
||||
"Marking docker image '%s' as changed because it "
|
||||
"depends on changed docker image '%s'",
|
||||
dependent,
|
||||
image,
|
||||
)
|
||||
changed_images.append(dependent)
|
||||
index += 1
|
||||
if index > 100:
|
||||
# Sanity check to prevent infinite loop.
|
||||
raise RuntimeError("Too many changed docker images, this is a bug." + str(changed_images))
|
||||
raise RuntimeError(
|
||||
f"Too many changed docker images, this is a bug. {changed_images}"
|
||||
)
|
||||
|
||||
# If a dependent image was already in the list because its own files
|
||||
# changed, but then it was added as a dependent of a changed base, we
|
||||
@ -76,23 +93,42 @@ def get_changed_docker_images(pr_info, repo_path, image_file_path):
|
||||
seen.add(x)
|
||||
no_dups_reversed.append(x)
|
||||
|
||||
result = [(x, images_dict[x]['name']) for x in reversed(no_dups_reversed)]
|
||||
logging.info("Changed docker images for PR %s @ %s: '%s'", pr_info.number, pr_info.sha, result)
|
||||
result = [(x, images_dict[x]["name"]) for x in reversed(no_dups_reversed)]
|
||||
logging.info(
|
||||
"Changed docker images for PR %s @ %s: '%s'",
|
||||
pr_info.number,
|
||||
pr_info.sha,
|
||||
result,
|
||||
)
|
||||
return result, dockerhub_repo_name
|
||||
|
||||
|
||||
def build_and_push_one_image(path_to_dockerfile_folder, image_name, version_string):
|
||||
logging.info("Building docker image %s with version %s from path %s", image_name, version_string, path_to_dockerfile_folder)
|
||||
logging.info(
|
||||
"Building docker image %s with version %s from path %s",
|
||||
image_name,
|
||||
version_string,
|
||||
path_to_dockerfile_folder,
|
||||
)
|
||||
build_log = None
|
||||
push_log = None
|
||||
with open('build_log_' + str(image_name).replace('/', '_') + "_" + version_string, 'w') as pl:
|
||||
cmd = "docker build --network=host -t {im}:{ver} {path}".format(im=image_name, ver=version_string, path=path_to_dockerfile_folder)
|
||||
with open(
|
||||
"build_log_" + str(image_name).replace("/", "_") + "_" + version_string, "w"
|
||||
) as pl:
|
||||
cmd = "docker build --network=host -t {im}:{ver} {path}".format(
|
||||
im=image_name, ver=version_string, path=path_to_dockerfile_folder
|
||||
)
|
||||
retcode = subprocess.Popen(cmd, shell=True, stderr=pl, stdout=pl).wait()
|
||||
build_log = str(pl.name)
|
||||
if retcode != 0:
|
||||
return False, build_log, None
|
||||
|
||||
with open('tag_log_' + str(image_name).replace('/', '_') + "_" + version_string, 'w') as pl:
|
||||
cmd = "docker build --network=host -t {im} {path}".format(im=image_name, path=path_to_dockerfile_folder)
|
||||
with open(
|
||||
"tag_log_" + str(image_name).replace("/", "_") + "_" + version_string, "w"
|
||||
) as pl:
|
||||
cmd = "docker build --network=host -t {im} {path}".format(
|
||||
im=image_name, path=path_to_dockerfile_folder
|
||||
)
|
||||
retcode = subprocess.Popen(cmd, shell=True, stderr=pl, stdout=pl).wait()
|
||||
build_log = str(pl.name)
|
||||
if retcode != 0:
|
||||
@ -100,7 +136,9 @@ def build_and_push_one_image(path_to_dockerfile_folder, image_name, version_stri
|
||||
|
||||
logging.info("Pushing image %s to dockerhub", image_name)
|
||||
|
||||
with open('push_log_' + str(image_name).replace('/', '_') + "_" + version_string, 'w') as pl:
|
||||
with open(
|
||||
"push_log_" + str(image_name).replace("/", "_") + "_" + version_string, "w"
|
||||
) as pl:
|
||||
cmd = "docker push {im}:{ver}".format(im=image_name, ver=version_string)
|
||||
retcode = subprocess.Popen(cmd, shell=True, stderr=pl, stdout=pl).wait()
|
||||
push_log = str(pl.name)
|
||||
@ -110,58 +148,64 @@ def build_and_push_one_image(path_to_dockerfile_folder, image_name, version_stri
|
||||
logging.info("Processing of %s successfully finished", image_name)
|
||||
return True, build_log, push_log
|
||||
|
||||
|
||||
def process_single_image(versions, path_to_dockerfile_folder, image_name):
|
||||
logging.info("Image will be pushed with versions %s", ', '.join(versions))
|
||||
logging.info("Image will be pushed with versions %s", ", ".join(versions))
|
||||
result = []
|
||||
for ver in versions:
|
||||
for i in range(5):
|
||||
success, build_log, push_log = build_and_push_one_image(path_to_dockerfile_folder, image_name, ver)
|
||||
success, build_log, push_log = build_and_push_one_image(
|
||||
path_to_dockerfile_folder, image_name, ver
|
||||
)
|
||||
if success:
|
||||
result.append((image_name + ":" + ver, build_log, push_log, 'OK'))
|
||||
result.append((image_name + ":" + ver, build_log, push_log, "OK"))
|
||||
break
|
||||
logging.info("Got error will retry %s time and sleep for %s seconds", i, i * 5)
|
||||
logging.info(
|
||||
"Got error will retry %s time and sleep for %s seconds", i, i * 5
|
||||
)
|
||||
time.sleep(i * 5)
|
||||
else:
|
||||
result.append((image_name + ":" + ver, build_log, push_log, 'FAIL'))
|
||||
result.append((image_name + ":" + ver, build_log, push_log, "FAIL"))
|
||||
|
||||
logging.info("Processing finished")
|
||||
return result
|
||||
|
||||
|
||||
def process_test_results(s3_client, test_results, s3_path_prefix):
|
||||
overall_status = 'success'
|
||||
overall_status = "success"
|
||||
processed_test_results = []
|
||||
for image, build_log, push_log, status in test_results:
|
||||
if status != 'OK':
|
||||
overall_status = 'failure'
|
||||
url_part = ''
|
||||
if status != "OK":
|
||||
overall_status = "failure"
|
||||
url_part = ""
|
||||
if build_log is not None and os.path.exists(build_log):
|
||||
build_url = s3_client.upload_test_report_to_s3(
|
||||
build_log,
|
||||
s3_path_prefix + "/" + os.path.basename(build_log))
|
||||
build_log, s3_path_prefix + "/" + os.path.basename(build_log)
|
||||
)
|
||||
url_part += '<a href="{}">build_log</a>'.format(build_url)
|
||||
if push_log is not None and os.path.exists(push_log):
|
||||
push_url = s3_client.upload_test_report_to_s3(
|
||||
push_log,
|
||||
s3_path_prefix + "/" + os.path.basename(push_log))
|
||||
push_log, s3_path_prefix + "/" + os.path.basename(push_log)
|
||||
)
|
||||
if url_part:
|
||||
url_part += ', '
|
||||
url_part += ", "
|
||||
url_part += '<a href="{}">push_log</a>'.format(push_url)
|
||||
if url_part:
|
||||
test_name = image + ' (' + url_part + ')'
|
||||
test_name = image + " (" + url_part + ")"
|
||||
else:
|
||||
test_name = image
|
||||
processed_test_results.append((test_name, status))
|
||||
return overall_status, processed_test_results
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
stopwatch = Stopwatch()
|
||||
|
||||
repo_path = GITHUB_WORKSPACE
|
||||
temp_path = os.path.join(RUNNER_TEMP, 'docker_images_check')
|
||||
dockerhub_password = get_parameter_from_ssm('dockerhub_robot_password')
|
||||
temp_path = os.path.join(RUNNER_TEMP, "docker_images_check")
|
||||
dockerhub_password = get_parameter_from_ssm("dockerhub_robot_password")
|
||||
|
||||
if os.path.exists(temp_path):
|
||||
shutil.rmtree(temp_path)
|
||||
@ -170,46 +214,71 @@ if __name__ == "__main__":
|
||||
os.makedirs(temp_path)
|
||||
|
||||
pr_info = PRInfo(need_changed_files=True)
|
||||
changed_images, dockerhub_repo_name = get_changed_docker_images(pr_info, repo_path, "docker/images.json")
|
||||
logging.info("Has changed images %s", ', '.join([str(image[0]) for image in changed_images]))
|
||||
pr_commit_version = str(pr_info.number) + '-' + pr_info.sha
|
||||
changed_images, dockerhub_repo_name = get_changed_docker_images(
|
||||
pr_info, repo_path, "docker/images.json"
|
||||
)
|
||||
logging.info(
|
||||
"Has changed images %s", ", ".join([str(image[0]) for image in changed_images])
|
||||
)
|
||||
pr_commit_version = str(pr_info.number) + "-" + pr_info.sha
|
||||
versions = [str(pr_info.number), pr_commit_version]
|
||||
if pr_info.number == 0:
|
||||
versions.append("latest")
|
||||
|
||||
subprocess.check_output("docker login --username 'robotclickhouse' --password '{}'".format(dockerhub_password), shell=True)
|
||||
subprocess.check_output(
|
||||
"docker login --username 'robotclickhouse' --password '{}'".format(
|
||||
dockerhub_password
|
||||
),
|
||||
shell=True,
|
||||
)
|
||||
|
||||
result_images = {}
|
||||
images_processing_result = []
|
||||
for rel_path, image_name in changed_images:
|
||||
full_path = os.path.join(repo_path, rel_path)
|
||||
images_processing_result += process_single_image(versions, full_path, image_name)
|
||||
images_processing_result += process_single_image(
|
||||
versions, full_path, image_name
|
||||
)
|
||||
result_images[image_name] = pr_commit_version
|
||||
|
||||
if changed_images:
|
||||
description = "Updated " + ','.join([im[1] for im in changed_images])
|
||||
description = "Updated " + ",".join([im[1] for im in changed_images])
|
||||
else:
|
||||
description = "Nothing to update"
|
||||
|
||||
if len(description) >= 140:
|
||||
description = description[:136] + "..."
|
||||
|
||||
s3_helper = S3Helper('https://s3.amazonaws.com')
|
||||
s3_helper = S3Helper("https://s3.amazonaws.com")
|
||||
|
||||
s3_path_prefix = str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(' ', '_')
|
||||
status, test_results = process_test_results(s3_helper, images_processing_result, s3_path_prefix)
|
||||
s3_path_prefix = (
|
||||
str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")
|
||||
)
|
||||
status, test_results = process_test_results(
|
||||
s3_helper, images_processing_result, s3_path_prefix
|
||||
)
|
||||
|
||||
ch_helper = ClickHouseHelper()
|
||||
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
|
||||
|
||||
with open(os.path.join(temp_path, 'changed_images.json'), 'w') as images_file:
|
||||
with open(os.path.join(temp_path, "changed_images.json"), "w") as images_file:
|
||||
json.dump(result_images, images_file)
|
||||
|
||||
print("::notice ::Report url: {}".format(url))
|
||||
print("::set-output name=url_output::\"{}\"".format(url))
|
||||
print('::set-output name=url_output::"{}"'.format(url))
|
||||
gh = Github(get_best_robot_token())
|
||||
commit = get_commit(gh, pr_info.sha)
|
||||
commit.create_status(context=NAME, description=description, state=status, target_url=url)
|
||||
commit.create_status(
|
||||
context=NAME, description=description, state=status, target_url=url
|
||||
)
|
||||
|
||||
prepared_events = prepare_tests_results_for_clickhouse(pr_info, test_results, status, stopwatch.duration_seconds, stopwatch.start_time_str, url, NAME)
|
||||
prepared_events = prepare_tests_results_for_clickhouse(
|
||||
pr_info,
|
||||
test_results,
|
||||
status,
|
||||
stopwatch.duration_seconds,
|
||||
stopwatch.start_time_str,
|
||||
url,
|
||||
NAME,
|
||||
)
|
||||
ch_helper.insert_events_into(db="gh-data", table="checks", events=prepared_events)
|
||||
|
Loading…
Reference in New Issue
Block a user