Use both release_pr and merged_pr as additional cache

This commit is contained in:
Mikhail f. Shiryaev 2023-02-09 21:18:01 +01:00
parent 7c6bb55eca
commit d0f5b6efb5
No known key found for this signature in database
GPG Key ID: 4B02ED204C7D93F4
2 changed files with 23 additions and 18 deletions

View File

@ -213,7 +213,7 @@ def build_and_push_dummy_image(
def build_and_push_one_image(
image: DockerImage,
version_string: str,
additional_cache: str,
additional_cache: List[str],
push: bool,
child: bool,
) -> Tuple[bool, Path]:
@ -241,11 +241,9 @@ def build_and_push_one_image(
f"--cache-from type=registry,ref={image.repo}:{version_string} "
f"--cache-from type=registry,ref={image.repo}:latest"
)
if additional_cache:
cache_from = (
f"{cache_from} "
f"--cache-from type=registry,ref={image.repo}:{additional_cache}"
)
for tag in additional_cache:
assert tag
cache_from = f"{cache_from} --cache-from type=registry,ref={image.repo}:{tag}"
cmd = (
"docker buildx build --builder default "
@ -273,7 +271,7 @@ def build_and_push_one_image(
def process_single_image(
image: DockerImage,
versions: List[str],
additional_cache: str,
additional_cache: List[str],
push: bool,
child: bool,
) -> TestResults:
@ -317,7 +315,7 @@ def process_single_image(
def process_image_with_parents(
image: DockerImage,
versions: List[str],
additional_cache: str,
additional_cache: List[str],
push: bool,
child: bool = False,
) -> TestResults:
@ -437,9 +435,13 @@ def main():
result_images = {}
test_results = [] # type: TestResults
additional_cache = ""
if pr_info.release_pr or pr_info.merged_pr:
additional_cache = str(pr_info.release_pr or pr_info.merged_pr)
additional_cache = [] # type: List[str]
if pr_info.release_pr:
logging.info("Use %s as additional cache tag", pr_info.release_pr)
additional_cache.append(str(pr_info.release_pr))
if pr_info.merged_pr:
logging.info("Use %s as additional cache tag", pr_info.merged_pr)
additional_cache.append(str(pr_info.merged_pr))
for image in changed_images:
# If we are in backport PR, then pr_info.release_pr is defined

View File

@ -123,7 +123,7 @@ class TestDockerImageCheck(unittest.TestCase):
mock_popen.return_value.__enter__.return_value.wait.return_value = 0
image = di.DockerImage("path", "name", False, gh_repo_path="")
result, _ = di.build_and_push_one_image(image, "version", "", True, True)
result, _ = di.build_and_push_one_image(image, "version", [], True, True)
mock_popen.assert_called_once()
mock_machine.assert_not_called()
self.assertIn(
@ -140,7 +140,7 @@ class TestDockerImageCheck(unittest.TestCase):
mock_machine.reset_mock()
mock_popen.return_value.__enter__.return_value.wait.return_value = 0
result, _ = di.build_and_push_one_image(image, "version2", "", False, True)
result, _ = di.build_and_push_one_image(image, "version2", [], False, True)
mock_popen.assert_called_once()
mock_machine.assert_not_called()
self.assertIn(
@ -157,7 +157,7 @@ class TestDockerImageCheck(unittest.TestCase):
mock_popen.reset_mock()
mock_machine.reset_mock()
mock_popen.return_value.__enter__.return_value.wait.return_value = 1
result, _ = di.build_and_push_one_image(image, "version2", "", False, False)
result, _ = di.build_and_push_one_image(image, "version2", [], False, False)
mock_popen.assert_called_once()
mock_machine.assert_not_called()
self.assertIn(
@ -174,7 +174,7 @@ class TestDockerImageCheck(unittest.TestCase):
mock_machine.reset_mock()
mock_popen.return_value.__enter__.return_value.wait.return_value = 1
result, _ = di.build_and_push_one_image(
image, "version2", "cached-version", False, False
image, "version2", ["cached-version", "another-cached"], False, False
)
mock_popen.assert_called_once()
mock_machine.assert_not_called()
@ -184,6 +184,7 @@ class TestDockerImageCheck(unittest.TestCase):
"--tag name:version2 --cache-from type=registry,ref=name:version2 "
"--cache-from type=registry,ref=name:latest "
"--cache-from type=registry,ref=name:cached-version "
"--cache-from type=registry,ref=name:another-cached "
"--cache-to type=inline,mode=max --progress plain path",
mock_popen.call_args.args,
)
@ -195,7 +196,7 @@ class TestDockerImageCheck(unittest.TestCase):
mock_popen.return_value.__enter__.return_value.wait.return_value = 0
result, _ = di.build_and_push_one_image(
only_amd64_image, "version", "", True, True
only_amd64_image, "version", [], True, True
)
mock_popen.assert_called_once()
mock_machine.assert_called_once()
@ -206,12 +207,14 @@ class TestDockerImageCheck(unittest.TestCase):
)
self.assertTrue(result)
result, _ = di.build_and_push_one_image(
only_amd64_image, "version", "", False, True
only_amd64_image, "version", [], False, True
)
self.assertIn(
"docker pull ubuntu:20.04; docker tag ubuntu:20.04 name:version; ",
mock_popen.call_args.args,
)
with self.assertRaises(AssertionError):
result, _ = di.build_and_push_one_image(image, "version", [""], False, True)
@patch("docker_images_check.build_and_push_one_image")
def test_process_image_with_parents(self, mock_build):
@ -223,7 +226,7 @@ class TestDockerImageCheck(unittest.TestCase):
# We use list to have determined order of image builgings
images = [im4, im1, im3, im2, im1]
test_results = [
di.process_image_with_parents(im, ["v1", "v2", "latest"], "", True)
di.process_image_with_parents(im, ["v1", "v2", "latest"], [], True)
for im in images
]
# The time is random, so we check it's not None and greater than 0,