2023-11-03 15:07:05 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2023-12-18 08:07:22 +00:00
|
|
|
import bisect
|
|
|
|
from dataclasses import asdict
|
2023-11-03 15:07:05 +00:00
|
|
|
from hashlib import md5
|
|
|
|
from logging import getLogger
|
|
|
|
from pathlib import Path
|
2023-12-18 08:07:22 +00:00
|
|
|
from typing import TYPE_CHECKING, Dict, Iterable, List, Optional, Union
|
2023-11-03 15:07:05 +00:00
|
|
|
from sys import modules
|
|
|
|
|
2023-12-18 08:07:22 +00:00
|
|
|
from docker_images_helper import get_images_info
|
|
|
|
from git_helper import Runner
|
2024-01-04 15:35:09 +00:00
|
|
|
from env_helper import ROOT_DIR
|
|
|
|
from ci_utils import cd
|
2024-06-10 09:18:03 +00:00
|
|
|
from ci_config import CI
|
2023-12-18 08:07:22 +00:00
|
|
|
|
|
|
|
DOCKER_DIGEST_LEN = 12
|
|
|
|
JOB_DIGEST_LEN = 10
|
|
|
|
|
2023-11-03 15:07:05 +00:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from hashlib import ( # pylint:disable=no-name-in-module,ungrouped-imports
|
|
|
|
_Hash as HASH,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
HASH = "_Hash"
|
|
|
|
|
|
|
|
logger = getLogger(__name__)
|
|
|
|
|
|
|
|
|
2023-11-03 16:41:53 +00:00
|
|
|
def _digest_file(file: Path, hash_object: HASH) -> None:
|
2023-11-03 15:07:05 +00:00
|
|
|
assert file.is_file()
|
|
|
|
with open(file, "rb") as fd:
|
|
|
|
for chunk in iter(lambda: fd.read(4096), b""):
|
2023-11-03 16:41:53 +00:00
|
|
|
hash_object.update(chunk)
|
2023-11-03 15:07:05 +00:00
|
|
|
|
|
|
|
|
2023-12-18 08:07:22 +00:00
|
|
|
def digest_path(
|
|
|
|
path: Union[Path, str],
|
|
|
|
hash_object: Optional[HASH] = None,
|
|
|
|
exclude_files: Optional[Iterable[str]] = None,
|
|
|
|
exclude_dirs: Optional[Iterable[Union[Path, str]]] = None,
|
|
|
|
) -> HASH:
|
2023-11-03 16:41:53 +00:00
|
|
|
"""Calculates md5 (or updates existing hash_object) hash of the path, either it's
|
2023-12-18 08:07:22 +00:00
|
|
|
directory or file
|
|
|
|
@exclude_files - file extension(s) or any filename suffix(es) that you want to exclude from digest
|
|
|
|
@exclude_dirs - dir names that you want to exclude from digest
|
|
|
|
"""
|
|
|
|
path = Path(path)
|
2023-11-03 16:41:53 +00:00
|
|
|
hash_object = hash_object or md5()
|
2023-12-18 08:07:22 +00:00
|
|
|
if path.is_file():
|
|
|
|
if not exclude_files or not any(path.name.endswith(x) for x in exclude_files):
|
|
|
|
_digest_file(path, hash_object)
|
|
|
|
elif path.is_dir():
|
|
|
|
if not exclude_dirs or not any(path.name == x for x in exclude_dirs):
|
|
|
|
for p in sorted(path.iterdir()):
|
|
|
|
digest_path(p, hash_object, exclude_files, exclude_dirs)
|
|
|
|
else:
|
|
|
|
pass # broken symlink
|
2023-11-03 16:41:53 +00:00
|
|
|
return hash_object
|
2023-11-03 15:07:05 +00:00
|
|
|
|
|
|
|
|
2023-12-18 08:07:22 +00:00
|
|
|
def digest_paths(
|
|
|
|
paths: Iterable[Union[Path, str]],
|
|
|
|
hash_object: Optional[HASH] = None,
|
|
|
|
exclude_files: Optional[Iterable[str]] = None,
|
|
|
|
exclude_dirs: Optional[Iterable[Union[Path, str]]] = None,
|
|
|
|
) -> HASH:
|
2023-11-03 16:41:53 +00:00
|
|
|
"""Calculates aggregated md5 (or updates existing hash_object) hash of passed paths.
|
2023-11-06 15:12:48 +00:00
|
|
|
The order is processed as given"""
|
2023-11-03 16:41:53 +00:00
|
|
|
hash_object = hash_object or md5()
|
2023-12-18 08:07:22 +00:00
|
|
|
paths_all: List[Path] = []
|
2024-01-04 15:35:09 +00:00
|
|
|
with cd(ROOT_DIR):
|
|
|
|
for p in paths:
|
|
|
|
if isinstance(p, str) and "*" in p:
|
|
|
|
for path in Path(".").glob(p):
|
|
|
|
bisect.insort(paths_all, path.absolute()) # type: ignore[misc]
|
|
|
|
else:
|
|
|
|
bisect.insort(paths_all, Path(p).absolute()) # type: ignore[misc]
|
|
|
|
for path in paths_all: # type: ignore
|
|
|
|
if path.exists():
|
|
|
|
digest_path(path, hash_object, exclude_files, exclude_dirs)
|
|
|
|
else:
|
|
|
|
raise AssertionError(f"Invalid path: {path}")
|
2023-11-03 16:41:53 +00:00
|
|
|
return hash_object
|
2023-11-03 15:30:14 +00:00
|
|
|
|
|
|
|
|
2023-11-03 15:07:05 +00:00
|
|
|
def digest_script(path_str: str) -> HASH:
|
|
|
|
"""Accepts value of the __file__ executed script and calculates the md5 hash for it"""
|
|
|
|
path = Path(path_str)
|
|
|
|
parent = path.parent
|
|
|
|
md5_hash = md5()
|
2024-01-04 15:35:09 +00:00
|
|
|
with cd(ROOT_DIR):
|
|
|
|
try:
|
|
|
|
for script in modules.values():
|
|
|
|
script_path = getattr(script, "__file__", "")
|
|
|
|
if parent.absolute().as_posix() in script_path:
|
|
|
|
logger.debug("Updating the hash with %s", script_path)
|
|
|
|
_digest_file(Path(script_path), md5_hash)
|
|
|
|
except RuntimeError:
|
|
|
|
logger.warning("The modules size has changed, retry calculating digest")
|
|
|
|
return digest_script(path_str)
|
2023-11-03 15:07:05 +00:00
|
|
|
return md5_hash
|
2023-12-18 08:07:22 +00:00
|
|
|
|
|
|
|
|
|
|
|
def digest_string(string: str) -> str:
|
|
|
|
hash_object = md5()
|
|
|
|
hash_object.update(string.encode("utf-8"))
|
|
|
|
return hash_object.hexdigest()
|
|
|
|
|
|
|
|
|
|
|
|
class DockerDigester:
|
|
|
|
EXCLUDE_FILES = [".md"]
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.images_info = get_images_info()
|
|
|
|
assert self.images_info, "Fetch image info error"
|
|
|
|
|
|
|
|
def get_image_digest(self, name: str) -> str:
|
|
|
|
assert isinstance(name, str)
|
2024-01-04 15:35:09 +00:00
|
|
|
with cd(ROOT_DIR):
|
|
|
|
deps = [name]
|
|
|
|
digest = None
|
|
|
|
while deps:
|
|
|
|
dep_name = deps.pop(0)
|
|
|
|
digest = digest_path(
|
|
|
|
self.images_info[dep_name]["path"],
|
|
|
|
digest,
|
|
|
|
exclude_files=self.EXCLUDE_FILES,
|
|
|
|
)
|
|
|
|
deps += self.images_info[dep_name]["deps"]
|
|
|
|
assert digest
|
2023-12-18 08:07:22 +00:00
|
|
|
return digest.hexdigest()[0:DOCKER_DIGEST_LEN]
|
|
|
|
|
|
|
|
def get_all_digests(self) -> Dict:
|
|
|
|
res = {}
|
|
|
|
for image_name in self.images_info:
|
|
|
|
res[image_name] = self.get_image_digest(image_name)
|
|
|
|
return res
|
|
|
|
|
|
|
|
|
|
|
|
class JobDigester:
|
2024-06-10 09:18:03 +00:00
|
|
|
def __init__(self, dry_run: bool = False):
|
2023-12-18 08:07:22 +00:00
|
|
|
self.dd = DockerDigester()
|
|
|
|
self.cache: Dict[str, str] = {}
|
2024-06-10 09:18:03 +00:00
|
|
|
self.dry_run = dry_run
|
2023-12-18 08:07:22 +00:00
|
|
|
|
|
|
|
@staticmethod
|
2024-06-10 09:18:03 +00:00
|
|
|
def _get_config_hash(digest_config: CI.DigestConfig) -> str:
|
2023-12-18 08:07:22 +00:00
|
|
|
data_dict = asdict(digest_config)
|
|
|
|
hash_obj = md5()
|
|
|
|
hash_obj.update(str(data_dict).encode())
|
|
|
|
hash_string = hash_obj.hexdigest()
|
|
|
|
return hash_string
|
|
|
|
|
2024-06-10 09:18:03 +00:00
|
|
|
def get_job_digest(self, digest_config: CI.DigestConfig) -> str:
|
|
|
|
if not digest_config.include_paths or self.dry_run:
|
2023-12-18 08:07:22 +00:00
|
|
|
# job is not for digest
|
|
|
|
return "f" * JOB_DIGEST_LEN
|
|
|
|
|
|
|
|
cache_key = self._get_config_hash(digest_config)
|
|
|
|
if cache_key in self.cache:
|
|
|
|
return self.cache[cache_key]
|
|
|
|
|
|
|
|
digest_str: List[str] = []
|
|
|
|
if digest_config.include_paths:
|
|
|
|
digest = digest_paths(
|
|
|
|
digest_config.include_paths,
|
|
|
|
hash_object=None,
|
|
|
|
exclude_files=digest_config.exclude_files,
|
|
|
|
exclude_dirs=digest_config.exclude_dirs,
|
|
|
|
)
|
|
|
|
digest_str += (digest.hexdigest(),)
|
|
|
|
if digest_config.docker:
|
|
|
|
for image_name in digest_config.docker:
|
|
|
|
image_digest = self.dd.get_image_digest(image_name)
|
|
|
|
digest_str += (image_digest,)
|
|
|
|
if digest_config.git_submodules:
|
|
|
|
submodules_sha = Runner().run(
|
|
|
|
"git submodule | awk '{print $1}' | sed 's/^[+-]//'"
|
|
|
|
)
|
|
|
|
assert submodules_sha and len(submodules_sha) > 10
|
|
|
|
submodules_digest = digest_string("-".join(submodules_sha))
|
|
|
|
digest_str += (submodules_digest,)
|
|
|
|
res = digest_string("-".join(digest_str))[0:JOB_DIGEST_LEN]
|
|
|
|
self.cache[cache_key] = res
|
|
|
|
return res
|