Fix for nightly job for digest-ci (#58079)

* Fix for run_always job - do not set done
 #no-merge-commit
This commit is contained in:
Max K 2023-12-20 21:28:54 +01:00 committed by GitHub
parent cc23ddd94a
commit e0a790de1e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 36 additions and 92 deletions

View File

@ -279,11 +279,11 @@ def _configure_docker_jobs(
images_info = docker_images_helper.get_images_info()
# a. check missing images
print("Start checking missing images in dockerhub")
if not rebuild_all_dockers:
# FIXME: we need login as docker manifest inspect goes directly to one of the *.docker.com hosts instead of "registry-mirrors" : ["http://dockerhub-proxy.dockerhub-proxy-zone:5000"]
# find if it's possible to use the setting of /etc/docker/daemon.json
docker_images_helper.docker_login()
if not rebuild_all_dockers:
print("Start checking missing images in dockerhub")
missing_multi_dict = check_missing_images_on_dockerhub(imagename_digest_dict)
missing_multi = list(missing_multi_dict)
missing_amd64 = []
@ -305,6 +305,15 @@ def _configure_docker_jobs(
"aarch64",
)
)
# FIXME: temporary hack, remove after transition to docker digest as tag
else:
if missing_multi:
print(
f"WARNING: Missing images {list(missing_multi)} - fallback to latest tag"
)
for image in missing_multi:
imagename_digest_dict[image] = "latest"
print("...checking missing images in dockerhub - done")
else:
# add all images to missing
missing_multi = list(imagename_digest_dict)
@ -315,16 +324,7 @@ def _configure_docker_jobs(
for name in imagename_digest_dict
if not images_info[name]["only_amd64"]
]
# FIXME: temporary hack, remove after transition to docker digest as tag
if docker_digest_or_latest:
if missing_multi:
print(
f"WARNING: Missing images {list(missing_multi)} - fallback to latest tag"
)
for image in missing_multi:
imagename_digest_dict[image] = "latest"
print("...checking missing images in dockerhub - done")
return {
"images": imagename_digest_dict,
"missing_aarch64": missing_aarch64,
@ -548,14 +548,14 @@ def main() -> int:
if args.configure:
GR = GitRunner()
pr_info = PRInfo(need_changed_files=True)
pr_info = PRInfo()
docker_data = {}
git_ref = GR.run(f"{GIT_PREFIX} rev-parse HEAD")
# if '#no-merge-commit' is set in commit message - set git ref to PR branch head to avoid merge-commit
tokens = []
if pr_info.number != 0:
if pr_info.number != 0 and not args.skip_jobs:
message = GR.run(f"{GIT_PREFIX} log {pr_info.sha} --format=%B -n 1")
tokens = _fetch_commit_tokens(message)
print(f"Found commit message tokens: [{tokens}]")
@ -689,7 +689,8 @@ def main() -> int:
elif args.mark_success:
assert indata, "Run config must be provided via --infile"
job = args.job_name
num_batches = CI_CONFIG.get_job_config(job).num_batches
job_config = CI_CONFIG.get_job_config(job)
num_batches = job_config.num_batches
assert (
num_batches <= 1 or 0 <= args.batch < num_batches
), f"--batch must be provided and in range [0, {num_batches}) for {job}"
@ -706,7 +707,7 @@ def main() -> int:
if not CommitStatusData.is_present():
# apparently exit after rerun-helper check
# do nothing, exit without failure
print("ERROR: no status file for job [{job}]")
print(f"ERROR: no status file for job [{job}]")
job_status = CommitStatusData(
status="dummy failure",
description="dummy status",
@ -717,7 +718,9 @@ def main() -> int:
job_status = CommitStatusData.load_status()
# Storing job data (report_url) to restore OK GH status on job results reuse
if job_status.is_ok():
if job_config.run_always:
print(f"Job [{job}] runs always in CI - do not mark as done")
elif job_status.is_ok():
success_flag_name = get_file_flag_name(
job, indata["jobs_data"]["digests"][job], args.batch, num_batches
)

View File

@ -2,7 +2,7 @@
import json
import logging
import os
from typing import Dict, List, Set, Union, Literal
from typing import Dict, List, Set, Union
from unidiff import PatchSet # type: ignore
@ -93,6 +93,7 @@ class PRInfo:
github_event = PRInfo.default_event.copy()
self.event = github_event
self.changed_files = set() # type: Set[str]
self.changed_files_requested = False
self.body = ""
self.diff_urls = [] # type: List[str]
# release_pr and merged_pr are used for docker images additional cache
@ -285,6 +286,7 @@ class PRInfo:
response.raise_for_status()
diff_object = PatchSet(response.text)
self.changed_files.update({f.path for f in diff_object})
self.changed_files_requested = True
print(f"Fetched info about {len(self.changed_files)} changed files")
def get_dict(self):
@ -297,9 +299,10 @@ class PRInfo:
}
def has_changes_in_documentation(self) -> bool:
# If the list wasn't built yet the best we can do is to
# assume that there were changes.
if self.changed_files is None or not self.changed_files:
if not self.changed_files_requested:
self.fetch_changed_files()
if not self.changed_files:
return True
for f in self.changed_files:
@ -316,7 +319,11 @@ class PRInfo:
checks if changes are docs related without other changes
FIXME: avoid hardcoding filenames here
"""
if not self.changed_files_requested:
self.fetch_changed_files()
if not self.changed_files:
# if no changes at all return False
return False
for f in self.changed_files:
@ -332,7 +339,10 @@ class PRInfo:
return True
def has_changes_in_submodules(self):
if self.changed_files is None or not self.changed_files:
if not self.changed_files_requested:
self.fetch_changed_files()
if not self.changed_files:
return True
for f in self.changed_files:
@ -340,75 +350,6 @@ class PRInfo:
return True
return False
def can_skip_builds_and_use_version_from_master(self):
if FORCE_TESTS_LABEL in self.labels:
return False
if self.changed_files is None or not self.changed_files:
return False
return not any(
f.startswith("programs")
or f.startswith("src")
or f.startswith("base")
or f.startswith("cmake")
or f.startswith("rust")
or f == "CMakeLists.txt"
or f == "tests/ci/build_check.py"
for f in self.changed_files
)
def can_skip_integration_tests(self, versions: List[str]) -> bool:
if FORCE_TESTS_LABEL in self.labels:
return False
# If docker image(s) relevant to integration tests are updated
if any(self.sha in version for version in versions):
return False
if self.changed_files is None or not self.changed_files:
return False
if not self.can_skip_builds_and_use_version_from_master():
return False
# Integration tests can be skipped if integration tests are not changed
return not any(
f.startswith("tests/integration/")
or f == "tests/ci/integration_test_check.py"
for f in self.changed_files
)
def can_skip_functional_tests(
self, version: str, test_type: Literal["stateless", "stateful"]
) -> bool:
if FORCE_TESTS_LABEL in self.labels:
return False
# If docker image(s) relevant to functional tests are updated
if self.sha in version:
return False
if self.changed_files is None or not self.changed_files:
return False
if not self.can_skip_builds_and_use_version_from_master():
return False
# Functional tests can be skipped if queries tests are not changed
if test_type == "stateless":
return not any(
f.startswith("tests/queries/0_stateless")
or f == "tests/ci/functional_test_check.py"
for f in self.changed_files
)
else: # stateful
return not any(
f.startswith("tests/queries/1_stateful")
or f == "tests/ci/functional_test_check.py"
for f in self.changed_files
)
class FakePRInfo:
def __init__(self):