diff --git a/.github/workflows/pull_request.yml b/.github/workflows/pull_request.yml index 476cdd57e18..c2e76de5e14 100644 --- a/.github/workflows/pull_request.yml +++ b/.github/workflows/pull_request.yml @@ -97,7 +97,7 @@ jobs: # for main CI chain # Builds_1: - needs: [RunConfig, FastTest] + needs: [RunConfig, StyleCheck, FastTest] if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_1') }} # using callable wf (reusable_stage.yml) allows to group all nested jobs under a tab uses: ./.github/workflows/reusable_build_stage.yml diff --git a/tests/ci/build_report_check.py b/tests/ci/build_report_check.py index 48640f15ac0..c1dd2910788 100644 --- a/tests/ci/build_report_check.py +++ b/tests/ci/build_report_check.py @@ -50,7 +50,7 @@ def main(): builds_for_check = CI_CONFIG.get_builds_for_report( build_check_name, - release=pr_info.is_release(), + release=pr_info.is_release, backport=pr_info.head_ref.startswith("backport/"), ) required_builds = len(builds_for_check) diff --git a/tests/ci/ci.py b/tests/ci/ci.py index f11d62e9136..24d6d95bd0d 100644 --- a/tests/ci/ci.py +++ b/tests/ci/ci.py @@ -17,7 +17,7 @@ from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Union import docker_images_helper import upload_result_helper from build_check import get_release_or_pr -from ci_config import CI_CONFIG, Build, CIStages, Labels, JobNames +from ci_config import CI_CONFIG, Build, CIStages, JobNames, Labels from ci_utils import GHActions, is_hex, normalize_string from clickhouse_helper import ( CiLogsCredentials, @@ -49,7 +49,7 @@ from env_helper import ( from get_robot_token import get_best_robot_token from git_helper import GIT_PREFIX, Git from git_helper import Runner as GitRunner -from github import Github +from github_helper import GitHub from pr_info import PRInfo from report import ERROR, SUCCESS, BuildResult, JobReport from s3_helper import S3Helper @@ -770,7 +770,7 @@ class CiOptions: res = CiOptions() pr_info = PRInfo() if ( - not pr_info.is_pr() and not debug_message + not pr_info.is_pr and not debug_message ): # if commit_message is provided it's test/debug scenario - do not return # CI options can be configured in PRs only # if debug_message is provided - it's a test @@ -1218,19 +1218,19 @@ def _mark_success_action( if job_config.run_always or job_config.run_by_label: print(f"Job [{job}] runs always or by label in CI - do not cache") else: - if pr_info.is_master(): + if pr_info.is_master: pass # delete method is disabled for ci_cache. need it? # pending enabled for master branch jobs only # ci_cache.delete_pending(job, batch, num_batches, release_branch=True) if job_status and job_status.is_ok(): ci_cache.push_successful( - job, batch, num_batches, job_status, pr_info.is_release_branch() + job, batch, num_batches, job_status, pr_info.is_release_branch ) print(f"Job [{job}] is ok") elif job_status and not job_status.is_ok(): ci_cache.push_failed( - job, batch, num_batches, job_status, pr_info.is_release_branch() + job, batch, num_batches, job_status, pr_info.is_release_branch ) print(f"Job [{job}] is failed with status [{job_status.status}]") else: @@ -1238,7 +1238,7 @@ def _mark_success_action( description="dummy description", status=ERROR, report_url="dummy url" ) ci_cache.push_failed( - job, batch, num_batches, job_status, pr_info.is_release_branch() + job, batch, num_batches, job_status, pr_info.is_release_branch ) print(f"No CommitStatusData for [{job}], push dummy failure to ci_cache") @@ -1354,9 +1354,9 @@ def _configure_jobs( batches_to_do: List[int] = [] add_to_skip = False - if job_config.pr_only and pr_info.is_release_branch(): + if job_config.pr_only and pr_info.is_release_branch: continue - if job_config.release_only and not pr_info.is_release_branch(): + if job_config.release_only and not pr_info.is_release_branch: continue # fill job randomization buckets (for jobs with configured @random_bucket property)) @@ -1379,7 +1379,7 @@ def _configure_jobs( job, batch, num_batches, - release_branch=pr_info.is_release_branch() + release_branch=pr_info.is_release_branch and job_config.required_on_release_branch, ): # ci cache is enabled and job is not in the cache - add @@ -1390,7 +1390,7 @@ def _configure_jobs( job, batch, num_batches, - release_branch=pr_info.is_release_branch() + release_branch=pr_info.is_release_branch and job_config.required_on_release_branch, ): if job in jobs_to_wait: @@ -1413,7 +1413,7 @@ def _configure_jobs( # treat job as being skipped only if it's controlled by digest jobs_to_skip.append(job) - if not pr_info.is_release_branch(): + if not pr_info.is_release_branch: # randomization bucket filtering (pick one random job from each bucket, for jobs with configured random_bucket property) for _, jobs in randomization_buckets.items(): jobs_to_remove_randomization = set() @@ -1435,7 +1435,7 @@ def _configure_jobs( jobs_to_do, jobs_to_skip, jobs_params ) - if pr_info.is_merge_queue(): + if pr_info.is_merge_queue: # FIXME: Quick support for MQ workflow which is only StyleCheck for now jobs_to_do = [JobNames.STYLE_CHECK] jobs_to_skip = [] @@ -1504,7 +1504,7 @@ def _update_gh_statuses_action(indata: Dict, s3: S3Helper) -> None: # create GH status pr_info = PRInfo() - commit = get_commit(Github(get_best_robot_token(), per_page=100), pr_info.sha) + commit = get_commit(GitHub(get_best_robot_token(), per_page=100), pr_info.sha) def _concurrent_create_status(job: str, batch: int, num_batches: int) -> None: job_status = ci_cache.get_successful(job, batch, num_batches) @@ -1551,7 +1551,7 @@ def _fetch_commit_tokens(message: str, pr_info: PRInfo) -> List[str]: ] print(f"CI modifyers from commit message: [{res}]") res_2 = [] - if pr_info.is_pr(): + if pr_info.is_pr: matches = [match[-1] for match in re.findall(pattern, pr_info.body)] res_2 = [ match @@ -1626,7 +1626,7 @@ def _upload_build_artifacts( # Upload head master binaries static_bin_name = CI_CONFIG.build_config[build_name].static_binary_name - if pr_info.is_master() and static_bin_name: + if pr_info.is_master and static_bin_name: # Full binary with debug info: s3_path_full = "/".join((pr_info.base_ref, static_bin_name, "clickhouse-full")) binary_full = Path(job_report.build_dir_for_upload) / "clickhouse" @@ -1908,11 +1908,11 @@ def main() -> int: if not args.skip_jobs: ci_cache = CiCache(s3, jobs_data["digests"]) - if pr_info.is_master(): + if pr_info.is_master: # wait for pending jobs to be finished, await_jobs is a long blocking call # wait pending jobs (for now only on release/master branches) ready_jobs_batches_dict = ci_cache.await_jobs( - jobs_data.get("jobs_to_wait", {}), pr_info.is_release_branch() + jobs_data.get("jobs_to_wait", {}), pr_info.is_release_branch ) jobs_to_do = jobs_data["jobs_to_do"] jobs_to_skip = jobs_data["jobs_to_skip"] @@ -1929,7 +1929,7 @@ def main() -> int: del jobs_params[job] # set planned jobs as pending in the CI cache if on the master - if pr_info.is_master(): + if pr_info.is_master: for job in jobs_data["jobs_to_do"]: config = CI_CONFIG.get_job_config(job) if config.run_always or config.run_by_label: @@ -1939,7 +1939,7 @@ def main() -> int: job, job_params["batches"], config.num_batches, - release_branch=pr_info.is_release_branch(), + release_branch=pr_info.is_release_branch, ) if "jobs_to_wait" in jobs_data: @@ -1994,7 +1994,7 @@ def main() -> int: else: # this is a test job - check if GH commit status or cache record is present commit = get_commit( - Github(get_best_robot_token(), per_page=100), pr_info.sha + GitHub(get_best_robot_token(), per_page=100), pr_info.sha ) # rerun helper check @@ -2110,7 +2110,7 @@ def main() -> int: additional_urls=additional_urls or None, ) commit = get_commit( - Github(get_best_robot_token(), per_page=100), pr_info.sha + GitHub(get_best_robot_token(), per_page=100), pr_info.sha ) post_commit_status( commit, @@ -2121,7 +2121,7 @@ def main() -> int: pr_info, dump_to_file=True, ) - if not pr_info.is_merge_queue(): + if not pr_info.is_merge_queue: # in the merge queue mergeable status must be set only in FinishCheck (last job in wf) update_mergeable_check( commit, diff --git a/tests/ci/commit_status_helper.py b/tests/ci/commit_status_helper.py index 56728c3d3ba..2ee526bdd39 100644 --- a/tests/ci/commit_status_helper.py +++ b/tests/ci/commit_status_helper.py @@ -149,7 +149,7 @@ def set_status_comment(commit: Commit, pr_info: PRInfo) -> None: one, so the method does nothing for simple pushes and pull requests with `release`/`release-lts` labels""" - if pr_info.is_merge_queue(): + if pr_info.is_merge_queue: # skip report creation for the MQ return @@ -448,7 +448,7 @@ def update_mergeable_check(commit: Commit, pr_info: PRInfo, check_name: str) -> ) # FIXME: For now, always set mergeable check in the Merge Queue. It's required to pass MQ - if not_run and not pr_info.is_merge_queue(): + if not_run and not pr_info.is_merge_queue: # Let's avoid unnecessary work return diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py index 230f3e56110..151cc5a4c02 100644 --- a/tests/ci/docker_server.py +++ b/tests/ci/docker_server.py @@ -362,7 +362,7 @@ def main(): del args.image_repo del args.push - if pr_info.is_master(): + if pr_info.is_master: push = True image = DockerImageData(image_path, image_repo, False) @@ -374,9 +374,10 @@ def main(): for arch, build_name in zip(ARCH, ("package_release", "package_aarch64")): if not args.bucket_prefix: - repo_urls[ - arch - ] = f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/{release_or_pr}/{pr_info.sha}/{build_name}" + repo_urls[arch] = ( + f"{S3_DOWNLOAD}/{S3_BUILDS_BUCKET}/" + f"{release_or_pr}/{pr_info.sha}/{build_name}" + ) else: repo_urls[arch] = f"{args.bucket_prefix}/{build_name}" if args.allow_build_reuse: diff --git a/tests/ci/finish_check.py b/tests/ci/finish_check.py index 617f4c9d88c..79926b33dc0 100644 --- a/tests/ci/finish_check.py +++ b/tests/ci/finish_check.py @@ -28,7 +28,7 @@ def main(): statuses = get_commit_filtered_statuses(commit) trigger_mergeable_check(commit, statuses) - if not pr_info.is_merge_queue(): + if not pr_info.is_merge_queue: statuses = [s for s in statuses if s.context == CI_STATUS_NAME] if not statuses: return diff --git a/tests/ci/jepsen_check.py b/tests/ci/jepsen_check.py index 011ecff635e..6ed411a11ef 100644 --- a/tests/ci/jepsen_check.py +++ b/tests/ci/jepsen_check.py @@ -200,7 +200,7 @@ def main(): # always use latest docker_image = KEEPER_IMAGE_NAME if args.program == "keeper" else SERVER_IMAGE_NAME - if pr_info.is_scheduled() or pr_info.is_dispatched(): + if pr_info.is_scheduled or pr_info.is_dispatched: # get latest clcikhouse by the static link for latest master buit - get its version and provide permanent url for this version to the jepsen build_url = f"{S3_URL}/{S3_BUILDS_BUCKET}/master/amd64/clickhouse" download_build_with_progress(build_url, Path(TEMP_PATH) / "clickhouse") diff --git a/tests/ci/pr_info.py b/tests/ci/pr_info.py index 293004fc4f3..c61e62f334c 100644 --- a/tests/ci/pr_info.py +++ b/tests/ci/pr_info.py @@ -310,27 +310,34 @@ class PRInfo: if need_changed_files: self.fetch_changed_files() + @property def is_master(self) -> bool: return self.number == 0 and self.head_ref == "master" + @property def is_release(self) -> bool: return self.number == 0 and bool( re.match(r"^2[1-9]\.[1-9][0-9]*$", self.head_ref) ) + @property def is_release_branch(self) -> bool: return self.number == 0 + @property def is_pr(self): return self.event_type == EventType.PULL_REQUEST - def is_scheduled(self): + @property + def is_scheduled(self) -> bool: return self.event_type == EventType.SCHEDULE - def is_merge_queue(self): + @property + def is_merge_queue(self) -> bool: return self.event_type == EventType.MERGE_QUEUE - def is_dispatched(self): + @property + def is_dispatched(self) -> bool: return self.event_type == EventType.DISPATCH def compare_pr_url(self, pr_object: dict) -> str: diff --git a/tests/ci/run_check.py b/tests/ci/run_check.py index 435a5f726f2..262786d8228 100644 --- a/tests/ci/run_check.py +++ b/tests/ci/run_check.py @@ -8,6 +8,7 @@ from github import Github # isort: on +from cherry_pick import Labels from commit_status_helper import ( CI_STATUS_NAME, create_ci_report, @@ -26,7 +27,6 @@ from lambda_shared_package.lambda_shared.pr import ( ) from pr_info import PRInfo from report import FAILURE, PENDING, SUCCESS -from cherry_pick import Labels TRUSTED_ORG_IDS = { 54801242, # clickhouse @@ -202,7 +202,7 @@ def main(): ci_report_url = create_ci_report(pr_info, []) print("::notice ::Can run") - if not pr_info.is_merge_queue(): + if not pr_info.is_merge_queue: # we need clean CI status for MQ to merge (no pending statuses) post_commit_status( commit, diff --git a/tests/ci/style_check.py b/tests/ci/style_check.py index 4580f007606..d49cd283e9f 100644 --- a/tests/ci/style_check.py +++ b/tests/ci/style_check.py @@ -132,7 +132,7 @@ def main(): pr_info = PRInfo() - if pr_info.is_merge_queue() and args.push: + if pr_info.is_merge_queue and args.push: print("Auto style fix will be disabled for Merge Queue workflow") args.push = False