diff --git a/tests/ci/artifactory.py b/tests/ci/artifactory.py index 9457fa32ad3..c66659d4e93 100644 --- a/tests/ci/artifactory.py +++ b/tests/ci/artifactory.py @@ -1,15 +1,16 @@ import argparse import time from pathlib import Path -from typing import Optional from shutil import copy2 +from typing import Optional + +from ci_utils import Shell, WithIter from create_release import ( PackageDownloader, - ReleaseInfo, ReleaseContextManager, + ReleaseInfo, ReleaseProgress, ) -from ci_utils import WithIter, Shell class MountPointApp(metaclass=WithIter): @@ -54,21 +55,27 @@ class R2MountPoint: "-o passwd_file /home/ubuntu/.passwd-s3fs_packages " ) # without -o nomultipart there are errors like "Error 5 writing to /home/ubuntu/***.deb: Input/output error" - self.mount_cmd = f"s3fs {self.bucket_name} {self.MOUNT_POINT} -o url={self.API_ENDPOINT} -o use_path_request_style -o umask=0000 -o nomultipart -o logfile={self.LOG_FILE} {self.aux_mount_options}" + self.mount_cmd = ( + f"s3fs {self.bucket_name} {self.MOUNT_POINT} -o url={self.API_ENDPOINT} " + f"-o use_path_request_style -o umask=0000 -o nomultipart " + f"-o logfile={self.LOG_FILE} {self.aux_mount_options}" + ) elif self.app == MountPointApp.GEESEFS: self.cache_dir = "/home/ubuntu/geesefs_cache" self.aux_mount_options += ( f" --cache={self.cache_dir} " if self.CACHE_ENABLED else "" ) if not dry_run: - self.aux_mount_options += f" --shared-config=/home/ubuntu/.r2_auth " + self.aux_mount_options += " --shared-config=/home/ubuntu/.r2_auth " else: - self.aux_mount_options += ( - f" --shared-config=/home/ubuntu/.r2_auth_test " - ) + self.aux_mount_options += " --shared-config=/home/ubuntu/.r2_auth_test " if self.DEBUG: self.aux_mount_options += " --debug_s3 " - self.mount_cmd = f"geesefs --endpoint={self.API_ENDPOINT} --cheap --memory-limit=1000 --gc-interval=100 --max-flushers=10 --max-parallel-parts=1 --max-parallel-copy=10 --log-file={self.LOG_FILE} {self.aux_mount_options} {self.bucket_name} {self.MOUNT_POINT}" + self.mount_cmd = ( + f"geesefs --endpoint={self.API_ENDPOINT} --cheap --memory-limit=1000 " + f"--gc-interval=100 --max-flushers=10 --max-parallel-parts=1 --max-parallel-copy=10 " + f"--log-file={self.LOG_FILE} {self.aux_mount_options} {self.bucket_name} {self.MOUNT_POINT}" + ) else: assert False @@ -144,19 +151,32 @@ class DebianArtifactory: Shell.check(cmd, strict=True) Shell.check("sync") time.sleep(10) - Shell.check(f"lsof +D R2MountPoint.MOUNT_POINT", verbose=True) + Shell.check("lsof +D R2MountPoint.MOUNT_POINT", verbose=True) def test_packages(self): Shell.check("docker pull ubuntu:latest", strict=True) print(f"Test packages installation, version [{self.version}]") - debian_command = f"echo 'deb {self.repo_url} stable main' | tee /etc/apt/sources.list.d/clickhouse.list; apt update -y; apt-get install -y clickhouse-common-static={self.version} clickhouse-client={self.version}" - cmd = f'docker run --rm ubuntu:latest bash -c "apt update -y; apt install -y sudo gnupg ca-certificates; apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754; {debian_command}"' + debian_command = ( + f"echo 'deb {self.repo_url} stable main' | " + "tee /etc/apt/sources.list.d/clickhouse.list; apt update -y; " + f"apt-get install -y clickhouse-common-static={self.version} clickhouse-client={self.version}" + ) + cmd = ( + "docker run --rm ubuntu:latest bash -c " + f'"apt update -y; apt install -y sudo gnupg ca-certificates; apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754; {debian_command}"' + ) print("Running test command:") print(f" {cmd}") assert Shell.check(cmd) - print(f"Test packages installation, version [latest]") - debian_command_2 = f"echo 'deb {self.repo_url} stable main' | tee /etc/apt/sources.list.d/clickhouse.list; apt update -y; apt-get install -y clickhouse-common-static clickhouse-client" - cmd = f'docker run --rm ubuntu:latest bash -c "apt update -y; apt install -y sudo gnupg ca-certificates; apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754; {debian_command_2}"' + print("Test packages installation, version [latest]") + debian_command_2 = ( + f"echo 'deb {self.repo_url} stable main' | " + "tee /etc/apt/sources.list.d/clickhouse.list; apt update -y; apt-get install -y clickhouse-common-static clickhouse-client" + ) + cmd = ( + "docker run --rm ubuntu:latest bash -c " + f'"apt update -y; apt install -y sudo gnupg ca-certificates; apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 8919F6BD2B48D754; {debian_command_2}"' + ) print("Running test command:") print(f" {cmd}") assert Shell.check(cmd) @@ -236,7 +256,7 @@ class RpmArtifactory: print("Running test command:") print(f" {cmd}") assert Shell.check(cmd) - print(f"Test package installation, version [latest]") + print("Test package installation, version [latest]") rpm_command_2 = f"dnf config-manager --add-repo={self.repo_url} && dnf makecache && dnf -y install clickhouse-client" cmd = f'docker run --rm fedora:latest /bin/bash -c "dnf -y install dnf-plugins-core && dnf config-manager --add-repo={self.repo_url} && {rpm_command_2}"' print("Running test command:") @@ -359,7 +379,7 @@ if __name__ == "__main__": """ S3FS - very slow with a big repo RCLONE - fuse had many different errors with r2 remote and completely removed - GEESEFS ? + GEESEFS ? """ mp = R2MountPoint(MountPointApp.GEESEFS, dry_run=args.dry_run) if args.export_debian: diff --git a/tests/ci/auto_release.py b/tests/ci/auto_release.py index 89714b2fb4b..bb05e487a5e 100644 --- a/tests/ci/auto_release.py +++ b/tests/ci/auto_release.py @@ -5,13 +5,13 @@ import os import sys from typing import List -from get_robot_token import get_best_robot_token -from github_helper import GitHub -from ci_utils import Shell -from env_helper import GITHUB_REPOSITORY -from report import SUCCESS from ci_buddy import CIBuddy from ci_config import CI +from ci_utils import Shell +from env_helper import GITHUB_REPOSITORY +from get_robot_token import get_best_robot_token +from github_helper import GitHub +from report import SUCCESS def parse_args(): @@ -229,7 +229,7 @@ def main(): ) else: CIBuddy(dry_run=False).post_info( - title=f"Autorelease completed", + title="Autorelease completed", body="", with_wf_link=True, ) diff --git a/tests/ci/changelog.py b/tests/ci/changelog.py index 8a202480d45..efe16a57ca4 100755 --- a/tests/ci/changelog.py +++ b/tests/ci/changelog.py @@ -467,17 +467,19 @@ def main(): if branch and patch and Shell.check(f"git show-ref --quiet {branch}"): if patch > 1: query += f" base:{branch}" - print( - f"NOTE: It's a patch [{patch}]. will use base branch to filter PRs [{branch}]" + logging.info( + "NOTE: It's a patch [%s]. will use base branch to filter PRs [%s]", + patch, + branch, ) else: - print( - f"NOTE: It's a first patch version. should count PRs merged on master - won't filter PRs by branch" + logging.info( + "NOTE: It's a first patch version. should count PRs merged on master - won't filter PRs by branch" ) else: - print(f"ERROR: invalid branch {branch} - pass") + logging.error("ERROR: invalid branch %s - pass", branch) - print(f"Fetch PRs with query {query}") + logging.info("Fetch PRs with query %s", query) prs = gh.get_pulls_from_search( query=query, merged=merged, sort="created", progress_func=tqdm.tqdm ) diff --git a/tests/ci/ci.py b/tests/ci/ci.py index cd635713eb1..7cc32fa19c5 100644 --- a/tests/ci/ci.py +++ b/tests/ci/ci.py @@ -995,7 +995,7 @@ def _run_test(job_name: str, run_command: str) -> int: jr = JobReport.load() if jr.dummy: print( - f"ERROR: Run action failed with timeout and did not generate JobReport - update dummy report with execution time" + "ERROR: Run action failed with timeout and did not generate JobReport - update dummy report with execution time" ) jr.test_results = [TestResult.create_check_timeout_expired()] jr.duration = stopwatch.duration_seconds @@ -1305,7 +1305,7 @@ def main() -> int: elif job_report.job_skipped: print(f"Skipped after rerun check {[args.job_name]} - do nothing") else: - print(f"ERROR: Job was killed - generate evidence") + print("ERROR: Job was killed - generate evidence") job_report.update_duration() ret_code = os.getenv("JOB_EXIT_CODE", "") if ret_code: diff --git a/tests/ci/create_release.py b/tests/ci/create_release.py index 4d21c628d7f..8e07243b98b 100755 --- a/tests/ci/create_release.py +++ b/tests/ci/create_release.py @@ -352,11 +352,20 @@ class ReleaseInfo: with checkout_new(branch_upd_version_contributors): update_cmake_version(version) update_contributors(raise_error=True) - cmd_commit_version_upd = f"{GIT_PREFIX} commit '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}' -m 'Update autogenerated version to {self.version} and contributors'" + cmd_commit_version_upd = ( + f"{GIT_PREFIX} commit '{CMAKE_PATH}' '{CONTRIBUTORS_PATH}' " + f"-m 'Update autogenerated version to {self.version} and contributors'" + ) cmd_push_branch = f"{GIT_PREFIX} push --set-upstream origin {branch_upd_version_contributors}" actor = os.getenv("GITHUB_ACTOR", "") or "me" - body = f"Automatic version bump after release {self.release_tag}\n### Changelog category (leave one):\n- Not for changelog (changelog entry is not required)\n" - cmd_create_pr = f"gh pr create --repo {CI.Envs.GITHUB_REPOSITORY} --title 'Update version after release' --head {branch_upd_version_contributors} --base master --body \"{body}\" --assignee {actor}" + body = ( + f"Automatic version bump after release {self.release_tag}\n" + "### Changelog category (leave one):\n- Not for changelog (changelog entry is not required)\n" + ) + cmd_create_pr = ( + f"gh pr create --repo {CI.Envs.GITHUB_REPOSITORY} --title 'Update version after release' " + f'--head {branch_upd_version_contributors} --base master --body "{body}" --assignee {actor}' + ) Shell.check( cmd_commit_version_upd, strict=True, @@ -447,7 +456,7 @@ class ReleaseInfo: else: print("Dry-run, would run commands:") print("\n * ".join(cmds)) - self.release_url = f"dry-run" + self.release_url = "dry-run" self.dump() def merge_prs(self, dry_run: bool) -> None: diff --git a/tests/ci/functional_test_check.py b/tests/ci/functional_test_check.py index 372c66ebd3f..a49289e6e4c 100644 --- a/tests/ci/functional_test_check.py +++ b/tests/ci/functional_test_check.py @@ -247,7 +247,7 @@ def handle_sigterm(signum, _frame): print(f"WARNING: Received signal {signum}") global timeout_expired # pylint:disable=global-statement timeout_expired = True - Shell.check(f"docker exec func-tester pkill -f clickhouse-test", verbose=True) + Shell.check("docker exec func-tester pkill -f clickhouse-test", verbose=True) def main(): @@ -401,7 +401,7 @@ def main(): failed_cnt and failed_cnt <= CI.MAX_TOTAL_FAILURES_PER_JOB_BEFORE_BLOCKING_CI ): - print(f"Won't block the CI workflow") + print("Won't block the CI workflow") should_block_ci = False if should_block_ci: diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py index 6245f0490fc..fd00be8c94f 100644 --- a/tests/ci/integration_test_check.py +++ b/tests/ci/integration_test_check.py @@ -9,7 +9,10 @@ import sys from pathlib import Path from typing import Dict, List, Tuple +import integration_tests_runner as runner from build_download_helper import download_all_deb_packages +from ci_config import CI +from ci_utils import Utils from docker_images_helper import DockerImage, get_docker_image from download_release_packages import download_last_release from env_helper import REPO_COPY, REPORT_PATH, TEMP_PATH @@ -17,20 +20,16 @@ from integration_test_images import IMAGES from pr_info import PRInfo from report import ( ERROR, + FAILURE, SUCCESS, - StatusType, JobReport, + StatusType, TestResult, TestResults, read_test_results, - FAILURE, ) from stopwatch import Stopwatch -import integration_tests_runner as runner -from ci_config import CI -from ci_utils import Utils - def get_json_params_dict( check_name: str, @@ -249,7 +248,7 @@ def main(): failed_cnt and failed_cnt <= CI.MAX_TOTAL_FAILURES_PER_JOB_BEFORE_BLOCKING_CI ): - print(f"Won't block the CI workflow") + print("Won't block the CI workflow") should_block_ci = False if should_block_ci: