More fixes in tests/ci

This commit is contained in:
Mikhail f. Shiryaev 2024-02-26 19:25:02 +01:00
parent e31078e204
commit 91cff01ce0
No known key found for this signature in database
GPG Key ID: 4B02ED204C7D93F4
15 changed files with 97 additions and 81 deletions

View File

@ -8,8 +8,12 @@ import time
from pathlib import Path
from typing import Any, Callable, List, Union
import get_robot_token as grt # we need an updated ROBOT_TOKEN
# isort: off
import requests # type: ignore
# isort: on
import get_robot_token as grt # we need an updated ROBOT_TOKEN
from ci_config import CI_CONFIG
DOWNLOAD_RETRIES_COUNT = 5

View File

@ -303,7 +303,7 @@ def post_commit_status_to_file(
file_path: Path, description: str, state: str, report_url: str
) -> None:
if file_path.exists():
raise Exception(f'File "{file_path}" already exists!')
raise FileExistsError(f'File "{file_path}" already exists!')
with open(file_path, "w", encoding="utf-8") as f:
out = csv.writer(f, delimiter="\t")
out.writerow([state, report_url, description])
@ -329,7 +329,7 @@ class CommitStatusData:
@classmethod
def load_from_file(cls, file_path: Union[Path, str]): # type: ignore
res = {}
with open(file_path, "r") as json_file:
with open(file_path, "r", encoding="utf-8") as json_file:
res = json.load(json_file)
return CommitStatusData(**cls._filter_dict(res))
@ -347,7 +347,7 @@ class CommitStatusData:
def dump_to_file(self, file_path: Union[Path, str]) -> None:
file_path = Path(file_path) or STATUS_FILE_PATH
with open(file_path, "w") as json_file:
with open(file_path, "w", encoding="utf-8") as json_file:
json.dump(asdict(self), json_file)
def is_ok(self):

View File

@ -26,7 +26,7 @@ DOWNLOAD_RETRIES_COUNT = 5
def process_os_check(log_path: Path) -> TestResult:
name = log_path.name
with open(log_path, "r") as log:
with open(log_path, "r", encoding="utf-8") as log:
line = log.read().split("\n")[0].strip()
if line != "OK":
return TestResult(name, "FAIL")
@ -35,7 +35,7 @@ def process_os_check(log_path: Path) -> TestResult:
def process_glibc_check(log_path: Path, max_glibc_version: str) -> TestResults:
test_results = [] # type: TestResults
with open(log_path, "r") as log:
with open(log_path, "r", encoding="utf-8") as log:
for line in log:
if line.strip():
columns = line.strip().split(" ")
@ -204,7 +204,7 @@ def main():
elif "aarch64" in check_name:
max_glibc_version = "2.18" # because of build with newer sysroot?
else:
raise Exception("Can't determine max glibc version")
raise RuntimeError("Can't determine max glibc version")
state, description, test_results, additional_logs = process_result(
result_path,

View File

@ -195,18 +195,21 @@ def main():
ok_cnt = 0
status = SUCCESS # type: StatusType
image_tags = (
json.loads(args.image_tags)
if not os.path.isfile(args.image_tags)
else json.load(open(args.image_tags))
)
missing_images = (
image_tags
if args.missing_images == "all"
else json.loads(args.missing_images)
if not os.path.isfile(args.missing_images)
else json.load(open(args.missing_images))
)
if os.path.isfile(args.image_tags):
with open(args.image_tags, "r", encoding="utf-8") as jfd:
image_tags = json.load(jfd)
else:
image_tags = json.loads(args.image_tags)
if args.missing_images == "all":
missing_images = image_tags
elif os.path.isfile(args.missing_images):
with open(args.missing_images, "r", encoding="utf-8") as jfd:
missing_images = json.load(jfd)
else:
missing_images = json.loads(args.missing_images)
images_build_list = get_images_oredered_list()
for image in images_build_list:

View File

@ -135,18 +135,20 @@ def main():
archs = args.suffixes
assert len(archs) > 1, "arch suffix input param is invalid"
image_tags = (
json.loads(args.image_tags)
if not os.path.isfile(args.image_tags)
else json.load(open(args.image_tags))
)
missing_images = (
list(image_tags)
if args.missing_images == "all"
else json.loads(args.missing_images)
if not os.path.isfile(args.missing_images)
else json.load(open(args.missing_images))
)
if os.path.isfile(args.image_tags):
with open(args.image_tags, "r", encoding="utf-8") as jfd:
image_tags = json.load(jfd)
else:
image_tags = json.loads(args.image_tags)
if args.missing_images == "all":
missing_images = image_tags
elif os.path.isfile(args.missing_images):
with open(args.missing_images, "r", encoding="utf-8") as jfd:
missing_images = json.load(jfd)
else:
missing_images = json.loads(args.missing_images)
test_results = []
status = SUCCESS # type: StatusType

View File

@ -363,8 +363,8 @@ def main():
image = DockerImageData(image_path, image_repo, False)
args.release_type = auto_release_type(args.version, args.release_type)
tags = gen_tags(args.version, args.release_type)
repo_urls = dict()
direct_urls: Dict[str, List[str]] = dict()
repo_urls = {}
direct_urls: Dict[str, List[str]] = {}
release_or_pr, _ = get_release_or_pr(pr_info, args.version)
for arch, build_name in zip(ARCH, ("package_release", "package_aarch64")):

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python3
import re
import logging
import re
from typing import List, Optional, Tuple
import requests # type: ignore
@ -82,13 +82,14 @@ def get_previous_release(server_version: Optional[Version]) -> Optional[ReleaseI
CLICKHOUSE_TAGS_URL, {"page": page, "per_page": 100}, timeout=10
)
if not response.ok:
raise Exception(
"Cannot load the list of tags from github: " + response.reason
logger.error(
"Cannot load the list of tags from github: %s", response.reason
)
response.raise_for_status()
releases_str = set(re.findall(VERSION_PATTERN, response.text))
if len(releases_str) == 0:
raise Exception(
raise ValueError(
"Cannot find previous release for "
+ str(server_version)
+ " server version"

View File

@ -46,7 +46,7 @@ FAILED_TESTS_ANCHOR = "# Failed tests"
def _parse_jepsen_output(path: Path) -> TestResults:
test_results = [] # type: TestResults
current_type = ""
with open(path, "r") as f:
with open(path, "r", encoding="utf-8") as f:
for line in f:
if SUCCESSFUL_TESTS_ANCHOR in line:
current_type = "OK"
@ -101,7 +101,7 @@ def prepare_autoscaling_group_and_get_hostnames(count):
instances = get_autoscaling_group_instances_ids(asg_client, JEPSEN_GROUP_NAME)
counter += 1
if counter > 30:
raise Exception("Cannot wait autoscaling group")
raise RuntimeError("Cannot wait autoscaling group")
ec2_client = boto3.client("ec2", region_name="us-east-1")
return get_instances_addresses(ec2_client, instances)
@ -119,12 +119,12 @@ def clear_autoscaling_group():
instances = get_autoscaling_group_instances_ids(asg_client, JEPSEN_GROUP_NAME)
counter += 1
if counter > 30:
raise Exception("Cannot wait autoscaling group")
raise RuntimeError("Cannot wait autoscaling group")
def save_nodes_to_file(instances: List[Any], temp_path: Path) -> Path:
nodes_path = temp_path / "nodes.txt"
with open(nodes_path, "w") as f:
with open(nodes_path, "w", encoding="utf-8") as f:
f.write("\n".join(instances))
f.flush()
return nodes_path
@ -159,7 +159,7 @@ def main():
)
args = parser.parse_args()
if args.program != "server" and args.program != "keeper":
if args.program not in ("server", "keeper"):
logging.warning("Invalid argument '%s'", args.program)
sys.exit(0)
@ -220,7 +220,7 @@ def main():
f"{S3_URL}/{S3_BUILDS_BUCKET}/{version}/{sha}/binary_release/clickhouse"
)
print(f"Clickhouse version: [{version_full}], sha: [{sha}], url: [{build_url}]")
head = requests.head(build_url)
head = requests.head(build_url, timeout=60)
assert head.status_code == 200, f"Clickhouse binary not found: {build_url}"
else:
build_name = get_build_name_for_check(check_name)

View File

@ -54,7 +54,7 @@ class Repo:
elif protocol == "origin":
self._url = protocol
else:
raise Exception(f"protocol must be in {self.VALID}")
raise ValueError(f"protocol must be in {self.VALID}")
def __str__(self):
return self._repo
@ -144,7 +144,7 @@ class Release:
for status in statuses:
if status["context"] == RELEASE_READY_STATUS:
if not status["state"] == SUCCESS:
raise Exception(
raise ValueError(
f"the status {RELEASE_READY_STATUS} is {status['state']}"
", not success"
)
@ -153,7 +153,7 @@ class Release:
page += 1
raise Exception(
raise KeyError(
f"the status {RELEASE_READY_STATUS} "
f"is not found for commit {self.release_commit}"
)
@ -188,7 +188,7 @@ class Release:
raise
if check_run_from_master and self._git.branch != "master":
raise Exception("the script must be launched only from master")
raise RuntimeError("the script must be launched only from master")
self.set_release_info()
@ -229,7 +229,7 @@ class Release:
def check_no_tags_after(self):
tags_after_commit = self.run(f"git tag --contains={self.release_commit}")
if tags_after_commit:
raise Exception(
raise RuntimeError(
f"Commit {self.release_commit} belongs to following tags:\n"
f"{tags_after_commit}\nChoose another commit"
)
@ -253,7 +253,7 @@ class Release:
)
output = self.run(f"git branch --contains={self.release_commit} {branch}")
if branch not in output:
raise Exception(
raise RuntimeError(
f"commit {self.release_commit} must belong to {branch} "
f"for {self.release_type} release"
)
@ -464,9 +464,9 @@ class Release:
logging.warning("Rolling back checked out %s for %s", ref, orig_ref)
self.run(f"git reset --hard; git checkout -f {orig_ref}")
raise
else:
if with_checkout_back and need_rollback:
self.run(rollback_cmd)
# Normal flow when we need to checkout back
if with_checkout_back and need_rollback:
self.run(rollback_cmd)
@contextmanager
def _create_branch(self, name: str, start_point: str = "") -> Iterator[None]:

View File

@ -22,8 +22,8 @@ from typing import (
from build_download_helper import get_gh_api
from ci_config import CI_CONFIG, BuildConfig
from env_helper import REPORT_PATH, TEMP_PATH
from ci_utils import normalize_string
from env_helper import REPORT_PATH, TEMP_PATH
logger = logging.getLogger(__name__)
@ -296,7 +296,7 @@ class JobReport:
def load(cls, from_file=None): # type: ignore
res = {}
from_file = from_file or JOB_REPORT_FILE
with open(from_file, "r") as json_file:
with open(from_file, "r", encoding="utf-8") as json_file:
res = json.load(json_file)
# Deserialize the nested lists of TestResult
test_results_data = res.get("test_results", [])
@ -316,7 +316,7 @@ class JobReport:
raise TypeError("Type not serializable")
to_file = to_file or JOB_REPORT_FILE
with open(to_file, "w") as json_file:
with open(to_file, "w", encoding="utf-8") as json_file:
json.dump(asdict(self), json_file, default=path_converter, indent=2)
@ -418,7 +418,7 @@ class BuildResult:
def load_from_file(cls, file: Union[Path, str]): # type: ignore
if not Path(file).exists():
return None
with open(file, "r") as json_file:
with open(file, "r", encoding="utf-8") as json_file:
res = json.load(json_file)
return BuildResult(**res)

View File

@ -46,14 +46,14 @@ def main():
build_name = get_build_name_for_check(check_name)
urls = read_build_urls(build_name, reports_path)
if not urls:
raise Exception("No build URLs found")
raise ValueError("No build URLs found")
for url in urls:
if url.endswith("/clickhouse"):
build_url = url
break
else:
raise Exception("Cannot find binary clickhouse among build results")
raise ValueError("Cannot find the clickhouse binary among build results")
logging.info("Got build url %s", build_url)

View File

@ -53,14 +53,14 @@ def main():
print(build_name)
urls = read_build_urls(build_name, reports_path)
if not urls:
raise Exception("No build URLs found")
raise ValueError("No build URLs found")
for url in urls:
if url.endswith("/clickhouse"):
build_url = url
break
else:
raise Exception("Cannot find the clickhouse binary among build results")
raise ValueError("Cannot find the clickhouse binary among build results")
logging.info("Got build url %s", build_url)

View File

@ -1,11 +1,11 @@
#!/usr/bin/env python3
import shutil
import logging
import os
import shutil
import signal
import subprocess
import tempfile
import logging
import signal
class SSHAgent:
@ -21,7 +21,7 @@ class SSHAgent:
def start(self):
if shutil.which("ssh-agent") is None:
raise Exception("ssh-agent binary is not available")
raise RuntimeError("ssh-agent binary is not available")
self._env_backup["SSH_AUTH_SOCK"] = os.environ.get("SSH_AUTH_SOCK")
self._env_backup["SSH_OPTIONS"] = os.environ.get("SSH_OPTIONS")
@ -54,7 +54,7 @@ class SSHAgent:
def remove(self, key_pub):
if key_pub not in self._keys:
raise Exception(f"Private key not found, public part: {key_pub}")
raise ValueError(f"Private key not found, public part: {key_pub}")
if self._keys[key_pub] > 1:
self._keys[key_pub] -= 1
@ -107,7 +107,7 @@ class SSHAgent:
if p.returncode:
message = stderr.strip() + b"\n" + stdout.strip()
raise Exception(message.strip().decode())
raise RuntimeError(message.strip().decode())
return stdout
@ -115,9 +115,9 @@ class SSHAgent:
class SSHKey:
def __init__(self, key_name=None, key_value=None):
if key_name is None and key_value is None:
raise Exception("Either key_name or key_value must be specified")
raise ValueError("Either key_name or key_value must be specified")
if key_name is not None and key_value is not None:
raise Exception("key_name or key_value must be specified")
raise ValueError("key_name or key_value must be specified")
if key_name is not None:
self.key = os.getenv(key_name)
else:

View File

@ -1,14 +1,14 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""This script is used in docker images for stress tests and upgrade tests"""
from multiprocessing import cpu_count
from pathlib import Path
from subprocess import Popen, call, check_output, STDOUT, PIPE
from typing import List
import argparse
import logging
import random
import time
from multiprocessing import cpu_count
from pathlib import Path
from subprocess import PIPE, STDOUT, Popen, call, check_output
from typing import List
def get_options(i: int, upgrade_check: bool) -> str:
@ -90,12 +90,13 @@ def run_func_test(
]
pipes = []
for i, path in enumerate(output_paths):
with open(path, "w") as op:
with open(path, "w", encoding="utf-8") as op:
full_command = (
f"{cmd} {get_options(i, upgrade_check)} {global_time_limit_option} "
f"{skip_tests_option} {upgrade_check_option}"
)
logging.info("Run func tests '%s'", full_command)
# pylint:disable-next=consider-using-with
pipes.append(Popen(full_command, shell=True, stdout=op, stderr=op))
time.sleep(0.5)
return pipes
@ -204,6 +205,7 @@ def prepare_for_hung_check(drop_databases: bool) -> bool:
continue
command = make_query_command(f"DETACH DATABASE {db}")
# we don't wait for drop
# pylint:disable-next=consider-using-with
Popen(command, shell=True)
break
except Exception as ex:
@ -212,7 +214,7 @@ def prepare_for_hung_check(drop_databases: bool) -> bool:
)
time.sleep(i)
else:
raise Exception(
raise RuntimeError(
"Cannot drop databases after stress tests. Probably server consumed "
"too much memory and cannot execute simple queries"
)
@ -293,7 +295,9 @@ def main():
args = parse_args()
if args.drop_databases and not args.hung_check:
raise Exception("--drop-databases only used in hung check (--hung-check)")
raise argparse.ArgumentTypeError(
"--drop-databases only used in hung check (--hung-check)"
)
# FIXME Hung check with ubsan is temporarily disabled due to
# https://github.com/ClickHouse/ClickHouse/issues/45372
@ -359,15 +363,17 @@ def main():
]
)
hung_check_log = args.output_folder / "hung_check.log" # type: Path
tee = Popen(["/usr/bin/tee", hung_check_log], stdin=PIPE)
res = call(cmd, shell=True, stdout=tee.stdin, stderr=STDOUT, timeout=600)
if tee.stdin is not None:
tee.stdin.close()
with Popen(["/usr/bin/tee", hung_check_log], stdin=PIPE) as tee:
res = call(cmd, shell=True, stdout=tee.stdin, stderr=STDOUT, timeout=600)
if tee.stdin is not None:
tee.stdin.close()
if res != 0 and have_long_running_queries and not suppress_hung_check:
logging.info("Hung check failed with exit code %d", res)
else:
hung_check_status = "No queries hung\tOK\t\\N\t\n"
with open(args.output_folder / "test_results.tsv", "w+") as results:
with open(
args.output_folder / "test_results.tsv", "w+", encoding="utf-8"
) as results:
results.write(hung_check_status)
hung_check_log.unlink()

View File

@ -23,7 +23,7 @@ def get_test_name(line):
for element in elements:
if "(" not in element and ")" not in element:
return element
raise Exception(f"No test name in line '{line}'")
raise ValueError(f"No test name in line '{line}'")
def process_results(