mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 15:12:02 +00:00
Remove if-return-elif-return-else statements
This commit is contained in:
parent
117a846f87
commit
42097b0abc
@ -1,24 +1,23 @@
|
||||
import json
|
||||
import time
|
||||
from dataclasses import dataclass, asdict
|
||||
from dataclasses import asdict, dataclass
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional, Any, Union, Sequence, List, Set
|
||||
from typing import Any, Dict, List, Optional, Sequence, Set, Union
|
||||
|
||||
from ci_config import CI
|
||||
|
||||
from ci_utils import Utils, GH
|
||||
from ci_utils import GH, Utils
|
||||
from commit_status_helper import CommitStatusData
|
||||
from digest_helper import JobDigester
|
||||
from env_helper import (
|
||||
TEMP_PATH,
|
||||
CI_CONFIG_PATH,
|
||||
S3_BUILDS_BUCKET,
|
||||
GITHUB_RUN_URL,
|
||||
REPORT_PATH,
|
||||
S3_BUILDS_BUCKET,
|
||||
TEMP_PATH,
|
||||
)
|
||||
from report import BuildResult
|
||||
from s3_helper import S3Helper
|
||||
from digest_helper import JobDigester
|
||||
|
||||
|
||||
@dataclass
|
||||
@ -387,8 +386,7 @@ class CiCache:
|
||||
res = record_key in self.records[record_type]
|
||||
if release_branch:
|
||||
return res and self.records[record_type][record_key].release_branch
|
||||
else:
|
||||
return res
|
||||
return res
|
||||
|
||||
def push(
|
||||
self,
|
||||
|
@ -1,9 +1,10 @@
|
||||
import re
|
||||
from dataclasses import dataclass, asdict
|
||||
from typing import Optional, List, Dict, Any, Iterable
|
||||
from dataclasses import asdict, dataclass
|
||||
from typing import Any, Dict, Iterable, List, Optional
|
||||
|
||||
from ci_config import CI
|
||||
from git_helper import Runner as GitRunner, GIT_PREFIX
|
||||
from git_helper import GIT_PREFIX
|
||||
from git_helper import Runner as GitRunner
|
||||
from pr_info import PRInfo
|
||||
|
||||
# pylint: disable=too-many-return-statements
|
||||
@ -156,8 +157,7 @@ class CiSettings:
|
||||
f"Job [{job}] selected by GH label [{job_config.run_by_label}] - pass"
|
||||
)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
return False
|
||||
|
||||
# do not exclude builds
|
||||
if self.exclude_keywords and not CI.is_build_job(job):
|
||||
@ -195,7 +195,8 @@ class CiSettings:
|
||||
|
||||
if job_config.release_only and not is_release:
|
||||
return False
|
||||
elif job_config.pr_only and not is_pr and not is_mq:
|
||||
|
||||
if job_config.pr_only and not is_pr and not is_mq:
|
||||
return False
|
||||
|
||||
return not to_deny
|
||||
|
@ -117,8 +117,7 @@ class GH:
|
||||
res = cls.get_workflow_results()
|
||||
if wf_job_name in res:
|
||||
return res[wf_job_name]["result"] # type: ignore
|
||||
else:
|
||||
return None
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def print_in_group(group_name: str, lines: Union[Any, List[Any]]) -> None:
|
||||
|
@ -2,27 +2,26 @@ import argparse
|
||||
import dataclasses
|
||||
import json
|
||||
import os
|
||||
|
||||
from contextlib import contextmanager
|
||||
from copy import copy
|
||||
from pathlib import Path
|
||||
from typing import Iterator, List
|
||||
|
||||
from git_helper import Git, GIT_PREFIX
|
||||
from ssh import SSHAgent
|
||||
from s3_helper import S3Helper
|
||||
from ci_utils import Shell, GH
|
||||
from ci_buddy import CIBuddy
|
||||
from ci_config import CI
|
||||
from ci_utils import GH, Shell
|
||||
from git_helper import GIT_PREFIX, Git
|
||||
from s3_helper import S3Helper
|
||||
from ssh import SSHAgent
|
||||
from version_helper import (
|
||||
FILE_WITH_VERSION_PATH,
|
||||
GENERATED_CONTRIBUTORS,
|
||||
VersionType,
|
||||
get_abs_path,
|
||||
get_version_from_repo,
|
||||
update_cmake_version,
|
||||
update_contributors,
|
||||
VersionType,
|
||||
)
|
||||
from ci_config import CI
|
||||
|
||||
CMAKE_PATH = get_abs_path(FILE_WITH_VERSION_PATH)
|
||||
CONTRIBUTORS_PATH = get_abs_path(GENERATED_CONTRIBUTORS)
|
||||
@ -522,12 +521,11 @@ class PackageDownloader:
|
||||
return (
|
||||
"amd64" if repo_type in (RepoTypes.DEBIAN, RepoTypes.TGZ) else "x86_64"
|
||||
)
|
||||
elif package_arch == CI.BuildNames.PACKAGE_AARCH64:
|
||||
if package_arch == CI.BuildNames.PACKAGE_AARCH64:
|
||||
return (
|
||||
"arm64" if repo_type in (RepoTypes.DEBIAN, RepoTypes.TGZ) else "aarch64"
|
||||
)
|
||||
else:
|
||||
assert False, "BUG"
|
||||
assert False, "BUG"
|
||||
|
||||
def __init__(self, release, commit_sha, version):
|
||||
assert version.startswith(release), "Invalid release branch or version"
|
||||
|
@ -6,9 +6,9 @@ import os
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from env_helper import ROOT_DIR, DOCKER_TAG
|
||||
from get_robot_token import get_parameter_from_ssm
|
||||
from ci_utils import Shell
|
||||
from env_helper import DOCKER_TAG, ROOT_DIR
|
||||
from get_robot_token import get_parameter_from_ssm
|
||||
|
||||
IMAGES_FILE_PATH = Path("docker/images.json")
|
||||
|
||||
@ -58,9 +58,8 @@ def get_docker_image(image_name: str) -> DockerImage:
|
||||
image_name in tags_map
|
||||
), "Image name does not exist in provided DOCKER_TAG json string"
|
||||
return DockerImage(image_name, tags_map[image_name])
|
||||
else:
|
||||
# DOCKER_TAG is a tag itself
|
||||
return DockerImage(image_name, DOCKER_TAG)
|
||||
# DOCKER_TAG is a tag itself
|
||||
return DockerImage(image_name, DOCKER_TAG)
|
||||
|
||||
|
||||
class DockerImageData:
|
||||
|
@ -25,11 +25,7 @@ from pr_info import PRInfo
|
||||
from report import FAILURE, SUCCESS, JobReport, TestResult, TestResults
|
||||
from stopwatch import Stopwatch
|
||||
from tee_popen import TeePopen
|
||||
from version_helper import (
|
||||
ClickHouseVersion,
|
||||
get_version_from_repo,
|
||||
version_arg,
|
||||
)
|
||||
from version_helper import ClickHouseVersion, get_version_from_repo, version_arg
|
||||
|
||||
git = Git(ignore_no_tags=True)
|
||||
|
||||
@ -138,14 +134,13 @@ def retry_popen(cmd: str, log_file: Path) -> int:
|
||||
retcode = process.wait()
|
||||
if retcode == 0:
|
||||
return 0
|
||||
else:
|
||||
# From time to time docker build may failed. Curl issues, or even push
|
||||
logging.error(
|
||||
"The following command failed, sleep %s before retry: %s",
|
||||
sleep_seconds,
|
||||
cmd,
|
||||
)
|
||||
time.sleep(sleep_seconds)
|
||||
# From time to time docker build may failed. Curl issues, or even push
|
||||
logging.error(
|
||||
"The following command failed, sleep %s before retry: %s",
|
||||
sleep_seconds,
|
||||
cmd,
|
||||
)
|
||||
time.sleep(sleep_seconds)
|
||||
return retcode
|
||||
|
||||
|
||||
|
@ -851,16 +851,15 @@ def create_test_html_report(
|
||||
def sort_key(status):
|
||||
if "fail" in status.lower():
|
||||
return 0
|
||||
elif "error" in status.lower():
|
||||
if "error" in status.lower():
|
||||
return 1
|
||||
elif "not" in status.lower():
|
||||
if "not" in status.lower():
|
||||
return 2
|
||||
elif "ok" in status.lower():
|
||||
if "ok" in status.lower():
|
||||
return 10
|
||||
elif "success" in status.lower():
|
||||
if "success" in status.lower():
|
||||
return 9
|
||||
else:
|
||||
return 5
|
||||
return 5
|
||||
|
||||
test_results.sort(key=lambda result: sort_key(result.status))
|
||||
|
||||
|
@ -222,7 +222,7 @@ class ClickHouseVersion:
|
||||
for part in ("major", "minor", "patch", "tweak"):
|
||||
if getattr(self, part) < getattr(other, part):
|
||||
return True
|
||||
elif getattr(self, part) > getattr(other, part):
|
||||
if getattr(self, part) > getattr(other, part):
|
||||
return False
|
||||
|
||||
return False
|
||||
|
@ -2,17 +2,18 @@
|
||||
|
||||
# pylint: disable=too-many-return-statements
|
||||
# pylint: disable=global-variable-not-assigned
|
||||
# pylint: disable=global-statement
|
||||
# pylint: disable=too-many-lines
|
||||
# pylint: disable=anomalous-backslash-in-string
|
||||
# pylint: disable=protected-access
|
||||
|
||||
import copy
|
||||
import enum
|
||||
import tempfile
|
||||
import glob
|
||||
|
||||
# Not requests, to avoid requiring extra dependency.
|
||||
import http.client
|
||||
import io
|
||||
import itertools
|
||||
import json
|
||||
import math
|
||||
@ -28,21 +29,20 @@ import socket
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
import urllib.parse
|
||||
import io
|
||||
|
||||
# for crc32
|
||||
import zlib
|
||||
from argparse import ArgumentParser
|
||||
from ast import literal_eval as make_tuple
|
||||
from contextlib import redirect_stdout
|
||||
from datetime import datetime, timedelta
|
||||
from errno import ESRCH
|
||||
from subprocess import PIPE, Popen
|
||||
from time import sleep, time
|
||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||
from contextlib import redirect_stdout
|
||||
from ast import literal_eval as make_tuple
|
||||
|
||||
|
||||
try:
|
||||
import termcolor # type: ignore
|
||||
@ -232,8 +232,7 @@ def trim_for_log(s):
|
||||
if len(lines) > 10000:
|
||||
separator = "-" * 40 + str(len(lines) - 10000) + " lines are hidden" + "-" * 40
|
||||
return "\n".join(lines[:5000] + [] + [separator] + [] + lines[-5000:])
|
||||
else:
|
||||
return "\n".join(lines)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def is_valid_utf_8(fname):
|
||||
@ -478,8 +477,7 @@ def get_zookeeper_session_uptime(args):
|
||||
""",
|
||||
)
|
||||
)
|
||||
else:
|
||||
return int(clickhouse_execute(args, "SELECT zookeeperSessionUptime()"))
|
||||
return int(clickhouse_execute(args, "SELECT zookeeperSessionUptime()"))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
@ -512,18 +510,17 @@ def get_processlist_size(args):
|
||||
""",
|
||||
).strip()
|
||||
)
|
||||
else:
|
||||
return int(
|
||||
clickhouse_execute(
|
||||
args,
|
||||
"""
|
||||
return int(
|
||||
clickhouse_execute(
|
||||
args,
|
||||
"""
|
||||
SELECT
|
||||
count()
|
||||
FROM system.processes
|
||||
WHERE query NOT LIKE '%system.processes%'
|
||||
""",
|
||||
).strip()
|
||||
)
|
||||
).strip()
|
||||
)
|
||||
|
||||
|
||||
def get_processlist_with_stacktraces(args):
|
||||
@ -552,10 +549,9 @@ def get_processlist_with_stacktraces(args):
|
||||
},
|
||||
timeout=120,
|
||||
)
|
||||
else:
|
||||
return clickhouse_execute(
|
||||
args,
|
||||
"""
|
||||
return clickhouse_execute(
|
||||
args,
|
||||
"""
|
||||
SELECT
|
||||
p.*,
|
||||
arrayStringConcat(groupArray('Thread ID ' || toString(s.thread_id) || '\n' || arrayStringConcat(arrayMap(
|
||||
@ -568,11 +564,11 @@ def get_processlist_with_stacktraces(args):
|
||||
GROUP BY p.*
|
||||
ORDER BY elapsed DESC FORMAT Vertical
|
||||
""",
|
||||
settings={
|
||||
"allow_introspection_functions": 1,
|
||||
},
|
||||
timeout=120,
|
||||
)
|
||||
settings={
|
||||
"allow_introspection_functions": 1,
|
||||
},
|
||||
timeout=120,
|
||||
)
|
||||
|
||||
|
||||
def get_transactions_list(args):
|
||||
@ -583,8 +579,7 @@ def get_transactions_list(args):
|
||||
"SELECT materialize((hostName(), tcpPort())) as host, * FROM "
|
||||
"clusterAllReplicas('test_cluster_database_replicated', system.transactions)",
|
||||
)
|
||||
else:
|
||||
return clickhouse_execute_json(args, "select * from system.transactions")
|
||||
return clickhouse_execute_json(args, "select * from system.transactions")
|
||||
except Exception as e:
|
||||
return f"Cannot get list of transactions: {e}"
|
||||
|
||||
@ -718,8 +713,7 @@ def get_server_pid():
|
||||
def colored(text, args, color=None, on_color=None, attrs=None):
|
||||
if termcolor and (sys.stdout.isatty() or args.force_color):
|
||||
return termcolor.colored(text, color, on_color, attrs)
|
||||
else:
|
||||
return text
|
||||
return text
|
||||
|
||||
|
||||
class TestStatus(enum.Enum):
|
||||
@ -777,8 +771,7 @@ def threshold_generator(always_on_prob, always_off_prob, min_val, max_val):
|
||||
|
||||
if isinstance(min_val, int) and isinstance(max_val, int):
|
||||
return random.randint(min_val, max_val)
|
||||
else:
|
||||
return random.uniform(min_val, max_val)
|
||||
return random.uniform(min_val, max_val)
|
||||
|
||||
return gen
|
||||
|
||||
@ -1239,48 +1232,48 @@ class TestCase:
|
||||
if tags and ("disabled" in tags) and not args.disabled:
|
||||
return FailureReason.DISABLED
|
||||
|
||||
elif args.private and self.name in suite.private_skip_list:
|
||||
if args.private and self.name in suite.private_skip_list:
|
||||
return FailureReason.NOT_SUPPORTED_IN_PRIVATE
|
||||
|
||||
elif args.cloud and ("no-replicated-database" in tags):
|
||||
if args.cloud and ("no-replicated-database" in tags):
|
||||
return FailureReason.REPLICATED_DB
|
||||
|
||||
elif args.cloud and self.name in suite.cloud_skip_list:
|
||||
if args.cloud and self.name in suite.cloud_skip_list:
|
||||
return FailureReason.NOT_SUPPORTED_IN_CLOUD
|
||||
|
||||
elif (
|
||||
if (
|
||||
os.path.exists(os.path.join(suite.suite_path, self.name) + ".disabled")
|
||||
and not args.disabled
|
||||
):
|
||||
return FailureReason.DISABLED
|
||||
|
||||
elif "no-parallel-replicas" in tags and args.no_parallel_replicas:
|
||||
if "no-parallel-replicas" in tags and args.no_parallel_replicas:
|
||||
return FailureReason.NO_PARALLEL_REPLICAS
|
||||
|
||||
elif args.skip and any(s in self.name for s in args.skip):
|
||||
if args.skip and any(s in self.name for s in args.skip):
|
||||
return FailureReason.SKIP
|
||||
|
||||
elif not USE_JINJA and self.ext.endswith("j2"):
|
||||
if not USE_JINJA and self.ext.endswith("j2"):
|
||||
return FailureReason.NO_JINJA
|
||||
|
||||
elif (
|
||||
if (
|
||||
tags
|
||||
and (("zookeeper" in tags) or ("replica" in tags))
|
||||
and not args.zookeeper
|
||||
):
|
||||
return FailureReason.NO_ZOOKEEPER
|
||||
|
||||
elif (
|
||||
if (
|
||||
tags
|
||||
and (("shard" in tags) or ("distributed" in tags) or ("global" in tags))
|
||||
and not args.shard
|
||||
):
|
||||
return FailureReason.NO_SHARD
|
||||
|
||||
elif tags and ("no-fasttest" in tags) and args.fast_tests_only:
|
||||
if tags and ("no-fasttest" in tags) and args.fast_tests_only:
|
||||
return FailureReason.FAST_ONLY
|
||||
|
||||
elif (
|
||||
if (
|
||||
tags
|
||||
and (("long" in tags) or ("deadlock" in tags) or ("race" in tags))
|
||||
and args.no_long
|
||||
@ -1288,37 +1281,37 @@ class TestCase:
|
||||
# Tests for races and deadlocks usually are run in a loop for a significant amount of time
|
||||
return FailureReason.NO_LONG
|
||||
|
||||
elif tags and ("no-replicated-database" in tags) and args.replicated_database:
|
||||
if tags and ("no-replicated-database" in tags) and args.replicated_database:
|
||||
return FailureReason.REPLICATED_DB
|
||||
|
||||
elif tags and ("no-distributed-cache" in tags) and args.distributed_cache:
|
||||
if tags and ("no-distributed-cache" in tags) and args.distributed_cache:
|
||||
return FailureReason.DISTRIBUTED_CACHE
|
||||
|
||||
elif (
|
||||
if (
|
||||
tags
|
||||
and ("atomic-database" in tags)
|
||||
and (args.replicated_database or args.db_engine not in (None, "Atomic"))
|
||||
):
|
||||
return FailureReason.NON_ATOMIC_DB
|
||||
|
||||
elif (
|
||||
if (
|
||||
tags
|
||||
and ("no-shared-merge-tree" in tags)
|
||||
and args.replace_replicated_with_shared
|
||||
):
|
||||
return FailureReason.SHARED_MERGE_TREE
|
||||
|
||||
elif tags and ("no-s3-storage" in tags) and args.s3_storage:
|
||||
if tags and ("no-s3-storage" in tags) and args.s3_storage:
|
||||
return FailureReason.S3_STORAGE
|
||||
elif tags and ("no-azure-blob-storage" in tags) and args.azure_blob_storage:
|
||||
if tags and ("no-azure-blob-storage" in tags) and args.azure_blob_storage:
|
||||
return FailureReason.AZURE_BLOB_STORAGE
|
||||
elif (
|
||||
if (
|
||||
tags
|
||||
and ("no-object-storage" in tags)
|
||||
and (args.azure_blob_storage or args.s3_storage)
|
||||
):
|
||||
return FailureReason.OBJECT_STORAGE
|
||||
elif (
|
||||
if (
|
||||
tags
|
||||
and "no-object-storage-with-slow-build" in tags
|
||||
and (args.s3_storage or args.azure_blob_storage)
|
||||
@ -1326,17 +1319,15 @@ class TestCase:
|
||||
):
|
||||
return FailureReason.OBJECT_STORAGE
|
||||
|
||||
elif "no-batch" in tags and (
|
||||
if "no-batch" in tags and (
|
||||
args.run_by_hash_num is not None or args.run_by_hash_total is not None
|
||||
):
|
||||
return FailureReason.SKIP
|
||||
|
||||
elif "no-flaky-check" in tags and (
|
||||
1 == int(os.environ.get("IS_FLAKY_CHECK", 0))
|
||||
):
|
||||
if "no-flaky-check" in tags and (1 == int(os.environ.get("IS_FLAKY_CHECK", 0))):
|
||||
return FailureReason.SKIP
|
||||
|
||||
elif tags:
|
||||
if tags:
|
||||
for build_flag in args.build_flags:
|
||||
if "no-" + build_flag in tags:
|
||||
return FailureReason.BUILD
|
||||
@ -1559,8 +1550,7 @@ class TestCase:
|
||||
def print_test_time(test_time) -> str:
|
||||
if args.print_time:
|
||||
return f" {test_time:.2f} sec."
|
||||
else:
|
||||
return ""
|
||||
return ""
|
||||
|
||||
def process_result(self, result: TestResult, messages):
|
||||
description_full = messages[result.status]
|
||||
@ -2009,14 +1999,13 @@ class TestSuite:
|
||||
def get_comment_sign(filename):
|
||||
if filename.endswith(".sql") or filename.endswith(".sql.j2"):
|
||||
return "--"
|
||||
elif (
|
||||
if (
|
||||
filename.endswith(".sh")
|
||||
or filename.endswith(".py")
|
||||
or filename.endswith(".expect")
|
||||
):
|
||||
return "#"
|
||||
else:
|
||||
raise TestException(f"Unknown file_extension: {filename}")
|
||||
raise TestException(f"Unknown file_extension: {filename}")
|
||||
|
||||
def parse_tags_from_line(line, comment_sign) -> Set[str]:
|
||||
if not line.startswith(comment_sign):
|
||||
@ -2698,17 +2687,16 @@ def do_run_tests(jobs, test_suite: TestSuite):
|
||||
)
|
||||
|
||||
return len(test_suite.sequential_tests) + len(test_suite.parallel_tests)
|
||||
else:
|
||||
num_tests = len(test_suite.all_tests)
|
||||
run_tests_array(
|
||||
(
|
||||
test_suite.all_tests,
|
||||
num_tests,
|
||||
test_suite,
|
||||
False,
|
||||
)
|
||||
num_tests = len(test_suite.all_tests)
|
||||
run_tests_array(
|
||||
(
|
||||
test_suite.all_tests,
|
||||
num_tests,
|
||||
test_suite,
|
||||
False,
|
||||
)
|
||||
return num_tests
|
||||
)
|
||||
return num_tests
|
||||
|
||||
|
||||
def is_test_from_dir(suite_dir, case):
|
||||
@ -3198,11 +3186,9 @@ def get_additional_client_options(args):
|
||||
client_options = " ".join("--" + option for option in args.client_option)
|
||||
if "CLICKHOUSE_CLIENT_OPT" in os.environ:
|
||||
return os.environ["CLICKHOUSE_CLIENT_OPT"] + " " + client_options
|
||||
else:
|
||||
return client_options
|
||||
else:
|
||||
if "CLICKHOUSE_CLIENT_OPT" in os.environ:
|
||||
return os.environ["CLICKHOUSE_CLIENT_OPT"]
|
||||
return client_options
|
||||
if "CLICKHOUSE_CLIENT_OPT" in os.environ:
|
||||
return os.environ["CLICKHOUSE_CLIENT_OPT"]
|
||||
return ""
|
||||
|
||||
|
||||
|
@ -8,6 +8,7 @@ import string
|
||||
from contextlib import contextmanager
|
||||
|
||||
import pyodbc # pylint:disable=import-error; for style check
|
||||
|
||||
from exceptions import ProgramError
|
||||
|
||||
logger = logging.getLogger("connection")
|
||||
@ -276,10 +277,9 @@ def execute_request(request, connection):
|
||||
rows = cursor.fetchall()
|
||||
connection.commit()
|
||||
return ExecResult().as_ok(rows=rows, description=cursor.description)
|
||||
else:
|
||||
logging.debug("request doesn't have a description")
|
||||
connection.commit()
|
||||
return ExecResult().as_ok()
|
||||
logging.debug("request doesn't have a description")
|
||||
connection.commit()
|
||||
return ExecResult().as_ok()
|
||||
except (pyodbc.Error, sqlite3.DatabaseError) as err:
|
||||
return ExecResult().as_exception(err)
|
||||
finally:
|
||||
|
Loading…
Reference in New Issue
Block a user