From 071a5af96e6ec93a7417c5cc3df1d0f6dfc1fa7d Mon Sep 17 00:00:00 2001 From: Azat Khuzhin Date: Tue, 12 Oct 2021 21:06:26 +0300 Subject: [PATCH 1/9] Revert "docker: add pandas/clickhouse_driver into test images" This reverts commit e07a6f3fc0ea0b496483287d85b50d29f5a8c330. --- docker/test/fasttest/Dockerfile | 2 +- docker/test/fuzzer/Dockerfile | 2 +- docker/test/stateless/Dockerfile | 2 +- docker/test/style/Dockerfile | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/test/fasttest/Dockerfile b/docker/test/fasttest/Dockerfile index f50c65bb9f2..798910fb952 100644 --- a/docker/test/fasttest/Dockerfile +++ b/docker/test/fasttest/Dockerfile @@ -67,7 +67,7 @@ RUN apt-get update \ unixodbc \ --yes --no-install-recommends -RUN pip3 install numpy scipy pandas Jinja2 pandas clickhouse_driver +RUN pip3 install numpy scipy pandas Jinja2 # This symlink required by gcc to find lld compiler RUN ln -s /usr/bin/lld-${LLVM_VERSION} /usr/bin/ld.lld diff --git a/docker/test/fuzzer/Dockerfile b/docker/test/fuzzer/Dockerfile index 13353bc2960..6444e745c47 100644 --- a/docker/test/fuzzer/Dockerfile +++ b/docker/test/fuzzer/Dockerfile @@ -27,7 +27,7 @@ RUN apt-get update \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* -RUN pip3 install Jinja2 pandas clickhouse_driver +RUN pip3 install Jinja2 COPY * / diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile index a5733d11dd2..7de8c061673 100644 --- a/docker/test/stateless/Dockerfile +++ b/docker/test/stateless/Dockerfile @@ -34,7 +34,7 @@ RUN apt-get update -y \ postgresql-client \ sqlite3 -RUN pip3 install numpy scipy pandas Jinja2 clickhouse_driver +RUN pip3 install numpy scipy pandas Jinja2 RUN mkdir -p /tmp/clickhouse-odbc-tmp \ && wget -nv -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \ diff --git a/docker/test/style/Dockerfile b/docker/test/style/Dockerfile index 64cc0c9c7b7..33cdb9db57a 100644 --- a/docker/test/style/Dockerfile +++ b/docker/test/style/Dockerfile @@ -10,7 +10,7 @@ RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ python3-pip \ pylint \ yamllint \ - && pip3 install codespell pandas clickhouse_driver + && pip3 install codespell COPY run.sh / COPY process_style_check_result.py / From e2e62ce2735a70bf77fdf3f2277728a338ab034f Mon Sep 17 00:00:00 2001 From: Azat Khuzhin Date: Tue, 12 Oct 2021 21:06:26 +0300 Subject: [PATCH 2/9] clickhouse-test: replace clickhouse-driver with http interface (via http.client) Cons of clickhouse-driver: - it is one more extra dependency - it does not have correct timeouts (only for socket operations, and this is not the same, so we need to set timeout by ourself) - it is one more thing which can break (@alesapin) --- tests/clickhouse-test | 176 +++++++++++++++++++----------------------- 1 file changed, 81 insertions(+), 95 deletions(-) diff --git a/tests/clickhouse-test b/tests/clickhouse-test index 62860a36fc7..d73c73a3650 100755 --- a/tests/clickhouse-test +++ b/tests/clickhouse-test @@ -13,6 +13,10 @@ import re import copy import traceback import math +# Not requests, to avoid requiring extra dependency. +import http.client +import urllib.parse +import json from argparse import ArgumentParser from typing import Tuple, Union, Optional, Dict, Set, List @@ -34,9 +38,6 @@ import multiprocessing import socket from contextlib import closing -import clickhouse_driver -import pandas - USE_JINJA = True try: import jinja2 @@ -51,53 +52,60 @@ MESSAGES_TO_RETRY = [ "DB::Exception: Cannot enqueue query", "is executing longer than distributed_ddl_task_timeout" # FIXME ] -error_codes = clickhouse_driver.errors.ErrorCodes -error_codes.NOT_A_LEADER = 529 -ERROR_CODES_TO_RETRY = [ - error_codes.ALL_CONNECTION_TRIES_FAILED, - error_codes.DATABASE_NOT_EMPTY, - error_codes.NOT_A_LEADER, - error_codes.UNFINISHED, -] MAX_RETRIES = 3 TEST_FILE_EXTENSIONS = ['.sql', '.sql.j2', '.sh', '.py', '.expect'] -class Client(clickhouse_driver.Client): - # return first column of the first row - def execute_one(self, *args, **kwargs): - return super().execute(*args, **kwargs)[0][0] +class HTTPError(Exception): + def __init__(self, message=None, code=None): + self.message = message + self.code = code + super().__init__(message) - # return pandas.DataFrame - def execute_pandas(self, *args, **kwargs): - data = super().execute(*args, **kwargs, with_column_types=True) - return Client.__combine(data) + def __str__(self): + return 'Code: {}. {}'.format(self.code, self.message) - @staticmethod - def __combine(data): - cols = data[1] - rows = data[0] - header = [ i[0] for i in cols ] - data = pandas.DataFrame(data=rows, columns=header) - return data +# Helpers to execute queries via HTTP interface. +def clickhouse_execute_http(base_args, query, timeout=30, settings=None, default_format=None): + client = http.client.HTTPConnection( + host=base_args.tcp_host, + port=base_args.http_port, + timeout=timeout) -# Helpers -def make_clickhouse_client(base_args): - return Client(host=base_args.tcp_host, port=base_args.tcp_port, - # hung check in stress tests may remove the database, - # hence we should use 'system'. - database='system', - settings=get_additional_client_options_dict(base_args)) + timeout = int(timeout) + params = { + 'query': query, -def clickhouse_execute_one(base_args, *args, **kwargs): - return make_clickhouse_client(base_args).execute_one(*args, **kwargs) + 'connect_timeout': timeout, + 'receive_timeout': timeout, + 'send_timeout': timeout, -def clickhouse_execute(base_args, *args, **kwargs): - return make_clickhouse_client(base_args).execute(*args, **kwargs) + 'http_connection_timeout': timeout, + 'http_receive_timeout': timeout, + 'http_send_timeout': timeout, + } + if settings is not None: + params.update(settings) + if default_format is not None: + params['default_format'] = default_format -def clickhouse_execute_pandas(base_args, *args, **kwargs): - return make_clickhouse_client(base_args).execute_pandas(*args, **kwargs) + client.request('POST', '/?' + urllib.parse.urlencode(params)) + res = client.getresponse() + data = res.read() + if res.status != 200: + raise HTTPError(data.decode(), res.status) + + return data + +def clickhouse_execute(base_args, query, timeout=30, settings=None): + return clickhouse_execute_http(base_args, query, timeout, settings).strip() + +def clickhouse_execute_json(base_args, query, timeout=30, settings=None): + data = clickhouse_execute_http(base_args, query, timeout, settings, 'JSONEachRow') + if not data: + return None + return json.loads(data) class Terminated(KeyboardInterrupt): @@ -144,12 +152,12 @@ def get_db_engine(args, database_name): def get_zookeeper_session_uptime(args): try: if args.replicated_database: - return int(clickhouse_execute_one(args, """ + return int(clickhouse_execute(args, """ SELECT min(materialize(zookeeperSessionUptime())) FROM clusterAllReplicas('test_cluster_database_replicated', system.one) """)) else: - return int(clickhouse_execute_one(args, 'SELECT zookeeperSessionUptime()')) + return int(clickhouse_execute(args, 'SELECT zookeeperSessionUptime()')) except: return None @@ -163,30 +171,16 @@ def need_retry(args, stdout, stderr, total_time): return True return any(msg in stdout for msg in MESSAGES_TO_RETRY) or any(msg in stderr for msg in MESSAGES_TO_RETRY) -def need_retry_error(args, error, total_time): - # Sometimes we may get unexpected exception like "Replica is readonly" or "Shutdown is called for table" - # instead of "Session expired" or "Connection loss" - # Retry if session was expired during test execution - session_uptime = get_zookeeper_session_uptime(args) - if session_uptime is not None and session_uptime < math.ceil(total_time): - return True - if isinstance(error, clickhouse_driver.errors.Error): - if error.code in ERROR_CODES_TO_RETRY: - return True - if any(msg in error.message for msg in MESSAGES_TO_RETRY): - return True - return False - def get_processlist(args): if args.replicated_database: - return clickhouse_execute_pandas(args, """ + return clickhouse_execute_json(args, """ SELECT materialize((hostName(), tcpPort())) as host, * FROM clusterAllReplicas('test_cluster_database_replicated', system.processes) WHERE query NOT LIKE '%system.processes%' """) else: - return clickhouse_execute_pandas(args, 'SHOW PROCESSLIST') + return clickhouse_execute_json(args, 'SHOW PROCESSLIST') # collect server stacktraces using gdb @@ -374,7 +368,7 @@ class TestCase: try: clickhouse_execute(args, "CREATE DATABASE " + database + get_db_engine(testcase_args, database), settings={'log_comment': testcase_basename}) - except (TimeoutError, clickhouse_driver.errors.SocketTimeoutError): + except TimeoutError: total_time = (datetime.now() - testcase_args.testcase_start_time).total_seconds() return None, "", f"Timeout creating database {database} before test", total_time @@ -593,11 +587,8 @@ class TestCase: if need_drop_database: seconds_left = max(args.timeout - (datetime.now() - start_time).total_seconds(), 20) try: - client = make_clickhouse_client(args) - client.connection.force_connect() - with client.connection.timeout_setter(seconds_left): - client.execute("DROP DATABASE " + database) - except (TimeoutError, clickhouse_driver.errors.SocketTimeoutError): + clickhouse_execute(args, "DROP DATABASE " + database, timeout=seconds_left) + except TimeoutError: total_time = (datetime.now() - start_time).total_seconds() return None, "", f"Timeout dropping database {database} after test", total_time shutil.rmtree(args.test_tmp_dir) @@ -803,7 +794,7 @@ class TestSuite: @staticmethod def readTestSuite(args, suite_dir_name: str): def is_data_present(): - return int(clickhouse_execute_one(args, 'EXISTS TABLE test.hits')) + return int(clickhouse_execute(args, 'EXISTS TABLE test.hits')) base_dir = os.path.abspath(args.queries) tmp_dir = os.path.abspath(args.tmp) @@ -976,7 +967,7 @@ def check_server_started(args): print(" OK") sys.stdout.flush() return True - except (ConnectionRefusedError, ConnectionResetError, clickhouse_driver.errors.NetworkError): + except (ConnectionRefusedError, ConnectionResetError): print('.', end='') sys.stdout.flush() retry_count -= 1 @@ -1003,31 +994,31 @@ class BuildFlags(): def collect_build_flags(args): result = [] - value = clickhouse_execute_one(args, "SELECT value FROM system.build_options WHERE name = 'CXX_FLAGS'") - if '-fsanitize=thread' in value: + value = clickhouse_execute(args, "SELECT value FROM system.build_options WHERE name = 'CXX_FLAGS'") + if b'-fsanitize=thread' in value: result.append(BuildFlags.THREAD) - elif '-fsanitize=address' in value: + elif b'-fsanitize=address' in value: result.append(BuildFlags.ADDRESS) - elif '-fsanitize=undefined' in value: + elif b'-fsanitize=undefined' in value: result.append(BuildFlags.UNDEFINED) - elif '-fsanitize=memory' in value: + elif b'-fsanitize=memory' in value: result.append(BuildFlags.MEMORY) - value = clickhouse_execute_one(args, "SELECT value FROM system.build_options WHERE name = 'BUILD_TYPE'") - if 'Debug' in value: + value = clickhouse_execute(args, "SELECT value FROM system.build_options WHERE name = 'BUILD_TYPE'") + if b'Debug' in value: result.append(BuildFlags.DEBUG) - elif 'RelWithDebInfo' in value or 'Release' in value: + elif b'RelWithDebInfo' in value or b'Release' in value: result.append(BuildFlags.RELEASE) - value = clickhouse_execute_one(args, "SELECT value FROM system.build_options WHERE name = 'UNBUNDLED'") - if value in ('ON', '1'): + value = clickhouse_execute(args, "SELECT value FROM system.build_options WHERE name = 'UNBUNDLED'") + if value in (b'ON', b'1'): result.append(BuildFlags.UNBUNDLED) - value = clickhouse_execute_one(args, "SELECT value FROM system.settings WHERE name = 'default_database_engine'") - if value == 'Ordinary': + value = clickhouse_execute(args, "SELECT value FROM system.settings WHERE name = 'default_database_engine'") + if value == b'Ordinary': result.append(BuildFlags.ORDINARY_DATABASE) - value = int(clickhouse_execute_one(args, "SELECT value FROM system.merge_tree_settings WHERE name = 'min_bytes_for_wide_part'")) + value = int(clickhouse_execute(args, "SELECT value FROM system.merge_tree_settings WHERE name = 'min_bytes_for_wide_part'")) if value == 0: result.append(BuildFlags.POLYMORPHIC_PARTS) @@ -1173,9 +1164,9 @@ def main(args): start_time = datetime.now() try: clickhouse_execute(args, "CREATE DATABASE IF NOT EXISTS " + db_name + get_db_engine(args, db_name)) - except Exception as e: + except HTTPError as e: total_time = (datetime.now() - start_time).total_seconds() - if not need_retry_error(args, e, total_time): + if not need_retry(args, e.message, e.message, total_time): break create_database_retries += 1 @@ -1204,13 +1195,13 @@ def main(args): # Some queries may execute in background for some time after test was finished. This is normal. for _ in range(1, 60): processlist = get_processlist(args) - if processlist.empty: + if not processlist: break sleep(1) - if not processlist.empty: + if processlist: print(colored("\nFound hung queries in processlist:", args, "red", attrs=["bold"])) - print(processlist) + print(json.dumps(processlist, indent=4)) print_stacktraces() exit_code.value = 1 @@ -1262,14 +1253,6 @@ def get_additional_client_options_url(args): return '&'.join(args.client_option) return '' -def get_additional_client_options_dict(args): - settings = {} - if args.client_option: - for key, value in map(lambda x: x.split('='), args.client_option): - settings[key] = value - return settings - - if __name__ == '__main__': stop_time = None exit_code = multiprocessing.Value("i", 0) @@ -1401,6 +1384,13 @@ if __name__ == '__main__': else: args.tcp_port = 9000 + http_port = os.getenv("CLICKHOUSE_PORT_HTTP") + if http_port is not None: + args.http_port = int(http_port) + args.client += f" --port={http_port}" + else: + args.http_port = 8123 + client_database = os.getenv("CLICKHOUSE_DATABASE") if client_database is not None: args.client += f' --database={client_database}' @@ -1434,8 +1424,4 @@ if __name__ == '__main__': if args.jobs is None: args.jobs = multiprocessing.cpu_count() - # configure pandas to make it more like Vertical format - pandas.options.display.max_columns = None - pandas.options.display.width = None - main(args) From 50b95bd89d9b6d6b563d8d084379a7f7d5341ed8 Mon Sep 17 00:00:00 2001 From: Azat Khuzhin Date: Tue, 12 Oct 2021 21:06:26 +0300 Subject: [PATCH 3/9] clickhouse-test: passthrough log_comment for DROP DATABASE too --- tests/clickhouse-test | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/clickhouse-test b/tests/clickhouse-test index d73c73a3650..2e2b34ac68c 100755 --- a/tests/clickhouse-test +++ b/tests/clickhouse-test @@ -352,6 +352,7 @@ class TestCase: testcase_args.testcase_start_time = datetime.now() testcase_basename = os.path.basename(case_file) testcase_args.testcase_client = f"{testcase_args.client} --log_comment='{testcase_basename}'" + testcase_args.testcase_basename = testcase_basename if testcase_args.database: database = testcase_args.database @@ -367,7 +368,9 @@ class TestCase: database = 'test_{suffix}'.format(suffix=random_str()) try: - clickhouse_execute(args, "CREATE DATABASE " + database + get_db_engine(testcase_args, database), settings={'log_comment': testcase_basename}) + clickhouse_execute(args, "CREATE DATABASE " + database + get_db_engine(testcase_args, database), settings={ + 'log_comment': testcase_args.testcase_basename, + }) except TimeoutError: total_time = (datetime.now() - testcase_args.testcase_start_time).total_seconds() return None, "", f"Timeout creating database {database} before test", total_time @@ -587,7 +590,9 @@ class TestCase: if need_drop_database: seconds_left = max(args.timeout - (datetime.now() - start_time).total_seconds(), 20) try: - clickhouse_execute(args, "DROP DATABASE " + database, timeout=seconds_left) + clickhouse_execute(args, "DROP DATABASE " + database, timeout=seconds_left, settings={ + 'log_comment': args.testcase_basename, + }) except TimeoutError: total_time = (datetime.now() - start_time).total_seconds() return None, "", f"Timeout dropping database {database} after test", total_time From fe90c979b59cddc135eff510a9dc880b6bb0ef54 Mon Sep 17 00:00:00 2001 From: Azat Khuzhin Date: Sun, 10 Oct 2021 20:13:46 +0300 Subject: [PATCH 4/9] clickhouse-test: fix redirect to stderr Before #29856 `CREATE DATABASE` overwrites it. Reported-by: @amosbird --- tests/clickhouse-test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/clickhouse-test b/tests/clickhouse-test index 2e2b34ac68c..b28ea2a49ac 100755 --- a/tests/clickhouse-test +++ b/tests/clickhouse-test @@ -567,7 +567,7 @@ class TestCase: # >> append to stderr (but not stdout since it is not used there), # because there are also output of per test database creation if not args.database: - pattern = '{test} > {stdout} 2>> {stderr}' + pattern = '{test} > {stdout} 2> {stderr}' else: pattern = '{test} > {stdout} 2> {stderr}' From 8dc8674298f35bdc1bffe201256e2cf1a7a4e15c Mon Sep 17 00:00:00 2001 From: Azat Khuzhin Date: Tue, 12 Oct 2021 22:27:54 +0300 Subject: [PATCH 5/9] clickhouse-test: process --client-option --- tests/clickhouse-test | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/clickhouse-test b/tests/clickhouse-test index b28ea2a49ac..024b4c2ae0f 100755 --- a/tests/clickhouse-test +++ b/tests/clickhouse-test @@ -90,7 +90,7 @@ def clickhouse_execute_http(base_args, query, timeout=30, settings=None, default if default_format is not None: params['default_format'] = default_format - client.request('POST', '/?' + urllib.parse.urlencode(params)) + client.request('POST', '/?' + base_args.client_options_query_str + urllib.parse.urlencode(params)) res = client.getresponse() data = res.read() if res.status != 200: @@ -1418,7 +1418,11 @@ if __name__ == '__main__': else: os.environ['CLICKHOUSE_URL_PARAMS'] = '' - os.environ['CLICKHOUSE_URL_PARAMS'] += get_additional_client_options_url(args) + client_options_query_str = get_additional_client_options_url(args) + args.client_options_query_str = client_options_query_str + '&' + os.environ['CLICKHOUSE_URL_PARAMS'] += client_options_query_str + else: + args.client_options_query_str = '' if args.extract_from_config is None: if os.access(args.binary + '-extract-from-config', os.X_OK): From 427c428a27f418c38b4171e8171a888d9f22e31b Mon Sep 17 00:00:00 2001 From: Azat Khuzhin Date: Tue, 12 Oct 2021 22:30:34 +0300 Subject: [PATCH 6/9] clickhouse-test: process some options regardless --client --- tests/clickhouse-test | 52 +++++++++++++++++++++---------------------- 1 file changed, 26 insertions(+), 26 deletions(-) diff --git a/tests/clickhouse-test b/tests/clickhouse-test index 024b4c2ae0f..a70b8795142 100755 --- a/tests/clickhouse-test +++ b/tests/clickhouse-test @@ -1372,36 +1372,36 @@ if __name__ == '__main__': parser.print_help() sys.exit(1) - if args.configclient: - args.client += ' --config-file=' + args.configclient + if args.configclient: + args.client += ' --config-file=' + args.configclient - tcp_host = os.getenv("CLICKHOUSE_HOST") - if tcp_host is not None: - args.tcp_host = tcp_host - args.client += f' --host={tcp_host}' - else: - args.tcp_host = 'localhost' + tcp_host = os.getenv("CLICKHOUSE_HOST") + if tcp_host is not None: + args.tcp_host = tcp_host + args.client += f' --host={tcp_host}' + else: + args.tcp_host = 'localhost' - tcp_port = os.getenv("CLICKHOUSE_PORT_TCP") - if tcp_port is not None: - args.tcp_port = int(tcp_port) - args.client += f" --port={tcp_port}" - else: - args.tcp_port = 9000 + tcp_port = os.getenv("CLICKHOUSE_PORT_TCP") + if tcp_port is not None: + args.tcp_port = int(tcp_port) + args.client += f" --port={tcp_port}" + else: + args.tcp_port = 9000 - http_port = os.getenv("CLICKHOUSE_PORT_HTTP") - if http_port is not None: - args.http_port = int(http_port) - args.client += f" --port={http_port}" - else: - args.http_port = 8123 + http_port = os.getenv("CLICKHOUSE_PORT_HTTP") + if http_port is not None: + args.http_port = int(http_port) + args.client += f" --port={http_port}" + else: + args.http_port = 8123 - client_database = os.getenv("CLICKHOUSE_DATABASE") - if client_database is not None: - args.client += f' --database={client_database}' - args.client_database = client_database - else: - args.client_database = 'default' + client_database = os.getenv("CLICKHOUSE_DATABASE") + if client_database is not None: + args.client += f' --database={client_database}' + args.client_database = client_database + else: + args.client_database = 'default' if args.client_option: # Set options for client From 40d210367fd23846f58e431acd5f1eb16d70e1b2 Mon Sep 17 00:00:00 2001 From: Azat Khuzhin Date: Tue, 12 Oct 2021 22:33:41 +0300 Subject: [PATCH 7/9] clickhouse-test: fix catching of timeouts --- tests/clickhouse-test | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/clickhouse-test b/tests/clickhouse-test index a70b8795142..aedf5f5be67 100755 --- a/tests/clickhouse-test +++ b/tests/clickhouse-test @@ -371,7 +371,7 @@ class TestCase: clickhouse_execute(args, "CREATE DATABASE " + database + get_db_engine(testcase_args, database), settings={ 'log_comment': testcase_args.testcase_basename, }) - except TimeoutError: + except socket.timeout: total_time = (datetime.now() - testcase_args.testcase_start_time).total_seconds() return None, "", f"Timeout creating database {database} before test", total_time @@ -593,7 +593,7 @@ class TestCase: clickhouse_execute(args, "DROP DATABASE " + database, timeout=seconds_left, settings={ 'log_comment': args.testcase_basename, }) - except TimeoutError: + except socket.timeout: total_time = (datetime.now() - start_time).total_seconds() return None, "", f"Timeout dropping database {database} after test", total_time shutil.rmtree(args.test_tmp_dir) From 8d7798fa2e220d989106efdec375c81e2ace8fc3 Mon Sep 17 00:00:00 2001 From: Azat Khuzhin Date: Tue, 12 Oct 2021 22:39:43 +0300 Subject: [PATCH 8/9] clickhouse-test: do not guard CREATE DATABASE with try/catch Since it will not configure testcase args and fail eventually, and later we have a try/catch anyway, this should be enough. --- tests/clickhouse-test | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/tests/clickhouse-test b/tests/clickhouse-test index aedf5f5be67..133c6f4e4e8 100755 --- a/tests/clickhouse-test +++ b/tests/clickhouse-test @@ -367,13 +367,9 @@ class TestCase: database = 'test_{suffix}'.format(suffix=random_str()) - try: - clickhouse_execute(args, "CREATE DATABASE " + database + get_db_engine(testcase_args, database), settings={ - 'log_comment': testcase_args.testcase_basename, - }) - except socket.timeout: - total_time = (datetime.now() - testcase_args.testcase_start_time).total_seconds() - return None, "", f"Timeout creating database {database} before test", total_time + clickhouse_execute(args, "CREATE DATABASE " + database + get_db_engine(testcase_args, database), settings={ + 'log_comment': testcase_args.testcase_basename, + }) os.environ["CLICKHOUSE_DATABASE"] = database # Set temporary directory to match the randomly generated database, From e5bc573250d3d6938937739b05d6f8cf618722db Mon Sep 17 00:00:00 2001 From: Azat Khuzhin Date: Wed, 13 Oct 2021 00:19:16 +0300 Subject: [PATCH 9/9] clickhouse-test: fix hung check in stress test by using system database --- tests/clickhouse-test | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/clickhouse-test b/tests/clickhouse-test index 133c6f4e4e8..b2a9358371a 100755 --- a/tests/clickhouse-test +++ b/tests/clickhouse-test @@ -77,6 +77,10 @@ def clickhouse_execute_http(base_args, query, timeout=30, settings=None, default params = { 'query': query, + # hung check in stress tests may remove the database, + # hence we should use 'system'. + 'database': 'system', + 'connect_timeout': timeout, 'receive_timeout': timeout, 'send_timeout': timeout,