2020-10-02 16:54:07 +00:00
#!/usr/bin/env python3
2020-03-26 08:36:15 +00:00
import sys
2016-09-01 17:40:02 +00:00
import os
import os.path
import re
2020-07-03 10:57:16 +00:00
import json
2021-01-26 20:36:04 +00:00
import traceback
2016-09-01 17:40:02 +00:00
from argparse import ArgumentParser
2017-08-31 20:03:47 +00:00
import shlex
2017-02-02 13:41:39 +00:00
import subprocess
2016-09-01 17:40:02 +00:00
from subprocess import Popen
from subprocess import PIPE
from subprocess import CalledProcessError
2020-12-15 13:33:14 +00:00
from subprocess import TimeoutExpired
2016-09-02 16:26:09 +00:00
from datetime import datetime
2020-08-26 17:44:03 +00:00
from time import time, sleep
2016-09-02 16:26:09 +00:00
from errno import ESRCH
2019-07-17 12:46:20 +00:00
try:
import termcolor
except ImportError:
termcolor = None
2021-01-26 20:36:04 +00:00
import random
import string
2019-06-03 17:36:27 +00:00
import multiprocessing
2019-04-22 23:40:40 +00:00
from contextlib import closing
2016-09-01 17:40:02 +00:00
2016-11-15 17:59:55 +00:00
2019-03-13 16:47:02 +00:00
MESSAGES_TO_RETRY = [
2019-03-29 08:26:42 +00:00
"DB::Exception: ZooKeeper session has been expired",
2019-03-13 16:47:02 +00:00
"Coordination::Exception: Connection loss",
2020-05-28 22:53:15 +00:00
"Operation timed out",
2020-05-29 00:46:42 +00:00
"ConnectionPoolWithFailover: Connection failed at try",
2019-03-13 16:47:02 +00:00
]
2016-09-01 17:40:02 +00:00
2020-08-27 08:17:01 +00:00
def json_minify(string):
"""
Removes all js-style comments from json string. Allows to have comments in skip_list.json.
2020-08-27 12:20:59 +00:00
The code taken from https://github.com/getify/JSON.minify/tree/python under the MIT license.
2020-08-27 08:17:01 +00:00
"""
2021-01-26 20:36:04 +00:00
tokenizer = re.compile(r'"|(/\*)|(\*/)|(//)|\n|\r')
2020-08-27 08:17:01 +00:00
end_slashes_re = re.compile(r'(\\)*$')
in_string = False
in_multi = False
in_single = False
new_str = []
index = 0
for match in re.finditer(tokenizer, string):
if not (in_multi or in_single):
tmp = string[index:match.start()]
new_str.append(tmp)
else:
# Replace comments with white space so that the JSON parser reports
# the correct column numbers on parsing errors.
new_str.append(' ' * (match.start() - index))
index = match.end()
val = match.group()
if val == '"' and not (in_multi or in_single):
escaped = end_slashes_re.search(string, 0, match.start())
# start of string or unescaped quote character to end string
if not in_string or (escaped is None or len(escaped.group()) % 2 == 0): # noqa
in_string = not in_string
index -= 1 # include " character in next catch
elif not (in_string or in_multi or in_single):
if val == '/*':
in_multi = True
elif val == '//':
in_single = True
elif val == '*/' and in_multi and not (in_string or in_single):
in_multi = False
new_str.append(' ' * len(val))
elif val in '\r\n' and not (in_multi or in_string) and in_single:
in_single = False
elif not in_multi or in_single: # noqa
new_str.append(val)
if val in '\r\n':
new_str.append(val)
elif in_multi or in_single:
new_str.append(' ' * len(val))
new_str.append(string[index:])
return ''.join(new_str)
2017-09-14 17:13:40 +00:00
def remove_control_characters(s):
"""
https://github.com/html5lib/html5lib-python/issues/96#issuecomment-43438438
"""
def str_to_int(s, default, base=10):
if int(s, base) < 0x10000:
2020-10-02 16:54:07 +00:00
return chr(int(s, base))
2017-09-14 17:13:40 +00:00
return default
2019-04-18 18:48:04 +00:00
s = re.sub(r"&#(\d+);?", lambda c: str_to_int(c.group(1), c.group(0)), s)
s = re.sub(r"&#[xX]([0-9a-fA-F]+);?", lambda c: str_to_int(c.group(1), c.group(0), base=16), s)
s = re.sub(r"[\x00-\x08\x0b\x0e-\x1f\x7f]", "", s)
2017-09-14 17:13:40 +00:00
return s
2021-02-15 10:26:34 +00:00
def get_db_engine(args, database_name):
if args.replicated_database:
return " ENGINE=Replicated('/test/clickhouse/db/{}', 's1', 'r1')".format(database_name)
2020-09-21 10:24:10 +00:00
if args.db_engine:
return " ENGINE=" + args.db_engine
return "" # Will use default engine
2019-10-11 10:30:32 +00:00
def run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file):
2019-10-11 13:34:26 +00:00
# print(client_options)
2020-12-15 13:33:14 +00:00
start_time = datetime.now()
2020-05-13 20:17:12 +00:00
if args.database:
database = args.database
os.environ.setdefault("CLICKHOUSE_DATABASE", database)
else:
# If --database is not specified, we will create temporary database with unique name
# And we will recreate and drop it for each test
def random_str(length=6):
alphabet = string.ascii_lowercase + string.digits
return ''.join(random.choice(alphabet) for _ in range(length))
database = 'test_{suffix}'.format(suffix=random_str())
2020-10-02 16:54:07 +00:00
clickhouse_proc_create = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True)
2020-12-15 13:33:14 +00:00
try:
2021-02-15 10:26:34 +00:00
clickhouse_proc_create.communicate(("CREATE DATABASE " + database + get_db_engine(args, database)), timeout=args.timeout)
2020-12-15 13:33:14 +00:00
except TimeoutExpired:
total_time = (datetime.now() - start_time).total_seconds()
return clickhouse_proc_create, "", "Timeout creating database {} before test".format(database), total_time
2020-05-13 20:17:12 +00:00
os.environ["CLICKHOUSE_DATABASE"] = database
2021-01-28 17:31:34 +00:00
# This is for .sh tests
2021-03-13 15:05:24 +00:00
os.environ["CLICKHOUSE_LOG_COMMENT"] = case_file
2021-01-28 17:31:34 +00:00
2019-10-11 10:30:32 +00:00
params = {
2020-05-13 20:17:12 +00:00
'client': args.client + ' --database=' + database,
2019-10-11 10:30:32 +00:00
'logs_level': server_logs_level,
'options': client_options,
'test': case_file,
'stdout': stdout_file,
'stderr': stderr_file,
}
2019-10-11 13:34:26 +00:00
pattern = '{test} > {stdout} 2> {stderr}'
2019-03-13 16:47:02 +00:00
if ext == '.sql':
2021-01-25 19:29:29 +00:00
pattern = "{client} --send_logs_level={logs_level} --testmode --multiquery {options} --log_comment='{test}' < " + pattern
2019-10-11 13:34:26 +00:00
command = pattern.format(**params)
2020-10-12 11:17:35 +00:00
2020-10-15 10:36:00 +00:00
# print(command)
2019-03-13 16:47:02 +00:00
2019-10-11 13:34:26 +00:00
proc = Popen(command, shell=True, env=os.environ)
2020-10-12 11:17:35 +00:00
2019-03-13 16:47:02 +00:00
while (datetime.now() - start_time).total_seconds() < args.timeout and proc.poll() is None:
sleep(0.01)
2021-01-26 17:51:25 +00:00
need_drop_database = not args.database
if need_drop_database and args.no_drop_if_fail:
maybe_passed = (proc.returncode == 0) and (proc.stderr is None) and (proc.stdout is None or 'Exception' not in proc.stdout)
need_drop_database = not maybe_passed
if need_drop_database:
2020-10-02 16:54:07 +00:00
clickhouse_proc_create = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True)
2020-12-15 13:34:53 +00:00
seconds_left = max(args.timeout - (datetime.now() - start_time).total_seconds(), 10)
2020-12-15 13:33:14 +00:00
try:
clickhouse_proc_create.communicate(("DROP DATABASE " + database), timeout=seconds_left)
except TimeoutExpired:
2020-12-15 16:20:09 +00:00
# kill test process because it can also hung
if proc.returncode is None:
try:
proc.kill()
except OSError as e:
if e.errno != ESRCH:
raise
2020-12-17 20:13:49 +00:00
total_time = (datetime.now() - start_time).total_seconds()
2020-12-15 13:33:14 +00:00
return clickhouse_proc_create, "", "Timeout dropping database {} after test".format(database), total_time
2020-05-13 20:03:10 +00:00
2019-12-03 09:59:41 +00:00
total_time = (datetime.now() - start_time).total_seconds()
2019-06-15 19:20:56 +00:00
# Normalize randomized database names in stdout, stderr files.
2020-05-13 20:17:12 +00:00
os.system("LC_ALL=C sed -i -e 's/{test_db}/default/g' {file}".format(test_db=database, file=stdout_file))
2021-01-26 17:51:25 +00:00
if not args.show_db_name:
os.system("LC_ALL=C sed -i -e 's/{test_db}/default/g' {file}".format(test_db=database, file=stderr_file))
2019-06-10 10:41:53 +00:00
2020-10-02 16:54:07 +00:00
stdout = open(stdout_file, 'rb').read() if os.path.exists(stdout_file) else b''
stdout = str(stdout, errors='replace', encoding='utf-8')
stderr = open(stderr_file, 'rb').read() if os.path.exists(stderr_file) else b''
stderr = str(stderr, errors='replace', encoding='utf-8')
2019-03-13 16:47:02 +00:00
2019-12-03 09:59:41 +00:00
return proc, stdout, stderr, total_time
2019-03-13 16:47:02 +00:00
2019-10-11 10:30:32 +00:00
2019-03-13 16:47:02 +00:00
def need_retry(stderr):
return any(msg in stderr for msg in MESSAGES_TO_RETRY)
2019-10-11 10:30:32 +00:00
2021-01-25 08:53:04 +00:00
def get_processlist(args):
2019-03-13 11:03:57 +00:00
try:
2021-01-25 08:53:04 +00:00
clickhouse_proc = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, _) = clickhouse_proc.communicate((b"SHOW PROCESSLIST FORMAT Vertical"), timeout=10)
return False, stdout.decode('utf-8')
except Exception as ex:
print("Exception", ex)
return True, ""
2019-03-13 11:03:57 +00:00
2019-10-11 10:30:32 +00:00
2020-03-23 18:17:07 +00:00
# collect server stacktraces using gdb
2020-03-23 17:30:31 +00:00
def get_stacktraces_from_gdb(server_pid):
2019-03-13 11:03:57 +00:00
try:
2021-02-20 20:04:24 +00:00
cmd = "gdb -batch -ex 'thread apply all backtrace' -p {}".format(server_pid)
2020-10-27 08:44:58 +00:00
return subprocess.check_output(cmd, shell=True).decode('utf-8')
2019-03-13 11:03:57 +00:00
except Exception as ex:
2021-02-19 14:38:20 +00:00
print("Error occured while receiving stack traces from gdb: {}".format(str(ex)))
return None
2020-03-23 17:30:31 +00:00
2020-03-23 18:17:07 +00:00
# collect server stacktraces from system.stack_trace table
2020-05-13 20:03:10 +00:00
# it does not work in Sandbox
2020-03-23 17:30:31 +00:00
def get_stacktraces_from_clickhouse(client):
try:
2020-10-27 08:44:58 +00:00
return subprocess.check_output("{} --allow_introspection_functions=1 --query "
"\"SELECT arrayStringConcat(arrayMap(x, y -> concat(x, ': ', y), arrayMap(x -> addressToLine(x), trace), "
"arrayMap(x -> demangle(addressToSymbol(x)), trace)), '\n') as trace "
2021-02-22 13:53:43 +00:00
"FROM system.stack_trace format Vertical\"".format(client), shell=True, stderr=subprocess.STDOUT).decode('utf-8')
2020-03-23 17:30:31 +00:00
except Exception as ex:
2021-02-19 14:38:20 +00:00
print("Error occured while receiving stack traces from client: {}".format(str(ex)))
return None
2019-03-13 11:03:57 +00:00
2019-10-11 10:30:32 +00:00
2019-03-13 11:03:57 +00:00
def get_server_pid(server_tcp_port):
2021-02-19 14:38:20 +00:00
# lsof does not work in stress tests for some reason
cmd_lsof = "lsof -i tcp:{port} -s tcp:LISTEN -Fp | awk '/^p[0-9]+$/{{print substr($0, 2)}}'".format(port=server_tcp_port)
cmd_pidof = "pidof -s clickhouse-server"
commands = [cmd_lsof, cmd_pidof]
2021-02-19 09:57:09 +00:00
output = None
2021-02-19 14:38:20 +00:00
for cmd in commands:
try:
2021-03-13 05:39:08 +00:00
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True)
2021-02-19 14:38:20 +00:00
if output:
return int(output)
except Exception as e:
2021-03-13 05:39:08 +00:00
print("Cannot get server pid with {}, got {}: {}".format(cmd, output, e))
2021-02-19 14:38:20 +00:00
return None # most likely server dead
2019-03-13 11:03:57 +00:00
2019-10-11 10:30:32 +00:00
2019-04-22 23:40:40 +00:00
def colored(text, args, color=None, on_color=None, attrs=None):
2021-01-26 20:36:04 +00:00
if termcolor and (sys.stdout.isatty() or args.force_color):
return termcolor.colored(text, color, on_color, attrs)
else:
return text
2019-04-22 23:40:40 +00:00
2019-10-11 10:30:32 +00:00
2019-04-22 23:40:40 +00:00
SERVER_DIED = False
exit_code = 0
2020-08-26 17:44:03 +00:00
stop_time = None
2019-04-22 23:40:40 +00:00
2020-03-26 08:36:15 +00:00
2019-10-11 10:30:32 +00:00
# def run_tests_array(all_tests, suite, suite_dir, suite_tmp_dir, run_total):
2020-03-26 08:36:15 +00:00
def run_tests_array(all_tests_with_params):
2020-12-21 11:19:12 +00:00
all_tests, suite, suite_dir, suite_tmp_dir = all_tests_with_params
2019-04-23 12:51:27 +00:00
global exit_code
2019-04-22 23:40:40 +00:00
global SERVER_DIED
2020-08-26 17:44:03 +00:00
global stop_time
2017-05-01 21:27:11 +00:00
2019-04-22 23:40:40 +00:00
OP_SQUARE_BRACKET = colored("[", args, attrs=['bold'])
CL_SQUARE_BRACKET = colored("]", args, attrs=['bold'])
MSG_FAIL = OP_SQUARE_BRACKET + colored(" FAIL ", args, "red", attrs=['bold']) + CL_SQUARE_BRACKET
MSG_UNKNOWN = OP_SQUARE_BRACKET + colored(" UNKNOWN ", args, "yellow", attrs=['bold']) + CL_SQUARE_BRACKET
MSG_OK = OP_SQUARE_BRACKET + colored(" OK ", args, "green", attrs=['bold']) + CL_SQUARE_BRACKET
MSG_SKIPPED = OP_SQUARE_BRACKET + colored(" SKIPPED ", args, "cyan", attrs=['bold']) + CL_SQUARE_BRACKET
passed_total = 0
skipped_total = 0
failures_total = 0
failures = 0
failures_chain = 0
2017-05-01 21:27:11 +00:00
2019-10-11 10:30:32 +00:00
client_options = get_additional_client_options(args)
2019-12-03 09:59:41 +00:00
def print_test_time(test_time):
if args.print_time:
2021-01-20 05:27:42 +00:00
return " {0:.2f} sec.".format(test_time)
else:
return ''
2019-12-03 09:59:41 +00:00
2021-01-26 20:36:04 +00:00
if all_tests:
2020-03-26 08:36:15 +00:00
print("\nRunning {} {} tests.".format(len(all_tests), suite) + "\n")
2019-04-09 13:17:36 +00:00
2020-03-26 08:36:15 +00:00
for case in all_tests:
2019-04-22 23:40:40 +00:00
if SERVER_DIED:
break
2019-04-09 13:17:36 +00:00
2020-08-26 17:44:03 +00:00
if stop_time and time() > stop_time:
print("\nStop tests run because global time limit is exceeded.\n")
break
2019-04-22 23:40:40 +00:00
case_file = os.path.join(suite_dir, case)
(name, ext) = os.path.splitext(case)
try:
2021-01-20 05:27:42 +00:00
status = ''
2021-01-26 20:36:04 +00:00
is_concurrent = multiprocessing.current_process().name != "MainProcess"
2021-01-20 05:27:42 +00:00
if not is_concurrent:
sys.stdout.flush()
sys.stdout.write("{0:72}".format(name + ": "))
# This flush is needed so you can see the test name of the long
# running test before it will finish. But don't do it in parallel
# mode, so that the lines don't mix.
sys.stdout.flush()
else:
2021-01-26 20:36:04 +00:00
status = "{0:72}".format(name + ": ")
2019-04-22 23:40:40 +00:00
if args.skip and any(s in name for s in args.skip):
2021-01-20 05:27:42 +00:00
status += MSG_SKIPPED + " - skip\n"
2019-04-22 23:40:40 +00:00
skipped_total += 1
2020-04-17 20:36:08 +00:00
elif not args.zookeeper and ('zookeeper' in name
or 'replica' in name):
2021-01-20 05:27:42 +00:00
status += MSG_SKIPPED + " - no zookeeper\n"
2019-04-22 23:40:40 +00:00
skipped_total += 1
2019-05-24 12:07:26 +00:00
elif not args.shard and ('shard' in name
or 'distributed' in name
or 'global' in name):
2021-01-20 05:27:42 +00:00
status += MSG_SKIPPED + " - no shard\n"
2019-04-22 23:40:40 +00:00
skipped_total += 1
2020-01-14 13:57:40 +00:00
elif not args.no_long and ('long' in name
# Tests for races and deadlocks usually are runned in loop
# for significant amount of time
or 'deadlock' in name
or 'race' in name):
2021-01-20 05:27:42 +00:00
status += MSG_SKIPPED + " - no long\n"
2019-04-22 23:40:40 +00:00
skipped_total += 1
else:
disabled_file = os.path.join(suite_dir, name) + '.disabled'
if os.path.exists(disabled_file) and not args.disabled:
message = open(disabled_file, 'r').read()
2021-01-20 05:27:42 +00:00
status += MSG_SKIPPED + " - " + message + "\n"
2019-04-22 23:40:40 +00:00
else:
if args.testname:
2020-10-02 16:54:07 +00:00
clickhouse_proc = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True)
2021-01-25 08:53:04 +00:00
failed_to_check = False
try:
clickhouse_proc.communicate(("SELECT 'Running test {suite}/{case} from pid={pid}';".format(pid = os.getpid(), case = case, suite = suite)), timeout=10)
except:
failed_to_check = True
2019-04-22 23:40:40 +00:00
2021-01-25 08:53:04 +00:00
if failed_to_check or clickhouse_proc.returncode != 0:
2020-09-25 02:27:13 +00:00
failures += 1
2020-09-25 05:05:50 +00:00
print("Server does not respond to health check")
2020-09-25 02:27:13 +00:00
SERVER_DIED = True
break
2021-01-27 18:37:28 +00:00
file_suffix = ('.' + str(os.getpid())) if is_concurrent and args.test_runs > 1 else ''
2019-04-22 23:40:40 +00:00
reference_file = os.path.join(suite_dir, name) + '.reference'
2021-01-27 15:24:39 +00:00
stdout_file = os.path.join(suite_tmp_dir, name) + file_suffix + '.stdout'
stderr_file = os.path.join(suite_tmp_dir, name) + file_suffix + '.stderr'
2019-04-22 23:40:40 +00:00
2019-12-03 09:59:41 +00:00
proc, stdout, stderr, total_time = run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file)
2020-10-12 11:17:35 +00:00
2019-04-22 23:40:40 +00:00
if proc.returncode is None:
try:
proc.kill()
except OSError as e:
if e.errno != ESRCH:
raise
failures += 1
2021-01-20 05:27:42 +00:00
status += MSG_FAIL
status += print_test_time(total_time)
status += " - Timeout!\n"
2020-12-15 13:33:14 +00:00
if stderr:
2021-01-20 05:27:42 +00:00
status += stderr
2019-04-22 23:40:40 +00:00
else:
counter = 1
while proc.returncode != 0 and need_retry(stderr):
2019-12-03 09:59:41 +00:00
proc, stdout, stderr, total_time = run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file)
2019-04-22 23:40:40 +00:00
sleep(2**counter)
counter += 1
if counter > 6:
break
if proc.returncode != 0:
failures += 1
failures_chain += 1
2021-01-20 05:27:42 +00:00
status += MSG_FAIL
status += print_test_time(total_time)
2021-01-20 07:01:26 +00:00
status += ' - return code {}\n'.format(proc.returncode)
2019-04-22 23:40:40 +00:00
if stderr:
2021-01-20 05:27:42 +00:00
status += stderr
2019-04-22 23:40:40 +00:00
2020-10-12 11:17:35 +00:00
# Stop on fatal errors like segmentation fault. They are sent to client via logs.
2020-06-22 19:32:33 +00:00
if ' <Fatal> ' in stderr:
SERVER_DIED = True
2019-04-22 23:40:40 +00:00
if args.stop and ('Connection refused' in stderr or 'Attempt to read after eof' in stderr) and not 'Received exception from server' in stderr:
SERVER_DIED = True
2020-08-15 10:03:51 +00:00
if os.path.isfile(stdout_file):
2021-01-20 05:27:42 +00:00
status += ", result:\n\n"
status += '\n'.join(
open(stdout_file).read().split('\n')[:100])
2021-01-26 20:36:04 +00:00
status += '\n'
2020-08-15 10:03:51 +00:00
2019-04-22 23:40:40 +00:00
elif stderr:
failures += 1
failures_chain += 1
2021-01-20 05:27:42 +00:00
status += MSG_FAIL
status += print_test_time(total_time)
status += " - having stderror:\n{}\n".format(
'\n'.join(stderr.split('\n')[:100]))
2019-04-22 23:40:40 +00:00
elif 'Exception' in stdout:
failures += 1
failures_chain += 1
2021-01-20 05:27:42 +00:00
status += MSG_FAIL
status += print_test_time(total_time)
status += " - having exception:\n{}\n".format(
'\n'.join(stdout.split('\n')[:100]))
2019-04-22 23:40:40 +00:00
elif not os.path.isfile(reference_file):
2021-01-20 05:27:42 +00:00
status += MSG_UNKNOWN
status += print_test_time(total_time)
status += " - no reference file\n"
2019-04-22 23:40:40 +00:00
else:
2020-10-02 16:54:07 +00:00
result_is_different = subprocess.call(['diff', '-q', reference_file, stdout_file], stdout=PIPE)
2019-04-22 23:40:40 +00:00
if result_is_different:
2020-10-02 16:54:07 +00:00
diff = Popen(['diff', '-U', str(args.unified), reference_file, stdout_file], stdout=PIPE, universal_newlines=True).communicate()[0]
2019-04-22 23:40:40 +00:00
failures += 1
2021-01-20 05:27:42 +00:00
status += MSG_FAIL
status += print_test_time(total_time)
status += " - result differs with reference:\n{}\n".format(diff)
2019-04-22 23:40:40 +00:00
else:
2021-02-04 12:06:48 +00:00
if args.test_runs > 1 and total_time > 30 and 'long' not in name:
# We're in Flaky Check mode, check the run time as well while we're at it.
failures += 1
failures_chain += 1
status += MSG_FAIL
status += print_test_time(total_time)
status += " - Long test not marked as 'long'"
else:
passed_total += 1
failures_chain = 0
status += MSG_OK
status += print_test_time(total_time)
status += "\n"
if os.path.exists(stdout_file):
os.remove(stdout_file)
if os.path.exists(stderr_file):
os.remove(stderr_file)
2021-01-20 05:27:42 +00:00
2021-01-28 08:55:20 +00:00
if status and not status.endswith('\n'):
status += '\n'
2021-01-20 05:27:42 +00:00
sys.stdout.write(status)
sys.stdout.flush()
2020-03-26 08:36:15 +00:00
except KeyboardInterrupt as e:
2019-04-22 23:40:40 +00:00
print(colored("Break tests execution", args, "red"))
2020-03-26 08:36:15 +00:00
raise e
2019-04-22 23:40:40 +00:00
except:
exc_type, exc_value, tb = sys.exc_info()
failures += 1
2019-05-26 18:38:43 +00:00
print("{0} - Test internal error: {1}\n{2}\n{3}".format(MSG_FAIL, exc_type.__name__, exc_value, "\n".join(traceback.format_tb(tb, 10))))
2019-04-22 23:40:40 +00:00
if failures_chain >= 20:
break
failures_total = failures_total + failures
if failures_total > 0:
2021-01-26 20:36:04 +00:00
print(colored("\nHaving {failures_total} errors! {passed_total} tests passed. {skipped_total} tests skipped.".format(
passed_total = passed_total, skipped_total = skipped_total, failures_total = failures_total), args, "red", attrs=["bold"]))
2019-04-22 23:40:40 +00:00
exit_code = 1
else:
2021-01-26 20:36:04 +00:00
print(colored("\n{passed_total} tests passed. {skipped_total} tests skipped.".format(
passed_total = passed_total, skipped_total = skipped_total), args, "green", attrs=["bold"]))
2019-04-09 13:17:36 +00:00
2019-10-11 10:30:32 +00:00
2019-04-22 23:40:40 +00:00
server_logs_level = "warning"
2019-10-11 10:30:32 +00:00
2020-05-29 10:08:11 +00:00
def check_server_started(client, retry_count):
2020-06-24 19:03:28 +00:00
print("Connecting to ClickHouse server...", end='')
sys.stdout.flush()
2020-05-29 10:08:11 +00:00
while retry_count > 0:
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
2020-10-02 16:54:07 +00:00
(stdout, stderr) = clickhouse_proc.communicate(b"SELECT 1")
2020-06-24 19:03:28 +00:00
2020-10-02 16:54:07 +00:00
if clickhouse_proc.returncode == 0 and stdout.startswith(b"1"):
2020-06-24 19:03:28 +00:00
print(" OK")
sys.stdout.flush()
return True
if clickhouse_proc.returncode == 210:
# Connection refused, retry
print('.', end = '')
sys.stdout.flush()
2020-05-29 10:08:11 +00:00
retry_count -= 1
sleep(0.5)
2020-06-24 19:03:28 +00:00
continue
# Other kind of error, fail.
print('')
print("Client invocation failed with code ", clickhouse_proc.returncode, ": ")
# We can't print this, because for some reason this is python 2,
# and args appeared in 3.3. To hell with it.
# print(''.join(clickhouse_proc.args))
print("stdout: ")
print(stdout)
print("stderr: ")
print(stderr)
sys.stdout.flush()
return False
print('')
print('All connection tries failed')
sys.stdout.flush()
2020-05-29 10:08:11 +00:00
return False
2021-01-26 20:36:04 +00:00
class BuildFlags():
2020-07-03 10:57:16 +00:00
THREAD = 'thread-sanitizer'
ADDRESS = 'address-sanitizer'
UNDEFINED = 'ub-sanitizer'
MEMORY = 'memory-sanitizer'
DEBUG = 'debug-build'
UNBUNDLED = 'unbundled-build'
RELEASE = 'release-build'
2020-09-24 23:04:01 +00:00
DATABASE_ORDINARY = 'database-ordinary'
2020-07-03 10:57:16 +00:00
POLYMORPHIC_PARTS = 'polymorphic-parts'
2021-02-15 10:26:34 +00:00
ANTLR = 'antlr'
DATABASE_REPLICATED = 'database-replicated'
2020-07-03 10:57:16 +00:00
def collect_build_flags(client):
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
2020-10-02 16:54:07 +00:00
(stdout, stderr) = clickhouse_proc.communicate(b"SELECT value FROM system.build_options WHERE name = 'CXX_FLAGS'")
2020-07-03 10:57:16 +00:00
result = []
if clickhouse_proc.returncode == 0:
2020-10-02 16:54:07 +00:00
if b'-fsanitize=thread' in stdout:
2020-07-03 10:57:16 +00:00
result.append(BuildFlags.THREAD)
2020-10-02 16:54:07 +00:00
elif b'-fsanitize=address' in stdout:
2020-07-03 10:57:16 +00:00
result.append(BuildFlags.ADDRESS)
2020-10-02 16:54:07 +00:00
elif b'-fsanitize=undefined' in stdout:
2020-07-03 10:57:16 +00:00
result.append(BuildFlags.UNDEFINED)
2020-10-02 16:54:07 +00:00
elif b'-fsanitize=memory' in stdout:
2020-07-03 10:57:16 +00:00
result.append(BuildFlags.MEMORY)
else:
2020-10-12 11:17:35 +00:00
raise Exception("Cannot get information about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
2020-07-03 10:57:16 +00:00
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
2020-10-02 16:54:07 +00:00
(stdout, stderr) = clickhouse_proc.communicate(b"SELECT value FROM system.build_options WHERE name = 'BUILD_TYPE'")
2020-07-03 10:57:16 +00:00
if clickhouse_proc.returncode == 0:
2020-10-02 16:54:07 +00:00
if b'Debug' in stdout:
2020-07-03 10:57:16 +00:00
result.append(BuildFlags.DEBUG)
2020-10-02 16:54:07 +00:00
elif b'RelWithDebInfo' in stdout or b'Release' in stdout:
2020-07-03 10:57:16 +00:00
result.append(BuildFlags.RELEASE)
else:
2020-10-12 11:17:35 +00:00
raise Exception("Cannot get information about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
2020-07-03 10:57:16 +00:00
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
2020-10-02 16:54:07 +00:00
(stdout, stderr) = clickhouse_proc.communicate(b"SELECT value FROM system.build_options WHERE name = 'UNBUNDLED'")
2020-07-03 10:57:16 +00:00
if clickhouse_proc.returncode == 0:
2020-10-02 16:54:07 +00:00
if b'ON' in stdout or b'1' in stdout:
2020-07-03 10:57:16 +00:00
result.append(BuildFlags.UNBUNDLED)
else:
2020-10-12 11:17:35 +00:00
raise Exception("Cannot get information about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
2020-07-03 10:57:16 +00:00
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
2020-10-02 16:54:07 +00:00
(stdout, stderr) = clickhouse_proc.communicate(b"SELECT value FROM system.settings WHERE name = 'default_database_engine'")
2020-07-03 10:57:16 +00:00
if clickhouse_proc.returncode == 0:
2020-10-02 16:54:07 +00:00
if b'Ordinary' in stdout:
2020-09-24 23:04:01 +00:00
result.append(BuildFlags.DATABASE_ORDINARY)
2020-07-03 10:57:16 +00:00
else:
2020-10-12 11:17:35 +00:00
raise Exception("Cannot get information about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
2020-07-03 10:57:16 +00:00
2020-10-07 18:53:34 +00:00
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = clickhouse_proc.communicate(b"SELECT value FROM system.merge_tree_settings WHERE name = 'min_bytes_for_wide_part'")
if clickhouse_proc.returncode == 0:
if stdout == b'0\n':
result.append(BuildFlags.POLYMORPHIC_PARTS)
else:
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
2020-07-03 10:57:16 +00:00
return result
2019-04-22 23:40:40 +00:00
def main(args):
global SERVER_DIED
2020-08-26 17:44:03 +00:00
global stop_time
2019-04-22 23:40:40 +00:00
global exit_code
global server_logs_level
2019-04-09 13:17:36 +00:00
2017-05-01 21:27:11 +00:00
def is_data_present():
2017-11-01 11:46:58 +00:00
clickhouse_proc = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
2020-10-02 16:54:07 +00:00
(stdout, stderr) = clickhouse_proc.communicate(b"EXISTS TABLE test.hits")
2018-01-16 20:17:31 +00:00
if clickhouse_proc.returncode != 0:
2017-11-01 11:46:58 +00:00
raise CalledProcessError(clickhouse_proc.returncode, args.client, stderr)
2017-05-01 21:27:11 +00:00
2020-10-02 16:54:07 +00:00
return stdout.startswith(b'1')
2017-05-01 21:27:11 +00:00
2020-05-29 10:08:11 +00:00
if not check_server_started(args.client, args.server_check_retries):
2020-10-12 11:17:35 +00:00
raise Exception(
"Server is not responding. Cannot execute 'SELECT 1' query. \
2021-01-15 21:33:53 +00:00
Note: if you are using split build, you may have to specify -c option.")
2020-10-12 11:17:35 +00:00
2020-07-03 10:57:16 +00:00
build_flags = collect_build_flags(args.client)
2020-12-22 12:24:46 +00:00
if args.antlr:
2021-02-15 10:26:34 +00:00
build_flags.append(BuildFlags.ANTLR)
if args.replicated_database:
build_flags.append(BuildFlags.DATABASE_REPLICATED)
2020-10-12 11:17:35 +00:00
2020-07-03 11:15:30 +00:00
if args.use_skip_list:
tests_to_skip_from_list = collect_tests_to_skip(args.skip_list_path, build_flags)
else:
2020-07-08 09:47:14 +00:00
tests_to_skip_from_list = set([])
2020-07-03 11:15:30 +00:00
2020-07-03 10:57:16 +00:00
if args.skip:
args.skip = set(args.skip) | tests_to_skip_from_list
else:
args.skip = tests_to_skip_from_list
2020-05-29 10:08:11 +00:00
2020-09-24 14:54:10 +00:00
if args.use_skip_list and not args.sequential:
args.sequential = collect_sequential_list(args.skip_list_path)
2018-01-18 20:33:16 +00:00
base_dir = os.path.abspath(args.queries)
tmp_dir = os.path.abspath(args.tmp)
2018-06-08 19:50:15 +00:00
# Keep same default values as in queries/shell_config.sh
2018-01-18 20:33:16 +00:00
os.environ.setdefault("CLICKHOUSE_BINARY", args.binary)
2020-07-09 18:59:32 +00:00
#os.environ.setdefault("CLICKHOUSE_CLIENT", args.client)
2018-04-26 20:02:10 +00:00
os.environ.setdefault("CLICKHOUSE_CONFIG", args.configserver)
2018-06-18 21:13:11 +00:00
if args.configclient:
os.environ.setdefault("CLICKHOUSE_CONFIG_CLIENT", args.configclient)
2018-01-18 20:33:16 +00:00
os.environ.setdefault("CLICKHOUSE_TMP", tmp_dir)
2017-05-01 21:27:11 +00:00
2018-06-13 19:01:07 +00:00
# Force to print server warnings in stderr
2018-08-14 20:29:42 +00:00
# Shell scripts could change logging level
2018-06-13 19:01:07 +00:00
os.environ.setdefault("CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL", server_logs_level)
2020-08-26 17:44:03 +00:00
# This code is bad as the time is not monotonic
if args.global_time_limit:
stop_time = time() + args.global_time_limit
2017-05-01 21:27:11 +00:00
if args.zookeeper is None:
2021-01-26 20:36:04 +00:00
_, out = subprocess.getstatusoutput(args.extract_from_config + " --try --config " + args.configserver + ' --key zookeeper | grep . | wc -l')
2018-03-27 15:08:01 +00:00
try:
if int(out) > 0:
args.zookeeper = True
else:
args.zookeeper = False
except ValueError:
2017-05-01 21:27:11 +00:00
args.zookeeper = False
if args.shard is None:
2021-01-26 20:36:04 +00:00
_, out = subprocess.getstatusoutput(args.extract_from_config + " --try --config " + args.configserver + ' --key listen_host | grep -E "127.0.0.2|::"')
2018-01-18 20:33:16 +00:00
if out:
2017-11-05 20:17:43 +00:00
args.shard = True
2018-01-18 20:33:16 +00:00
else:
args.shard = False
2017-05-01 21:27:11 +00:00
2020-05-13 20:17:12 +00:00
if args.database and args.database != "test":
2020-10-02 16:54:07 +00:00
clickhouse_proc_create = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True)
2021-02-15 10:26:34 +00:00
clickhouse_proc_create.communicate(("CREATE DATABASE IF NOT EXISTS " + args.database + get_db_engine(args, args.database)))
2020-05-13 20:17:12 +00:00
2020-10-02 16:54:07 +00:00
clickhouse_proc_create = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True)
2021-02-15 10:26:34 +00:00
clickhouse_proc_create.communicate(("CREATE DATABASE IF NOT EXISTS test" + get_db_engine(args, 'test')))
2019-04-11 17:20:36 +00:00
def is_test_from_dir(suite_dir, case):
case_file = os.path.join(suite_dir, case)
2021-01-26 20:36:04 +00:00
(_, ext) = os.path.splitext(case)
2021-01-21 18:01:32 +00:00
# We could also test for executable files (os.access(case_file, os.X_OK),
# but it interferes with 01610_client_spawn_editor.editor, which is invoked
# as a query editor in the test, and must be marked as executable.
2021-01-26 20:36:04 +00:00
return os.path.isfile(case_file) and (ext in ['.sql', '.sh', '.py', '.expect'])
2018-06-18 21:13:11 +00:00
2019-03-14 16:37:23 +00:00
def sute_key_func(item):
2021-01-26 20:36:04 +00:00
if args.order == 'random':
return random.random()
2019-03-14 16:37:23 +00:00
2021-01-26 20:36:04 +00:00
if -1 == item.find('_'):
return 99998, ''
2019-03-14 16:37:23 +00:00
2021-01-26 20:36:04 +00:00
prefix, suffix = item.split('_', 1)
2019-03-14 16:37:23 +00:00
2021-01-26 20:36:04 +00:00
try:
return int(prefix), suffix
except ValueError:
return 99997, ''
2019-03-14 16:37:23 +00:00
2019-10-09 10:51:05 +00:00
total_tests_run = 0
2019-03-14 16:37:23 +00:00
for suite in sorted(os.listdir(base_dir), key=sute_key_func):
2017-05-01 21:27:11 +00:00
if SERVER_DIED:
break
suite_dir = os.path.join(base_dir, suite)
suite_re_obj = re.search('^[0-9]+_(.*)$', suite)
if not suite_re_obj: #skip .gitignore and so on
continue
2017-12-27 15:56:42 +00:00
suite_tmp_dir = os.path.join(tmp_dir, suite)
if not os.path.exists(suite_tmp_dir):
os.makedirs(suite_tmp_dir)
2017-05-01 21:27:11 +00:00
suite = suite_re_obj.group(1)
if os.path.isdir(suite_dir):
2019-04-23 12:51:27 +00:00
if 'stateful' in suite and not args.no_stateful and not is_data_present():
2018-08-08 03:03:26 +00:00
print("Won't run stateful tests because test data wasn't loaded.")
2017-05-01 21:27:11 +00:00
continue
2019-01-18 11:47:50 +00:00
if 'stateless' in suite and args.no_stateless:
print("Won't run stateless tests because they were manually disabled.")
continue
2019-04-18 18:48:04 +00:00
if 'stateful' in suite and args.no_stateful:
print("Won't run stateful tests because they were manually disabled.")
continue
2017-05-01 21:27:11 +00:00
2017-09-06 20:13:21 +00:00
# Reverse sort order: we want run newest test first.
# And not reverse subtests
def key_func(item):
2017-12-15 19:34:19 +00:00
if args.order == 'random':
2021-01-26 20:36:04 +00:00
return random.random()
2017-12-15 19:34:19 +00:00
reverse = 1 if args.order == 'asc' else -1
if -1 == item.find('_'):
return 99998
2021-01-26 20:36:04 +00:00
prefix, _ = item.split('_', 1)
2017-12-15 19:34:19 +00:00
try:
2020-12-21 21:03:41 +00:00
return reverse * int(prefix)
2017-12-15 19:34:19 +00:00
except ValueError:
return 99997
2019-04-11 17:20:36 +00:00
all_tests = os.listdir(suite_dir)
2020-10-02 16:54:07 +00:00
all_tests = [case for case in all_tests if is_test_from_dir(suite_dir, case)]
2019-10-09 10:51:05 +00:00
if args.test:
all_tests = [t for t in all_tests if any([re.search(r, t) for r in args.test])]
2021-01-27 15:24:39 +00:00
all_tests = all_tests * args.test_runs
2019-10-09 11:56:38 +00:00
all_tests.sort(key=key_func)
2019-04-11 17:20:36 +00:00
2020-12-21 11:19:12 +00:00
jobs = args.jobs
2020-09-24 14:54:10 +00:00
parallel_tests = []
sequential_tests = []
for test in all_tests:
if any(s in test for s in args.sequential):
sequential_tests.append(test)
else:
parallel_tests.append(test)
2020-12-21 11:19:12 +00:00
if jobs > 1 and len(parallel_tests) > 0:
print("Found", len(parallel_tests), "parallel tests and", len(sequential_tests), "sequential tests")
run_n, run_total = args.parallel.split('/')
run_n = float(run_n)
run_total = float(run_total)
tests_n = len(parallel_tests)
if run_total > tests_n:
run_total = tests_n
if jobs > tests_n:
jobs = tests_n
if jobs > run_total:
run_total = jobs
batch_size = len(parallel_tests) // jobs
parallel_tests_array = []
for i in range(0, len(parallel_tests), batch_size):
parallel_tests_array.append((parallel_tests[i:i+batch_size], suite, suite_dir, suite_tmp_dir))
2018-01-16 13:44:45 +00:00
2020-03-26 08:36:15 +00:00
with closing(multiprocessing.Pool(processes=jobs)) as pool:
2020-09-24 14:54:10 +00:00
pool.map(run_tests_array, parallel_tests_array)
2019-03-13 11:03:57 +00:00
2020-12-21 11:19:12 +00:00
run_tests_array((sequential_tests, suite, suite_dir, suite_tmp_dir))
2020-09-24 14:54:10 +00:00
total_tests_run += len(sequential_tests) + len(parallel_tests)
else:
2020-12-21 11:19:12 +00:00
run_tests_array((all_tests, suite, suite_dir, suite_tmp_dir))
2020-09-24 14:54:10 +00:00
total_tests_run += len(all_tests)
2019-10-09 10:51:05 +00:00
2019-03-13 11:03:57 +00:00
if args.hung_check:
2020-05-31 14:57:29 +00:00
# Some queries may execute in background for some time after test was finished. This is normal.
2021-01-26 20:36:04 +00:00
for _ in range(1, 60):
2021-01-25 08:53:04 +00:00
timeout, processlist = get_processlist(args)
if timeout or not processlist:
2020-05-31 14:57:29 +00:00
break
sleep(1)
2021-01-25 08:53:04 +00:00
if timeout or processlist:
if processlist:
print(colored("\nFound hung queries in processlist:", args, "red", attrs=["bold"]))
print(processlist)
else:
2021-01-26 20:23:00 +00:00
print(colored("Seems like server hung and cannot respond to queries", args, "red", attrs=["bold"]))
2020-01-27 18:04:12 +00:00
clickhouse_tcp_port = os.getenv("CLICKHOUSE_PORT_TCP", '9000')
server_pid = get_server_pid(clickhouse_tcp_port)
2021-02-19 14:38:20 +00:00
bt = None
2020-01-27 18:04:12 +00:00
if server_pid:
print("\nLocated ClickHouse server process {} listening at TCP port {}".format(server_pid, clickhouse_tcp_port))
2020-03-23 17:30:31 +00:00
print("\nCollecting stacktraces from all running threads with gdb:")
2021-02-19 14:38:20 +00:00
bt = get_stacktraces_from_gdb(server_pid)
2021-02-20 20:04:24 +00:00
if len(bt) < 1000:
print("Got suspiciously small stacktraces: ", bt)
bt = None
2021-02-19 14:38:20 +00:00
if bt is None:
print("\nCollecting stacktraces from system.stacktraces table:")
bt = get_stacktraces_from_clickhouse(args.client)
if bt is None:
2020-01-27 18:04:12 +00:00
print(
colored(
"\nUnable to locate ClickHouse server process listening at TCP port {}. "
"It must have crashed or exited prematurely!".format(clickhouse_tcp_port),
args, "red", attrs=["bold"]))
2021-02-19 14:38:20 +00:00
else:
print(bt)
2020-01-27 18:04:12 +00:00
exit_code = 1
2019-03-13 13:52:23 +00:00
else:
2020-01-27 18:04:12 +00:00
print(colored("\nNo queries hung.", args, "green", attrs=["bold"]))
2019-03-13 11:03:57 +00:00
2019-10-09 10:51:05 +00:00
if total_tests_run == 0:
print("No tests were run.")
sys.exit(1)
2019-03-13 11:03:57 +00:00
sys.exit(exit_code)
2016-12-06 20:55:13 +00:00
2019-01-24 11:02:55 +00:00
def find_binary(name):
2019-03-25 15:03:12 +00:00
if os.path.exists(name) and os.access(name, os.X_OK):
return True
2019-01-24 11:02:55 +00:00
paths = os.environ.get("PATH").split(':')
for path in paths:
if os.access(os.path.join(path, name), os.X_OK):
return True
# maybe it wasn't in PATH
2019-06-17 16:50:31 +00:00
if os.access(os.path.join('/usr/local/bin', name), os.X_OK):
return True
if os.access(os.path.join('/usr/bin', name), os.X_OK):
return True
return False
2016-09-01 17:40:02 +00:00
2019-10-11 10:30:32 +00:00
def get_additional_client_options(args):
2019-10-11 13:34:26 +00:00
if args.client_option:
return ' '.join('--' + option for option in args.client_option)
return ''
def get_additional_client_options_url(args):
if args.client_option:
return '&'.join(args.client_option)
return ''
2019-10-11 10:30:32 +00:00
2020-07-03 10:57:16 +00:00
def collect_tests_to_skip(skip_list_path, build_flags):
result = set([])
if not os.path.exists(skip_list_path):
return result
with open(skip_list_path, 'r') as skip_list_file:
2020-08-27 08:17:01 +00:00
content = skip_list_file.read()
# allows to have comments in skip_list.json
skip_dict = json.loads(json_minify(content))
2020-07-03 10:57:16 +00:00
for build_flag in build_flags:
result |= set(skip_dict[build_flag])
2020-07-03 11:05:27 +00:00
if len(result) > 0:
print("Found file with skip-list {}, {} test will be skipped".format(skip_list_path, len(result)))
2020-07-03 10:57:16 +00:00
return result
2020-09-24 14:54:10 +00:00
def collect_sequential_list(skip_list_path):
if not os.path.exists(skip_list_path):
return set([])
with open(skip_list_path, 'r') as skip_list_file:
content = skip_list_file.read()
# allows to have comments in skip_list.json
skip_dict = json.loads(json_minify(content))
2020-09-25 13:20:16 +00:00
if 'parallel' in skip_dict:
return skip_dict['parallel']
2020-09-24 14:54:10 +00:00
return set([])
2016-09-01 17:40:02 +00:00
if __name__ == '__main__':
2019-03-13 11:03:57 +00:00
parser=ArgumentParser(description='ClickHouse functional tests')
parser.add_argument('-q', '--queries', help='Path to queries dir')
parser.add_argument('--tmp', help='Path to tmp dir')
2020-10-12 11:17:35 +00:00
parser.add_argument('-b', '--binary', default='clickhouse',
2021-01-15 21:33:53 +00:00
help='Path to clickhouse (if monolithic build, clickhouse-server otherwise) binary or name of binary in PATH')
2020-10-12 11:17:35 +00:00
parser.add_argument('-c', '--client',
2021-01-15 21:33:53 +00:00
help='Path to clickhouse-client (if split build, useless otherwise) binary of name of binary in PATH')
2020-10-12 11:17:35 +00:00
2019-03-13 11:03:57 +00:00
parser.add_argument('--extract_from_config', help='extract-from-config program')
parser.add_argument('--configclient', help='Client config (if you use not default ports)')
parser.add_argument('--configserver', default= '/etc/clickhouse-server/config.xml', help='Preprocessed server config')
parser.add_argument('-o', '--output', help='Output xUnit compliant test report directory')
parser.add_argument('-t', '--timeout', type=int, default=600, help='Timeout for each test case in seconds')
2020-08-26 17:44:03 +00:00
parser.add_argument('--global_time_limit', type=int, help='Stop if executing more than specified time (after current test finished)')
2019-10-04 13:38:06 +00:00
parser.add_argument('test', nargs='*', help='Optional test case name regex')
2019-03-13 11:03:57 +00:00
parser.add_argument('-d', '--disabled', action='store_true', default=False, help='Also run disabled tests')
parser.add_argument('--stop', action='store_true', default=None, dest='stop', help='Stop on network errors')
2019-12-13 14:27:57 +00:00
parser.add_argument('--order', default='desc', choices=['asc', 'desc', 'random'], help='Run order')
2019-03-13 11:03:57 +00:00
parser.add_argument('--testname', action='store_true', default=None, dest='testname', help='Make query with test name before test run')
parser.add_argument('--hung-check', action='store_true', default=False)
2019-04-09 13:17:36 +00:00
parser.add_argument('--force-color', action='store_true', default=False)
2019-06-10 10:41:53 +00:00
parser.add_argument('--database', help='Database for tests (random name test_XXXXXX by default)')
2021-01-26 17:51:25 +00:00
parser.add_argument('--no-drop-if-fail', action='store_true', help='Do not drop database for test if test has failed')
parser.add_argument('--show-db-name', action='store_true', help='Do not replace random database name with "default"')
2019-04-22 23:40:40 +00:00
parser.add_argument('--parallel', default='1/1', help='One parallel test run number/total')
2019-06-20 09:12:49 +00:00
parser.add_argument('-j', '--jobs', default=1, nargs='?', type=int, help='Run all tests in parallel')
2021-01-27 15:24:39 +00:00
parser.add_argument('--test-runs', default=1, nargs='?', type=int, help='Run each test many times (useful for e.g. flaky check)')
2019-10-10 18:47:51 +00:00
parser.add_argument('-U', '--unified', default=3, type=int, help='output NUM lines of unified context')
2020-05-29 10:08:11 +00:00
parser.add_argument('-r', '--server-check-retries', default=30, type=int, help='Num of tries to execute SELECT 1 before tests started')
2020-07-03 11:15:30 +00:00
parser.add_argument('--skip-list-path', help="Path to skip-list file")
parser.add_argument('--use-skip-list', action='store_true', default=False, help="Use skip list to skip tests if found")
2020-09-21 10:24:10 +00:00
parser.add_argument('--db-engine', help='Database engine name')
2021-02-15 10:26:34 +00:00
parser.add_argument('--replicated-database', action='store_true', default=False, help='Run tests with Replicated database engine')
2019-03-13 11:03:57 +00:00
2020-12-28 15:47:44 +00:00
parser.add_argument('--antlr', action='store_true', default=False, dest='antlr', help='Use new ANTLR parser in tests')
2019-03-13 11:03:57 +00:00
parser.add_argument('--no-stateless', action='store_true', help='Disable all stateless tests')
2019-04-18 18:48:04 +00:00
parser.add_argument('--no-stateful', action='store_true', help='Disable all stateful tests')
2019-03-13 11:03:57 +00:00
parser.add_argument('--skip', nargs='+', help="Skip these tests")
2020-09-24 14:54:10 +00:00
parser.add_argument('--sequential', nargs='+', help="Run these tests sequentially even if --parallel specified")
2019-03-13 11:03:57 +00:00
parser.add_argument('--no-long', action='store_false', dest='no_long', help='Do not run long tests')
2019-11-11 15:57:33 +00:00
parser.add_argument('--client-option', nargs='+', help='Specify additional client argument')
2019-12-03 09:59:41 +00:00
parser.add_argument('--print-time', action='store_true', dest='print_time', help='Print test time')
2019-03-13 11:03:57 +00:00
group=parser.add_mutually_exclusive_group(required=False)
group.add_argument('--zookeeper', action='store_true', default=None, dest='zookeeper', help='Run zookeeper related tests')
group.add_argument('--no-zookeeper', action='store_false', default=None, dest='zookeeper', help='Do not run zookeeper related tests')
group=parser.add_mutually_exclusive_group(required=False)
group.add_argument('--shard', action='store_true', default=None, dest='shard', help='Run sharding related tests (required to clickhouse-server listen 127.0.0.2 127.0.0.3)')
group.add_argument('--no-shard', action='store_false', default=None, dest='shard', help='Do not run shard related tests')
2017-05-01 21:27:11 +00:00
args = parser.parse_args()
2018-01-12 13:56:02 +00:00
2020-08-13 18:45:55 +00:00
if args.queries and not os.path.isdir(args.queries):
2020-08-20 19:44:00 +00:00
print("Cannot access the specified directory with queries (" + args.queries + ")", file=sys.stderr)
2021-01-26 20:36:04 +00:00
sys.exit(1)
2020-08-13 18:45:55 +00:00
# Autodetect the directory with queries if not specified
if args.queries is None:
2018-01-12 13:56:02 +00:00
args.queries = 'queries'
2020-08-13 18:45:55 +00:00
if not os.path.isdir(args.queries):
# If we're running from the repo
args.queries = os.path.join(os.path.dirname(os.path.abspath( __file__ )), 'queries')
if not os.path.isdir(args.queries):
# Next we're going to try some system directories, don't write 'stdout' files into them.
2018-01-12 13:56:02 +00:00
if args.tmp is None:
args.tmp = '/tmp/clickhouse-test'
2020-08-13 18:45:55 +00:00
args.queries = '/usr/local/share/clickhouse-test/queries'
if not os.path.isdir(args.queries):
args.queries = '/usr/share/clickhouse-test/queries'
if not os.path.isdir(args.queries):
2020-04-17 20:36:08 +00:00
print("Failed to detect path to the queries directory. Please specify it with '--queries' option.", file=sys.stderr)
2021-01-26 20:36:04 +00:00
sys.exit(1)
2020-07-03 10:57:16 +00:00
2020-08-13 18:45:55 +00:00
print("Using queries from '" + args.queries + "' directory")
2020-07-03 11:15:30 +00:00
if args.skip_list_path is None:
args.skip_list_path = os.path.join(args.queries, 'skip_list.json')
2020-07-03 10:57:16 +00:00
2020-09-24 14:54:10 +00:00
if args.sequential is None:
args.sequential = set([])
2018-01-16 20:17:31 +00:00
if args.tmp is None:
args.tmp = args.queries
2017-10-12 19:46:24 +00:00
if args.client is None:
2019-01-24 11:02:55 +00:00
if find_binary(args.binary + '-client'):
2018-11-07 11:00:46 +00:00
args.client = args.binary + '-client'
2020-10-12 11:17:35 +00:00
2021-01-15 21:33:53 +00:00
print("Using " + args.client + " as client program (expecting split build)")
2019-01-24 11:02:55 +00:00
elif find_binary(args.binary):
2018-11-07 11:00:46 +00:00
args.client = args.binary + ' client'
2020-10-12 11:17:35 +00:00
2021-01-15 21:33:53 +00:00
print("Using " + args.client + " as client program (expecting monolithic build)")
2019-01-23 14:05:11 +00:00
else:
2020-10-12 11:17:35 +00:00
print("No 'clickhouse' or 'clickhouse-client' client binary found", file=sys.stderr)
2019-01-24 11:02:55 +00:00
parser.print_help()
2021-01-26 20:36:04 +00:00
sys.exit(1)
2019-01-24 11:02:55 +00:00
2018-01-18 20:33:16 +00:00
if args.configclient:
2018-05-25 18:05:30 +00:00
args.client += ' --config-file=' + args.configclient
if os.getenv("CLICKHOUSE_HOST"):
args.client += ' --host=' + os.getenv("CLICKHOUSE_HOST")
if os.getenv("CLICKHOUSE_PORT_TCP"):
args.client += ' --port=' + os.getenv("CLICKHOUSE_PORT_TCP")
2019-04-11 17:20:36 +00:00
if os.getenv("CLICKHOUSE_DATABASE"):
args.client += ' --database=' + os.getenv("CLICKHOUSE_DATABASE")
2019-10-11 10:30:32 +00:00
if args.client_option:
2019-10-11 13:34:26 +00:00
# Set options for client
if 'CLICKHOUSE_CLIENT_OPT' in os.environ:
2021-01-26 20:36:04 +00:00
os.environ['CLICKHOUSE_CLIENT_OPT'] += ' '
2019-10-11 13:34:26 +00:00
else:
2021-01-26 20:36:04 +00:00
os.environ['CLICKHOUSE_CLIENT_OPT'] = ''
2019-10-11 13:34:26 +00:00
os.environ['CLICKHOUSE_CLIENT_OPT'] += get_additional_client_options(args)
# Set options for curl
if 'CLICKHOUSE_URL_PARAMS' in os.environ:
2021-01-26 20:36:04 +00:00
os.environ['CLICKHOUSE_URL_PARAMS'] += '&'
2019-10-11 13:34:26 +00:00
else:
2021-01-26 20:36:04 +00:00
os.environ['CLICKHOUSE_URL_PARAMS'] = ''
2019-10-11 13:34:26 +00:00
os.environ['CLICKHOUSE_URL_PARAMS'] += get_additional_client_options_url(args)
2020-12-22 12:24:46 +00:00
if args.antlr:
2020-12-28 15:47:44 +00:00
if 'CLICKHOUSE_CLIENT_OPT' in os.environ:
2021-01-26 20:36:04 +00:00
os.environ['CLICKHOUSE_CLIENT_OPT'] += ' --use_antlr_parser=1'
2020-12-28 15:47:44 +00:00
else:
2021-01-26 20:36:04 +00:00
os.environ['CLICKHOUSE_CLIENT_OPT'] = '--use_antlr_parser=1'
2020-12-22 12:24:46 +00:00
2018-11-07 11:00:46 +00:00
if args.extract_from_config is None:
if os.access(args.binary + '-extract-from-config', os.X_OK):
args.extract_from_config = args.binary + '-extract-from-config'
else:
args.extract_from_config = args.binary + ' extract-from-config'
2019-06-20 09:12:49 +00:00
if args.jobs is None:
2019-10-11 10:30:32 +00:00
args.jobs = multiprocessing.cpu_count()
2019-06-20 09:12:49 +00:00
2017-05-01 21:27:11 +00:00
main(args)