2020-04-28 19:45:18 +00:00
|
|
|
#!/usr/bin/env python2
|
2019-12-03 09:59:41 +00:00
|
|
|
from __future__ import print_function
|
2020-03-26 08:36:15 +00:00
|
|
|
import sys
|
2016-09-01 17:40:02 +00:00
|
|
|
import os
|
|
|
|
import os.path
|
|
|
|
import re
|
2020-07-03 10:57:16 +00:00
|
|
|
import json
|
2016-09-01 17:40:02 +00:00
|
|
|
|
|
|
|
from argparse import ArgumentParser
|
|
|
|
from argparse import FileType
|
|
|
|
from pprint import pprint
|
2017-08-31 20:03:47 +00:00
|
|
|
import shlex
|
2017-02-02 13:41:39 +00:00
|
|
|
import subprocess
|
2016-09-01 17:40:02 +00:00
|
|
|
from subprocess import check_call
|
|
|
|
from subprocess import Popen
|
|
|
|
from subprocess import PIPE
|
|
|
|
from subprocess import CalledProcessError
|
2016-09-02 16:26:09 +00:00
|
|
|
from datetime import datetime
|
|
|
|
from time import sleep
|
|
|
|
from errno import ESRCH
|
2019-07-17 12:46:20 +00:00
|
|
|
try:
|
|
|
|
import termcolor
|
|
|
|
except ImportError:
|
|
|
|
termcolor = None
|
2017-10-03 18:31:32 +00:00
|
|
|
from random import random
|
2018-01-18 20:33:16 +00:00
|
|
|
import commands
|
2019-06-03 17:36:27 +00:00
|
|
|
import multiprocessing
|
2019-04-22 23:40:40 +00:00
|
|
|
from contextlib import closing
|
2016-09-01 17:40:02 +00:00
|
|
|
|
2016-11-15 17:59:55 +00:00
|
|
|
|
2019-03-13 16:47:02 +00:00
|
|
|
MESSAGES_TO_RETRY = [
|
2019-03-29 08:26:42 +00:00
|
|
|
"DB::Exception: ZooKeeper session has been expired",
|
2019-03-13 16:47:02 +00:00
|
|
|
"Coordination::Exception: Connection loss",
|
2020-05-28 22:53:15 +00:00
|
|
|
"Operation timed out",
|
2020-05-29 00:46:42 +00:00
|
|
|
"ConnectionPoolWithFailover: Connection failed at try",
|
2019-03-13 16:47:02 +00:00
|
|
|
]
|
|
|
|
|
2016-09-01 17:40:02 +00:00
|
|
|
|
2017-09-14 17:13:40 +00:00
|
|
|
def remove_control_characters(s):
|
|
|
|
"""
|
|
|
|
https://github.com/html5lib/html5lib-python/issues/96#issuecomment-43438438
|
|
|
|
"""
|
|
|
|
def str_to_int(s, default, base=10):
|
|
|
|
if int(s, base) < 0x10000:
|
|
|
|
return unichr(int(s, base))
|
|
|
|
return default
|
2019-04-18 18:48:04 +00:00
|
|
|
s = re.sub(r"&#(\d+);?", lambda c: str_to_int(c.group(1), c.group(0)), s)
|
|
|
|
s = re.sub(r"&#[xX]([0-9a-fA-F]+);?", lambda c: str_to_int(c.group(1), c.group(0), base=16), s)
|
|
|
|
s = re.sub(r"[\x00-\x08\x0b\x0e-\x1f\x7f]", "", s)
|
2017-09-14 17:13:40 +00:00
|
|
|
return s
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
|
|
|
def run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file):
|
|
|
|
|
2019-10-11 13:34:26 +00:00
|
|
|
# print(client_options)
|
|
|
|
|
2020-05-13 20:17:12 +00:00
|
|
|
if args.database:
|
|
|
|
database = args.database
|
|
|
|
os.environ.setdefault("CLICKHOUSE_DATABASE", database)
|
|
|
|
|
|
|
|
else:
|
|
|
|
# If --database is not specified, we will create temporary database with unique name
|
|
|
|
# And we will recreate and drop it for each test
|
|
|
|
def random_str(length=6):
|
|
|
|
import random
|
|
|
|
import string
|
|
|
|
alphabet = string.ascii_lowercase + string.digits
|
|
|
|
return ''.join(random.choice(alphabet) for _ in range(length))
|
|
|
|
database = 'test_{suffix}'.format(suffix=random_str())
|
|
|
|
|
|
|
|
clickhouse_proc_create = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
clickhouse_proc_create.communicate("CREATE DATABASE " + database)
|
|
|
|
|
|
|
|
os.environ["CLICKHOUSE_DATABASE"] = database
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
params = {
|
2020-05-13 20:17:12 +00:00
|
|
|
'client': args.client + ' --database=' + database,
|
2019-10-11 10:30:32 +00:00
|
|
|
'logs_level': server_logs_level,
|
|
|
|
'options': client_options,
|
|
|
|
'test': case_file,
|
|
|
|
'stdout': stdout_file,
|
|
|
|
'stderr': stderr_file,
|
|
|
|
}
|
|
|
|
|
2019-10-11 13:34:26 +00:00
|
|
|
pattern = '{test} > {stdout} 2> {stderr}'
|
|
|
|
|
2019-03-13 16:47:02 +00:00
|
|
|
if ext == '.sql':
|
2019-10-11 13:34:26 +00:00
|
|
|
pattern = "{client} --send_logs_level={logs_level} --testmode --multiquery {options} < " + pattern
|
|
|
|
|
|
|
|
command = pattern.format(**params)
|
|
|
|
#print(command)
|
2019-03-13 16:47:02 +00:00
|
|
|
|
2019-10-11 13:34:26 +00:00
|
|
|
proc = Popen(command, shell=True, env=os.environ)
|
2019-03-13 16:47:02 +00:00
|
|
|
start_time = datetime.now()
|
|
|
|
while (datetime.now() - start_time).total_seconds() < args.timeout and proc.poll() is None:
|
|
|
|
sleep(0.01)
|
|
|
|
|
2020-05-13 20:17:12 +00:00
|
|
|
if not args.database:
|
2020-05-13 20:03:10 +00:00
|
|
|
clickhouse_proc_create = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
2020-05-13 20:17:12 +00:00
|
|
|
clickhouse_proc_create.communicate("DROP DATABASE " + database)
|
2020-05-13 20:03:10 +00:00
|
|
|
|
2019-12-03 09:59:41 +00:00
|
|
|
total_time = (datetime.now() - start_time).total_seconds()
|
|
|
|
|
2019-06-15 19:20:56 +00:00
|
|
|
# Normalize randomized database names in stdout, stderr files.
|
2020-05-13 20:17:12 +00:00
|
|
|
os.system("LC_ALL=C sed -i -e 's/{test_db}/default/g' {file}".format(test_db=database, file=stdout_file))
|
|
|
|
os.system("LC_ALL=C sed -i -e 's/{test_db}/default/g' {file}".format(test_db=database, file=stderr_file))
|
2019-06-10 10:41:53 +00:00
|
|
|
|
2019-03-13 16:47:02 +00:00
|
|
|
stdout = open(stdout_file, 'r').read() if os.path.exists(stdout_file) else ''
|
|
|
|
stdout = unicode(stdout, errors='replace', encoding='utf-8')
|
|
|
|
stderr = open(stderr_file, 'r').read() if os.path.exists(stderr_file) else ''
|
|
|
|
stderr = unicode(stderr, errors='replace', encoding='utf-8')
|
|
|
|
|
2019-12-03 09:59:41 +00:00
|
|
|
return proc, stdout, stderr, total_time
|
2019-03-13 16:47:02 +00:00
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2019-03-13 16:47:02 +00:00
|
|
|
def need_retry(stderr):
|
|
|
|
return any(msg in stderr for msg in MESSAGES_TO_RETRY)
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2019-03-13 11:03:57 +00:00
|
|
|
def get_processlist(client_cmd):
|
|
|
|
try:
|
|
|
|
return subprocess.check_output("{} --query 'SHOW PROCESSLIST FORMAT Vertical'".format(client_cmd), shell=True)
|
|
|
|
except:
|
|
|
|
return "" # server seems dead
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2020-03-23 18:17:07 +00:00
|
|
|
# collect server stacktraces using gdb
|
2020-03-23 17:30:31 +00:00
|
|
|
def get_stacktraces_from_gdb(server_pid):
|
2020-01-27 17:47:05 +00:00
|
|
|
cmd = "gdb -batch -ex 'thread apply all backtrace' -p {}".format(server_pid)
|
2019-03-13 11:03:57 +00:00
|
|
|
try:
|
|
|
|
return subprocess.check_output(cmd, shell=True)
|
|
|
|
except Exception as ex:
|
2020-03-23 17:30:31 +00:00
|
|
|
return "Error occured while receiving stack traces from gdb: {}".format(str(ex))
|
|
|
|
|
|
|
|
|
2020-03-23 18:17:07 +00:00
|
|
|
# collect server stacktraces from system.stack_trace table
|
2020-05-13 20:03:10 +00:00
|
|
|
# it does not work in Sandbox
|
2020-03-23 17:30:31 +00:00
|
|
|
def get_stacktraces_from_clickhouse(client):
|
|
|
|
try:
|
2020-04-20 14:19:55 +00:00
|
|
|
return subprocess.check_output("{} --allow_introspection_functions=1 --query \"SELECT arrayStringConcat(arrayMap(x, y -> concat(x, ': ', y), arrayMap(x -> addressToLine(x), trace), arrayMap(x -> demangle(addressToSymbol(x)), trace)), '\n') as trace FROM system.stack_trace format Vertical\"".format(client), shell=True)
|
2020-03-23 17:30:31 +00:00
|
|
|
except Exception as ex:
|
|
|
|
return "Error occured while receiving stack traces from client: {}".format(str(ex))
|
2019-03-13 11:03:57 +00:00
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2019-03-13 11:03:57 +00:00
|
|
|
def get_server_pid(server_tcp_port):
|
2020-01-28 17:21:57 +00:00
|
|
|
cmd = "lsof -i tcp:{port} -s tcp:LISTEN -Fp | awk '/^p[0-9]+$/{{print substr($0, 2)}}'".format(port=server_tcp_port)
|
2019-03-13 11:03:57 +00:00
|
|
|
try:
|
|
|
|
output = subprocess.check_output(cmd, shell=True)
|
|
|
|
if output:
|
2020-03-23 17:30:31 +00:00
|
|
|
return int(output)
|
2019-03-13 11:03:57 +00:00
|
|
|
else:
|
|
|
|
return None # server dead
|
|
|
|
except Exception as ex:
|
|
|
|
return None
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
def colored(text, args, color=None, on_color=None, attrs=None):
|
2019-07-17 12:46:20 +00:00
|
|
|
if termcolor and (sys.stdout.isatty() or args.force_color):
|
2019-04-22 23:40:40 +00:00
|
|
|
return termcolor.colored(text, color, on_color, attrs)
|
|
|
|
else:
|
|
|
|
return text
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
SERVER_DIED = False
|
|
|
|
exit_code = 0
|
|
|
|
|
2020-03-26 08:36:15 +00:00
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
# def run_tests_array(all_tests, suite, suite_dir, suite_tmp_dir, run_total):
|
2020-03-26 08:36:15 +00:00
|
|
|
def run_tests_array(all_tests_with_params):
|
|
|
|
all_tests, suite, suite_dir, suite_tmp_dir, run_total = all_tests_with_params
|
2019-04-23 12:51:27 +00:00
|
|
|
global exit_code
|
2019-04-22 23:40:40 +00:00
|
|
|
global SERVER_DIED
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
OP_SQUARE_BRACKET = colored("[", args, attrs=['bold'])
|
|
|
|
CL_SQUARE_BRACKET = colored("]", args, attrs=['bold'])
|
|
|
|
|
|
|
|
MSG_FAIL = OP_SQUARE_BRACKET + colored(" FAIL ", args, "red", attrs=['bold']) + CL_SQUARE_BRACKET
|
|
|
|
MSG_UNKNOWN = OP_SQUARE_BRACKET + colored(" UNKNOWN ", args, "yellow", attrs=['bold']) + CL_SQUARE_BRACKET
|
|
|
|
MSG_OK = OP_SQUARE_BRACKET + colored(" OK ", args, "green", attrs=['bold']) + CL_SQUARE_BRACKET
|
|
|
|
MSG_SKIPPED = OP_SQUARE_BRACKET + colored(" SKIPPED ", args, "cyan", attrs=['bold']) + CL_SQUARE_BRACKET
|
|
|
|
|
|
|
|
passed_total = 0
|
|
|
|
skipped_total = 0
|
|
|
|
failures_total = 0
|
|
|
|
failures = 0
|
|
|
|
failures_chain = 0
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
client_options = get_additional_client_options(args)
|
|
|
|
|
2019-12-03 09:59:41 +00:00
|
|
|
def print_test_time(test_time):
|
|
|
|
if args.print_time:
|
|
|
|
print(" {0:.2f} sec.".format(test_time), end='')
|
|
|
|
|
2020-03-26 08:36:15 +00:00
|
|
|
if len(all_tests):
|
|
|
|
print("\nRunning {} {} tests.".format(len(all_tests), suite) + "\n")
|
2019-04-09 13:17:36 +00:00
|
|
|
|
2020-03-26 08:36:15 +00:00
|
|
|
for case in all_tests:
|
2019-04-22 23:40:40 +00:00
|
|
|
if SERVER_DIED:
|
|
|
|
break
|
2019-04-09 13:17:36 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
case_file = os.path.join(suite_dir, case)
|
|
|
|
(name, ext) = os.path.splitext(case)
|
|
|
|
|
|
|
|
try:
|
|
|
|
sys.stdout.write("{0:72}".format(name + ": "))
|
|
|
|
if run_total == 1:
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
if args.skip and any(s in name for s in args.skip):
|
|
|
|
print(MSG_SKIPPED + " - skip")
|
|
|
|
skipped_total += 1
|
2020-04-17 20:36:08 +00:00
|
|
|
elif not args.zookeeper and ('zookeeper' in name
|
|
|
|
or 'replica' in name):
|
2019-04-22 23:40:40 +00:00
|
|
|
print(MSG_SKIPPED + " - no zookeeper")
|
|
|
|
skipped_total += 1
|
2019-05-24 12:07:26 +00:00
|
|
|
elif not args.shard and ('shard' in name
|
|
|
|
or 'distributed' in name
|
|
|
|
or 'global' in name):
|
2019-04-22 23:40:40 +00:00
|
|
|
print(MSG_SKIPPED + " - no shard")
|
|
|
|
skipped_total += 1
|
2020-01-14 13:57:40 +00:00
|
|
|
elif not args.no_long and ('long' in name
|
|
|
|
# Tests for races and deadlocks usually are runned in loop
|
|
|
|
# for significant amount of time
|
|
|
|
or 'deadlock' in name
|
|
|
|
or 'race' in name):
|
2019-04-22 23:40:40 +00:00
|
|
|
print(MSG_SKIPPED + " - no long")
|
|
|
|
skipped_total += 1
|
|
|
|
else:
|
|
|
|
disabled_file = os.path.join(suite_dir, name) + '.disabled'
|
|
|
|
|
|
|
|
if os.path.exists(disabled_file) and not args.disabled:
|
|
|
|
message = open(disabled_file, 'r').read()
|
|
|
|
print(MSG_SKIPPED + " - " + message)
|
|
|
|
else:
|
|
|
|
|
|
|
|
if args.testname:
|
2020-05-13 20:17:12 +00:00
|
|
|
clickhouse_proc = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
2019-04-22 23:40:40 +00:00
|
|
|
clickhouse_proc.communicate("SELECT 'Running test {suite}/{case} from pid={pid}';".format(pid = os.getpid(), case = case, suite = suite))
|
|
|
|
|
|
|
|
reference_file = os.path.join(suite_dir, name) + '.reference'
|
|
|
|
stdout_file = os.path.join(suite_tmp_dir, name) + '.stdout'
|
|
|
|
stderr_file = os.path.join(suite_tmp_dir, name) + '.stderr'
|
|
|
|
|
2019-12-03 09:59:41 +00:00
|
|
|
proc, stdout, stderr, total_time = run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file)
|
2019-04-22 23:40:40 +00:00
|
|
|
if proc.returncode is None:
|
|
|
|
try:
|
|
|
|
proc.kill()
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != ESRCH:
|
|
|
|
raise
|
|
|
|
|
|
|
|
failures += 1
|
2019-12-03 09:59:41 +00:00
|
|
|
print(MSG_FAIL, end='')
|
|
|
|
print_test_time(total_time)
|
|
|
|
print(" - Timeout!")
|
2019-04-22 23:40:40 +00:00
|
|
|
else:
|
|
|
|
counter = 1
|
|
|
|
while proc.returncode != 0 and need_retry(stderr):
|
2019-12-03 09:59:41 +00:00
|
|
|
proc, stdout, stderr, total_time = run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file)
|
2019-04-22 23:40:40 +00:00
|
|
|
sleep(2**counter)
|
|
|
|
counter += 1
|
|
|
|
if counter > 6:
|
|
|
|
break
|
|
|
|
|
|
|
|
if proc.returncode != 0:
|
|
|
|
failures += 1
|
|
|
|
failures_chain += 1
|
2019-12-03 09:59:41 +00:00
|
|
|
print(MSG_FAIL, end='')
|
|
|
|
print_test_time(total_time)
|
|
|
|
print(" - return code {}".format(proc.returncode))
|
2019-04-22 23:40:40 +00:00
|
|
|
|
|
|
|
if stderr:
|
2019-05-26 18:38:43 +00:00
|
|
|
print(stderr.encode('utf-8'))
|
2019-04-22 23:40:40 +00:00
|
|
|
|
2020-06-22 19:32:33 +00:00
|
|
|
# Stop on fatal errors like segmentation fault. They are send to client via logs.
|
|
|
|
if ' <Fatal> ' in stderr:
|
|
|
|
SERVER_DIED = True
|
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
if args.stop and ('Connection refused' in stderr or 'Attempt to read after eof' in stderr) and not 'Received exception from server' in stderr:
|
|
|
|
SERVER_DIED = True
|
|
|
|
|
|
|
|
elif stderr:
|
|
|
|
failures += 1
|
|
|
|
failures_chain += 1
|
2019-12-03 09:59:41 +00:00
|
|
|
print(MSG_FAIL, end='')
|
|
|
|
print_test_time(total_time)
|
|
|
|
print(" - having stderror:\n{}".format(stderr.encode('utf-8')))
|
2019-04-22 23:40:40 +00:00
|
|
|
elif 'Exception' in stdout:
|
|
|
|
failures += 1
|
|
|
|
failures_chain += 1
|
2019-12-03 09:59:41 +00:00
|
|
|
print(MSG_FAIL, end='')
|
|
|
|
print_test_time(total_time)
|
|
|
|
print(" - having exception:\n{}".format(stdout.encode('utf-8')))
|
2019-04-22 23:40:40 +00:00
|
|
|
elif not os.path.isfile(reference_file):
|
2019-12-03 09:59:41 +00:00
|
|
|
print(MSG_UNKNOWN, end='')
|
|
|
|
print_test_time(total_time)
|
|
|
|
print(" - no reference file")
|
2019-04-22 23:40:40 +00:00
|
|
|
else:
|
|
|
|
result_is_different = subprocess.call(['diff', '-q', reference_file, stdout_file], stdout = PIPE)
|
|
|
|
|
|
|
|
if result_is_different:
|
2019-10-10 18:47:51 +00:00
|
|
|
diff = Popen(['diff', '-U', str(args.unified), reference_file, stdout_file], stdout = PIPE).communicate()[0]
|
2019-04-22 23:40:40 +00:00
|
|
|
failures += 1
|
2019-12-03 09:59:41 +00:00
|
|
|
print(MSG_FAIL, end='')
|
|
|
|
print_test_time(total_time)
|
|
|
|
print(" - result differs with reference:\n{}".format(diff))
|
2019-04-22 23:40:40 +00:00
|
|
|
else:
|
|
|
|
passed_total += 1
|
|
|
|
failures_chain = 0
|
2019-12-03 09:59:41 +00:00
|
|
|
print(MSG_OK, end='')
|
|
|
|
print_test_time(total_time)
|
|
|
|
print()
|
2019-04-22 23:40:40 +00:00
|
|
|
if os.path.exists(stdout_file):
|
|
|
|
os.remove(stdout_file)
|
|
|
|
if os.path.exists(stderr_file):
|
|
|
|
os.remove(stderr_file)
|
2020-03-26 08:36:15 +00:00
|
|
|
except KeyboardInterrupt as e:
|
2019-04-22 23:40:40 +00:00
|
|
|
print(colored("Break tests execution", args, "red"))
|
2020-03-26 08:36:15 +00:00
|
|
|
raise e
|
2019-04-22 23:40:40 +00:00
|
|
|
except:
|
|
|
|
import traceback
|
|
|
|
exc_type, exc_value, tb = sys.exc_info()
|
|
|
|
failures += 1
|
2019-05-26 18:38:43 +00:00
|
|
|
print("{0} - Test internal error: {1}\n{2}\n{3}".format(MSG_FAIL, exc_type.__name__, exc_value, "\n".join(traceback.format_tb(tb, 10))))
|
2019-04-22 23:40:40 +00:00
|
|
|
|
|
|
|
if failures_chain >= 20:
|
|
|
|
break
|
|
|
|
|
|
|
|
failures_total = failures_total + failures
|
|
|
|
|
|
|
|
if failures_total > 0:
|
2019-05-26 18:38:43 +00:00
|
|
|
print(colored("\nHaving {failures_total} errors! {passed_total} tests passed. {skipped_total} tests skipped.".format(passed_total = passed_total, skipped_total = skipped_total, failures_total = failures_total), args, "red", attrs=["bold"]))
|
2019-04-22 23:40:40 +00:00
|
|
|
exit_code = 1
|
|
|
|
else:
|
|
|
|
print(colored("\n{passed_total} tests passed. {skipped_total} tests skipped.".format(passed_total = passed_total, skipped_total = skipped_total), args, "green", attrs=["bold"]))
|
2019-04-09 13:17:36 +00:00
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
server_logs_level = "warning"
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2020-05-29 10:08:11 +00:00
|
|
|
def check_server_started(client, retry_count):
|
2020-06-24 19:03:28 +00:00
|
|
|
print("Connecting to ClickHouse server...", end='')
|
|
|
|
sys.stdout.flush()
|
2020-05-29 10:08:11 +00:00
|
|
|
while retry_count > 0:
|
|
|
|
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
(stdout, stderr) = clickhouse_proc.communicate("SELECT 1")
|
2020-06-24 19:03:28 +00:00
|
|
|
|
|
|
|
if clickhouse_proc.returncode == 0 and stdout.startswith("1"):
|
|
|
|
print(" OK")
|
|
|
|
sys.stdout.flush()
|
|
|
|
return True
|
|
|
|
|
|
|
|
if clickhouse_proc.returncode == 210:
|
|
|
|
# Connection refused, retry
|
|
|
|
print('.', end = '')
|
|
|
|
sys.stdout.flush()
|
2020-05-29 10:08:11 +00:00
|
|
|
retry_count -= 1
|
|
|
|
sleep(0.5)
|
2020-06-24 19:03:28 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
# Other kind of error, fail.
|
|
|
|
print('')
|
|
|
|
print("Client invocation failed with code ", clickhouse_proc.returncode, ": ")
|
|
|
|
# We can't print this, because for some reason this is python 2,
|
|
|
|
# and args appeared in 3.3. To hell with it.
|
|
|
|
# print(''.join(clickhouse_proc.args))
|
|
|
|
print("stdout: ")
|
|
|
|
print(stdout)
|
|
|
|
print("stderr: ")
|
|
|
|
print(stderr)
|
|
|
|
sys.stdout.flush()
|
|
|
|
return False
|
|
|
|
|
|
|
|
print('')
|
|
|
|
print('All connection tries failed')
|
|
|
|
sys.stdout.flush()
|
2020-05-29 10:08:11 +00:00
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2020-07-03 10:57:16 +00:00
|
|
|
class BuildFlags(object):
|
|
|
|
THREAD = 'thread-sanitizer'
|
|
|
|
ADDRESS = 'address-sanitizer'
|
|
|
|
UNDEFINED = 'ub-sanitizer'
|
|
|
|
MEMORY = 'memory-sanitizer'
|
|
|
|
DEBUG = 'debug-build'
|
|
|
|
UNBUNDLED = 'unbundled-build'
|
|
|
|
RELEASE = 'release-build'
|
|
|
|
DATABASE_ATOMIC = 'database-atomic'
|
|
|
|
POLYMORPHIC_PARTS = 'polymorphic-parts'
|
|
|
|
|
|
|
|
|
|
|
|
def collect_build_flags(client):
|
|
|
|
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.build_options WHERE name = 'CXX_FLAGS'")
|
|
|
|
result = []
|
|
|
|
|
|
|
|
if clickhouse_proc.returncode == 0:
|
|
|
|
if '-fsanitize=thread' in stdout:
|
|
|
|
result.append(BuildFlags.THREAD)
|
|
|
|
elif '-fsanitize=address' in stdout:
|
|
|
|
result.append(BuildFlags.ADDRESS)
|
|
|
|
elif '-fsanitize=undefined' in stdout:
|
|
|
|
result.append(BuildFlags.UNDEFINED)
|
|
|
|
elif '-fsanitize=memory' in stdout:
|
|
|
|
result.append(BuildFlags.MEMORY)
|
|
|
|
else:
|
|
|
|
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
|
|
|
|
|
|
|
|
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.build_options WHERE name = 'BUILD_TYPE'")
|
|
|
|
|
|
|
|
if clickhouse_proc.returncode == 0:
|
|
|
|
if 'Debug' in stdout:
|
|
|
|
result.append(BuildFlags.DEBUG)
|
|
|
|
elif 'RelWithDebInfo' in stdout or 'Release' in stdout:
|
|
|
|
result.append(BuildFlags.RELEASE)
|
|
|
|
else:
|
|
|
|
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
|
|
|
|
|
|
|
|
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.build_options WHERE name = 'UNBUNDLED'")
|
|
|
|
|
|
|
|
if clickhouse_proc.returncode == 0:
|
|
|
|
if 'ON' in stdout:
|
|
|
|
result.append(BuildFlags.UNBUNDLED)
|
|
|
|
else:
|
|
|
|
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
|
|
|
|
|
|
|
|
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.settings WHERE name = 'default_database_engine'")
|
|
|
|
|
|
|
|
if clickhouse_proc.returncode == 0:
|
|
|
|
if 'Atomic' in stdout:
|
|
|
|
result.append(BuildFlags.DATABASE_ATOMIC)
|
|
|
|
else:
|
|
|
|
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
|
|
|
|
|
|
|
|
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.merge_tree_settings WHERE name = 'min_bytes_for_wide_part'")
|
|
|
|
|
|
|
|
if clickhouse_proc.returncode == 0:
|
|
|
|
if '10485760' in stdout:
|
|
|
|
result.append(BuildFlags.POLYMORPHIC_PARTS)
|
|
|
|
else:
|
|
|
|
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
def main(args):
|
|
|
|
global SERVER_DIED
|
|
|
|
global exit_code
|
|
|
|
global server_logs_level
|
2019-04-09 13:17:36 +00:00
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
def is_data_present():
|
2017-11-01 11:46:58 +00:00
|
|
|
clickhouse_proc = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
(stdout, stderr) = clickhouse_proc.communicate("EXISTS TABLE test.hits")
|
2018-01-16 20:17:31 +00:00
|
|
|
if clickhouse_proc.returncode != 0:
|
2017-11-01 11:46:58 +00:00
|
|
|
raise CalledProcessError(clickhouse_proc.returncode, args.client, stderr)
|
2017-05-01 21:27:11 +00:00
|
|
|
|
|
|
|
return stdout.startswith('1')
|
|
|
|
|
2020-05-29 10:08:11 +00:00
|
|
|
if not check_server_started(args.client, args.server_check_retries):
|
|
|
|
raise Exception("clickhouse-server is not responding. Cannot execute 'SELECT 1' query.")
|
2020-07-03 10:57:16 +00:00
|
|
|
build_flags = collect_build_flags(args.client)
|
|
|
|
tests_to_skip_from_list = collect_tests_to_skip(args.skip_list, build_flags)
|
|
|
|
if args.skip:
|
|
|
|
args.skip = set(args.skip) | tests_to_skip_from_list
|
|
|
|
else:
|
|
|
|
args.skip = tests_to_skip_from_list
|
2020-05-29 10:08:11 +00:00
|
|
|
|
2018-01-18 20:33:16 +00:00
|
|
|
base_dir = os.path.abspath(args.queries)
|
|
|
|
tmp_dir = os.path.abspath(args.tmp)
|
|
|
|
|
2018-06-08 19:50:15 +00:00
|
|
|
# Keep same default values as in queries/shell_config.sh
|
2018-01-18 20:33:16 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_BINARY", args.binary)
|
2019-03-20 20:04:49 +00:00
|
|
|
#os.environ.setdefault("CLICKHOUSE_CLIENT", args.client)
|
2018-04-26 20:02:10 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_CONFIG", args.configserver)
|
2018-06-18 21:13:11 +00:00
|
|
|
if args.configclient:
|
|
|
|
os.environ.setdefault("CLICKHOUSE_CONFIG_CLIENT", args.configclient)
|
2018-01-18 20:33:16 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_TMP", tmp_dir)
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2018-06-13 19:01:07 +00:00
|
|
|
# Force to print server warnings in stderr
|
2018-08-14 20:29:42 +00:00
|
|
|
# Shell scripts could change logging level
|
2018-06-13 19:01:07 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL", server_logs_level)
|
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
if args.zookeeper is None:
|
2020-05-29 10:08:11 +00:00
|
|
|
code, out = commands.getstatusoutput(args.extract_from_config + " --try --config " + args.configserver + ' --key zookeeper | grep . | wc -l')
|
2018-03-27 15:08:01 +00:00
|
|
|
try:
|
|
|
|
if int(out) > 0:
|
|
|
|
args.zookeeper = True
|
|
|
|
else:
|
|
|
|
args.zookeeper = False
|
|
|
|
except ValueError:
|
2017-05-01 21:27:11 +00:00
|
|
|
args.zookeeper = False
|
|
|
|
|
|
|
|
if args.shard is None:
|
2018-11-07 11:00:46 +00:00
|
|
|
code, out = commands.getstatusoutput(args.extract_from_config + " --try --config " + args.configserver + ' --key listen_host | grep -E "127.0.0.2|::"')
|
2018-01-18 20:33:16 +00:00
|
|
|
if out:
|
2017-11-05 20:17:43 +00:00
|
|
|
args.shard = True
|
2018-01-18 20:33:16 +00:00
|
|
|
else:
|
|
|
|
args.shard = False
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2020-05-13 20:17:12 +00:00
|
|
|
if args.database and args.database != "test":
|
2019-04-16 14:13:13 +00:00
|
|
|
clickhouse_proc_create = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
2020-05-13 20:17:12 +00:00
|
|
|
clickhouse_proc_create.communicate("CREATE DATABASE IF NOT EXISTS " + args.database)
|
|
|
|
|
|
|
|
clickhouse_proc_create = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
clickhouse_proc_create.communicate("CREATE DATABASE IF NOT EXISTS test")
|
2019-04-11 17:20:36 +00:00
|
|
|
|
|
|
|
def is_test_from_dir(suite_dir, case):
|
|
|
|
case_file = os.path.join(suite_dir, case)
|
|
|
|
(name, ext) = os.path.splitext(case)
|
|
|
|
return os.path.isfile(case_file) and (ext == '.sql' or ext == '.sh' or ext == '.py')
|
2018-06-18 21:13:11 +00:00
|
|
|
|
2019-03-14 16:37:23 +00:00
|
|
|
def sute_key_func(item):
|
|
|
|
if args.order == 'random':
|
|
|
|
return random()
|
|
|
|
|
|
|
|
if -1 == item.find('_'):
|
|
|
|
return 99998
|
|
|
|
|
|
|
|
prefix, suffix = item.split('_', 1)
|
|
|
|
|
|
|
|
try:
|
|
|
|
return int(prefix), suffix
|
|
|
|
except ValueError:
|
|
|
|
return 99997
|
|
|
|
|
2019-10-09 10:51:05 +00:00
|
|
|
total_tests_run = 0
|
2019-03-14 16:37:23 +00:00
|
|
|
for suite in sorted(os.listdir(base_dir), key=sute_key_func):
|
2017-05-01 21:27:11 +00:00
|
|
|
if SERVER_DIED:
|
|
|
|
break
|
|
|
|
|
|
|
|
suite_dir = os.path.join(base_dir, suite)
|
|
|
|
suite_re_obj = re.search('^[0-9]+_(.*)$', suite)
|
|
|
|
if not suite_re_obj: #skip .gitignore and so on
|
|
|
|
continue
|
2017-12-27 15:56:42 +00:00
|
|
|
|
|
|
|
suite_tmp_dir = os.path.join(tmp_dir, suite)
|
|
|
|
if not os.path.exists(suite_tmp_dir):
|
|
|
|
os.makedirs(suite_tmp_dir)
|
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
suite = suite_re_obj.group(1)
|
|
|
|
if os.path.isdir(suite_dir):
|
|
|
|
|
2019-04-23 12:51:27 +00:00
|
|
|
if 'stateful' in suite and not args.no_stateful and not is_data_present():
|
2018-08-08 03:03:26 +00:00
|
|
|
print("Won't run stateful tests because test data wasn't loaded.")
|
2017-05-01 21:27:11 +00:00
|
|
|
continue
|
2019-01-18 11:47:50 +00:00
|
|
|
if 'stateless' in suite and args.no_stateless:
|
|
|
|
print("Won't run stateless tests because they were manually disabled.")
|
|
|
|
continue
|
2019-04-18 18:48:04 +00:00
|
|
|
if 'stateful' in suite and args.no_stateful:
|
|
|
|
print("Won't run stateful tests because they were manually disabled.")
|
|
|
|
continue
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2017-09-06 20:13:21 +00:00
|
|
|
# Reverse sort order: we want run newest test first.
|
|
|
|
# And not reverse subtests
|
|
|
|
def key_func(item):
|
2017-12-15 19:34:19 +00:00
|
|
|
if args.order == 'random':
|
2017-10-03 18:31:32 +00:00
|
|
|
return random()
|
2017-12-15 19:34:19 +00:00
|
|
|
|
|
|
|
reverse = 1 if args.order == 'asc' else -1
|
|
|
|
|
|
|
|
if -1 == item.find('_'):
|
|
|
|
return 99998
|
|
|
|
|
2017-09-06 20:13:21 +00:00
|
|
|
prefix, suffix = item.split('_', 1)
|
2017-12-15 19:34:19 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
return reverse * int(prefix), suffix
|
|
|
|
except ValueError:
|
|
|
|
return 99997
|
|
|
|
|
2019-04-11 17:20:36 +00:00
|
|
|
all_tests = os.listdir(suite_dir)
|
|
|
|
all_tests = filter(lambda case: is_test_from_dir(suite_dir, case), all_tests)
|
2019-10-09 10:51:05 +00:00
|
|
|
if args.test:
|
|
|
|
all_tests = [t for t in all_tests if any([re.search(r, t) for r in args.test])]
|
2019-10-09 11:56:38 +00:00
|
|
|
all_tests.sort(key=key_func)
|
2019-04-11 17:20:36 +00:00
|
|
|
|
2020-03-26 08:36:15 +00:00
|
|
|
run_n, run_total = args.parallel.split('/')
|
|
|
|
run_n = float(run_n)
|
|
|
|
run_total = float(run_total)
|
2019-04-22 23:40:40 +00:00
|
|
|
tests_n = len(all_tests)
|
|
|
|
if run_total > tests_n:
|
|
|
|
run_total = tests_n
|
|
|
|
if run_n > run_total:
|
|
|
|
continue
|
2017-12-27 15:56:42 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
jobs = args.jobs
|
2019-06-03 17:36:27 +00:00
|
|
|
if jobs > tests_n:
|
|
|
|
jobs = tests_n
|
2019-04-22 23:40:40 +00:00
|
|
|
if jobs > run_total:
|
|
|
|
run_total = jobs
|
2018-01-16 13:44:45 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
all_tests_array = []
|
2020-03-26 08:36:15 +00:00
|
|
|
for n in range(1, 1 + int(run_total)):
|
|
|
|
start = int(tests_n / run_total * (n - 1))
|
|
|
|
end = int(tests_n / run_total * n)
|
|
|
|
all_tests_array.append([all_tests[start : end], suite, suite_dir, suite_tmp_dir, run_total])
|
2018-01-16 13:44:45 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
if jobs > 1:
|
2020-03-26 08:36:15 +00:00
|
|
|
with closing(multiprocessing.Pool(processes=jobs)) as pool:
|
|
|
|
pool.map(run_tests_array, all_tests_array)
|
|
|
|
pool.terminate()
|
2019-04-22 23:40:40 +00:00
|
|
|
else:
|
2020-03-26 08:36:15 +00:00
|
|
|
run_tests_array(all_tests_array[int(run_n)-1])
|
2019-03-13 11:03:57 +00:00
|
|
|
|
2019-10-09 10:51:05 +00:00
|
|
|
total_tests_run += tests_n
|
|
|
|
|
2019-03-13 11:03:57 +00:00
|
|
|
if args.hung_check:
|
2020-05-31 14:57:29 +00:00
|
|
|
|
|
|
|
# Some queries may execute in background for some time after test was finished. This is normal.
|
|
|
|
for n in range(1, 60):
|
|
|
|
processlist = get_processlist(args.client)
|
|
|
|
if not processlist:
|
|
|
|
break
|
|
|
|
sleep(1)
|
|
|
|
|
2020-01-27 18:04:12 +00:00
|
|
|
if processlist:
|
|
|
|
print(colored("\nFound hung queries in processlist:", args, "red", attrs=["bold"]))
|
|
|
|
print(processlist)
|
|
|
|
|
|
|
|
clickhouse_tcp_port = os.getenv("CLICKHOUSE_PORT_TCP", '9000')
|
|
|
|
server_pid = get_server_pid(clickhouse_tcp_port)
|
|
|
|
if server_pid:
|
|
|
|
print("\nLocated ClickHouse server process {} listening at TCP port {}".format(server_pid, clickhouse_tcp_port))
|
2020-05-13 20:03:10 +00:00
|
|
|
|
|
|
|
# It does not work in Sandbox
|
|
|
|
#print("\nCollecting stacktraces from system.stacktraces table:")
|
|
|
|
#print(get_stacktraces_from_clickhouse(args.client))
|
|
|
|
|
2020-03-23 17:30:31 +00:00
|
|
|
print("\nCollecting stacktraces from all running threads with gdb:")
|
|
|
|
print(get_stacktraces_from_gdb(server_pid))
|
2020-01-27 16:51:48 +00:00
|
|
|
else:
|
2020-01-27 18:04:12 +00:00
|
|
|
print(
|
|
|
|
colored(
|
|
|
|
"\nUnable to locate ClickHouse server process listening at TCP port {}. "
|
|
|
|
"It must have crashed or exited prematurely!".format(clickhouse_tcp_port),
|
|
|
|
args, "red", attrs=["bold"]))
|
|
|
|
|
|
|
|
exit_code = 1
|
2019-03-13 13:52:23 +00:00
|
|
|
else:
|
2020-01-27 18:04:12 +00:00
|
|
|
print(colored("\nNo queries hung.", args, "green", attrs=["bold"]))
|
2019-03-13 11:03:57 +00:00
|
|
|
|
2019-10-09 10:51:05 +00:00
|
|
|
if total_tests_run == 0:
|
|
|
|
print("No tests were run.")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2019-03-13 11:03:57 +00:00
|
|
|
sys.exit(exit_code)
|
|
|
|
|
2016-12-06 20:55:13 +00:00
|
|
|
|
2019-01-24 11:02:55 +00:00
|
|
|
def find_binary(name):
|
2019-03-25 15:03:12 +00:00
|
|
|
if os.path.exists(name) and os.access(name, os.X_OK):
|
|
|
|
return True
|
2019-01-24 11:02:55 +00:00
|
|
|
paths = os.environ.get("PATH").split(':')
|
|
|
|
for path in paths:
|
|
|
|
if os.access(os.path.join(path, name), os.X_OK):
|
|
|
|
return True
|
|
|
|
|
|
|
|
# maybe it wasn't in PATH
|
2019-06-17 16:50:31 +00:00
|
|
|
if os.access(os.path.join('/usr/local/bin', name), os.X_OK):
|
|
|
|
return True
|
|
|
|
if os.access(os.path.join('/usr/bin', name), os.X_OK):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2016-09-01 17:40:02 +00:00
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
def get_additional_client_options(args):
|
2019-10-11 13:34:26 +00:00
|
|
|
if args.client_option:
|
|
|
|
return ' '.join('--' + option for option in args.client_option)
|
|
|
|
|
|
|
|
return ''
|
|
|
|
|
|
|
|
|
|
|
|
def get_additional_client_options_url(args):
|
|
|
|
if args.client_option:
|
|
|
|
return '&'.join(args.client_option)
|
|
|
|
|
|
|
|
return ''
|
2019-10-11 10:30:32 +00:00
|
|
|
|
|
|
|
|
2020-07-03 10:57:16 +00:00
|
|
|
def collect_tests_to_skip(skip_list_path, build_flags):
|
|
|
|
result = set([])
|
|
|
|
if not os.path.exists(skip_list_path):
|
|
|
|
return result
|
|
|
|
|
|
|
|
with open(skip_list_path, 'r') as skip_list_file:
|
|
|
|
skip_dict = json.load(skip_list_file)
|
|
|
|
for build_flag in build_flags:
|
|
|
|
result |= set(skip_dict[build_flag])
|
2020-07-03 11:05:27 +00:00
|
|
|
|
|
|
|
if len(result) > 0:
|
|
|
|
print("Found file with skip-list {}, {} test will be skipped".format(skip_list_path, len(result)))
|
|
|
|
|
2020-07-03 10:57:16 +00:00
|
|
|
return result
|
|
|
|
|
2016-09-01 17:40:02 +00:00
|
|
|
if __name__ == '__main__':
|
2019-03-13 11:03:57 +00:00
|
|
|
parser=ArgumentParser(description='ClickHouse functional tests')
|
|
|
|
parser.add_argument('-q', '--queries', help='Path to queries dir')
|
|
|
|
parser.add_argument('--tmp', help='Path to tmp dir')
|
2019-03-25 15:03:12 +00:00
|
|
|
parser.add_argument('-b', '--binary', default='clickhouse', help='Path to clickhouse binary or name of binary in PATH')
|
2019-03-13 11:03:57 +00:00
|
|
|
parser.add_argument('-c', '--client', help='Client program')
|
|
|
|
parser.add_argument('--extract_from_config', help='extract-from-config program')
|
|
|
|
parser.add_argument('--configclient', help='Client config (if you use not default ports)')
|
|
|
|
parser.add_argument('--configserver', default= '/etc/clickhouse-server/config.xml', help='Preprocessed server config')
|
|
|
|
parser.add_argument('-o', '--output', help='Output xUnit compliant test report directory')
|
|
|
|
parser.add_argument('-t', '--timeout', type=int, default=600, help='Timeout for each test case in seconds')
|
2019-10-04 13:38:06 +00:00
|
|
|
parser.add_argument('test', nargs='*', help='Optional test case name regex')
|
2019-03-13 11:03:57 +00:00
|
|
|
parser.add_argument('-d', '--disabled', action='store_true', default=False, help='Also run disabled tests')
|
|
|
|
parser.add_argument('--stop', action='store_true', default=None, dest='stop', help='Stop on network errors')
|
2019-12-13 14:27:57 +00:00
|
|
|
parser.add_argument('--order', default='desc', choices=['asc', 'desc', 'random'], help='Run order')
|
2019-03-13 11:03:57 +00:00
|
|
|
parser.add_argument('--testname', action='store_true', default=None, dest='testname', help='Make query with test name before test run')
|
|
|
|
parser.add_argument('--hung-check', action='store_true', default=False)
|
2019-04-09 13:17:36 +00:00
|
|
|
parser.add_argument('--force-color', action='store_true', default=False)
|
2019-06-10 10:41:53 +00:00
|
|
|
parser.add_argument('--database', help='Database for tests (random name test_XXXXXX by default)')
|
2019-04-22 23:40:40 +00:00
|
|
|
parser.add_argument('--parallel', default='1/1', help='One parallel test run number/total')
|
2019-06-20 09:12:49 +00:00
|
|
|
parser.add_argument('-j', '--jobs', default=1, nargs='?', type=int, help='Run all tests in parallel')
|
2019-10-10 18:47:51 +00:00
|
|
|
parser.add_argument('-U', '--unified', default=3, type=int, help='output NUM lines of unified context')
|
2020-05-29 10:08:11 +00:00
|
|
|
parser.add_argument('-r', '--server-check-retries', default=30, type=int, help='Num of tries to execute SELECT 1 before tests started')
|
2020-07-03 10:57:16 +00:00
|
|
|
parser.add_argument('--skip-list', help="Path to skip-list file")
|
2019-03-13 11:03:57 +00:00
|
|
|
|
|
|
|
parser.add_argument('--no-stateless', action='store_true', help='Disable all stateless tests')
|
2019-04-18 18:48:04 +00:00
|
|
|
parser.add_argument('--no-stateful', action='store_true', help='Disable all stateful tests')
|
2019-03-13 11:03:57 +00:00
|
|
|
parser.add_argument('--skip', nargs='+', help="Skip these tests")
|
|
|
|
parser.add_argument('--no-long', action='store_false', dest='no_long', help='Do not run long tests')
|
2019-11-11 15:57:33 +00:00
|
|
|
parser.add_argument('--client-option', nargs='+', help='Specify additional client argument')
|
2019-12-03 09:59:41 +00:00
|
|
|
parser.add_argument('--print-time', action='store_true', dest='print_time', help='Print test time')
|
2019-03-13 11:03:57 +00:00
|
|
|
group=parser.add_mutually_exclusive_group(required=False)
|
|
|
|
group.add_argument('--zookeeper', action='store_true', default=None, dest='zookeeper', help='Run zookeeper related tests')
|
|
|
|
group.add_argument('--no-zookeeper', action='store_false', default=None, dest='zookeeper', help='Do not run zookeeper related tests')
|
|
|
|
group=parser.add_mutually_exclusive_group(required=False)
|
|
|
|
group.add_argument('--shard', action='store_true', default=None, dest='shard', help='Run sharding related tests (required to clickhouse-server listen 127.0.0.2 127.0.0.3)')
|
|
|
|
group.add_argument('--no-shard', action='store_false', default=None, dest='shard', help='Do not run shard related tests')
|
2017-05-01 21:27:11 +00:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
2018-01-12 13:56:02 +00:00
|
|
|
|
|
|
|
if args.queries is None and os.path.isdir('queries'):
|
|
|
|
args.queries = 'queries'
|
2018-01-15 18:57:10 +00:00
|
|
|
elif args.queries is None:
|
2018-05-18 17:36:12 +00:00
|
|
|
if (os.path.isdir('/usr/local/share/clickhouse-test/queries')):
|
|
|
|
args.queries = '/usr/local/share/clickhouse-test/queries'
|
|
|
|
if (args.queries is None and os.path.isdir('/usr/share/clickhouse-test/queries')):
|
|
|
|
args.queries = '/usr/share/clickhouse-test/queries'
|
2018-01-12 13:56:02 +00:00
|
|
|
if args.tmp is None:
|
|
|
|
args.tmp = '/tmp/clickhouse-test'
|
2019-05-24 12:07:26 +00:00
|
|
|
if args.queries is None:
|
2020-04-17 20:36:08 +00:00
|
|
|
print("Failed to detect path to the queries directory. Please specify it with '--queries' option.", file=sys.stderr)
|
2019-05-24 12:07:26 +00:00
|
|
|
exit(1)
|
2020-07-03 10:57:16 +00:00
|
|
|
|
|
|
|
if args.skip_list is None:
|
|
|
|
args.skip_list = os.path.join(args.queries, 'skip_list.json')
|
|
|
|
|
2018-01-16 20:17:31 +00:00
|
|
|
if args.tmp is None:
|
|
|
|
args.tmp = args.queries
|
2017-10-12 19:46:24 +00:00
|
|
|
if args.client is None:
|
2019-01-24 11:02:55 +00:00
|
|
|
if find_binary(args.binary + '-client'):
|
2018-11-07 11:00:46 +00:00
|
|
|
args.client = args.binary + '-client'
|
2019-01-24 11:02:55 +00:00
|
|
|
elif find_binary(args.binary):
|
2018-11-07 11:00:46 +00:00
|
|
|
args.client = args.binary + ' client'
|
2019-01-23 14:05:11 +00:00
|
|
|
else:
|
2020-04-17 20:36:08 +00:00
|
|
|
print("No 'clickhouse' binary found in PATH", file=sys.stderr)
|
2019-01-24 11:02:55 +00:00
|
|
|
parser.print_help()
|
|
|
|
exit(1)
|
|
|
|
|
2018-01-18 20:33:16 +00:00
|
|
|
if args.configclient:
|
2018-05-25 18:05:30 +00:00
|
|
|
args.client += ' --config-file=' + args.configclient
|
|
|
|
if os.getenv("CLICKHOUSE_HOST"):
|
|
|
|
args.client += ' --host=' + os.getenv("CLICKHOUSE_HOST")
|
|
|
|
if os.getenv("CLICKHOUSE_PORT_TCP"):
|
|
|
|
args.client += ' --port=' + os.getenv("CLICKHOUSE_PORT_TCP")
|
2019-04-11 17:20:36 +00:00
|
|
|
if os.getenv("CLICKHOUSE_DATABASE"):
|
|
|
|
args.client += ' --database=' + os.getenv("CLICKHOUSE_DATABASE")
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
if args.client_option:
|
2019-10-11 13:34:26 +00:00
|
|
|
# Set options for client
|
|
|
|
if 'CLICKHOUSE_CLIENT_OPT' in os.environ:
|
|
|
|
os.environ['CLICKHOUSE_CLIENT_OPT'] += ' '
|
|
|
|
else:
|
|
|
|
os.environ['CLICKHOUSE_CLIENT_OPT'] = ''
|
|
|
|
|
|
|
|
os.environ['CLICKHOUSE_CLIENT_OPT'] += get_additional_client_options(args)
|
|
|
|
|
|
|
|
# Set options for curl
|
|
|
|
if 'CLICKHOUSE_URL_PARAMS' in os.environ:
|
|
|
|
os.environ['CLICKHOUSE_URL_PARAMS'] += '&'
|
|
|
|
else:
|
|
|
|
os.environ['CLICKHOUSE_URL_PARAMS'] = ''
|
|
|
|
|
|
|
|
os.environ['CLICKHOUSE_URL_PARAMS'] += get_additional_client_options_url(args)
|
|
|
|
|
2018-11-07 11:00:46 +00:00
|
|
|
if args.extract_from_config is None:
|
|
|
|
if os.access(args.binary + '-extract-from-config', os.X_OK):
|
|
|
|
args.extract_from_config = args.binary + '-extract-from-config'
|
|
|
|
else:
|
|
|
|
args.extract_from_config = args.binary + ' extract-from-config'
|
|
|
|
|
2019-06-20 09:12:49 +00:00
|
|
|
if args.jobs is None:
|
2019-10-11 10:30:32 +00:00
|
|
|
args.jobs = multiprocessing.cpu_count()
|
2019-06-20 09:12:49 +00:00
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
main(args)
|