2016-09-01 17:40:02 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import os.path
|
|
|
|
import re
|
|
|
|
import lxml.etree as et
|
|
|
|
|
|
|
|
from argparse import ArgumentParser
|
|
|
|
from argparse import FileType
|
|
|
|
from pprint import pprint
|
2017-08-31 20:03:47 +00:00
|
|
|
import shlex
|
2017-02-02 13:41:39 +00:00
|
|
|
import subprocess
|
2016-09-01 17:40:02 +00:00
|
|
|
from subprocess import check_call
|
|
|
|
from subprocess import Popen
|
|
|
|
from subprocess import PIPE
|
|
|
|
from subprocess import CalledProcessError
|
2016-09-02 16:26:09 +00:00
|
|
|
from datetime import datetime
|
|
|
|
from time import sleep
|
|
|
|
from errno import ESRCH
|
2016-11-15 17:59:55 +00:00
|
|
|
from termcolor import colored
|
2017-10-03 18:31:32 +00:00
|
|
|
from random import random
|
2018-01-18 20:33:16 +00:00
|
|
|
import commands
|
2016-09-01 17:40:02 +00:00
|
|
|
|
|
|
|
|
2016-11-21 08:37:51 +00:00
|
|
|
OP_SQUARE_BRACKET = colored("[", attrs=['bold'])
|
|
|
|
CL_SQUARE_BRACKET = colored("]", attrs=['bold'])
|
|
|
|
|
|
|
|
MSG_FAIL = OP_SQUARE_BRACKET + colored(" FAIL ", "red", attrs=['bold']) + CL_SQUARE_BRACKET
|
|
|
|
MSG_UNKNOWN = OP_SQUARE_BRACKET + colored(" UNKNOWN ", "yellow", attrs=['bold']) + CL_SQUARE_BRACKET
|
|
|
|
MSG_OK = OP_SQUARE_BRACKET + colored(" OK ", "green", attrs=['bold']) + CL_SQUARE_BRACKET
|
|
|
|
MSG_SKIPPED = OP_SQUARE_BRACKET + colored(" SKIPPED ", "cyan", attrs=['bold']) + CL_SQUARE_BRACKET
|
2016-11-15 17:59:55 +00:00
|
|
|
|
2019-03-13 16:47:02 +00:00
|
|
|
MESSAGES_TO_RETRY = [
|
2019-03-29 08:26:42 +00:00
|
|
|
"DB::Exception: ZooKeeper session has been expired",
|
2019-03-13 16:47:02 +00:00
|
|
|
"Coordination::Exception: Connection loss",
|
|
|
|
]
|
|
|
|
|
2016-09-01 17:40:02 +00:00
|
|
|
|
2017-09-14 17:13:40 +00:00
|
|
|
def remove_control_characters(s):
|
|
|
|
"""
|
|
|
|
https://github.com/html5lib/html5lib-python/issues/96#issuecomment-43438438
|
|
|
|
"""
|
|
|
|
def str_to_int(s, default, base=10):
|
|
|
|
if int(s, base) < 0x10000:
|
|
|
|
return unichr(int(s, base))
|
|
|
|
return default
|
|
|
|
s = re.sub(ur"&#(\d+);?", lambda c: str_to_int(c.group(1), c.group(0)), s)
|
|
|
|
s = re.sub(ur"&#[xX]([0-9a-fA-F]+);?", lambda c: str_to_int(c.group(1), c.group(0), base=16), s)
|
|
|
|
s = re.sub(ur"[\x00-\x08\x0b\x0e-\x1f\x7f]", "", s)
|
|
|
|
return s
|
|
|
|
|
2019-03-13 16:47:02 +00:00
|
|
|
def run_single_test(args, ext, server_logs_level, case_file, stdout_file, stderr_file):
|
|
|
|
if ext == '.sql':
|
|
|
|
command = "{0} --send_logs_level={1} --testmode --multiquery < {2} > {3} 2> {4}".format(args.client, server_logs_level, case_file, stdout_file, stderr_file)
|
|
|
|
else:
|
|
|
|
command = "{} > {} 2> {}".format(case_file, stdout_file, stderr_file)
|
|
|
|
|
|
|
|
proc = Popen(command, shell = True)
|
|
|
|
start_time = datetime.now()
|
|
|
|
while (datetime.now() - start_time).total_seconds() < args.timeout and proc.poll() is None:
|
|
|
|
sleep(0.01)
|
|
|
|
|
|
|
|
stdout = open(stdout_file, 'r').read() if os.path.exists(stdout_file) else ''
|
|
|
|
stdout = unicode(stdout, errors='replace', encoding='utf-8')
|
|
|
|
stderr = open(stderr_file, 'r').read() if os.path.exists(stderr_file) else ''
|
|
|
|
stderr = unicode(stderr, errors='replace', encoding='utf-8')
|
|
|
|
|
|
|
|
return proc, stdout, stderr
|
|
|
|
|
|
|
|
def need_retry(stderr):
|
|
|
|
return any(msg in stderr for msg in MESSAGES_TO_RETRY)
|
|
|
|
|
2019-03-13 11:03:57 +00:00
|
|
|
def get_processlist(client_cmd):
|
|
|
|
try:
|
|
|
|
return subprocess.check_output("{} --query 'SHOW PROCESSLIST FORMAT Vertical'".format(client_cmd), shell=True)
|
|
|
|
except:
|
|
|
|
return "" # server seems dead
|
|
|
|
|
|
|
|
def get_stacktraces(server_pid):
|
|
|
|
cmd = "gdb -q -ex 'set pagination off' -ex 'backtrace' -ex 'thread apply all backtrace' -ex 'detach' -ex 'quit' --pid {} 2>/dev/null".format(server_pid)
|
|
|
|
try:
|
|
|
|
return subprocess.check_output(cmd, shell=True)
|
|
|
|
except Exception as ex:
|
|
|
|
return "Error occured while receiving stack traces {}".format(str(ex))
|
|
|
|
|
|
|
|
def get_server_pid(server_tcp_port):
|
|
|
|
cmd = "lsof -i tcp:{port} | grep '*:{port}'".format(port=server_tcp_port)
|
|
|
|
try:
|
|
|
|
output = subprocess.check_output(cmd, shell=True)
|
|
|
|
if output:
|
|
|
|
columns = output.strip().split(' ')
|
|
|
|
return int(columns[1])
|
|
|
|
else:
|
|
|
|
return None # server dead
|
|
|
|
except Exception as ex:
|
|
|
|
return None
|
|
|
|
|
2016-09-01 17:40:02 +00:00
|
|
|
def main(args):
|
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
SERVER_DIED = False
|
|
|
|
|
|
|
|
def is_data_present():
|
2017-11-01 11:46:58 +00:00
|
|
|
clickhouse_proc = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
(stdout, stderr) = clickhouse_proc.communicate("EXISTS TABLE test.hits")
|
2018-01-16 20:17:31 +00:00
|
|
|
if clickhouse_proc.returncode != 0:
|
2017-11-01 11:46:58 +00:00
|
|
|
raise CalledProcessError(clickhouse_proc.returncode, args.client, stderr)
|
2017-05-01 21:27:11 +00:00
|
|
|
|
|
|
|
return stdout.startswith('1')
|
|
|
|
|
|
|
|
|
|
|
|
def dump_report(destination, suite, test_case, report):
|
|
|
|
if destination is not None:
|
|
|
|
destination_file = os.path.join(destination, suite, test_case + ".xml")
|
|
|
|
destination_dir = os.path.dirname(destination_file)
|
|
|
|
if not os.path.exists(destination_dir):
|
|
|
|
os.makedirs(destination_dir)
|
|
|
|
with open(destination_file, 'w') as report_file:
|
|
|
|
report_root = et.Element("testsuites", attrib = {'name': 'ClickHouse Tests'})
|
|
|
|
report_suite = et.Element("testsuite", attrib = {"name": suite})
|
|
|
|
report_suite.append(report)
|
|
|
|
report_root.append(report_suite)
|
|
|
|
report_file.write(et.tostring(report_root, encoding = "UTF-8", xml_declaration=True, pretty_print=True))
|
|
|
|
|
2018-01-18 20:33:16 +00:00
|
|
|
base_dir = os.path.abspath(args.queries)
|
|
|
|
tmp_dir = os.path.abspath(args.tmp)
|
|
|
|
|
2018-06-08 19:50:15 +00:00
|
|
|
# Keep same default values as in queries/shell_config.sh
|
2018-01-18 20:33:16 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_BINARY", args.binary)
|
2019-03-20 20:04:49 +00:00
|
|
|
#os.environ.setdefault("CLICKHOUSE_CLIENT", args.client)
|
2018-04-26 20:02:10 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_CONFIG", args.configserver)
|
2018-06-18 21:13:11 +00:00
|
|
|
if args.configclient:
|
|
|
|
os.environ.setdefault("CLICKHOUSE_CONFIG_CLIENT", args.configclient)
|
2018-01-18 20:33:16 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_TMP", tmp_dir)
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2018-06-13 19:01:07 +00:00
|
|
|
# Force to print server warnings in stderr
|
2018-08-14 20:29:42 +00:00
|
|
|
# Shell scripts could change logging level
|
|
|
|
server_logs_level = "warning"
|
2018-06-13 19:01:07 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL", server_logs_level)
|
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
if args.zookeeper is None:
|
2018-11-07 11:00:46 +00:00
|
|
|
code, out = commands.getstatusoutput(args.extract_from_config +" --try --config " + args.configserver + ' --key zookeeper | grep . | wc -l')
|
2018-03-27 15:08:01 +00:00
|
|
|
try:
|
|
|
|
if int(out) > 0:
|
|
|
|
args.zookeeper = True
|
|
|
|
else:
|
|
|
|
args.zookeeper = False
|
|
|
|
except ValueError:
|
2017-05-01 21:27:11 +00:00
|
|
|
args.zookeeper = False
|
|
|
|
|
|
|
|
if args.shard is None:
|
2018-11-07 11:00:46 +00:00
|
|
|
code, out = commands.getstatusoutput(args.extract_from_config + " --try --config " + args.configserver + ' --key listen_host | grep -E "127.0.0.2|::"')
|
2018-01-18 20:33:16 +00:00
|
|
|
if out:
|
2017-11-05 20:17:43 +00:00
|
|
|
args.shard = True
|
2018-01-18 20:33:16 +00:00
|
|
|
else:
|
|
|
|
args.shard = False
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2018-01-12 18:30:15 +00:00
|
|
|
passed_total = 0
|
|
|
|
skipped_total = 0
|
2017-05-01 21:27:11 +00:00
|
|
|
failures_total = 0
|
|
|
|
|
2018-06-18 21:13:11 +00:00
|
|
|
clickhouse_proc_create = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
clickhouse_proc_create.communicate("CREATE DATABASE IF NOT EXISTS test")
|
|
|
|
|
2019-03-14 16:37:23 +00:00
|
|
|
def sute_key_func(item):
|
|
|
|
if args.order == 'random':
|
|
|
|
return random()
|
|
|
|
|
|
|
|
if -1 == item.find('_'):
|
|
|
|
return 99998
|
|
|
|
|
|
|
|
prefix, suffix = item.split('_', 1)
|
|
|
|
|
|
|
|
try:
|
|
|
|
return int(prefix), suffix
|
|
|
|
except ValueError:
|
|
|
|
return 99997
|
|
|
|
|
|
|
|
for suite in sorted(os.listdir(base_dir), key=sute_key_func):
|
2017-05-01 21:27:11 +00:00
|
|
|
if SERVER_DIED:
|
|
|
|
break
|
|
|
|
|
|
|
|
suite_dir = os.path.join(base_dir, suite)
|
|
|
|
suite_re_obj = re.search('^[0-9]+_(.*)$', suite)
|
|
|
|
if not suite_re_obj: #skip .gitignore and so on
|
|
|
|
continue
|
2017-12-27 15:56:42 +00:00
|
|
|
|
|
|
|
suite_tmp_dir = os.path.join(tmp_dir, suite)
|
|
|
|
if not os.path.exists(suite_tmp_dir):
|
|
|
|
os.makedirs(suite_tmp_dir)
|
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
suite = suite_re_obj.group(1)
|
|
|
|
if os.path.isdir(suite_dir):
|
|
|
|
print("\nRunning {} tests.\n".format(suite))
|
|
|
|
|
|
|
|
failures = 0
|
2018-01-12 13:56:02 +00:00
|
|
|
failures_chain = 0
|
2017-05-01 21:27:11 +00:00
|
|
|
if 'stateful' in suite and not is_data_present():
|
2018-08-08 03:03:26 +00:00
|
|
|
print("Won't run stateful tests because test data wasn't loaded.")
|
2017-05-01 21:27:11 +00:00
|
|
|
continue
|
2019-01-18 11:47:50 +00:00
|
|
|
if 'stateless' in suite and args.no_stateless:
|
|
|
|
print("Won't run stateless tests because they were manually disabled.")
|
|
|
|
continue
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2017-09-06 20:13:21 +00:00
|
|
|
# Reverse sort order: we want run newest test first.
|
|
|
|
# And not reverse subtests
|
|
|
|
def key_func(item):
|
2017-12-15 19:34:19 +00:00
|
|
|
if args.order == 'random':
|
2017-10-03 18:31:32 +00:00
|
|
|
return random()
|
2017-12-15 19:34:19 +00:00
|
|
|
|
|
|
|
reverse = 1 if args.order == 'asc' else -1
|
|
|
|
|
|
|
|
if -1 == item.find('_'):
|
|
|
|
return 99998
|
|
|
|
|
2017-09-06 20:13:21 +00:00
|
|
|
prefix, suffix = item.split('_', 1)
|
2017-12-15 19:34:19 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
return reverse * int(prefix), suffix
|
|
|
|
except ValueError:
|
|
|
|
return 99997
|
|
|
|
|
2017-09-06 20:13:21 +00:00
|
|
|
for case in sorted(filter(lambda case: re.search(args.test, case) if args.test else True, os.listdir(suite_dir)), key=key_func):
|
2017-05-01 21:27:11 +00:00
|
|
|
if SERVER_DIED:
|
|
|
|
break
|
|
|
|
|
|
|
|
case_file = os.path.join(suite_dir, case)
|
2018-06-13 19:01:07 +00:00
|
|
|
(name, ext) = os.path.splitext(case)
|
|
|
|
|
|
|
|
if os.path.isfile(case_file) and (ext == '.sql' or ext == '.sh' or ext == '.py'):
|
2017-05-01 21:27:11 +00:00
|
|
|
report_testcase = et.Element("testcase", attrib = {"name": name})
|
|
|
|
|
2017-09-14 21:35:26 +00:00
|
|
|
try:
|
2018-08-23 00:19:41 +00:00
|
|
|
print "{0:72}".format(name + ": "),
|
2017-09-14 21:35:26 +00:00
|
|
|
sys.stdout.flush()
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2018-01-15 18:57:10 +00:00
|
|
|
if args.skip and any(s in name for s in args.skip):
|
|
|
|
report_testcase.append(et.Element("skipped", attrib = {"message": "skip"}))
|
|
|
|
print(MSG_SKIPPED + " - skip")
|
|
|
|
skipped_total += 1
|
|
|
|
elif not args.zookeeper and 'zookeeper' in name:
|
2017-09-14 21:35:26 +00:00
|
|
|
report_testcase.append(et.Element("skipped", attrib = {"message": "no zookeeper"}))
|
|
|
|
print(MSG_SKIPPED + " - no zookeeper")
|
2018-01-12 18:30:15 +00:00
|
|
|
skipped_total += 1
|
2017-09-14 21:35:26 +00:00
|
|
|
elif not args.shard and 'shard' in name:
|
|
|
|
report_testcase.append(et.Element("skipped", attrib = {"message": "no shard"}))
|
|
|
|
print(MSG_SKIPPED + " - no shard")
|
2018-01-12 18:30:15 +00:00
|
|
|
skipped_total += 1
|
2018-01-15 18:57:10 +00:00
|
|
|
elif not args.no_long and 'long' in name:
|
|
|
|
report_testcase.append(et.Element("skipped", attrib = {"message": "no long"}))
|
|
|
|
print(MSG_SKIPPED + " - no long")
|
|
|
|
skipped_total += 1
|
2017-05-01 21:27:11 +00:00
|
|
|
else:
|
2017-12-26 14:42:41 +00:00
|
|
|
disabled_file = os.path.join(suite_dir, name) + '.disabled'
|
2017-12-27 15:56:42 +00:00
|
|
|
|
2017-12-26 14:42:41 +00:00
|
|
|
if os.path.exists(disabled_file) and not args.disabled:
|
|
|
|
message = open(disabled_file, 'r').read()
|
|
|
|
report_testcase.append(et.Element("skipped", attrib = {"message": message}))
|
|
|
|
print(MSG_SKIPPED + " - " + message)
|
2017-09-14 21:35:26 +00:00
|
|
|
else:
|
2018-01-16 13:44:45 +00:00
|
|
|
|
|
|
|
if args.testname:
|
|
|
|
clickhouse_proc = Popen(shlex.split(args.client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
|
|
|
|
clickhouse_proc.communicate("SELECT 'Running test {suite}/{case} from pid={pid}';".format(pid = os.getpid(), case = case, suite = suite))
|
|
|
|
|
2017-12-26 14:42:41 +00:00
|
|
|
reference_file = os.path.join(suite_dir, name) + '.reference'
|
2017-12-27 15:56:42 +00:00
|
|
|
stdout_file = os.path.join(suite_tmp_dir, name) + '.stdout'
|
|
|
|
stderr_file = os.path.join(suite_tmp_dir, name) + '.stderr'
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2019-03-13 16:47:02 +00:00
|
|
|
proc, stdout, stderr = run_single_test(args, ext, server_logs_level, case_file, stdout_file, stderr_file)
|
2017-12-26 14:42:41 +00:00
|
|
|
if proc.returncode is None:
|
|
|
|
try:
|
|
|
|
proc.kill()
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != ESRCH:
|
|
|
|
raise
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2017-12-26 14:42:41 +00:00
|
|
|
failure = et.Element("failure", attrib = {"message": "Timeout"})
|
2017-09-14 21:35:26 +00:00
|
|
|
report_testcase.append(failure)
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2018-01-12 18:30:15 +00:00
|
|
|
failures += 1
|
2017-12-26 14:42:41 +00:00
|
|
|
print("{0} - Timeout!".format(MSG_FAIL))
|
|
|
|
else:
|
2019-03-13 16:47:02 +00:00
|
|
|
counter = 1
|
|
|
|
while proc.returncode != 0 and need_retry(stderr):
|
|
|
|
proc, stdout, stderr = run_single_test(args, ext, server_logs_level, case_file, stdout_file, stderr_file)
|
|
|
|
sleep(2**counter)
|
2019-03-13 16:55:30 +00:00
|
|
|
counter += 1
|
2019-03-13 16:47:02 +00:00
|
|
|
if counter > 6:
|
|
|
|
break
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2017-12-26 14:42:41 +00:00
|
|
|
if proc.returncode != 0:
|
|
|
|
failure = et.Element("failure", attrib = {"message": "return code {}".format(proc.returncode)})
|
|
|
|
report_testcase.append(failure)
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2017-12-26 14:42:41 +00:00
|
|
|
stdout_element = et.Element("system-out")
|
|
|
|
stdout_element.text = et.CDATA(stdout)
|
|
|
|
report_testcase.append(stdout_element)
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2018-01-12 18:30:15 +00:00
|
|
|
failures += 1
|
|
|
|
failures_chain += 1
|
2017-12-26 14:42:41 +00:00
|
|
|
print("{0} - return code {1}".format(MSG_FAIL, proc.returncode))
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2017-12-26 14:42:41 +00:00
|
|
|
if stderr:
|
|
|
|
stderr_element = et.Element("system-err")
|
|
|
|
stderr_element.text = et.CDATA(stderr)
|
|
|
|
report_testcase.append(stderr_element)
|
2018-10-25 15:23:41 +00:00
|
|
|
print(stderr.encode('utf-8'))
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2017-12-26 14:42:41 +00:00
|
|
|
if args.stop and ('Connection refused' in stderr or 'Attempt to read after eof' in stderr) and not 'Received exception from server' in stderr:
|
|
|
|
SERVER_DIED = True
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2017-12-26 14:42:41 +00:00
|
|
|
elif stderr:
|
|
|
|
failure = et.Element("failure", attrib = {"message": "having stderror"})
|
|
|
|
report_testcase.append(failure)
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2017-12-26 14:42:41 +00:00
|
|
|
stderr_element = et.Element("system-err")
|
|
|
|
stderr_element.text = et.CDATA(stderr)
|
|
|
|
report_testcase.append(stderr_element)
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2018-01-12 18:30:15 +00:00
|
|
|
failures += 1
|
|
|
|
failures_chain += 1
|
2017-12-26 14:42:41 +00:00
|
|
|
print("{0} - having stderror:\n{1}".format(MSG_FAIL, stderr.encode('utf-8')))
|
|
|
|
elif 'Exception' in stdout:
|
|
|
|
failure = et.Element("error", attrib = {"message": "having exception"})
|
2017-09-14 21:35:26 +00:00
|
|
|
report_testcase.append(failure)
|
2017-09-16 15:55:31 +00:00
|
|
|
|
2017-09-14 21:35:26 +00:00
|
|
|
stdout_element = et.Element("system-out")
|
2017-12-26 14:42:41 +00:00
|
|
|
stdout_element.text = et.CDATA(stdout)
|
2017-09-14 21:35:26 +00:00
|
|
|
report_testcase.append(stdout_element)
|
2017-12-26 14:42:41 +00:00
|
|
|
|
2018-01-12 18:30:15 +00:00
|
|
|
failures += 1
|
|
|
|
failures_chain += 1
|
2017-12-26 14:42:41 +00:00
|
|
|
print("{0} - having exception:\n{1}".format(MSG_FAIL, stdout.encode('utf-8')))
|
|
|
|
elif not os.path.isfile(reference_file):
|
|
|
|
skipped = et.Element("skipped", attrib = {"message": "no reference file"})
|
|
|
|
report_testcase.append(skipped)
|
|
|
|
print("{0} - no reference file".format(MSG_UNKNOWN))
|
2017-09-14 21:35:26 +00:00
|
|
|
else:
|
2019-02-20 12:36:46 +00:00
|
|
|
result_is_different = subprocess.call(['diff', '-q', reference_file, stdout_file], stdout = PIPE)
|
2017-12-26 14:42:41 +00:00
|
|
|
|
|
|
|
if result_is_different:
|
2019-02-20 12:36:46 +00:00
|
|
|
diff = Popen(['diff', '--unified', reference_file, stdout_file], stdout = PIPE).communicate()[0]
|
2017-12-26 14:42:41 +00:00
|
|
|
diff = unicode(diff, errors='replace', encoding='utf-8')
|
2019-02-15 10:15:56 +00:00
|
|
|
cat = Popen(['cat', '-A'], stdin=PIPE, stdout=PIPE).communicate(input=diff)[0]
|
2017-12-26 14:42:41 +00:00
|
|
|
|
|
|
|
failure = et.Element("failure", attrib = {"message": "result differs with reference"})
|
|
|
|
report_testcase.append(failure)
|
|
|
|
|
|
|
|
stdout_element = et.Element("system-out")
|
|
|
|
try:
|
|
|
|
stdout_element.text = et.CDATA(diff)
|
|
|
|
except:
|
|
|
|
stdout_element.text = et.CDATA(remove_control_characters(diff))
|
|
|
|
|
|
|
|
report_testcase.append(stdout_element)
|
2018-01-12 18:30:15 +00:00
|
|
|
failures += 1
|
2019-02-15 10:15:56 +00:00
|
|
|
print("{0} - result differs with reference:\n{1}".format(MSG_FAIL, cat.encode('utf-8')))
|
2017-12-26 14:42:41 +00:00
|
|
|
else:
|
2018-01-12 18:30:15 +00:00
|
|
|
passed_total += 1
|
|
|
|
failures_chain = 0
|
2017-12-26 14:42:41 +00:00
|
|
|
print(MSG_OK)
|
|
|
|
if os.path.exists(stdout_file):
|
|
|
|
os.remove(stdout_file)
|
|
|
|
if os.path.exists(stderr_file):
|
|
|
|
os.remove(stderr_file)
|
2017-10-03 16:21:25 +00:00
|
|
|
except KeyboardInterrupt as e:
|
|
|
|
print(colored("Break tests execution", "red"))
|
|
|
|
raise e
|
2017-09-14 21:35:26 +00:00
|
|
|
except:
|
|
|
|
(exc_type, exc_value) = sys.exc_info()[:2]
|
|
|
|
error = et.Element("error", attrib = {"type": exc_type.__name__, "message": str(exc_value)})
|
|
|
|
report_testcase.append(error)
|
|
|
|
|
2018-01-12 18:30:15 +00:00
|
|
|
failures += 1
|
2017-09-14 21:35:26 +00:00
|
|
|
print("{0} - Test internal error: {1}\n{2}".format(MSG_FAIL, exc_type.__name__, exc_value))
|
|
|
|
finally:
|
|
|
|
dump_report(args.output, suite, name, report_testcase)
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2018-01-12 13:56:02 +00:00
|
|
|
if failures_chain >= 20:
|
|
|
|
break
|
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
failures_total = failures_total + failures
|
|
|
|
|
2019-03-13 11:03:57 +00:00
|
|
|
exit_code = 0
|
2017-05-01 21:27:11 +00:00
|
|
|
if failures_total > 0:
|
2018-01-12 18:30:15 +00:00
|
|
|
print(colored("\nHaving {failures_total} errors! {passed_total} tests passed. {skipped_total} tests skipped.".format(passed_total = passed_total, skipped_total = skipped_total, failures_total = failures_total), "red", attrs=["bold"]))
|
2019-03-13 11:03:57 +00:00
|
|
|
exit_code = 1
|
2017-05-01 21:27:11 +00:00
|
|
|
else:
|
2018-01-12 18:30:15 +00:00
|
|
|
print(colored("\n{passed_total} tests passed. {skipped_total} tests skipped.".format(passed_total = passed_total, skipped_total = skipped_total), "green", attrs=["bold"]))
|
2019-03-13 11:03:57 +00:00
|
|
|
|
|
|
|
if args.hung_check:
|
|
|
|
processlist = get_processlist(args.client)
|
|
|
|
if processlist:
|
|
|
|
server_pid = get_server_pid(os.getenv("CLICKHOUSE_PORT_TCP", '9000'))
|
|
|
|
print(colored("\nFound hung queries in processlist:", "red", attrs=["bold"]))
|
|
|
|
print(processlist)
|
|
|
|
if server_pid:
|
|
|
|
print("\nStacktraces of all threads:")
|
|
|
|
print(get_stacktraces(server_pid))
|
|
|
|
exit_code = 1
|
2019-03-13 13:52:23 +00:00
|
|
|
else:
|
|
|
|
print(colored("\nNo queries hung.", "green", attrs=["bold"]))
|
2019-03-13 11:03:57 +00:00
|
|
|
|
|
|
|
sys.exit(exit_code)
|
|
|
|
|
2016-12-06 20:55:13 +00:00
|
|
|
|
2019-01-24 11:02:55 +00:00
|
|
|
def find_binary(name):
|
2019-03-25 15:03:12 +00:00
|
|
|
if os.path.exists(name) and os.access(name, os.X_OK):
|
|
|
|
return True
|
2019-01-24 11:02:55 +00:00
|
|
|
paths = os.environ.get("PATH").split(':')
|
|
|
|
for path in paths:
|
|
|
|
if os.access(os.path.join(path, name), os.X_OK):
|
|
|
|
return True
|
|
|
|
|
|
|
|
# maybe it wasn't in PATH
|
|
|
|
return os.access(os.path.join('/usr/bin', name), os.X_OK)
|
2016-09-01 17:40:02 +00:00
|
|
|
|
|
|
|
if __name__ == '__main__':
|
2019-03-13 11:03:57 +00:00
|
|
|
parser=ArgumentParser(description='ClickHouse functional tests')
|
|
|
|
parser.add_argument('-q', '--queries', help='Path to queries dir')
|
|
|
|
parser.add_argument('--tmp', help='Path to tmp dir')
|
2019-03-25 15:03:12 +00:00
|
|
|
parser.add_argument('-b', '--binary', default='clickhouse', help='Path to clickhouse binary or name of binary in PATH')
|
2019-03-13 11:03:57 +00:00
|
|
|
parser.add_argument('-c', '--client', help='Client program')
|
|
|
|
parser.add_argument('--extract_from_config', help='extract-from-config program')
|
|
|
|
parser.add_argument('--configclient', help='Client config (if you use not default ports)')
|
|
|
|
parser.add_argument('--configserver', default= '/etc/clickhouse-server/config.xml', help='Preprocessed server config')
|
|
|
|
parser.add_argument('-o', '--output', help='Output xUnit compliant test report directory')
|
|
|
|
parser.add_argument('-t', '--timeout', type=int, default=600, help='Timeout for each test case in seconds')
|
|
|
|
parser.add_argument('test', nargs='?', help='Optional test case name regex')
|
|
|
|
parser.add_argument('-d', '--disabled', action='store_true', default=False, help='Also run disabled tests')
|
|
|
|
parser.add_argument('--stop', action='store_true', default=None, dest='stop', help='Stop on network errors')
|
|
|
|
parser.add_argument('--order', default='desc', help='Run order (asc, desc, random)')
|
|
|
|
parser.add_argument('--testname', action='store_true', default=None, dest='testname', help='Make query with test name before test run')
|
|
|
|
parser.add_argument('--hung-check', action='store_true', default=False)
|
|
|
|
|
|
|
|
parser.add_argument('--no-stateless', action='store_true', help='Disable all stateless tests')
|
|
|
|
parser.add_argument('--skip', nargs='+', help="Skip these tests")
|
|
|
|
parser.add_argument('--no-long', action='store_false', dest='no_long', help='Do not run long tests')
|
|
|
|
group=parser.add_mutually_exclusive_group(required=False)
|
|
|
|
group.add_argument('--zookeeper', action='store_true', default=None, dest='zookeeper', help='Run zookeeper related tests')
|
|
|
|
group.add_argument('--no-zookeeper', action='store_false', default=None, dest='zookeeper', help='Do not run zookeeper related tests')
|
|
|
|
group=parser.add_mutually_exclusive_group(required=False)
|
|
|
|
group.add_argument('--shard', action='store_true', default=None, dest='shard', help='Run sharding related tests (required to clickhouse-server listen 127.0.0.2 127.0.0.3)')
|
|
|
|
group.add_argument('--no-shard', action='store_false', default=None, dest='shard', help='Do not run shard related tests')
|
2017-05-01 21:27:11 +00:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
2018-01-12 13:56:02 +00:00
|
|
|
|
|
|
|
if args.queries is None and os.path.isdir('queries'):
|
|
|
|
args.queries = 'queries'
|
2018-01-15 18:57:10 +00:00
|
|
|
elif args.queries is None:
|
2018-05-18 17:36:12 +00:00
|
|
|
if (os.path.isdir('/usr/local/share/clickhouse-test/queries')):
|
|
|
|
args.queries = '/usr/local/share/clickhouse-test/queries'
|
|
|
|
if (args.queries is None and os.path.isdir('/usr/share/clickhouse-test/queries')):
|
|
|
|
args.queries = '/usr/share/clickhouse-test/queries'
|
2018-01-12 13:56:02 +00:00
|
|
|
if args.tmp is None:
|
|
|
|
args.tmp = '/tmp/clickhouse-test'
|
2018-01-16 20:17:31 +00:00
|
|
|
if args.tmp is None:
|
|
|
|
args.tmp = args.queries
|
2017-10-12 19:46:24 +00:00
|
|
|
if args.client is None:
|
2019-01-24 11:02:55 +00:00
|
|
|
if find_binary(args.binary + '-client'):
|
2018-11-07 11:00:46 +00:00
|
|
|
args.client = args.binary + '-client'
|
2019-01-24 11:02:55 +00:00
|
|
|
elif find_binary(args.binary):
|
2018-11-07 11:00:46 +00:00
|
|
|
args.client = args.binary + ' client'
|
2019-01-23 14:05:11 +00:00
|
|
|
else:
|
2019-01-24 11:02:55 +00:00
|
|
|
print("No 'clickhouse' binary found in PATH")
|
|
|
|
parser.print_help()
|
|
|
|
exit(1)
|
|
|
|
|
2018-01-18 20:33:16 +00:00
|
|
|
if args.configclient:
|
2018-05-25 18:05:30 +00:00
|
|
|
args.client += ' --config-file=' + args.configclient
|
|
|
|
if os.getenv("CLICKHOUSE_HOST"):
|
|
|
|
args.client += ' --host=' + os.getenv("CLICKHOUSE_HOST")
|
|
|
|
if os.getenv("CLICKHOUSE_PORT_TCP"):
|
|
|
|
args.client += ' --port=' + os.getenv("CLICKHOUSE_PORT_TCP")
|
|
|
|
|
2018-11-07 11:00:46 +00:00
|
|
|
if args.extract_from_config is None:
|
|
|
|
if os.access(args.binary + '-extract-from-config', os.X_OK):
|
|
|
|
args.extract_from_config = args.binary + '-extract-from-config'
|
|
|
|
else:
|
|
|
|
args.extract_from_config = args.binary + ' extract-from-config'
|
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
main(args)
|