2020-10-02 16:54:07 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
# pylint: disable=too-many-return-statements
|
2021-10-07 20:45:18 +00:00
|
|
|
# pylint: disable=global-variable-not-assigned
|
2022-04-28 11:26:49 +00:00
|
|
|
# pylint: disable=too-many-lines
|
2021-10-07 20:45:18 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
import enum
|
2022-06-27 20:54:52 +00:00
|
|
|
from queue import Full
|
2021-03-30 00:39:33 +00:00
|
|
|
import shutil
|
2020-03-26 08:36:15 +00:00
|
|
|
import sys
|
2016-09-01 17:40:02 +00:00
|
|
|
import os
|
|
|
|
import os.path
|
2021-04-29 07:43:56 +00:00
|
|
|
import signal
|
2016-09-01 17:40:02 +00:00
|
|
|
import re
|
2021-04-14 06:08:47 +00:00
|
|
|
import copy
|
2021-01-26 20:36:04 +00:00
|
|
|
import traceback
|
2021-09-13 13:36:05 +00:00
|
|
|
import math
|
2022-04-27 11:02:45 +00:00
|
|
|
|
2021-10-12 18:06:26 +00:00
|
|
|
# Not requests, to avoid requiring extra dependency.
|
|
|
|
import http.client
|
|
|
|
import urllib.parse
|
|
|
|
import json
|
2022-04-27 11:02:45 +00:00
|
|
|
|
2021-12-10 15:39:02 +00:00
|
|
|
# for crc32
|
|
|
|
import zlib
|
2016-09-01 17:40:02 +00:00
|
|
|
|
|
|
|
from argparse import ArgumentParser
|
2021-10-07 18:05:42 +00:00
|
|
|
from typing import Tuple, Union, Optional, Dict, Set, List
|
2017-02-02 13:41:39 +00:00
|
|
|
import subprocess
|
2016-09-01 17:40:02 +00:00
|
|
|
from subprocess import Popen
|
|
|
|
from subprocess import PIPE
|
2016-09-02 16:26:09 +00:00
|
|
|
from datetime import datetime
|
2020-08-26 17:44:03 +00:00
|
|
|
from time import time, sleep
|
2016-09-02 16:26:09 +00:00
|
|
|
from errno import ESRCH
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2019-07-17 12:46:20 +00:00
|
|
|
try:
|
2022-04-28 11:26:49 +00:00
|
|
|
import termcolor # type: ignore
|
2019-07-17 12:46:20 +00:00
|
|
|
except ImportError:
|
|
|
|
termcolor = None
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-01-26 20:36:04 +00:00
|
|
|
import random
|
|
|
|
import string
|
2019-06-03 17:36:27 +00:00
|
|
|
import multiprocessing
|
2021-08-02 13:51:33 +00:00
|
|
|
import socket
|
2019-04-22 23:40:40 +00:00
|
|
|
from contextlib import closing
|
2016-09-01 17:40:02 +00:00
|
|
|
|
2021-07-20 13:40:04 +00:00
|
|
|
USE_JINJA = True
|
|
|
|
try:
|
|
|
|
import jinja2
|
|
|
|
except ImportError:
|
|
|
|
USE_JINJA = False
|
2022-04-27 11:02:45 +00:00
|
|
|
print("WARNING: jinja2 not installed! Template tests will be skipped.")
|
2021-07-20 13:40:04 +00:00
|
|
|
|
2019-03-13 16:47:02 +00:00
|
|
|
MESSAGES_TO_RETRY = [
|
2020-05-29 00:46:42 +00:00
|
|
|
"ConnectionPoolWithFailover: Connection failed at try",
|
2021-06-15 20:52:29 +00:00
|
|
|
"DB::Exception: New table appeared in database being dropped or detached. Try again",
|
2021-07-06 13:36:18 +00:00
|
|
|
"is already started to be removing by another replica right now",
|
2021-08-19 21:25:14 +00:00
|
|
|
"DB::Exception: Cannot enqueue query",
|
2022-04-27 11:02:45 +00:00
|
|
|
"is executing longer than distributed_ddl_task_timeout", # FIXME
|
2021-10-07 18:05:42 +00:00
|
|
|
]
|
2019-03-13 16:47:02 +00:00
|
|
|
|
2021-06-21 17:29:32 +00:00
|
|
|
MAX_RETRIES = 3
|
2021-06-15 20:52:29 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
TEST_FILE_EXTENSIONS = [".sql", ".sql.j2", ".sh", ".py", ".expect"]
|
2021-07-20 13:40:04 +00:00
|
|
|
|
2022-02-18 13:54:21 +00:00
|
|
|
VERSION_PATTERN = r"^((\d+\.)?(\d+\.)?(\d+\.)?\d+)$"
|
2021-10-21 05:43:06 +00:00
|
|
|
|
2021-12-10 15:39:02 +00:00
|
|
|
|
|
|
|
def stringhash(s):
|
|
|
|
# default hash() function consistent
|
|
|
|
# only during process invocation https://stackoverflow.com/a/42089311
|
2022-04-27 11:02:45 +00:00
|
|
|
return zlib.crc32(s.encode("utf-8"))
|
2021-12-10 15:39:02 +00:00
|
|
|
|
|
|
|
|
2021-10-12 18:06:26 +00:00
|
|
|
class HTTPError(Exception):
|
|
|
|
def __init__(self, message=None, code=None):
|
|
|
|
self.message = message
|
|
|
|
self.code = code
|
|
|
|
super().__init__(message)
|
|
|
|
|
|
|
|
def __str__(self):
|
2022-04-28 11:26:49 +00:00
|
|
|
return f"Code: {self.code}. {self.message}"
|
2022-04-27 11:02:45 +00:00
|
|
|
|
2021-10-12 18:06:26 +00:00
|
|
|
|
|
|
|
# Helpers to execute queries via HTTP interface.
|
2022-04-27 11:02:45 +00:00
|
|
|
def clickhouse_execute_http(
|
|
|
|
base_args, query, timeout=30, settings=None, default_format=None
|
|
|
|
):
|
2022-05-05 19:11:38 +00:00
|
|
|
if args.secure:
|
|
|
|
client = http.client.HTTPSConnection(
|
|
|
|
host=base_args.tcp_host, port=base_args.http_port, timeout=timeout
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
client = http.client.HTTPConnection(
|
|
|
|
host=base_args.tcp_host, port=base_args.http_port, timeout=timeout
|
|
|
|
)
|
2021-10-12 18:06:26 +00:00
|
|
|
|
|
|
|
timeout = int(timeout)
|
|
|
|
params = {
|
2022-04-27 11:02:45 +00:00
|
|
|
"query": query,
|
2021-10-12 21:19:16 +00:00
|
|
|
# hung check in stress tests may remove the database,
|
|
|
|
# hence we should use 'system'.
|
2022-04-27 11:02:45 +00:00
|
|
|
"database": "system",
|
|
|
|
"connect_timeout": timeout,
|
|
|
|
"receive_timeout": timeout,
|
|
|
|
"send_timeout": timeout,
|
|
|
|
"http_connection_timeout": timeout,
|
|
|
|
"http_receive_timeout": timeout,
|
|
|
|
"http_send_timeout": timeout,
|
2021-10-12 18:06:26 +00:00
|
|
|
}
|
|
|
|
if settings is not None:
|
|
|
|
params.update(settings)
|
|
|
|
if default_format is not None:
|
2022-04-27 11:02:45 +00:00
|
|
|
params["default_format"] = default_format
|
2021-10-12 18:06:26 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
client.request(
|
|
|
|
"POST",
|
2022-04-27 11:17:54 +00:00
|
|
|
f"/?{base_args.client_options_query_str}{urllib.parse.urlencode(params)}",
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
2021-10-12 18:06:26 +00:00
|
|
|
res = client.getresponse()
|
|
|
|
data = res.read()
|
|
|
|
if res.status != 200:
|
|
|
|
raise HTTPError(data.decode(), res.status)
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
|
2021-10-12 18:06:26 +00:00
|
|
|
def clickhouse_execute(base_args, query, timeout=30, settings=None):
|
|
|
|
return clickhouse_execute_http(base_args, query, timeout, settings).strip()
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
|
clickhouse-test: increase timeout for obtaining processlist
Sometimes 30 seconds is not enough on CI [1]:
2022.03.29 04:46:09.805406 [ 3885 ] {ae06ad04-f9fa-44d4-adeb-9e91aaf9bfa9} <Debug> executeQuery: (from [::1]:33506) SELECT * FROM system.processes WHERE query NOT LIKE '%system.processes%' AND Settings['log_comment'] = '02016_order_by_with_fill_monotonic_functions_removal.sql' AND current_database = 'test_7fqp6w'
2022.03.29 04:46:09.866100 [ 3885 ] {ae06ad04-f9fa-44d4-adeb-9e91aaf9bfa9} <Trace> ContextAccess (default): Access granted: SELECT(is_initial_query, user, query_id, address, port, initial_user, initial_query_id, initial_address, initial_port, interface, os_user, client_hostname, client_name, client_revision, client_version_major, client_version_minor, client_version_patch, http_method, http_user_agent, http_referer, forwarded_for, quota_key, distributed_depth, elapsed, is_cancelled, read_rows, read_bytes, total_rows_approx, written_rows, written_bytes, memory_usage, peak_memory_usage, query, thread_ids, ProfileEvents, Settings, current_database) ON system.processes
2022.03.29 04:46:12.787395 [ 3885 ] {ae06ad04-f9fa-44d4-adeb-9e91aaf9bfa9} <Trace> InterpreterSelectQuery: FetchColumns -> Complete
2022.03.29 04:46:19.749163 [ 3885 ] {ae06ad04-f9fa-44d4-adeb-9e91aaf9bfa9} <Test> ParallelFormattingOutputFormat: Parallel formatting is being used
2022.03.29 04:46:37.923282 [ 3885 ] {ae06ad04-f9fa-44d4-adeb-9e91aaf9bfa9} <Information> executeQuery: Read 15 rows, 44.20 KiB in 28.117172383 sec., 0 rows/sec., 1.57 KiB/sec.
2022.03.29 04:46:40.020586 [ 3885 ] {ae06ad04-f9fa-44d4-adeb-9e91aaf9bfa9} <Debug> DynamicQueryHandler: Done processing query
2022.03.29 04:46:40.033535 [ 3885 ] {ae06ad04-f9fa-44d4-adeb-9e91aaf9bfa9} <Debug> MemoryTracker: Peak memory usage (for query): 4.00 MiB.
[1]: https://s3.amazonaws.com/clickhouse-test-reports/32928/ddd5bebe555ce8feebcdd339e47fc45184c20dd1/stateless_tests__thread__actions__[1/3].html
Signed-off-by: Azat Khuzhin <a.khuzhin@semrush.com>
2022-03-29 19:30:50 +00:00
|
|
|
def clickhouse_execute_json(base_args, query, timeout=60, settings=None):
|
2022-04-27 11:02:45 +00:00
|
|
|
data = clickhouse_execute_http(base_args, query, timeout, settings, "JSONEachRow")
|
2021-10-12 18:06:26 +00:00
|
|
|
if not data:
|
|
|
|
return None
|
2021-10-15 07:38:11 +00:00
|
|
|
rows = []
|
2021-10-16 14:58:19 +00:00
|
|
|
for row in data.strip().splitlines():
|
2021-10-15 07:38:11 +00:00
|
|
|
rows.append(json.loads(row))
|
|
|
|
return rows
|
2021-10-07 21:07:05 +00:00
|
|
|
|
2021-10-07 18:05:42 +00:00
|
|
|
|
2021-04-29 07:43:56 +00:00
|
|
|
class Terminated(KeyboardInterrupt):
|
|
|
|
pass
|
2021-06-21 11:21:26 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
|
2021-04-29 07:43:56 +00:00
|
|
|
def signal_handler(sig, frame):
|
2022-04-27 11:02:45 +00:00
|
|
|
raise Terminated(f"Terminated with {sig} signal")
|
|
|
|
|
2021-04-29 07:43:56 +00:00
|
|
|
|
|
|
|
def stop_tests():
|
2021-06-21 11:21:26 +00:00
|
|
|
global stop_tests_triggered_lock
|
|
|
|
global stop_tests_triggered
|
clickhouse-test: fix shared list object (by fixing manager lifetime)
Right now it is possible to get the following error:
Having 20 errors! 0 tests passed. 0 tests skipped. 57.37 s elapsed (MainProcess).
Won't run stateful tests because test data wasn't loaded.
Traceback (most recent call last):
File "/usr/lib/python3.9/multiprocessing/managers.py", line 802, in _callmethod
conn = self._tls.connection
AttributeError: 'ForkAwareLocal' object has no attribute 'connection'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/src/ch/clickhouse/.cmake/../tests/clickhouse-test", line 1462, in <module>
main(args)
File "/src/ch/clickhouse/.cmake/../tests/clickhouse-test", line 1261, in main
if len(restarted_tests) > 0:
File "<string>", line 2, in __len__
File "/usr/lib/python3.9/multiprocessing/managers.py", line 806, in _callmethod
self._connect()
File "/usr/lib/python3.9/multiprocessing/managers.py", line 793, in _connect
conn = self._Client(self._token.address, authkey=self._authkey)
File "/usr/lib/python3.9/multiprocessing/connection.py", line 507, in Client
c = SocketClient(address)
File "/usr/lib/python3.9/multiprocessing/connection.py", line 635, in SocketClient
s.connect(address)
ConnectionRefusedError: [Errno 111] Connection refused
The reason behind this is that manager's thread got terminated:
ipdb> p restarted_tests._manager._process
<ForkProcess name='SyncManager-1' pid=25125 parent=24939 stopped exitcode=-SIGTERM>
Refs: #29259 (cc: @vdimir)
Follow-up for: #29197 (cc: @tavplubix)
2021-09-27 18:10:59 +00:00
|
|
|
global restarted_tests
|
2021-06-21 11:21:26 +00:00
|
|
|
|
|
|
|
with stop_tests_triggered_lock:
|
2022-05-02 15:48:53 +00:00
|
|
|
print("Stopping tests")
|
2021-06-21 11:21:26 +00:00
|
|
|
if not stop_tests_triggered.is_set():
|
|
|
|
stop_tests_triggered.set()
|
|
|
|
|
clickhouse-test: fix shared list object (by fixing manager lifetime)
Right now it is possible to get the following error:
Having 20 errors! 0 tests passed. 0 tests skipped. 57.37 s elapsed (MainProcess).
Won't run stateful tests because test data wasn't loaded.
Traceback (most recent call last):
File "/usr/lib/python3.9/multiprocessing/managers.py", line 802, in _callmethod
conn = self._tls.connection
AttributeError: 'ForkAwareLocal' object has no attribute 'connection'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/src/ch/clickhouse/.cmake/../tests/clickhouse-test", line 1462, in <module>
main(args)
File "/src/ch/clickhouse/.cmake/../tests/clickhouse-test", line 1261, in main
if len(restarted_tests) > 0:
File "<string>", line 2, in __len__
File "/usr/lib/python3.9/multiprocessing/managers.py", line 806, in _callmethod
self._connect()
File "/usr/lib/python3.9/multiprocessing/managers.py", line 793, in _connect
conn = self._Client(self._token.address, authkey=self._authkey)
File "/usr/lib/python3.9/multiprocessing/connection.py", line 507, in Client
c = SocketClient(address)
File "/usr/lib/python3.9/multiprocessing/connection.py", line 635, in SocketClient
s.connect(address)
ConnectionRefusedError: [Errno 111] Connection refused
The reason behind this is that manager's thread got terminated:
ipdb> p restarted_tests._manager._process
<ForkProcess name='SyncManager-1' pid=25125 parent=24939 stopped exitcode=-SIGTERM>
Refs: #29259 (cc: @vdimir)
Follow-up for: #29197 (cc: @tavplubix)
2021-09-27 18:10:59 +00:00
|
|
|
# materialize multiprocessing.Manager().list() object before
|
|
|
|
# sending SIGTERM since this object is a proxy, that requires
|
|
|
|
# communicating with manager thread, but after SIGTERM will be
|
|
|
|
# send, this thread will die, and you will get
|
|
|
|
# ConnectionRefusedError error for any access to "restarted_tests"
|
|
|
|
# variable.
|
|
|
|
restarted_tests = [*restarted_tests]
|
|
|
|
|
2021-06-21 11:21:26 +00:00
|
|
|
# send signal to all processes in group to avoid hung check triggering
|
2022-05-02 15:48:53 +00:00
|
|
|
# (to avoid terminating clickhouse-test itself, the signal should be ignored)
|
2021-06-21 11:21:26 +00:00
|
|
|
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
|
|
|
os.killpg(os.getpgid(os.getpid()), signal.SIGTERM)
|
|
|
|
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
2016-09-01 17:40:02 +00:00
|
|
|
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-02-15 10:26:34 +00:00
|
|
|
def get_db_engine(args, database_name):
|
|
|
|
if args.replicated_database:
|
2021-08-05 14:15:51 +00:00
|
|
|
return f" ON CLUSTER test_cluster_database_replicated \
|
|
|
|
ENGINE=Replicated('/test/clickhouse/db/{database_name}', \
|
|
|
|
'{{shard}}', '{{replica}}')"
|
2020-09-21 10:24:10 +00:00
|
|
|
if args.db_engine:
|
|
|
|
return " ENGINE=" + args.db_engine
|
2022-04-27 11:02:45 +00:00
|
|
|
return "" # Will use default engine
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2022-08-12 09:28:16 +00:00
|
|
|
def get_create_database_settings(args, testcase_args):
|
|
|
|
create_database_settings = dict()
|
|
|
|
if testcase_args:
|
|
|
|
create_database_settings["log_comment"] = testcase_args.testcase_basename
|
|
|
|
if args.db_engine == "Ordinary":
|
|
|
|
create_database_settings["allow_deprecated_database_ordinary"] = 1
|
|
|
|
return create_database_settings
|
|
|
|
|
|
|
|
|
2021-09-13 13:36:05 +00:00
|
|
|
def get_zookeeper_session_uptime(args):
|
2021-10-07 18:05:42 +00:00
|
|
|
try:
|
2021-09-13 13:36:05 +00:00
|
|
|
if args.replicated_database:
|
2022-04-27 11:02:45 +00:00
|
|
|
return int(
|
|
|
|
clickhouse_execute(
|
|
|
|
args,
|
|
|
|
"""
|
2021-10-07 18:05:42 +00:00
|
|
|
SELECT min(materialize(zookeeperSessionUptime()))
|
|
|
|
FROM clusterAllReplicas('test_cluster_database_replicated', system.one)
|
2022-04-27 11:02:45 +00:00
|
|
|
""",
|
|
|
|
)
|
|
|
|
)
|
2021-10-07 18:05:42 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
return int(clickhouse_execute(args, "SELECT zookeeperSessionUptime()"))
|
2022-04-27 11:17:54 +00:00
|
|
|
except Exception:
|
2021-09-13 13:36:05 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def need_retry(args, stdout, stderr, total_time):
|
2021-10-29 11:45:09 +00:00
|
|
|
if args.check_zookeeper_session:
|
|
|
|
# Sometimes we may get unexpected exception like "Replica is readonly" or "Shutdown is called for table"
|
|
|
|
# instead of "Session expired" or "Connection loss"
|
|
|
|
# Retry if session was expired during test execution.
|
|
|
|
# If ZooKeeper is configured, then it's more reliable than checking stderr,
|
|
|
|
# but the following condition is always true if ZooKeeper is not configured.
|
|
|
|
session_uptime = get_zookeeper_session_uptime(args)
|
|
|
|
if session_uptime is not None and session_uptime < math.ceil(total_time):
|
|
|
|
return True
|
2022-04-27 11:02:45 +00:00
|
|
|
return any(msg in stdout for msg in MESSAGES_TO_RETRY) or any(
|
|
|
|
msg in stderr for msg in MESSAGES_TO_RETRY
|
|
|
|
)
|
2019-03-13 16:47:02 +00:00
|
|
|
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-10-07 18:05:42 +00:00
|
|
|
def get_processlist(args):
|
|
|
|
if args.replicated_database:
|
2022-04-27 11:02:45 +00:00
|
|
|
return clickhouse_execute_json(
|
|
|
|
args,
|
|
|
|
"""
|
2021-10-07 18:05:42 +00:00
|
|
|
SELECT materialize((hostName(), tcpPort())) as host, *
|
|
|
|
FROM clusterAllReplicas('test_cluster_database_replicated', system.processes)
|
|
|
|
WHERE query NOT LIKE '%system.processes%'
|
2022-04-27 11:02:45 +00:00
|
|
|
""",
|
|
|
|
)
|
2021-10-07 18:05:42 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
return clickhouse_execute_json(args, "SHOW PROCESSLIST")
|
|
|
|
|
2019-03-13 11:03:57 +00:00
|
|
|
|
2022-03-14 20:43:34 +00:00
|
|
|
def get_transactions_list(args):
|
|
|
|
try:
|
|
|
|
if args.replicated_database:
|
2022-04-27 11:02:45 +00:00
|
|
|
return clickhouse_execute_json(
|
|
|
|
args,
|
2022-04-28 11:26:49 +00:00
|
|
|
"SELECT materialize((hostName(), tcpPort())) as host, * FROM "
|
|
|
|
"clusterAllReplicas('test_cluster_database_replicated', system.transactions)",
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
2022-03-14 20:43:34 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
return clickhouse_execute_json(args, "select * from system.transactions")
|
2022-03-14 20:43:34 +00:00
|
|
|
except Exception as e:
|
|
|
|
return f"Cannot get list of transactions: {e}"
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
|
2020-03-23 18:17:07 +00:00
|
|
|
# collect server stacktraces using gdb
|
2020-03-23 17:30:31 +00:00
|
|
|
def get_stacktraces_from_gdb(server_pid):
|
2019-03-13 11:03:57 +00:00
|
|
|
try:
|
2021-08-05 14:15:51 +00:00
|
|
|
cmd = f"gdb -batch -ex 'thread apply all backtrace' -p {server_pid}"
|
2022-04-27 11:02:45 +00:00
|
|
|
return subprocess.check_output(cmd, shell=True).decode("utf-8")
|
2021-08-05 14:15:51 +00:00
|
|
|
except Exception as e:
|
|
|
|
print(f"Error occurred while receiving stack traces from gdb: {e}")
|
2021-02-19 14:38:20 +00:00
|
|
|
return None
|
2020-03-23 17:30:31 +00:00
|
|
|
|
|
|
|
|
2020-03-23 18:17:07 +00:00
|
|
|
# collect server stacktraces from system.stack_trace table
|
2020-05-13 20:03:10 +00:00
|
|
|
# it does not work in Sandbox
|
2021-12-04 18:56:15 +00:00
|
|
|
def get_stacktraces_from_clickhouse(args):
|
2022-04-27 11:02:45 +00:00
|
|
|
settings_str = " ".join(
|
|
|
|
[
|
|
|
|
get_additional_client_options(args),
|
|
|
|
"--allow_introspection_functions=1",
|
|
|
|
"--skip_unavailable_shards=1",
|
|
|
|
]
|
|
|
|
)
|
2022-04-27 11:17:54 +00:00
|
|
|
replicated_msg = (
|
|
|
|
f"{args.client} {settings_str} --query "
|
|
|
|
'"SELECT materialize((hostName(), tcpPort())) as host, thread_id, '
|
|
|
|
"arrayStringConcat(arrayMap(x, y -> concat(x, ': ', y), "
|
|
|
|
"arrayMap(x -> addressToLine(x), trace), "
|
|
|
|
"arrayMap(x -> demangle(addressToSymbol(x)), trace)), '\n') as trace "
|
|
|
|
"FROM clusterAllReplicas('test_cluster_database_replicated', 'system.stack_trace') "
|
|
|
|
'ORDER BY host, thread_id FORMAT Vertical"'
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2022-04-27 11:17:54 +00:00
|
|
|
msg = (
|
|
|
|
f"{args.client} {settings_str} --query "
|
|
|
|
"\"SELECT arrayStringConcat(arrayMap(x, y -> concat(x, ': ', y), "
|
|
|
|
"arrayMap(x -> addressToLine(x), trace), "
|
|
|
|
"arrayMap(x -> demangle(addressToSymbol(x)), trace)), '\n') as trace "
|
|
|
|
'FROM system.stack_trace FORMAT Vertical"'
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2020-03-23 17:30:31 +00:00
|
|
|
try:
|
2021-08-05 14:15:51 +00:00
|
|
|
return subprocess.check_output(
|
2021-12-04 18:56:15 +00:00
|
|
|
replicated_msg if args.replicated_database else msg,
|
2022-04-27 11:02:45 +00:00
|
|
|
shell=True,
|
|
|
|
stderr=subprocess.STDOUT,
|
|
|
|
).decode("utf-8")
|
2021-08-05 14:15:51 +00:00
|
|
|
except Exception as e:
|
|
|
|
print(f"Error occurred while receiving stack traces from client: {e}")
|
2021-02-19 14:38:20 +00:00
|
|
|
return None
|
2019-03-13 11:03:57 +00:00
|
|
|
|
2021-08-05 14:15:51 +00:00
|
|
|
|
|
|
|
def print_stacktraces() -> None:
|
|
|
|
server_pid = get_server_pid()
|
|
|
|
|
|
|
|
bt = None
|
|
|
|
|
|
|
|
if server_pid and not args.replicated_database:
|
|
|
|
print("")
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
|
|
|
f"Located ClickHouse server process {server_pid} listening at TCP port {args.tcp_port}"
|
|
|
|
)
|
2021-08-05 14:15:51 +00:00
|
|
|
print("Collecting stacktraces from all running threads with gdb:")
|
|
|
|
|
|
|
|
bt = get_stacktraces_from_gdb(server_pid)
|
|
|
|
|
|
|
|
if len(bt) < 1000:
|
|
|
|
print("Got suspiciously small stacktraces: ", bt)
|
|
|
|
bt = None
|
|
|
|
|
|
|
|
if bt is None:
|
|
|
|
print("\nCollecting stacktraces from system.stacktraces table:")
|
|
|
|
|
2021-12-04 18:56:15 +00:00
|
|
|
bt = get_stacktraces_from_clickhouse(args)
|
2021-08-05 14:15:51 +00:00
|
|
|
|
|
|
|
if bt is not None:
|
|
|
|
print(bt)
|
|
|
|
return
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
|
|
|
colored(
|
2022-04-27 11:17:54 +00:00
|
|
|
f"\nUnable to locate ClickHouse server process listening at TCP port "
|
|
|
|
f"{args.tcp_port}. It must have crashed or exited prematurely!",
|
2022-04-27 11:02:45 +00:00
|
|
|
args,
|
|
|
|
"red",
|
|
|
|
attrs=["bold"],
|
|
|
|
)
|
|
|
|
)
|
2021-08-05 14:15:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
def get_server_pid():
|
2021-02-19 14:38:20 +00:00
|
|
|
# lsof does not work in stress tests for some reason
|
2022-04-27 11:17:54 +00:00
|
|
|
cmd_lsof = f"lsof -i tcp:{args.tcp_port} -s tcp:LISTEN -Fp | sed 's/^p//p;d'"
|
2021-02-19 14:38:20 +00:00
|
|
|
cmd_pidof = "pidof -s clickhouse-server"
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-02-19 14:38:20 +00:00
|
|
|
commands = [cmd_lsof, cmd_pidof]
|
2021-02-19 09:57:09 +00:00
|
|
|
output = None
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-02-19 14:38:20 +00:00
|
|
|
for cmd in commands:
|
|
|
|
try:
|
2022-04-27 11:02:45 +00:00
|
|
|
output = subprocess.check_output(
|
|
|
|
cmd, shell=True, stderr=subprocess.STDOUT, universal_newlines=True
|
|
|
|
)
|
2021-02-19 14:38:20 +00:00
|
|
|
if output:
|
|
|
|
return int(output)
|
|
|
|
except Exception as e:
|
2021-08-05 14:15:51 +00:00
|
|
|
print(f"Cannot get server pid with {cmd}, got {output}: {e}")
|
|
|
|
|
|
|
|
return None # most likely server is dead
|
2019-03-13 11:03:57 +00:00
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
def colored(text, args, color=None, on_color=None, attrs=None):
|
2021-01-26 20:36:04 +00:00
|
|
|
if termcolor and (sys.stdout.isatty() or args.force_color):
|
|
|
|
return termcolor.colored(text, color, on_color, attrs)
|
|
|
|
else:
|
|
|
|
return text
|
2019-04-22 23:40:40 +00:00
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
class TestStatus(enum.Enum):
|
|
|
|
FAIL = "FAIL"
|
|
|
|
UNKNOWN = "UNKNOWN"
|
|
|
|
OK = "OK"
|
|
|
|
SKIPPED = "SKIPPED"
|
|
|
|
|
|
|
|
|
|
|
|
class FailureReason(enum.Enum):
|
|
|
|
# FAIL reasons
|
|
|
|
TIMEOUT = "Timeout!"
|
|
|
|
SERVER_DIED = "server died"
|
|
|
|
EXIT_CODE = "return code: "
|
|
|
|
STDERR = "having stderror: "
|
|
|
|
EXCEPTION = "having having exception in stdout: "
|
|
|
|
RESULT_DIFF = "result differs with reference: "
|
|
|
|
TOO_LONG = "Test runs too long (> 60s). Make it faster."
|
2021-12-12 15:40:33 +00:00
|
|
|
INTERNAL_QUERY_FAIL = "Internal query (CREATE/DROP DATABASE) failed:"
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
# SKIPPED reasons
|
|
|
|
DISABLED = "disabled"
|
|
|
|
SKIP = "skip"
|
|
|
|
NO_JINJA = "no jinja"
|
|
|
|
NO_ZOOKEEPER = "no zookeeper"
|
|
|
|
NO_SHARD = "no shard"
|
|
|
|
FAST_ONLY = "running fast tests only"
|
|
|
|
NO_LONG = "not running long tests"
|
|
|
|
REPLICATED_DB = "replicated-database"
|
2022-02-01 07:20:06 +00:00
|
|
|
S3_STORAGE = "s3-storage"
|
2022-06-20 04:43:07 +00:00
|
|
|
STRESS = "stress"
|
2021-09-22 08:42:46 +00:00
|
|
|
BUILD = "not running for current build"
|
2021-08-20 12:17:51 +00:00
|
|
|
BACKWARD_INCOMPATIBLE = "test is backward incompatible"
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
# UNKNOWN reasons
|
|
|
|
NO_REFERENCE = "no reference file"
|
|
|
|
INTERNAL_ERROR = "Test internal error: "
|
|
|
|
|
|
|
|
|
2022-01-28 11:26:50 +00:00
|
|
|
class SettingsRandomizer:
|
|
|
|
settings = {
|
2022-04-27 11:02:45 +00:00
|
|
|
"max_insert_threads": lambda: 0
|
|
|
|
if random.random() < 0.5
|
|
|
|
else random.randint(1, 16),
|
|
|
|
"group_by_two_level_threshold": lambda: 1
|
|
|
|
if random.random() < 0.1
|
|
|
|
else 2**60
|
|
|
|
if random.random() < 0.11
|
|
|
|
else 100000,
|
|
|
|
"group_by_two_level_threshold_bytes": lambda: 1
|
|
|
|
if random.random() < 0.1
|
|
|
|
else 2**60
|
|
|
|
if random.random() < 0.11
|
|
|
|
else 50000000,
|
2022-01-28 13:46:00 +00:00
|
|
|
"distributed_aggregation_memory_efficient": lambda: random.randint(0, 1),
|
2022-02-18 12:22:06 +00:00
|
|
|
"fsync_metadata": lambda: random.randint(0, 1),
|
|
|
|
"output_format_parallel_formatting": lambda: random.randint(0, 1),
|
|
|
|
"input_format_parallel_parsing": lambda: random.randint(0, 1),
|
2022-04-27 11:02:45 +00:00
|
|
|
"min_chunk_bytes_for_parallel_parsing": lambda: max(
|
|
|
|
1024, int(random.gauss(10 * 1024 * 1024, 5 * 1000 * 1000))
|
|
|
|
),
|
2022-03-28 13:33:01 +00:00
|
|
|
"max_read_buffer_size": lambda: random.randint(500000, 1048576),
|
2022-03-04 11:03:13 +00:00
|
|
|
"prefer_localhost_replica": lambda: random.randint(0, 1),
|
|
|
|
"max_block_size": lambda: random.randint(8000, 100000),
|
|
|
|
"max_threads": lambda: random.randint(1, 64),
|
2022-05-10 23:30:59 +00:00
|
|
|
"optimize_or_like_chain": lambda: random.randint(0, 1),
|
2022-05-04 20:36:26 +00:00
|
|
|
"optimize_read_in_order": lambda: random.randint(0, 1),
|
|
|
|
"read_in_order_two_level_merge_threshold": lambda: random.randint(0, 100),
|
|
|
|
"optimize_aggregation_in_order": lambda: random.randint(0, 1),
|
|
|
|
"aggregation_in_order_max_block_bytes": lambda: random.randint(0, 50000000),
|
2022-07-08 19:27:16 +00:00
|
|
|
"use_uncompressed_cache": lambda: random.randint(0, 1),
|
|
|
|
"min_bytes_to_use_direct_io": lambda: 0
|
|
|
|
if random.random() < 0.5
|
|
|
|
else 1
|
|
|
|
if random.random() < 0.2
|
|
|
|
else random.randint(1, 1024 * 1024 * 1024),
|
|
|
|
"min_bytes_to_use_mmap_io": lambda: 0
|
|
|
|
if random.random() < 0.5
|
|
|
|
else 1
|
|
|
|
if random.random() < 0.2
|
|
|
|
else random.randint(1, 1024 * 1024 * 1024),
|
|
|
|
"local_filesystem_read_method": lambda: random.choice(
|
|
|
|
["read", "pread", "mmap", "pread_threadpool"]
|
|
|
|
),
|
|
|
|
"remote_filesystem_read_method": lambda: random.choice(["read", "threadpool"]),
|
|
|
|
"local_filesystem_read_prefetch": lambda: random.randint(0, 1),
|
|
|
|
"remote_filesystem_read_prefetch": lambda: random.randint(0, 1),
|
|
|
|
"compile_expressions": lambda: random.randint(0, 1),
|
|
|
|
"compile_aggregate_expressions": lambda: random.randint(0, 1),
|
|
|
|
"compile_sort_description": lambda: random.randint(0, 1),
|
2022-07-13 12:50:12 +00:00
|
|
|
"merge_tree_coarse_index_granularity": lambda: random.randint(2, 32),
|
2022-07-26 20:35:45 +00:00
|
|
|
"optimize_distinct_in_order": lambda: random.randint(0, 1),
|
2022-08-09 16:29:56 +00:00
|
|
|
"optimize_sorting_for_input_stream": lambda: random.randint(0, 1),
|
2022-01-28 11:26:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
@staticmethod
|
2022-01-28 13:46:00 +00:00
|
|
|
def get_random_settings():
|
|
|
|
random_settings = []
|
2022-01-28 11:26:50 +00:00
|
|
|
for setting, generator in SettingsRandomizer.settings.items():
|
2022-04-28 11:26:49 +00:00
|
|
|
random_settings.append(f"{setting}={generator()}")
|
2022-01-28 13:46:00 +00:00
|
|
|
return random_settings
|
2022-01-28 11:26:50 +00:00
|
|
|
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
class TestResult:
|
2022-04-27 11:02:45 +00:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
case_name: str,
|
|
|
|
status: TestStatus,
|
|
|
|
reason: Optional[FailureReason],
|
|
|
|
total_time: float,
|
|
|
|
description: str,
|
|
|
|
):
|
2021-09-22 08:42:46 +00:00
|
|
|
self.case_name: str = case_name
|
|
|
|
self.status: TestStatus = status
|
|
|
|
self.reason: Optional[FailureReason] = reason
|
|
|
|
self.total_time: float = total_time
|
|
|
|
self.description: str = description
|
|
|
|
self.need_retry: bool = False
|
|
|
|
|
|
|
|
def check_if_need_retry(self, args, stdout, stderr, runs_count):
|
2022-04-28 11:26:49 +00:00
|
|
|
if (
|
|
|
|
self.status != TestStatus.FAIL
|
|
|
|
or not need_retry(args, stdout, stderr, self.total_time)
|
|
|
|
or MAX_RETRIES < runs_count
|
|
|
|
):
|
2021-09-22 08:42:46 +00:00
|
|
|
return
|
|
|
|
self.need_retry = True
|
|
|
|
|
|
|
|
|
|
|
|
class TestCase:
|
2021-12-12 15:40:33 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_description_from_exception_info(exc_info):
|
|
|
|
exc_type, exc_value, tb = exc_info
|
|
|
|
exc_name = exc_type.__name__
|
|
|
|
traceback_str = "\n".join(traceback.format_tb(tb, 10))
|
|
|
|
description = f"\n{exc_name}\n{exc_value}\n{traceback_str}"
|
|
|
|
return description
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
@staticmethod
|
|
|
|
def get_reference_file(suite_dir, name):
|
|
|
|
"""
|
|
|
|
Returns reference file name for specified test
|
|
|
|
"""
|
|
|
|
|
|
|
|
name = removesuffix(name, ".gen")
|
2022-04-27 11:02:45 +00:00
|
|
|
for ext in [".reference", ".gen.reference"]:
|
2021-09-22 08:42:46 +00:00
|
|
|
reference_file = os.path.join(suite_dir, name) + ext
|
|
|
|
if os.path.isfile(reference_file):
|
|
|
|
return reference_file
|
|
|
|
return None
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
@staticmethod
|
2021-10-07 18:05:42 +00:00
|
|
|
def configure_testcase_args(args, case_file, suite_tmp_dir):
|
2021-09-22 08:42:46 +00:00
|
|
|
testcase_args = copy.deepcopy(args)
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
testcase_args.testcase_start_time = datetime.now()
|
|
|
|
testcase_basename = os.path.basename(case_file)
|
2022-04-27 11:02:45 +00:00
|
|
|
testcase_args.testcase_client = (
|
|
|
|
f"{testcase_args.client} --log_comment '{testcase_basename}'"
|
|
|
|
)
|
2021-10-12 18:06:26 +00:00
|
|
|
testcase_args.testcase_basename = testcase_basename
|
2021-08-06 14:38:28 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
if testcase_args.database:
|
|
|
|
database = testcase_args.database
|
|
|
|
os.environ.setdefault("CLICKHOUSE_DATABASE", database)
|
|
|
|
os.environ.setdefault("CLICKHOUSE_TMP", suite_tmp_dir)
|
2022-07-02 09:05:52 +00:00
|
|
|
testcase_args.test_tmp_dir = suite_tmp_dir
|
2021-09-22 08:42:46 +00:00
|
|
|
else:
|
2022-04-28 11:26:49 +00:00
|
|
|
# If --database is not specified, we will create temporary database with
|
|
|
|
# unique name and we will recreate and drop it for each test
|
2021-12-02 22:29:23 +00:00
|
|
|
def random_str(length=6):
|
2021-09-22 08:42:46 +00:00
|
|
|
alphabet = string.ascii_lowercase + string.digits
|
2021-12-02 22:29:23 +00:00
|
|
|
# NOTE: it is important not to use default random generator, since it shares state.
|
2022-04-27 11:02:45 +00:00
|
|
|
return "".join(
|
|
|
|
random.SystemRandom().choice(alphabet) for _ in range(length)
|
|
|
|
)
|
2021-08-06 14:38:28 +00:00
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
database = f"test_{random_str()}"
|
2021-08-06 14:38:28 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
clickhouse_execute(
|
|
|
|
args,
|
|
|
|
"CREATE DATABASE " + database + get_db_engine(testcase_args, database),
|
2022-08-12 09:28:16 +00:00
|
|
|
settings=get_create_database_settings(args, testcase_args),
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
2021-08-06 14:38:28 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
os.environ["CLICKHOUSE_DATABASE"] = database
|
|
|
|
# Set temporary directory to match the randomly generated database,
|
|
|
|
# because .sh tests also use it for temporary files and we want to avoid
|
|
|
|
# collisions.
|
|
|
|
testcase_args.test_tmp_dir = os.path.join(suite_tmp_dir, database)
|
|
|
|
os.mkdir(testcase_args.test_tmp_dir)
|
2022-07-02 18:46:04 +00:00
|
|
|
os.environ["CLICKHOUSE_TMP"] = testcase_args.test_tmp_dir
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
testcase_args.testcase_database = database
|
|
|
|
|
2022-07-02 09:05:52 +00:00
|
|
|
# Printed only in case of failures
|
|
|
|
#
|
|
|
|
# NOTE: here we use "CLICKHOUSE_TMP" instead of "file_suffix",
|
|
|
|
# so it is installed in configure_testcase_args() unlike other files
|
|
|
|
# (stdout_file, stderr_file) in TestCase::__init__().
|
|
|
|
# Since using CLICKHOUSE_TMP is easier to use in expect.
|
|
|
|
testcase_args.debug_log_file = (
|
|
|
|
os.path.join(testcase_args.test_tmp_dir, testcase_basename) + ".debuglog"
|
|
|
|
)
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
return testcase_args
|
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
def cli_random_settings(self) -> str:
|
|
|
|
return " ".join([f"--{setting}" for setting in self.random_settings])
|
|
|
|
|
2022-03-28 12:56:23 +00:00
|
|
|
def add_random_settings(self, args, client_options):
|
2022-04-27 11:02:45 +00:00
|
|
|
if self.tags and "no-random-settings" in self.tags:
|
2022-02-11 14:15:56 +00:00
|
|
|
return client_options
|
2022-03-28 12:56:23 +00:00
|
|
|
if args.no_random_settings:
|
|
|
|
return client_options
|
2022-02-11 14:15:56 +00:00
|
|
|
|
2022-02-16 10:35:46 +00:00
|
|
|
if len(self.base_url_params) == 0:
|
2022-04-27 11:02:45 +00:00
|
|
|
os.environ["CLICKHOUSE_URL_PARAMS"] = "&".join(self.random_settings)
|
2022-02-16 10:35:46 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
os.environ["CLICKHOUSE_URL_PARAMS"] = (
|
|
|
|
self.base_url_params + "&" + "&".join(self.random_settings)
|
|
|
|
)
|
2022-02-16 10:35:46 +00:00
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
new_options = f" --allow_repeated_settings {self.cli_random_settings()}"
|
2022-04-27 11:02:45 +00:00
|
|
|
os.environ["CLICKHOUSE_CLIENT_OPT"] = (
|
|
|
|
self.base_client_options + new_options + " "
|
|
|
|
)
|
2022-02-16 10:35:46 +00:00
|
|
|
return client_options + new_options
|
|
|
|
|
|
|
|
def remove_random_settings_from_env(self):
|
2022-04-27 11:02:45 +00:00
|
|
|
os.environ["CLICKHOUSE_URL_PARAMS"] = self.base_url_params
|
|
|
|
os.environ["CLICKHOUSE_CLIENT_OPT"] = self.base_client_options
|
2022-01-28 13:46:00 +00:00
|
|
|
|
2022-03-28 12:56:23 +00:00
|
|
|
def add_info_about_settings(self, args, description):
|
2022-04-27 11:02:45 +00:00
|
|
|
if self.tags and "no-random-settings" in self.tags:
|
2022-02-11 14:15:56 +00:00
|
|
|
return description
|
2022-03-28 12:56:23 +00:00
|
|
|
if args.no_random_settings:
|
|
|
|
return description
|
2022-02-11 14:15:56 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
return (
|
2022-04-28 11:26:49 +00:00
|
|
|
f"{description}\nSettings used in the test: {self.cli_random_settings()}\n"
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
2022-01-28 13:46:00 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
def __init__(self, suite, case: str, args, is_concurrent: bool):
|
2022-04-27 11:02:45 +00:00
|
|
|
self.case: str = case # case file name
|
2021-09-22 08:42:46 +00:00
|
|
|
self.tags: Set[str] = suite.all_tags[case] if case in suite.all_tags else set()
|
|
|
|
|
2022-03-10 12:22:24 +00:00
|
|
|
for tag in os.getenv("GLOBAL_TAGS", "").split(","):
|
|
|
|
self.tags.add(tag.strip())
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
self.case_file: str = os.path.join(suite.suite_path, case)
|
|
|
|
(self.name, self.ext) = os.path.splitext(case)
|
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
file_suffix = f".{os.getpid()}" if is_concurrent and args.test_runs > 1 else ""
|
2021-09-22 08:42:46 +00:00
|
|
|
self.reference_file = self.get_reference_file(suite.suite_path, self.name)
|
2022-04-27 11:02:45 +00:00
|
|
|
self.stdout_file = (
|
|
|
|
os.path.join(suite.suite_tmp_path, self.name) + file_suffix + ".stdout"
|
|
|
|
)
|
|
|
|
self.stderr_file = (
|
|
|
|
os.path.join(suite.suite_tmp_path, self.name) + file_suffix + ".stderr"
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
self.testcase_args = None
|
|
|
|
self.runs_count = 0
|
2022-02-16 10:35:46 +00:00
|
|
|
|
2022-01-28 13:46:00 +00:00
|
|
|
self.random_settings = SettingsRandomizer.get_random_settings()
|
2022-04-27 11:02:45 +00:00
|
|
|
self.base_url_params = (
|
|
|
|
os.environ["CLICKHOUSE_URL_PARAMS"]
|
|
|
|
if "CLICKHOUSE_URL_PARAMS" in os.environ
|
|
|
|
else ""
|
|
|
|
)
|
|
|
|
self.base_client_options = (
|
|
|
|
os.environ["CLICKHOUSE_CLIENT_OPT"]
|
|
|
|
if "CLICKHOUSE_CLIENT_OPT" in os.environ
|
|
|
|
else ""
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2022-03-14 16:40:17 +00:00
|
|
|
# Check if test contains tag "no-backward-compatibility-check" and we should skip it
|
2021-10-27 11:46:53 +00:00
|
|
|
def check_backward_incompatible_tag(self) -> bool:
|
|
|
|
for tag in self.tags:
|
2022-03-14 16:40:17 +00:00
|
|
|
if tag.startswith("no-backward-compatibility-check"):
|
2022-04-27 11:02:45 +00:00
|
|
|
split = tag.split(":")
|
2021-10-21 05:43:06 +00:00
|
|
|
|
|
|
|
# If version is not specified in tag, always skip this test.
|
|
|
|
if len(split) == 1:
|
|
|
|
return True
|
|
|
|
version_from_tag = split[1]
|
|
|
|
|
|
|
|
# Check if extracted string from tag is a real ClickHouse version, if not - always skip test.
|
|
|
|
if re.match(VERSION_PATTERN, version_from_tag) is None:
|
|
|
|
return True
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
server_version = str(
|
|
|
|
clickhouse_execute(args, "SELECT version()").decode()
|
|
|
|
)
|
2021-10-21 05:43:06 +00:00
|
|
|
# If server version is less or equal from the version specified in tag, we should skip this test.
|
2022-04-27 11:02:45 +00:00
|
|
|
version_from_tag_split = list(map(int, version_from_tag.split(".")))
|
|
|
|
server_version_split = list(map(int, server_version.split(".")))
|
|
|
|
if (
|
|
|
|
server_version_split[: len(version_from_tag_split)]
|
|
|
|
<= version_from_tag_split
|
|
|
|
):
|
2021-10-21 05:43:06 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
# should skip test, should increment skipped_total, skip reason
|
|
|
|
def should_skip_test(self, suite) -> Optional[FailureReason]:
|
|
|
|
tags = self.tags
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
if tags and ("disabled" in tags) and not args.disabled:
|
2021-09-22 08:42:46 +00:00
|
|
|
return FailureReason.DISABLED
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
elif (
|
|
|
|
os.path.exists(os.path.join(suite.suite_path, self.name) + ".disabled")
|
|
|
|
and not args.disabled
|
|
|
|
):
|
2021-09-22 08:42:46 +00:00
|
|
|
return FailureReason.DISABLED
|
|
|
|
|
|
|
|
elif args.skip and any(s in self.name for s in args.skip):
|
|
|
|
return FailureReason.SKIP
|
|
|
|
|
|
|
|
elif not USE_JINJA and self.ext.endswith("j2"):
|
|
|
|
return FailureReason.NO_JINJA
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
elif (
|
|
|
|
tags
|
|
|
|
and (("zookeeper" in tags) or ("replica" in tags))
|
|
|
|
and not args.zookeeper
|
|
|
|
):
|
2021-09-22 08:42:46 +00:00
|
|
|
return FailureReason.NO_ZOOKEEPER
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
elif (
|
|
|
|
tags
|
|
|
|
and (("shard" in tags) or ("distributed" in tags) or ("global" in tags))
|
|
|
|
and not args.shard
|
|
|
|
):
|
2021-09-22 08:42:46 +00:00
|
|
|
return FailureReason.NO_SHARD
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
elif tags and ("no-fasttest" in tags) and args.fast_tests_only:
|
2021-09-22 08:42:46 +00:00
|
|
|
return FailureReason.FAST_ONLY
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
elif (
|
|
|
|
tags
|
|
|
|
and (("long" in tags) or ("deadlock" in tags) or ("race" in tags))
|
|
|
|
and args.no_long
|
|
|
|
):
|
2021-09-22 08:42:46 +00:00
|
|
|
# Tests for races and deadlocks usually are run in a loop for a significant amount of time
|
|
|
|
return FailureReason.NO_LONG
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
elif tags and ("no-replicated-database" in tags) and args.replicated_database:
|
2021-09-22 08:42:46 +00:00
|
|
|
return FailureReason.REPLICATED_DB
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
elif (
|
|
|
|
args.backward_compatibility_check and self.check_backward_incompatible_tag()
|
|
|
|
):
|
2021-08-20 12:17:51 +00:00
|
|
|
return FailureReason.BACKWARD_INCOMPATIBLE
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
elif tags and ("no-s3-storage" in tags) and args.s3_storage:
|
2022-02-01 07:20:06 +00:00
|
|
|
return FailureReason.S3_STORAGE
|
|
|
|
|
2022-06-20 04:43:07 +00:00
|
|
|
elif tags and ("no-stress" in tags) and args.stress:
|
|
|
|
return FailureReason.STRESS
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
elif tags:
|
|
|
|
for build_flag in args.build_flags:
|
2022-04-27 11:02:45 +00:00
|
|
|
if "no-" + build_flag in tags:
|
2021-09-22 08:42:46 +00:00
|
|
|
return FailureReason.BUILD
|
2022-01-19 12:21:51 +00:00
|
|
|
for tag in tags:
|
2022-04-27 11:02:45 +00:00
|
|
|
tag = tag.replace("-", "_")
|
|
|
|
if tag.startswith("use_") and tag not in args.build_flags:
|
2022-01-19 12:21:51 +00:00
|
|
|
return FailureReason.BUILD
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
return None
|
|
|
|
|
2022-07-08 19:27:16 +00:00
|
|
|
def process_result_impl(
|
|
|
|
self, proc, stdout: str, stderr: str, debug_log: str, total_time: float
|
|
|
|
):
|
2021-09-22 08:42:46 +00:00
|
|
|
description = ""
|
|
|
|
|
2021-10-07 18:05:42 +00:00
|
|
|
if proc:
|
|
|
|
if proc.returncode is None:
|
|
|
|
try:
|
|
|
|
proc.kill()
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != ESRCH:
|
|
|
|
raise
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2021-10-07 18:05:42 +00:00
|
|
|
if stderr:
|
|
|
|
description += stderr
|
2022-07-02 09:05:52 +00:00
|
|
|
if debug_log:
|
|
|
|
description += "\n"
|
|
|
|
description += debug_log
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.FAIL,
|
|
|
|
FailureReason.TIMEOUT,
|
|
|
|
total_time,
|
|
|
|
description,
|
|
|
|
)
|
2021-10-07 18:05:42 +00:00
|
|
|
|
|
|
|
if proc.returncode != 0:
|
|
|
|
reason = FailureReason.EXIT_CODE
|
|
|
|
description += str(proc.returncode)
|
|
|
|
|
|
|
|
if stderr:
|
|
|
|
description += "\n"
|
|
|
|
description += stderr
|
2022-07-02 09:05:52 +00:00
|
|
|
if debug_log:
|
|
|
|
description += "\n"
|
|
|
|
description += debug_log
|
2021-10-07 18:05:42 +00:00
|
|
|
|
|
|
|
# Stop on fatal errors like segmentation fault. They are sent to client via logs.
|
2022-04-27 11:02:45 +00:00
|
|
|
if " <Fatal> " in stderr:
|
2021-10-07 18:05:42 +00:00
|
|
|
reason = FailureReason.SERVER_DIED
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
if (
|
|
|
|
self.testcase_args.stop
|
|
|
|
and (
|
|
|
|
"Connection refused" in stderr
|
|
|
|
or "Attempt to read after eof" in stderr
|
|
|
|
)
|
|
|
|
and "Received exception from server" not in stderr
|
|
|
|
):
|
2021-10-07 18:05:42 +00:00
|
|
|
reason = FailureReason.SERVER_DIED
|
|
|
|
|
|
|
|
if os.path.isfile(self.stdout_file):
|
|
|
|
description += ", result:\n\n"
|
2022-04-27 11:02:45 +00:00
|
|
|
description += "\n".join(
|
|
|
|
open(self.stdout_file).read().splitlines()[:100]
|
|
|
|
)
|
|
|
|
description += "\n"
|
2021-10-07 18:05:42 +00:00
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
description += f"\nstdout:\n{stdout}\n"
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name, TestStatus.FAIL, reason, total_time, description
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
if stderr:
|
2022-04-27 11:02:45 +00:00
|
|
|
description += "\n{}\n".format("\n".join(stderr.splitlines()[:100]))
|
2022-04-28 11:26:49 +00:00
|
|
|
description += f"\nstdout:\n{stdout}\n"
|
2022-07-02 09:05:52 +00:00
|
|
|
if debug_log:
|
|
|
|
description += "\n"
|
|
|
|
description += debug_log
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.FAIL,
|
|
|
|
FailureReason.STDERR,
|
|
|
|
total_time,
|
|
|
|
description,
|
|
|
|
)
|
|
|
|
|
|
|
|
if "Exception" in stdout:
|
|
|
|
description += "\n{}\n".format("\n".join(stdout.splitlines()[:100]))
|
2022-07-02 09:05:52 +00:00
|
|
|
if debug_log:
|
|
|
|
description += "\n"
|
|
|
|
description += debug_log
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.FAIL,
|
|
|
|
FailureReason.EXCEPTION,
|
|
|
|
total_time,
|
|
|
|
description,
|
|
|
|
)
|
|
|
|
|
|
|
|
if "@@SKIP@@" in stdout:
|
|
|
|
skip_reason = stdout.replace("@@SKIP@@", "").rstrip("\n")
|
2021-09-22 08:42:46 +00:00
|
|
|
description += " - "
|
|
|
|
description += skip_reason
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.SKIPPED,
|
|
|
|
FailureReason.SKIP,
|
|
|
|
total_time,
|
|
|
|
description,
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
if self.reference_file is None:
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.UNKNOWN,
|
|
|
|
FailureReason.NO_REFERENCE,
|
|
|
|
total_time,
|
|
|
|
description,
|
|
|
|
)
|
|
|
|
|
|
|
|
result_is_different = subprocess.call(
|
|
|
|
["diff", "-q", self.reference_file, self.stdout_file], stdout=PIPE
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
if result_is_different:
|
2022-04-27 11:02:45 +00:00
|
|
|
diff = Popen(
|
|
|
|
[
|
|
|
|
"diff",
|
|
|
|
"-U",
|
|
|
|
str(self.testcase_args.unified),
|
|
|
|
self.reference_file,
|
|
|
|
self.stdout_file,
|
|
|
|
],
|
|
|
|
stdout=PIPE,
|
|
|
|
universal_newlines=True,
|
|
|
|
).communicate()[0]
|
2022-04-28 11:26:49 +00:00
|
|
|
description += f"\n{diff}\n"
|
2022-07-02 09:05:52 +00:00
|
|
|
if debug_log:
|
|
|
|
description += "\n"
|
|
|
|
description += debug_log
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.FAIL,
|
|
|
|
FailureReason.RESULT_DIFF,
|
|
|
|
total_time,
|
|
|
|
description,
|
|
|
|
)
|
|
|
|
|
|
|
|
if (
|
|
|
|
self.testcase_args.test_runs > 1
|
|
|
|
and total_time > 60
|
|
|
|
and "long" not in self.tags
|
|
|
|
):
|
2022-07-02 09:05:52 +00:00
|
|
|
if debug_log:
|
|
|
|
description += "\n"
|
|
|
|
description += debug_log
|
2021-09-22 08:42:46 +00:00
|
|
|
# We're in Flaky Check mode, check the run time as well while we're at it.
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.FAIL,
|
|
|
|
FailureReason.TOO_LONG,
|
|
|
|
total_time,
|
|
|
|
description,
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
if os.path.exists(self.stdout_file):
|
|
|
|
os.remove(self.stdout_file)
|
|
|
|
if os.path.exists(self.stderr_file):
|
|
|
|
os.remove(self.stderr_file)
|
2022-07-02 09:05:52 +00:00
|
|
|
if os.path.exists(self.testcase_args.debug_log_file):
|
|
|
|
os.remove(self.testcase_args.debug_log_file)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
return TestResult(self.name, TestStatus.OK, None, total_time, description)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def print_test_time(test_time) -> str:
|
|
|
|
if args.print_time:
|
2022-04-28 11:26:49 +00:00
|
|
|
return f" {test_time:.2f} sec."
|
2021-09-22 08:42:46 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
return ""
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
def process_result(self, result: TestResult, messages):
|
|
|
|
description_full = messages[result.status]
|
|
|
|
description_full += self.print_test_time(result.total_time)
|
|
|
|
if result.reason is not None:
|
|
|
|
description_full += " - "
|
|
|
|
description_full += result.reason.value
|
|
|
|
|
|
|
|
description_full += result.description
|
|
|
|
description_full += "\n"
|
|
|
|
|
2022-08-10 09:21:42 +00:00
|
|
|
if result.status == TestStatus.FAIL and self.testcase_args:
|
2022-04-27 11:02:45 +00:00
|
|
|
description_full += "Database: " + self.testcase_args.testcase_database
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
result.description = description_full
|
|
|
|
return result
|
|
|
|
|
|
|
|
@staticmethod
|
2022-04-28 11:26:49 +00:00
|
|
|
def send_test_name_failed(suite: str, case: str):
|
2021-09-22 08:42:46 +00:00
|
|
|
pid = os.getpid()
|
2021-10-07 21:07:05 +00:00
|
|
|
clickhouse_execute(args, f"SELECT 'Running test {suite}/{case} from pid={pid}'")
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
def run_single_test(
|
|
|
|
self, server_logs_level, client_options
|
2022-07-02 09:05:52 +00:00
|
|
|
) -> Tuple[Optional[Popen], str, str, str, float]:
|
2021-09-22 08:42:46 +00:00
|
|
|
args = self.testcase_args
|
|
|
|
client = args.testcase_client
|
|
|
|
start_time = args.testcase_start_time
|
|
|
|
database = args.testcase_database
|
|
|
|
|
|
|
|
# This is for .sh tests
|
2021-12-10 18:45:29 +00:00
|
|
|
os.environ["CLICKHOUSE_LOG_COMMENT"] = args.testcase_basename
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
params = {
|
2022-04-27 11:02:45 +00:00
|
|
|
"client": client + " --database=" + database,
|
|
|
|
"logs_level": server_logs_level,
|
|
|
|
"options": client_options,
|
|
|
|
"test": self.case_file,
|
|
|
|
"stdout": self.stdout_file,
|
|
|
|
"stderr": self.stderr_file,
|
2022-06-20 04:43:49 +00:00
|
|
|
"secure": "--secure" if args.secure else "",
|
2021-09-22 08:42:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
# >> append to stderr (but not stdout since it is not used there),
|
|
|
|
# because there are also output of per test database creation
|
|
|
|
if not args.database:
|
2022-04-27 11:02:45 +00:00
|
|
|
pattern = "{test} > {stdout} 2> {stderr}"
|
2021-09-22 08:42:46 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
pattern = "{test} > {stdout} 2> {stderr}"
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
if self.ext == ".sql":
|
|
|
|
pattern = (
|
2022-05-05 19:11:38 +00:00
|
|
|
"{client} --send_logs_level={logs_level} {secure} --multiquery {options} < "
|
2022-04-27 11:02:45 +00:00
|
|
|
+ pattern
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
command = pattern.format(**params)
|
|
|
|
|
|
|
|
proc = Popen(command, shell=True, env=os.environ)
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
while (
|
|
|
|
datetime.now() - start_time
|
|
|
|
).total_seconds() < args.timeout and proc.poll() is None:
|
2021-09-22 08:42:46 +00:00
|
|
|
sleep(0.01)
|
|
|
|
|
|
|
|
need_drop_database = not args.database
|
|
|
|
if need_drop_database and args.no_drop_if_fail:
|
2022-04-27 11:02:45 +00:00
|
|
|
maybe_passed = (
|
|
|
|
(proc.returncode == 0)
|
|
|
|
and (proc.stderr is None)
|
|
|
|
and (proc.stdout is None or "Exception" not in proc.stdout)
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
need_drop_database = not maybe_passed
|
|
|
|
|
2022-07-02 09:05:52 +00:00
|
|
|
debug_log = ""
|
|
|
|
if os.path.exists(self.testcase_args.debug_log_file):
|
|
|
|
with open(self.testcase_args.debug_log_file, "rb") as stream:
|
|
|
|
debug_log += self.testcase_args.debug_log_file + ":\n"
|
|
|
|
debug_log += str(stream.read(), errors="replace", encoding="utf-8")
|
|
|
|
debug_log += "\n"
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
if need_drop_database:
|
2022-04-27 11:02:45 +00:00
|
|
|
seconds_left = max(
|
|
|
|
args.timeout - (datetime.now() - start_time).total_seconds(), 20
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
try:
|
2022-04-27 11:02:45 +00:00
|
|
|
clickhouse_execute(
|
|
|
|
args,
|
|
|
|
"DROP DATABASE " + database,
|
|
|
|
timeout=seconds_left,
|
|
|
|
settings={
|
|
|
|
"log_comment": args.testcase_basename,
|
|
|
|
},
|
|
|
|
)
|
2021-10-12 19:33:41 +00:00
|
|
|
except socket.timeout:
|
2021-09-22 08:42:46 +00:00
|
|
|
total_time = (datetime.now() - start_time).total_seconds()
|
2022-04-27 11:02:45 +00:00
|
|
|
return (
|
|
|
|
None,
|
|
|
|
"",
|
|
|
|
f"Timeout dropping database {database} after test",
|
2022-07-02 09:05:52 +00:00
|
|
|
debug_log,
|
2022-04-27 11:02:45 +00:00
|
|
|
total_time,
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
shutil.rmtree(args.test_tmp_dir)
|
|
|
|
|
|
|
|
total_time = (datetime.now() - start_time).total_seconds()
|
|
|
|
|
|
|
|
# Normalize randomized database names in stdout, stderr files.
|
2022-04-28 11:26:49 +00:00
|
|
|
os.system(f"LC_ALL=C sed -i -e 's/{database}/default/g' {self.stdout_file}")
|
2021-09-22 08:42:46 +00:00
|
|
|
if args.hide_db_name:
|
2022-04-28 11:26:49 +00:00
|
|
|
os.system(f"LC_ALL=C sed -i -e 's/{database}/default/g' {self.stderr_file}")
|
2021-09-22 08:42:46 +00:00
|
|
|
if args.replicated_database:
|
2022-04-28 11:26:49 +00:00
|
|
|
os.system(f"LC_ALL=C sed -i -e 's|/auto_{{shard}}||g' {self.stdout_file}")
|
|
|
|
os.system(f"LC_ALL=C sed -i -e 's|auto_{{replica}}||g' {self.stdout_file}")
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
# Normalize hostname in stdout file.
|
2022-04-27 11:02:45 +00:00
|
|
|
os.system(
|
2022-04-28 11:26:49 +00:00
|
|
|
f"LC_ALL=C sed -i -e 's/{socket.gethostname()}/localhost/g' {self.stdout_file}"
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
stdout = ""
|
|
|
|
if os.path.exists(self.stdout_file):
|
|
|
|
with open(self.stdout_file, "rb") as stdfd:
|
|
|
|
stdout = str(stdfd.read(), errors="replace", encoding="utf-8")
|
2022-07-02 09:05:52 +00:00
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
stderr = ""
|
|
|
|
if os.path.exists(self.stderr_file):
|
|
|
|
with open(self.stderr_file, "rb") as stdfd:
|
2022-07-02 09:05:52 +00:00
|
|
|
stderr += str(stdfd.read(), errors="replace", encoding="utf-8")
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2022-07-02 09:05:52 +00:00
|
|
|
return proc, stdout, stderr, debug_log, total_time
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
def run(self, args, suite, client_options, server_logs_level):
|
|
|
|
try:
|
|
|
|
skip_reason = self.should_skip_test(suite)
|
|
|
|
if skip_reason is not None:
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(self.name, TestStatus.SKIPPED, skip_reason, 0.0, "")
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2021-10-07 18:05:42 +00:00
|
|
|
if args.testname:
|
|
|
|
try:
|
|
|
|
self.send_test_name_failed(suite.suite, self.case)
|
2022-04-28 11:26:49 +00:00
|
|
|
except Exception:
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.FAIL,
|
|
|
|
FailureReason.SERVER_DIED,
|
|
|
|
0.0,
|
|
|
|
"\nServer does not respond to health check\n",
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
self.runs_count += 1
|
2022-04-27 11:02:45 +00:00
|
|
|
self.testcase_args = self.configure_testcase_args(
|
|
|
|
args, self.case_file, suite.suite_tmp_path
|
|
|
|
)
|
2022-03-28 12:56:23 +00:00
|
|
|
client_options = self.add_random_settings(args, client_options)
|
2022-07-02 09:05:52 +00:00
|
|
|
proc, stdout, stderr, debug_log, total_time = self.run_single_test(
|
2022-04-27 11:02:45 +00:00
|
|
|
server_logs_level, client_options
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2022-07-08 19:27:16 +00:00
|
|
|
result = self.process_result_impl(
|
|
|
|
proc, stdout, stderr, debug_log, total_time
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
result.check_if_need_retry(args, stdout, stderr, self.runs_count)
|
2022-01-28 13:46:00 +00:00
|
|
|
if result.status == TestStatus.FAIL:
|
2022-04-27 11:02:45 +00:00
|
|
|
result.description = self.add_info_about_settings(
|
|
|
|
args, result.description
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
return result
|
|
|
|
except KeyboardInterrupt as e:
|
|
|
|
raise e
|
2021-12-12 15:40:33 +00:00
|
|
|
except HTTPError:
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.FAIL,
|
|
|
|
FailureReason.INTERNAL_QUERY_FAIL,
|
|
|
|
0.0,
|
|
|
|
self.add_info_about_settings(
|
|
|
|
args, self.get_description_from_exception_info(sys.exc_info())
|
|
|
|
),
|
|
|
|
)
|
2021-12-12 15:40:33 +00:00
|
|
|
except (ConnectionRefusedError, ConnectionResetError):
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.FAIL,
|
|
|
|
FailureReason.SERVER_DIED,
|
|
|
|
0.0,
|
|
|
|
self.add_info_about_settings(
|
|
|
|
args, self.get_description_from_exception_info(sys.exc_info())
|
|
|
|
),
|
|
|
|
)
|
2022-04-28 11:26:49 +00:00
|
|
|
except Exception:
|
2022-04-27 11:02:45 +00:00
|
|
|
return TestResult(
|
|
|
|
self.name,
|
|
|
|
TestStatus.UNKNOWN,
|
|
|
|
FailureReason.INTERNAL_ERROR,
|
|
|
|
0.0,
|
|
|
|
self.get_description_from_exception_info(sys.exc_info()),
|
|
|
|
)
|
2022-02-16 10:35:46 +00:00
|
|
|
finally:
|
|
|
|
self.remove_random_settings_from_env()
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
class TestSuite:
|
|
|
|
@staticmethod
|
2022-04-28 11:26:49 +00:00
|
|
|
def tests_in_suite_key_func(item: str) -> float:
|
2022-04-27 11:02:45 +00:00
|
|
|
if args.order == "random":
|
2021-09-22 08:42:46 +00:00
|
|
|
return random.random()
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
reverse = 1 if args.order == "asc" else -1
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
if -1 == item.find("_"):
|
2021-09-22 08:42:46 +00:00
|
|
|
return 99998
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
prefix, _ = item.split("_", 1)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
return reverse * int(prefix)
|
|
|
|
except ValueError:
|
|
|
|
return 99997
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def render_test_template(j2env, suite_dir, test_name):
|
|
|
|
"""
|
|
|
|
Render template for test and reference file if needed
|
|
|
|
"""
|
|
|
|
|
|
|
|
if j2env is None:
|
|
|
|
return test_name
|
|
|
|
|
|
|
|
test_base_name = removesuffix(test_name, ".sql.j2", ".sql")
|
|
|
|
|
|
|
|
reference_file_name = test_base_name + ".reference.j2"
|
|
|
|
reference_file_path = os.path.join(suite_dir, reference_file_name)
|
|
|
|
if os.path.isfile(reference_file_path):
|
|
|
|
tpl = j2env.get_template(reference_file_name)
|
2022-04-27 11:02:45 +00:00
|
|
|
tpl.stream().dump(
|
|
|
|
os.path.join(suite_dir, test_base_name) + ".gen.reference"
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
if test_name.endswith(".sql.j2"):
|
|
|
|
tpl = j2env.get_template(test_name)
|
|
|
|
generated_test_name = test_base_name + ".gen.sql"
|
|
|
|
tpl.stream().dump(os.path.join(suite_dir, generated_test_name))
|
|
|
|
return generated_test_name
|
|
|
|
|
|
|
|
return test_name
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def read_test_tags(suite_dir: str, all_tests: List[str]) -> Dict[str, Set[str]]:
|
|
|
|
def get_comment_sign(filename):
|
2022-04-27 11:02:45 +00:00
|
|
|
if filename.endswith(".sql") or filename.endswith(".sql.j2"):
|
|
|
|
return "--"
|
|
|
|
elif (
|
|
|
|
filename.endswith(".sh")
|
|
|
|
or filename.endswith(".py")
|
|
|
|
or filename.endswith(".expect")
|
|
|
|
):
|
|
|
|
return "#"
|
2021-09-22 08:42:46 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
raise Exception(f"Unknown file_extension: {filename}")
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
def parse_tags_from_line(line, comment_sign):
|
|
|
|
if not line.startswith(comment_sign):
|
|
|
|
return None
|
2022-04-28 11:26:49 +00:00
|
|
|
tags_str = line[len(comment_sign) :].lstrip() # noqa: ignore E203
|
2021-09-22 08:42:46 +00:00
|
|
|
tags_prefix = "Tags:"
|
|
|
|
if not tags_str.startswith(tags_prefix):
|
|
|
|
return None
|
2022-04-28 11:26:49 +00:00
|
|
|
tags_str = tags_str[len(tags_prefix) :] # noqa: ignore E203
|
2022-04-27 11:02:45 +00:00
|
|
|
tags = tags_str.split(",")
|
2021-09-22 08:42:46 +00:00
|
|
|
tags = {tag.strip() for tag in tags}
|
|
|
|
return tags
|
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
def is_shebang(line: str) -> bool:
|
2022-04-27 11:02:45 +00:00
|
|
|
return line.startswith("#!")
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2022-08-10 11:32:24 +00:00
|
|
|
def find_tag_line(file):
|
|
|
|
for line in file:
|
|
|
|
line = line.strip()
|
|
|
|
if line and not is_shebang(line):
|
|
|
|
return line
|
|
|
|
return ''
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
def load_tags_from_file(filepath):
|
2022-08-10 11:32:24 +00:00
|
|
|
comment_sign = get_comment_sign(filepath)
|
2022-04-28 11:26:49 +00:00
|
|
|
with open(filepath, "r", encoding="utf-8") as file:
|
2021-09-22 08:42:46 +00:00
|
|
|
try:
|
2022-08-10 11:32:24 +00:00
|
|
|
line = find_tag_line(file)
|
2021-09-22 08:42:46 +00:00
|
|
|
except UnicodeDecodeError:
|
|
|
|
return []
|
2022-08-10 11:32:24 +00:00
|
|
|
return parse_tags_from_line(line, comment_sign)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
all_tags = {}
|
|
|
|
start_time = datetime.now()
|
|
|
|
for test_name in all_tests:
|
|
|
|
tags = load_tags_from_file(os.path.join(suite_dir, test_name))
|
|
|
|
if tags:
|
|
|
|
all_tags[test_name] = tags
|
|
|
|
elapsed = (datetime.now() - start_time).total_seconds()
|
|
|
|
if elapsed > 1:
|
|
|
|
print(f"Tags for suite {suite_dir} read in {elapsed:.2f} seconds")
|
|
|
|
return all_tags
|
|
|
|
|
|
|
|
def __init__(self, args, suite_path: str, suite_tmp_path: str, suite: str):
|
|
|
|
self.args = args
|
|
|
|
self.suite_path: str = suite_path
|
|
|
|
self.suite_tmp_path: str = suite_tmp_path
|
|
|
|
self.suite: str = suite
|
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
filter_func = lambda x: True # noqa: ignore E731
|
2021-12-10 15:39:02 +00:00
|
|
|
|
|
|
|
if args.run_by_hash_num is not None and args.run_by_hash_total is not None:
|
|
|
|
if args.run_by_hash_num > args.run_by_hash_total:
|
2022-04-27 11:02:45 +00:00
|
|
|
raise Exception(
|
|
|
|
f"Incorrect run by hash, value {args.run_by_hash_num} bigger than total {args.run_by_hash_total}"
|
|
|
|
)
|
2021-12-10 15:39:02 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
filter_func = (
|
|
|
|
lambda x: stringhash(x) % args.run_by_hash_total == args.run_by_hash_num
|
|
|
|
)
|
2021-12-10 15:39:02 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
self.all_tests: List[str] = self.get_tests_list(
|
|
|
|
self.tests_in_suite_key_func, filter_func
|
|
|
|
)
|
|
|
|
self.all_tags: Dict[str, Set[str]] = self.read_test_tags(
|
|
|
|
self.suite_path, self.all_tests
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
self.sequential_tests = []
|
|
|
|
self.parallel_tests = []
|
|
|
|
for test_name in self.all_tests:
|
|
|
|
if self.is_sequential_test(test_name):
|
|
|
|
self.sequential_tests.append(test_name)
|
|
|
|
else:
|
|
|
|
self.parallel_tests.append(test_name)
|
|
|
|
|
|
|
|
def is_sequential_test(self, test_name):
|
|
|
|
if args.sequential:
|
|
|
|
if any(s in test_name for s in args.sequential):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if test_name not in self.all_tags:
|
|
|
|
return False
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
return ("no-parallel" in self.all_tags[test_name]) or (
|
|
|
|
"sequential" in self.all_tags[test_name]
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2021-12-10 15:39:02 +00:00
|
|
|
def get_tests_list(self, sort_key, filter_func):
|
2021-09-22 08:42:46 +00:00
|
|
|
"""
|
|
|
|
Return list of tests file names to run
|
|
|
|
"""
|
|
|
|
|
2021-12-10 15:39:02 +00:00
|
|
|
all_tests = list(self.get_selected_tests(filter_func))
|
2021-09-22 08:42:46 +00:00
|
|
|
all_tests = all_tests * self.args.test_runs
|
|
|
|
all_tests.sort(key=sort_key)
|
|
|
|
return all_tests
|
|
|
|
|
2021-12-10 15:39:02 +00:00
|
|
|
def get_selected_tests(self, filter_func):
|
2021-09-22 08:42:46 +00:00
|
|
|
"""
|
|
|
|
Find all files with tests, filter, render templates
|
|
|
|
"""
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
j2env = (
|
|
|
|
jinja2.Environment(
|
|
|
|
loader=jinja2.FileSystemLoader(self.suite_path),
|
|
|
|
keep_trailing_newline=True,
|
|
|
|
)
|
|
|
|
if USE_JINJA
|
|
|
|
else None
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
for test_name in os.listdir(self.suite_path):
|
|
|
|
if not is_test_from_dir(self.suite_path, test_name):
|
|
|
|
continue
|
2022-04-27 11:02:45 +00:00
|
|
|
if self.args.test and not any(
|
|
|
|
re.search(pattern, test_name) for pattern in self.args.test
|
|
|
|
):
|
2021-09-22 08:42:46 +00:00
|
|
|
continue
|
|
|
|
if USE_JINJA and test_name.endswith(".gen.sql"):
|
|
|
|
continue
|
2021-12-10 15:39:02 +00:00
|
|
|
if not filter_func(test_name):
|
|
|
|
continue
|
2021-09-22 08:42:46 +00:00
|
|
|
test_name = self.render_test_template(j2env, self.suite_path, test_name)
|
|
|
|
yield test_name
|
|
|
|
|
|
|
|
@staticmethod
|
2021-12-10 15:39:02 +00:00
|
|
|
def read_test_suite(args, suite_dir_name: str):
|
2021-09-22 08:42:46 +00:00
|
|
|
def is_data_present():
|
2022-05-05 19:11:38 +00:00
|
|
|
try:
|
|
|
|
return int(clickhouse_execute(args, "EXISTS TABLE test.hits"))
|
|
|
|
except Exception as e:
|
2022-06-20 04:43:49 +00:00
|
|
|
print(
|
|
|
|
"Cannot check if dataset is available, assuming it's not: ", str(e)
|
|
|
|
)
|
2022-05-05 19:11:38 +00:00
|
|
|
return False
|
2021-09-22 08:42:46 +00:00
|
|
|
|
|
|
|
base_dir = os.path.abspath(args.queries)
|
|
|
|
tmp_dir = os.path.abspath(args.tmp)
|
|
|
|
suite_path = os.path.join(base_dir, suite_dir_name)
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
suite_re_obj = re.search("^[0-9]+_(.*)$", suite_dir_name)
|
2021-09-22 08:42:46 +00:00
|
|
|
if not suite_re_obj: # skip .gitignore and so on
|
|
|
|
return None
|
|
|
|
|
|
|
|
suite_tmp_path = os.path.join(tmp_dir, suite_dir_name)
|
|
|
|
if not os.path.exists(suite_tmp_path):
|
|
|
|
os.makedirs(suite_tmp_path)
|
|
|
|
|
|
|
|
suite = suite_re_obj.group(1)
|
|
|
|
|
|
|
|
if not os.path.isdir(suite_path):
|
|
|
|
return None
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
if "stateful" in suite and not args.no_stateful and not is_data_present():
|
2021-09-22 08:42:46 +00:00
|
|
|
print("Won't run stateful tests because test data wasn't loaded.")
|
|
|
|
return None
|
2022-04-27 11:02:45 +00:00
|
|
|
if "stateless" in suite and args.no_stateless:
|
2021-09-22 08:42:46 +00:00
|
|
|
print("Won't run stateless tests because they were manually disabled.")
|
|
|
|
return None
|
2022-04-27 11:02:45 +00:00
|
|
|
if "stateful" in suite and args.no_stateful:
|
2021-09-22 08:42:46 +00:00
|
|
|
print("Won't run stateful tests because they were manually disabled.")
|
|
|
|
return None
|
2021-08-06 14:38:28 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
return TestSuite(args, suite_path, suite_tmp_path, suite)
|
2021-08-06 14:38:28 +00:00
|
|
|
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
stop_time = None
|
2021-09-22 13:00:59 +00:00
|
|
|
exit_code = None
|
|
|
|
server_died = None
|
|
|
|
stop_tests_triggered_lock = None
|
|
|
|
stop_tests_triggered = None
|
|
|
|
queue = None
|
|
|
|
multiprocessing_manager = None
|
|
|
|
restarted_tests = None
|
2020-03-26 08:36:15 +00:00
|
|
|
|
2022-04-27 11:17:54 +00:00
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
def run_tests_array(all_tests_with_params: Tuple[List[str], int, TestSuite]):
|
2021-09-22 08:42:46 +00:00
|
|
|
all_tests, num_tests, test_suite = all_tests_with_params
|
2020-08-26 17:44:03 +00:00
|
|
|
global stop_time
|
2021-06-21 11:21:26 +00:00
|
|
|
global exit_code
|
|
|
|
global server_died
|
2021-09-22 08:42:46 +00:00
|
|
|
global restarted_tests
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
OP_SQUARE_BRACKET = colored("[", args, attrs=["bold"])
|
|
|
|
CL_SQUARE_BRACKET = colored("]", args, attrs=["bold"])
|
|
|
|
|
|
|
|
MSG_FAIL = (
|
|
|
|
OP_SQUARE_BRACKET
|
|
|
|
+ colored(" FAIL ", args, "red", attrs=["bold"])
|
|
|
|
+ CL_SQUARE_BRACKET
|
|
|
|
)
|
|
|
|
MSG_UNKNOWN = (
|
|
|
|
OP_SQUARE_BRACKET
|
|
|
|
+ colored(" UNKNOWN ", args, "yellow", attrs=["bold"])
|
|
|
|
+ CL_SQUARE_BRACKET
|
|
|
|
)
|
|
|
|
MSG_OK = (
|
|
|
|
OP_SQUARE_BRACKET
|
|
|
|
+ colored(" OK ", args, "green", attrs=["bold"])
|
|
|
|
+ CL_SQUARE_BRACKET
|
|
|
|
)
|
|
|
|
MSG_SKIPPED = (
|
|
|
|
OP_SQUARE_BRACKET
|
|
|
|
+ colored(" SKIPPED ", args, "cyan", attrs=["bold"])
|
|
|
|
+ CL_SQUARE_BRACKET
|
|
|
|
)
|
|
|
|
|
|
|
|
MESSAGES = {
|
|
|
|
TestStatus.FAIL: MSG_FAIL,
|
|
|
|
TestStatus.UNKNOWN: MSG_UNKNOWN,
|
|
|
|
TestStatus.OK: MSG_OK,
|
|
|
|
TestStatus.SKIPPED: MSG_SKIPPED,
|
|
|
|
}
|
2021-09-22 08:42:46 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
passed_total = 0
|
|
|
|
skipped_total = 0
|
|
|
|
failures_total = 0
|
|
|
|
failures_chain = 0
|
2021-03-29 18:14:06 +00:00
|
|
|
start_time = datetime.now()
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2021-03-29 18:19:13 +00:00
|
|
|
is_concurrent = multiprocessing.current_process().name != "MainProcess"
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
client_options = get_additional_client_options(args)
|
|
|
|
|
2021-05-20 16:02:46 +00:00
|
|
|
if num_tests > 0:
|
2022-04-27 11:02:45 +00:00
|
|
|
about = "about " if is_concurrent else ""
|
2021-08-05 14:15:51 +00:00
|
|
|
proc_name = multiprocessing.current_process().name
|
2021-09-22 08:42:46 +00:00
|
|
|
print(f"\nRunning {about}{num_tests} {test_suite.suite} tests ({proc_name}).\n")
|
2021-05-20 16:02:46 +00:00
|
|
|
|
|
|
|
while True:
|
2021-05-20 16:44:35 +00:00
|
|
|
if is_concurrent:
|
2021-10-11 13:40:12 +00:00
|
|
|
case = queue.get(timeout=args.timeout * 1.1)
|
2021-05-20 16:44:35 +00:00
|
|
|
if not case:
|
2021-05-20 16:02:46 +00:00
|
|
|
break
|
|
|
|
else:
|
2021-05-20 16:44:35 +00:00
|
|
|
if all_tests:
|
|
|
|
case = all_tests.pop(0)
|
2021-05-20 16:02:46 +00:00
|
|
|
else:
|
|
|
|
break
|
2019-04-09 13:17:36 +00:00
|
|
|
|
2021-06-21 11:21:26 +00:00
|
|
|
if server_died.is_set():
|
2021-04-29 07:43:56 +00:00
|
|
|
stop_tests()
|
2019-04-22 23:40:40 +00:00
|
|
|
break
|
2019-04-09 13:17:36 +00:00
|
|
|
|
2020-08-26 17:44:03 +00:00
|
|
|
if stop_time and time() > stop_time:
|
|
|
|
print("\nStop tests run because global time limit is exceeded.\n")
|
2021-04-29 07:43:56 +00:00
|
|
|
stop_tests()
|
2020-08-26 17:44:03 +00:00
|
|
|
break
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
test_case = TestCase(test_suite, case, args, is_concurrent)
|
2019-04-22 23:40:40 +00:00
|
|
|
|
|
|
|
try:
|
2022-04-27 11:02:45 +00:00
|
|
|
description = ""
|
2022-04-28 11:26:49 +00:00
|
|
|
test_cace_name = removesuffix(test_case.name, ".gen", ".sql") + ": "
|
2021-01-20 05:27:42 +00:00
|
|
|
if not is_concurrent:
|
|
|
|
sys.stdout.flush()
|
2022-04-28 11:26:49 +00:00
|
|
|
sys.stdout.write(f"{test_cace_name:72}")
|
2021-01-20 05:27:42 +00:00
|
|
|
# This flush is needed so you can see the test name of the long
|
|
|
|
# running test before it will finish. But don't do it in parallel
|
|
|
|
# mode, so that the lines don't mix.
|
|
|
|
sys.stdout.flush()
|
|
|
|
else:
|
2022-04-28 11:26:49 +00:00
|
|
|
description = f"{test_cace_name:72}"
|
2021-08-06 14:38:28 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
while True:
|
2022-04-27 11:02:45 +00:00
|
|
|
test_result = test_case.run(
|
|
|
|
args, test_suite, client_options, server_logs_level
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
test_result = test_case.process_result(test_result, MESSAGES)
|
|
|
|
if not test_result.need_retry:
|
|
|
|
break
|
|
|
|
restarted_tests.append(test_result)
|
|
|
|
|
2022-05-02 15:48:53 +00:00
|
|
|
# First print the description, than invoke the check result logic
|
|
|
|
description += test_result.description
|
|
|
|
|
|
|
|
if description and not description.endswith("\n"):
|
|
|
|
description += "\n"
|
|
|
|
|
|
|
|
sys.stdout.write(description)
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
if test_result.status == TestStatus.OK:
|
|
|
|
passed_total += 1
|
|
|
|
failures_chain = 0
|
|
|
|
elif test_result.status == TestStatus.FAIL:
|
|
|
|
failures_total += 1
|
|
|
|
failures_chain += 1
|
|
|
|
if test_result.reason == FailureReason.SERVER_DIED:
|
2021-08-06 14:38:28 +00:00
|
|
|
server_died.set()
|
|
|
|
stop_tests()
|
2021-09-22 08:42:46 +00:00
|
|
|
elif test_result.status == TestStatus.SKIPPED:
|
|
|
|
skipped_total += 1
|
2021-08-06 14:38:28 +00:00
|
|
|
|
2020-03-26 08:36:15 +00:00
|
|
|
except KeyboardInterrupt as e:
|
2019-04-22 23:40:40 +00:00
|
|
|
print(colored("Break tests execution", args, "red"))
|
2021-04-29 07:43:56 +00:00
|
|
|
stop_tests()
|
2020-03-26 08:36:15 +00:00
|
|
|
raise e
|
2019-04-22 23:40:40 +00:00
|
|
|
|
2022-05-05 19:11:38 +00:00
|
|
|
if failures_chain >= args.max_failures_chain:
|
2021-04-29 07:43:56 +00:00
|
|
|
stop_tests()
|
2019-04-22 23:40:40 +00:00
|
|
|
break
|
|
|
|
|
|
|
|
if failures_total > 0:
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
|
|
|
colored(
|
|
|
|
f"\nHaving {failures_total} errors! {passed_total} tests passed."
|
2022-04-28 11:26:49 +00:00
|
|
|
f" {skipped_total} tests skipped."
|
|
|
|
f" {(datetime.now() - start_time).total_seconds():.2f} s elapsed"
|
2022-04-27 11:02:45 +00:00
|
|
|
f" ({multiprocessing.current_process().name}).",
|
|
|
|
args,
|
|
|
|
"red",
|
|
|
|
attrs=["bold"],
|
|
|
|
)
|
|
|
|
)
|
2021-06-21 11:21:26 +00:00
|
|
|
exit_code.value = 1
|
2019-04-22 23:40:40 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
|
|
|
colored(
|
|
|
|
f"\n{passed_total} tests passed. {skipped_total} tests skipped."
|
|
|
|
f" {(datetime.now() - start_time).total_seconds():.2f} s elapsed"
|
|
|
|
f" ({multiprocessing.current_process().name}).",
|
|
|
|
args,
|
|
|
|
"green",
|
|
|
|
attrs=["bold"],
|
|
|
|
)
|
|
|
|
)
|
2019-04-09 13:17:36 +00:00
|
|
|
|
2021-03-29 22:41:07 +00:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
server_logs_level = "warning"
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2021-10-07 21:07:05 +00:00
|
|
|
def check_server_started(args):
|
2022-04-27 11:02:45 +00:00
|
|
|
print("Connecting to ClickHouse server...", end="")
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2020-06-24 19:03:28 +00:00
|
|
|
sys.stdout.flush()
|
2021-10-07 21:07:05 +00:00
|
|
|
retry_count = args.server_check_retries
|
2020-05-29 10:08:11 +00:00
|
|
|
while retry_count > 0:
|
2021-10-07 18:05:42 +00:00
|
|
|
try:
|
2022-04-27 11:02:45 +00:00
|
|
|
clickhouse_execute(args, "SELECT 1")
|
2020-06-24 19:03:28 +00:00
|
|
|
print(" OK")
|
|
|
|
sys.stdout.flush()
|
|
|
|
return True
|
2021-10-12 18:06:26 +00:00
|
|
|
except (ConnectionRefusedError, ConnectionResetError):
|
2022-04-27 11:02:45 +00:00
|
|
|
print(".", end="")
|
2020-06-24 19:03:28 +00:00
|
|
|
sys.stdout.flush()
|
2020-05-29 10:08:11 +00:00
|
|
|
retry_count -= 1
|
|
|
|
sleep(0.5)
|
2020-06-24 19:03:28 +00:00
|
|
|
continue
|
2022-04-12 09:55:57 +00:00
|
|
|
except TimeoutError:
|
|
|
|
print("\nConnection timeout, will not retry")
|
|
|
|
break
|
2022-04-13 11:37:44 +00:00
|
|
|
except Exception as e:
|
|
|
|
print("\nUexpected exception, will not retry: ", str(e))
|
|
|
|
break
|
2020-06-24 19:03:28 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
print("\nAll connection tries failed")
|
2020-06-24 19:03:28 +00:00
|
|
|
sys.stdout.flush()
|
2020-05-29 10:08:11 +00:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
class BuildFlags:
|
|
|
|
THREAD = "tsan"
|
|
|
|
ADDRESS = "asan"
|
|
|
|
UNDEFINED = "ubsan"
|
|
|
|
MEMORY = "msan"
|
|
|
|
DEBUG = "debug"
|
|
|
|
RELEASE = "release"
|
|
|
|
ORDINARY_DATABASE = "ordinary-database"
|
|
|
|
POLYMORPHIC_PARTS = "polymorphic-parts"
|
2020-07-03 10:57:16 +00:00
|
|
|
|
|
|
|
|
2021-10-07 21:07:05 +00:00
|
|
|
def collect_build_flags(args):
|
2021-10-07 18:05:42 +00:00
|
|
|
result = []
|
2020-10-07 18:53:34 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
value = clickhouse_execute(
|
|
|
|
args, "SELECT value FROM system.build_options WHERE name = 'CXX_FLAGS'"
|
|
|
|
)
|
|
|
|
if b"-fsanitize=thread" in value:
|
2021-10-07 18:05:42 +00:00
|
|
|
result.append(BuildFlags.THREAD)
|
2022-04-27 11:02:45 +00:00
|
|
|
elif b"-fsanitize=address" in value:
|
2021-10-07 18:05:42 +00:00
|
|
|
result.append(BuildFlags.ADDRESS)
|
2022-04-27 11:02:45 +00:00
|
|
|
elif b"-fsanitize=undefined" in value:
|
2021-10-07 18:05:42 +00:00
|
|
|
result.append(BuildFlags.UNDEFINED)
|
2022-04-27 11:02:45 +00:00
|
|
|
elif b"-fsanitize=memory" in value:
|
2021-10-07 18:05:42 +00:00
|
|
|
result.append(BuildFlags.MEMORY)
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
value = clickhouse_execute(
|
|
|
|
args, "SELECT value FROM system.build_options WHERE name = 'BUILD_TYPE'"
|
|
|
|
)
|
|
|
|
if b"Debug" in value:
|
2021-10-07 18:05:42 +00:00
|
|
|
result.append(BuildFlags.DEBUG)
|
2022-04-27 11:02:45 +00:00
|
|
|
elif b"RelWithDebInfo" in value or b"Release" in value:
|
2021-10-07 18:05:42 +00:00
|
|
|
result.append(BuildFlags.RELEASE)
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
value = clickhouse_execute(
|
2022-07-08 19:27:16 +00:00
|
|
|
args,
|
|
|
|
"SELECT value FROM system.settings WHERE name = 'allow_deprecated_database_ordinary'",
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
2022-08-12 13:40:35 +00:00
|
|
|
if value == b"1" or args.db_engine == "Ordinary":
|
2021-10-07 18:05:42 +00:00
|
|
|
result.append(BuildFlags.ORDINARY_DATABASE)
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
value = int(
|
|
|
|
clickhouse_execute(
|
|
|
|
args,
|
|
|
|
"SELECT value FROM system.merge_tree_settings WHERE name = 'min_bytes_for_wide_part'",
|
|
|
|
)
|
|
|
|
)
|
2021-10-07 18:05:42 +00:00
|
|
|
if value == 0:
|
|
|
|
result.append(BuildFlags.POLYMORPHIC_PARTS)
|
2020-10-07 18:53:34 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
use_flags = clickhouse_execute(
|
|
|
|
args,
|
|
|
|
"SELECT name FROM system.build_options WHERE name like 'USE_%' AND value in ('ON', '1')",
|
|
|
|
)
|
2022-01-19 12:21:51 +00:00
|
|
|
for use_flag in use_flags.strip().splitlines():
|
|
|
|
use_flag = use_flag.decode().lower()
|
|
|
|
result.append(use_flag)
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
system_processor = clickhouse_execute(
|
|
|
|
args,
|
|
|
|
"SELECT value FROM system.build_options WHERE name = 'SYSTEM_PROCESSOR' LIMIT 1",
|
|
|
|
).strip()
|
2022-03-02 14:18:24 +00:00
|
|
|
if system_processor:
|
2022-04-27 11:02:45 +00:00
|
|
|
result.append(f"cpu-{system_processor.decode().lower()}")
|
2022-03-02 14:18:24 +00:00
|
|
|
|
2020-07-03 10:57:16 +00:00
|
|
|
return result
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
|
2022-04-28 20:16:30 +00:00
|
|
|
def check_table_column(args, database, table, column):
|
2022-04-27 11:02:45 +00:00
|
|
|
return (
|
|
|
|
int(
|
|
|
|
clickhouse_execute(
|
|
|
|
args,
|
|
|
|
f"""
|
2022-04-28 20:16:30 +00:00
|
|
|
SELECT count()
|
|
|
|
FROM system.columns
|
|
|
|
WHERE database = '{database}' AND table = '{table}' AND name = '{column}'
|
2022-04-27 11:02:45 +00:00
|
|
|
""",
|
|
|
|
)
|
|
|
|
)
|
|
|
|
> 0
|
|
|
|
)
|
|
|
|
|
2020-07-03 10:57:16 +00:00
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
def suite_key_func(item: str) -> Union[float, Tuple[int, str]]:
|
2022-04-27 11:02:45 +00:00
|
|
|
if args.order == "random":
|
2021-08-05 14:15:51 +00:00
|
|
|
return random.random()
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
if -1 == item.find("_"):
|
|
|
|
return 99998, ""
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
prefix, suffix = item.split("_", 1)
|
2021-08-05 14:15:51 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
return int(prefix), suffix
|
|
|
|
except ValueError:
|
2022-04-27 11:02:45 +00:00
|
|
|
return 99997, ""
|
2021-08-05 14:15:51 +00:00
|
|
|
|
|
|
|
|
|
|
|
def extract_key(key: str) -> str:
|
|
|
|
return subprocess.getstatusoutput(
|
2022-04-27 11:02:45 +00:00
|
|
|
args.extract_from_config + " --try --config " + args.configserver + key
|
|
|
|
)[1]
|
2021-08-05 14:15:51 +00:00
|
|
|
|
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
def do_run_tests(jobs, test_suite: TestSuite, parallel):
|
|
|
|
if jobs > 1 and len(test_suite.parallel_tests) > 0:
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
|
|
|
"Found",
|
|
|
|
len(test_suite.parallel_tests),
|
|
|
|
"parallel tests and",
|
|
|
|
len(test_suite.sequential_tests),
|
|
|
|
"sequential tests",
|
|
|
|
)
|
|
|
|
run_n, run_total = parallel.split("/")
|
2021-05-20 18:11:12 +00:00
|
|
|
run_n = float(run_n)
|
|
|
|
run_total = float(run_total)
|
2021-09-22 08:42:46 +00:00
|
|
|
tests_n = len(test_suite.parallel_tests)
|
2022-04-28 11:26:49 +00:00
|
|
|
run_total = min(run_total, tests_n)
|
2021-05-20 18:11:12 +00:00
|
|
|
|
2022-04-28 11:26:49 +00:00
|
|
|
jobs = min(jobs, tests_n)
|
|
|
|
run_total = max(jobs, run_total)
|
2021-05-20 18:11:12 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
batch_size = max(1, len(test_suite.parallel_tests) // jobs)
|
2021-05-20 18:11:12 +00:00
|
|
|
parallel_tests_array = []
|
|
|
|
for _ in range(jobs):
|
2021-09-22 08:42:46 +00:00
|
|
|
parallel_tests_array.append((None, batch_size, test_suite))
|
2021-05-20 18:11:12 +00:00
|
|
|
|
2022-06-27 20:54:52 +00:00
|
|
|
try:
|
|
|
|
with closing(multiprocessing.Pool(processes=jobs)) as pool:
|
|
|
|
pool.map_async(run_tests_array, parallel_tests_array)
|
2021-05-20 18:11:12 +00:00
|
|
|
|
2022-06-27 20:54:52 +00:00
|
|
|
for suit in test_suite.parallel_tests:
|
|
|
|
queue.put(suit, timeout=args.timeout * 1.1)
|
2021-05-20 18:11:12 +00:00
|
|
|
|
2022-06-27 20:54:52 +00:00
|
|
|
for _ in range(jobs):
|
|
|
|
queue.put(None, timeout=args.timeout * 1.1)
|
2021-05-20 18:11:12 +00:00
|
|
|
|
2022-06-27 20:54:52 +00:00
|
|
|
queue.close()
|
|
|
|
except Full:
|
2022-07-08 19:27:16 +00:00
|
|
|
print(
|
|
|
|
"Couldn't put test to the queue within timeout. Server probably hung."
|
|
|
|
)
|
2022-06-30 22:33:56 +00:00
|
|
|
print_stacktraces()
|
2021-05-20 18:11:12 +00:00
|
|
|
queue.close()
|
|
|
|
|
|
|
|
pool.join()
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
run_tests_array(
|
|
|
|
(test_suite.sequential_tests, len(test_suite.sequential_tests), test_suite)
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
return len(test_suite.sequential_tests) + len(test_suite.parallel_tests)
|
2021-05-20 18:11:12 +00:00
|
|
|
else:
|
2021-09-22 08:42:46 +00:00
|
|
|
num_tests = len(test_suite.all_tests)
|
|
|
|
run_tests_array((test_suite.all_tests, num_tests, test_suite))
|
2021-05-20 19:57:06 +00:00
|
|
|
return num_tests
|
2021-05-20 18:11:12 +00:00
|
|
|
|
|
|
|
|
2021-07-20 13:40:04 +00:00
|
|
|
def is_test_from_dir(suite_dir, case):
|
|
|
|
case_file = os.path.join(suite_dir, case)
|
|
|
|
# We could also test for executable files (os.access(case_file, os.X_OK),
|
|
|
|
# but it interferes with 01610_client_spawn_editor.editor, which is invoked
|
|
|
|
# as a query editor in the test, and must be marked as executable.
|
2022-04-27 11:02:45 +00:00
|
|
|
return os.path.isfile(case_file) and any(
|
|
|
|
case_file.endswith(suppotred_ext) for suppotred_ext in TEST_FILE_EXTENSIONS
|
|
|
|
)
|
2021-07-20 13:40:04 +00:00
|
|
|
|
|
|
|
|
2021-07-20 14:51:22 +00:00
|
|
|
def removesuffix(text, *suffixes):
|
2021-07-20 13:40:04 +00:00
|
|
|
"""
|
|
|
|
Added in python 3.9
|
|
|
|
https://www.python.org/dev/peps/pep-0616/
|
|
|
|
|
2021-08-05 14:15:51 +00:00
|
|
|
This version can work with several possible suffixes
|
2021-07-20 13:40:04 +00:00
|
|
|
"""
|
|
|
|
for suffix in suffixes:
|
2021-07-20 14:51:22 +00:00
|
|
|
if suffix and text.endswith(suffix):
|
2022-04-27 11:02:45 +00:00
|
|
|
return text[: -len(suffix)]
|
2021-07-20 14:51:22 +00:00
|
|
|
return text
|
2021-07-20 13:40:04 +00:00
|
|
|
|
|
|
|
|
2019-04-22 23:40:40 +00:00
|
|
|
def main(args):
|
2021-06-21 11:21:26 +00:00
|
|
|
global server_died
|
2020-08-26 17:44:03 +00:00
|
|
|
global stop_time
|
2019-04-22 23:40:40 +00:00
|
|
|
global exit_code
|
|
|
|
global server_logs_level
|
2021-09-22 08:42:46 +00:00
|
|
|
global restarted_tests
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2021-10-07 21:07:05 +00:00
|
|
|
if not check_server_started(args):
|
2021-09-18 22:05:17 +00:00
|
|
|
msg = "Server is not responding. Cannot execute 'SELECT 1' query. \
|
|
|
|
If you are using split build, you have to specify -c option."
|
|
|
|
if args.hung_check:
|
|
|
|
print(msg)
|
|
|
|
pid = get_server_pid()
|
|
|
|
print("Got server pid", pid)
|
|
|
|
print_stacktraces()
|
|
|
|
raise Exception(msg)
|
2020-10-12 11:17:35 +00:00
|
|
|
|
2021-10-07 21:07:05 +00:00
|
|
|
args.build_flags = collect_build_flags(args)
|
2022-04-27 11:02:45 +00:00
|
|
|
args.suppport_system_processes_is_all_data_sent = check_table_column(
|
|
|
|
args, "system", "processes", "is_all_data_sent"
|
|
|
|
)
|
2020-07-03 11:15:30 +00:00
|
|
|
|
2020-07-03 10:57:16 +00:00
|
|
|
if args.skip:
|
2021-09-12 12:35:00 +00:00
|
|
|
args.skip = set(args.skip)
|
2020-09-24 14:54:10 +00:00
|
|
|
|
2018-01-18 20:33:16 +00:00
|
|
|
base_dir = os.path.abspath(args.queries)
|
|
|
|
|
2018-06-08 19:50:15 +00:00
|
|
|
# Keep same default values as in queries/shell_config.sh
|
2018-01-18 20:33:16 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_BINARY", args.binary)
|
2021-08-06 14:38:28 +00:00
|
|
|
# os.environ.setdefault("CLICKHOUSE_CLIENT", args.client)
|
2018-04-26 20:02:10 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_CONFIG", args.configserver)
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2018-06-18 21:13:11 +00:00
|
|
|
if args.configclient:
|
|
|
|
os.environ.setdefault("CLICKHOUSE_CONFIG_CLIENT", args.configclient)
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2018-06-13 19:01:07 +00:00
|
|
|
# Force to print server warnings in stderr
|
2018-08-14 20:29:42 +00:00
|
|
|
# Shell scripts could change logging level
|
2018-06-13 19:01:07 +00:00
|
|
|
os.environ.setdefault("CLICKHOUSE_CLIENT_SERVER_LOGS_LEVEL", server_logs_level)
|
|
|
|
|
2020-08-26 17:44:03 +00:00
|
|
|
# This code is bad as the time is not monotonic
|
|
|
|
if args.global_time_limit:
|
|
|
|
stop_time = time() + args.global_time_limit
|
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
if args.zookeeper is None:
|
2022-08-13 23:01:59 +00:00
|
|
|
args.zookeeper = True
|
2017-05-01 21:27:11 +00:00
|
|
|
|
|
|
|
if args.shard is None:
|
2021-08-05 14:15:51 +00:00
|
|
|
args.shard = bool(extract_key(' --key listen_host | grep -E "127.0.0.2|::"'))
|
2017-05-01 21:27:11 +00:00
|
|
|
|
2021-06-22 11:50:09 +00:00
|
|
|
def create_common_database(args, db_name):
|
|
|
|
create_database_retries = 0
|
|
|
|
while create_database_retries < MAX_RETRIES:
|
2021-09-13 13:36:05 +00:00
|
|
|
start_time = datetime.now()
|
2021-10-07 18:05:42 +00:00
|
|
|
try:
|
2022-04-27 11:02:45 +00:00
|
|
|
clickhouse_execute(
|
|
|
|
args,
|
2022-04-28 11:26:49 +00:00
|
|
|
f"CREATE DATABASE IF NOT EXISTS {db_name} "
|
|
|
|
f"{get_db_engine(args, db_name)}",
|
2022-08-12 09:28:16 +00:00
|
|
|
settings=get_create_database_settings(args, None),
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
2021-10-12 18:06:26 +00:00
|
|
|
except HTTPError as e:
|
2021-10-07 18:05:42 +00:00
|
|
|
total_time = (datetime.now() - start_time).total_seconds()
|
2021-10-12 18:06:26 +00:00
|
|
|
if not need_retry(args, e.message, e.message, total_time):
|
2021-10-07 18:05:42 +00:00
|
|
|
break
|
2021-06-22 11:50:09 +00:00
|
|
|
create_database_retries += 1
|
2020-05-13 20:17:12 +00:00
|
|
|
|
2022-04-28 14:29:53 +00:00
|
|
|
try:
|
|
|
|
if args.database and args.database != "test":
|
|
|
|
create_common_database(args, args.database)
|
2019-03-14 16:37:23 +00:00
|
|
|
|
2022-04-28 14:29:53 +00:00
|
|
|
create_common_database(args, "test")
|
|
|
|
except Exception as e:
|
|
|
|
print(f"Failed to create databases for tests: {e}")
|
|
|
|
server_died.set()
|
2019-03-14 16:37:23 +00:00
|
|
|
|
2019-10-09 10:51:05 +00:00
|
|
|
total_tests_run = 0
|
2021-08-05 14:15:51 +00:00
|
|
|
|
|
|
|
for suite in sorted(os.listdir(base_dir), key=suite_key_func):
|
2021-06-21 11:21:26 +00:00
|
|
|
if server_died.is_set():
|
2017-05-01 21:27:11 +00:00
|
|
|
break
|
|
|
|
|
2021-12-10 15:39:02 +00:00
|
|
|
test_suite = TestSuite.read_test_suite(args, suite)
|
2021-09-22 08:42:46 +00:00
|
|
|
if test_suite is None:
|
2017-05-01 21:27:11 +00:00
|
|
|
continue
|
2017-12-27 15:56:42 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
total_tests_run += do_run_tests(args.jobs, test_suite, args.parallel)
|
2019-10-09 10:51:05 +00:00
|
|
|
|
2021-09-09 12:31:29 +00:00
|
|
|
if server_died.is_set():
|
|
|
|
exit_code.value = 1
|
|
|
|
|
2019-03-13 11:03:57 +00:00
|
|
|
if args.hung_check:
|
2020-05-31 14:57:29 +00:00
|
|
|
|
|
|
|
# Some queries may execute in background for some time after test was finished. This is normal.
|
2021-01-26 20:36:04 +00:00
|
|
|
for _ in range(1, 60):
|
2021-10-07 18:05:42 +00:00
|
|
|
processlist = get_processlist(args)
|
2021-10-12 18:06:26 +00:00
|
|
|
if not processlist:
|
2020-05-31 14:57:29 +00:00
|
|
|
break
|
|
|
|
sleep(1)
|
|
|
|
|
2021-10-12 18:06:26 +00:00
|
|
|
if processlist:
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
|
|
|
colored(
|
|
|
|
"\nFound hung queries in processlist:", args, "red", attrs=["bold"]
|
|
|
|
)
|
|
|
|
)
|
2021-10-12 18:06:26 +00:00
|
|
|
print(json.dumps(processlist, indent=4))
|
2022-03-14 20:43:34 +00:00
|
|
|
print(get_transactions_list(args))
|
2021-02-19 14:38:20 +00:00
|
|
|
|
2021-08-05 14:15:51 +00:00
|
|
|
print_stacktraces()
|
2021-06-21 11:21:26 +00:00
|
|
|
exit_code.value = 1
|
2019-03-13 13:52:23 +00:00
|
|
|
else:
|
2020-01-27 18:04:12 +00:00
|
|
|
print(colored("\nNo queries hung.", args, "green", attrs=["bold"]))
|
2019-03-13 11:03:57 +00:00
|
|
|
|
2021-06-16 10:26:04 +00:00
|
|
|
if len(restarted_tests) > 0:
|
2021-06-15 20:52:29 +00:00
|
|
|
print("\nSome tests were restarted:\n")
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-09-22 08:42:46 +00:00
|
|
|
for test_result in restarted_tests:
|
2022-04-28 11:26:49 +00:00
|
|
|
print(f"\n{test_result.case_name:72}: ")
|
2021-09-22 08:42:46 +00:00
|
|
|
# replace it with lowercase to avoid parsing retried tests as failed
|
|
|
|
for status in TestStatus:
|
2022-04-27 11:02:45 +00:00
|
|
|
test_result.description = test_result.description.replace(
|
|
|
|
status.value, status.value.lower()
|
|
|
|
)
|
2021-09-22 08:42:46 +00:00
|
|
|
print(test_result.description)
|
2021-06-15 20:52:29 +00:00
|
|
|
|
2019-10-09 10:51:05 +00:00
|
|
|
if total_tests_run == 0:
|
|
|
|
print("No tests were run.")
|
|
|
|
sys.exit(1)
|
2021-08-06 14:38:28 +00:00
|
|
|
else:
|
|
|
|
print("All tests have finished.")
|
2019-10-09 10:51:05 +00:00
|
|
|
|
2021-06-21 11:21:26 +00:00
|
|
|
sys.exit(exit_code.value)
|
2019-03-13 11:03:57 +00:00
|
|
|
|
2016-12-06 20:55:13 +00:00
|
|
|
|
2019-01-24 11:02:55 +00:00
|
|
|
def find_binary(name):
|
2019-03-25 15:03:12 +00:00
|
|
|
if os.path.exists(name) and os.access(name, os.X_OK):
|
|
|
|
return True
|
2022-04-27 11:02:45 +00:00
|
|
|
paths = os.environ.get("PATH").split(":")
|
2019-01-24 11:02:55 +00:00
|
|
|
for path in paths:
|
|
|
|
if os.access(os.path.join(path, name), os.X_OK):
|
|
|
|
return True
|
|
|
|
|
|
|
|
# maybe it wasn't in PATH
|
2022-04-27 11:02:45 +00:00
|
|
|
if os.access(os.path.join("/usr/local/bin", name), os.X_OK):
|
2019-06-17 16:50:31 +00:00
|
|
|
return True
|
2022-04-27 11:02:45 +00:00
|
|
|
if os.access(os.path.join("/usr/bin", name), os.X_OK):
|
2019-06-17 16:50:31 +00:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2016-09-01 17:40:02 +00:00
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
def get_additional_client_options(args):
|
2019-10-11 13:34:26 +00:00
|
|
|
if args.client_option:
|
2022-04-27 11:02:45 +00:00
|
|
|
return " ".join("--" + option for option in args.client_option)
|
|
|
|
return ""
|
|
|
|
|
2019-10-11 13:34:26 +00:00
|
|
|
|
|
|
|
def get_additional_client_options_url(args):
|
|
|
|
if args.client_option:
|
2022-04-27 11:02:45 +00:00
|
|
|
return "&".join(args.client_option)
|
|
|
|
return ""
|
|
|
|
|
2019-10-11 10:30:32 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
if __name__ == "__main__":
|
2021-09-22 13:00:59 +00:00
|
|
|
stop_time = None
|
|
|
|
exit_code = multiprocessing.Value("i", 0)
|
|
|
|
server_died = multiprocessing.Event()
|
|
|
|
stop_tests_triggered_lock = multiprocessing.Lock()
|
|
|
|
stop_tests_triggered = multiprocessing.Event()
|
|
|
|
queue = multiprocessing.Queue(maxsize=1)
|
|
|
|
multiprocessing_manager = multiprocessing.Manager()
|
|
|
|
restarted_tests = multiprocessing_manager.list()
|
|
|
|
|
2021-04-29 07:43:56 +00:00
|
|
|
# Move to a new process group and kill it at exit so that we don't have any
|
|
|
|
# infinite tests processes left
|
|
|
|
# (new process group is required to avoid killing some parent processes)
|
|
|
|
os.setpgid(0, 0)
|
|
|
|
signal.signal(signal.SIGTERM, signal_handler)
|
|
|
|
signal.signal(signal.SIGINT, signal_handler)
|
|
|
|
signal.signal(signal.SIGHUP, signal_handler)
|
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
parser = ArgumentParser(description="ClickHouse functional tests")
|
|
|
|
parser.add_argument("-q", "--queries", help="Path to queries dir")
|
|
|
|
parser.add_argument("--tmp", help="Path to tmp dir")
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
"-b",
|
|
|
|
"--binary",
|
|
|
|
default="clickhouse",
|
2022-04-28 11:26:49 +00:00
|
|
|
help="Path to clickhouse (if monolithic build, clickhouse-server otherwise) "
|
|
|
|
"binary or name of binary in PATH",
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
"-c",
|
|
|
|
"--client",
|
2022-04-28 11:26:49 +00:00
|
|
|
help="Path to clickhouse-client (if split build, useless otherwise) binary of "
|
|
|
|
"name of binary in PATH",
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument("--extract_from_config", help="extract-from-config program")
|
|
|
|
parser.add_argument(
|
|
|
|
"--configclient", help="Client config (if you use not default ports)"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--configserver",
|
|
|
|
default="/etc/clickhouse-server/config.xml",
|
|
|
|
help="Preprocessed server config",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-o", "--output", help="Output xUnit compliant test report directory"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-t",
|
|
|
|
"--timeout",
|
|
|
|
type=int,
|
|
|
|
default=600,
|
|
|
|
help="Timeout for each test case in seconds",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--global_time_limit",
|
|
|
|
type=int,
|
|
|
|
help="Stop if executing more than specified time (after current test finished)",
|
|
|
|
)
|
|
|
|
parser.add_argument("test", nargs="*", help="Optional test case name regex")
|
|
|
|
parser.add_argument(
|
|
|
|
"-d",
|
|
|
|
"--disabled",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="Also run disabled tests",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--stop",
|
|
|
|
action="store_true",
|
|
|
|
default=None,
|
|
|
|
dest="stop",
|
|
|
|
help="Stop on network errors",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--order", default="desc", choices=["asc", "desc", "random"], help="Run order"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--testname",
|
|
|
|
action="store_true",
|
|
|
|
default=None,
|
|
|
|
dest="testname",
|
|
|
|
help="Make query with test name before test run",
|
|
|
|
)
|
|
|
|
parser.add_argument("--hung-check", action="store_true", default=False)
|
|
|
|
parser.add_argument("--no-left-queries-check", action="store_true", default=False)
|
|
|
|
parser.add_argument("--force-color", action="store_true", default=False)
|
|
|
|
parser.add_argument(
|
|
|
|
"--database", help="Database for tests (random name test_XXXXXX by default)"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--no-drop-if-fail",
|
|
|
|
action="store_true",
|
|
|
|
help="Do not drop database for test if test has failed",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--hide-db-name",
|
|
|
|
action="store_true",
|
|
|
|
help='Replace random database name with "default" in stderr',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--parallel", default="1/1", help="One parallel test run number/total"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-j", "--jobs", default=1, nargs="?", type=int, help="Run all tests in parallel"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--test-runs",
|
|
|
|
default=1,
|
|
|
|
nargs="?",
|
|
|
|
type=int,
|
|
|
|
help="Run each test many times (useful for e.g. flaky check)",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-U",
|
|
|
|
"--unified",
|
|
|
|
default=3,
|
|
|
|
type=int,
|
|
|
|
help="output NUM lines of unified context",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-r",
|
|
|
|
"--server-check-retries",
|
|
|
|
default=180,
|
|
|
|
type=int,
|
|
|
|
help="Num of tries to execute SELECT 1 before tests started",
|
|
|
|
)
|
|
|
|
parser.add_argument("--db-engine", help="Database engine name")
|
|
|
|
parser.add_argument(
|
|
|
|
"--replicated-database",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="Run tests with Replicated database engine",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--fast-tests-only",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help='Run only fast tests (the tests without the "no-fasttest" tag)',
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--no-stateless", action="store_true", help="Disable all stateless tests"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--no-stateful", action="store_true", help="Disable all stateful tests"
|
|
|
|
)
|
|
|
|
parser.add_argument("--skip", nargs="+", help="Skip these tests")
|
|
|
|
parser.add_argument(
|
|
|
|
"--sequential",
|
|
|
|
nargs="+",
|
|
|
|
help="Run these tests sequentially even if --parallel specified",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--no-long", action="store_true", dest="no_long", help="Do not run long tests"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--client-option", nargs="+", help="Specify additional client argument"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--print-time", action="store_true", dest="print_time", help="Print test time"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--check-zookeeper-session",
|
|
|
|
action="store_true",
|
|
|
|
help="Check ZooKeeper session uptime to determine if failed test should be retried",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--s3-storage",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="Run tests over s3 storage",
|
|
|
|
)
|
2022-06-20 04:43:07 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--stress",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="Run stress tests",
|
|
|
|
)
|
2022-04-27 11:02:45 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--no-random-settings",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="Disable settings randomization",
|
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
"--run-by-hash-num",
|
|
|
|
type=int,
|
|
|
|
help="Run tests matching crc32(test_name) % run_by_hash_total == run_by_hash_num",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--run-by-hash-total",
|
|
|
|
type=int,
|
|
|
|
help="Total test groups for crc32(test_name) % run_by_hash_total == run_by_hash_num",
|
|
|
|
)
|
2021-12-10 15:39:02 +00:00
|
|
|
|
2021-08-06 14:38:28 +00:00
|
|
|
group = parser.add_mutually_exclusive_group(required=False)
|
2022-04-27 11:02:45 +00:00
|
|
|
group.add_argument(
|
|
|
|
"--zookeeper",
|
|
|
|
action="store_true",
|
|
|
|
default=None,
|
|
|
|
dest="zookeeper",
|
|
|
|
help="Run zookeeper related tests",
|
|
|
|
)
|
|
|
|
group.add_argument(
|
|
|
|
"--no-zookeeper",
|
|
|
|
action="store_false",
|
|
|
|
default=None,
|
|
|
|
dest="zookeeper",
|
|
|
|
help="Do not run zookeeper related tests",
|
|
|
|
)
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-08-06 14:38:28 +00:00
|
|
|
group = parser.add_mutually_exclusive_group(required=False)
|
2022-04-27 11:02:45 +00:00
|
|
|
group.add_argument(
|
|
|
|
"--shard",
|
|
|
|
action="store_true",
|
|
|
|
default=None,
|
|
|
|
dest="shard",
|
2022-04-28 11:26:49 +00:00
|
|
|
help="Run sharding related tests "
|
|
|
|
"(required to clickhouse-server listen 127.0.0.2 127.0.0.3)",
|
2022-04-27 11:02:45 +00:00
|
|
|
)
|
|
|
|
group.add_argument(
|
|
|
|
"--no-shard",
|
|
|
|
action="store_false",
|
|
|
|
default=None,
|
|
|
|
dest="shard",
|
|
|
|
help="Do not run shard related tests",
|
|
|
|
)
|
|
|
|
|
|
|
|
group.add_argument(
|
|
|
|
"--backward-compatibility-check",
|
|
|
|
action="store_true",
|
2022-07-18 18:15:18 +00:00
|
|
|
help="Run tests for further backward compatibility testing by ignoring all"
|
2022-04-27 11:02:45 +00:00
|
|
|
"drop queries in tests for collecting data from new version of server",
|
|
|
|
)
|
2022-05-05 19:11:38 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--secure",
|
|
|
|
action="store_true",
|
|
|
|
default=False,
|
|
|
|
help="Use secure connection to connect to clickhouse-server",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--max-failures-chain",
|
|
|
|
default=20,
|
|
|
|
type=int,
|
|
|
|
help="Max number of failed tests in a row (stop tests if higher)",
|
|
|
|
)
|
2017-05-01 21:27:11 +00:00
|
|
|
args = parser.parse_args()
|
2018-01-12 13:56:02 +00:00
|
|
|
|
2020-08-13 18:45:55 +00:00
|
|
|
if args.queries and not os.path.isdir(args.queries):
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
|
|
|
f"Cannot access the specified directory with queries ({args.queries})",
|
|
|
|
file=sys.stderr,
|
|
|
|
)
|
2021-01-26 20:36:04 +00:00
|
|
|
sys.exit(1)
|
2020-08-13 18:45:55 +00:00
|
|
|
|
|
|
|
# Autodetect the directory with queries if not specified
|
|
|
|
if args.queries is None:
|
2022-04-27 11:02:45 +00:00
|
|
|
args.queries = "queries"
|
2020-08-13 18:45:55 +00:00
|
|
|
|
|
|
|
if not os.path.isdir(args.queries):
|
|
|
|
# If we're running from the repo
|
2022-04-27 11:02:45 +00:00
|
|
|
args.queries = os.path.join(
|
|
|
|
os.path.dirname(os.path.abspath(__file__)), "queries"
|
|
|
|
)
|
2020-08-13 18:45:55 +00:00
|
|
|
|
|
|
|
if not os.path.isdir(args.queries):
|
|
|
|
# Next we're going to try some system directories, don't write 'stdout' files into them.
|
2018-01-12 13:56:02 +00:00
|
|
|
if args.tmp is None:
|
2022-04-27 11:02:45 +00:00
|
|
|
args.tmp = "/tmp/clickhouse-test"
|
2020-08-13 18:45:55 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
args.queries = "/usr/local/share/clickhouse-test/queries"
|
2020-08-13 18:45:55 +00:00
|
|
|
|
|
|
|
if not os.path.isdir(args.queries):
|
2022-04-27 11:02:45 +00:00
|
|
|
args.queries = "/usr/share/clickhouse-test/queries"
|
2020-08-13 18:45:55 +00:00
|
|
|
|
|
|
|
if not os.path.isdir(args.queries):
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
2022-04-28 11:26:49 +00:00
|
|
|
"Failed to detect path to the queries directory. Please specify it with "
|
|
|
|
"'--queries' option.",
|
2022-04-27 11:02:45 +00:00
|
|
|
file=sys.stderr,
|
|
|
|
)
|
2021-01-26 20:36:04 +00:00
|
|
|
sys.exit(1)
|
2020-07-03 10:57:16 +00:00
|
|
|
|
2020-08-13 18:45:55 +00:00
|
|
|
print("Using queries from '" + args.queries + "' directory")
|
|
|
|
|
2018-01-16 20:17:31 +00:00
|
|
|
if args.tmp is None:
|
|
|
|
args.tmp = args.queries
|
2017-10-12 19:46:24 +00:00
|
|
|
if args.client is None:
|
2022-04-27 11:02:45 +00:00
|
|
|
if find_binary(args.binary + "-client"):
|
|
|
|
args.client = args.binary + "-client"
|
2020-10-12 11:17:35 +00:00
|
|
|
|
2021-01-15 21:33:53 +00:00
|
|
|
print("Using " + args.client + " as client program (expecting split build)")
|
2019-01-24 11:02:55 +00:00
|
|
|
elif find_binary(args.binary):
|
2022-04-27 11:02:45 +00:00
|
|
|
args.client = args.binary + " client"
|
2020-10-12 11:17:35 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
|
|
|
"Using "
|
|
|
|
+ args.client
|
|
|
|
+ " as client program (expecting monolithic build)"
|
|
|
|
)
|
2019-01-23 14:05:11 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
print(
|
|
|
|
"No 'clickhouse' or 'clickhouse-client' client binary found",
|
|
|
|
file=sys.stderr,
|
|
|
|
)
|
2019-01-24 11:02:55 +00:00
|
|
|
parser.print_help()
|
2021-01-26 20:36:04 +00:00
|
|
|
sys.exit(1)
|
2019-01-24 11:02:55 +00:00
|
|
|
|
2021-10-12 19:30:34 +00:00
|
|
|
if args.configclient:
|
2022-04-27 11:02:45 +00:00
|
|
|
args.client += " --config-file=" + args.configclient
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-10-12 19:30:34 +00:00
|
|
|
tcp_host = os.getenv("CLICKHOUSE_HOST")
|
|
|
|
if tcp_host is not None:
|
|
|
|
args.tcp_host = tcp_host
|
2022-04-27 11:02:45 +00:00
|
|
|
args.client += f" --host={tcp_host}"
|
2021-10-12 19:30:34 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
args.tcp_host = "localhost"
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-10-12 19:30:34 +00:00
|
|
|
tcp_port = os.getenv("CLICKHOUSE_PORT_TCP")
|
|
|
|
if tcp_port is not None:
|
|
|
|
args.tcp_port = int(tcp_port)
|
|
|
|
args.client += f" --port={tcp_port}"
|
|
|
|
else:
|
2022-05-05 19:11:38 +00:00
|
|
|
args.tcp_port = 9440 if args.secure else 9000
|
2022-05-09 12:55:51 +00:00
|
|
|
if args.secure:
|
|
|
|
os.environ["CLICKHOUSE_PORT_TCP"] = str(args.tcp_port)
|
2021-08-05 14:15:51 +00:00
|
|
|
|
2021-10-12 19:30:34 +00:00
|
|
|
http_port = os.getenv("CLICKHOUSE_PORT_HTTP")
|
|
|
|
if http_port is not None:
|
|
|
|
args.http_port = int(http_port)
|
|
|
|
else:
|
2022-05-05 19:11:38 +00:00
|
|
|
args.http_port = 8443 if args.secure else 8123
|
|
|
|
os.environ["CLICKHOUSE_PORT_HTTP"] = str(args.http_port)
|
|
|
|
if args.secure and os.getenv("CLICKHOUSE_PORT_HTTP_PROTO") is None:
|
|
|
|
os.environ["CLICKHOUSE_PORT_HTTP_PROTO"] = "https"
|
2021-10-12 18:06:26 +00:00
|
|
|
|
2021-10-12 19:30:34 +00:00
|
|
|
client_database = os.getenv("CLICKHOUSE_DATABASE")
|
|
|
|
if client_database is not None:
|
2022-04-27 11:02:45 +00:00
|
|
|
args.client += f" --database={client_database}"
|
2021-10-12 19:30:34 +00:00
|
|
|
args.client_database = client_database
|
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
args.client_database = "default"
|
2019-04-11 17:20:36 +00:00
|
|
|
|
2021-08-20 12:17:51 +00:00
|
|
|
if args.backward_compatibility_check:
|
2022-04-27 11:02:45 +00:00
|
|
|
args.client += " --fake-drop"
|
2021-08-20 12:17:51 +00:00
|
|
|
|
2022-05-05 19:11:38 +00:00
|
|
|
if args.client_option or args.secure:
|
2019-10-11 13:34:26 +00:00
|
|
|
# Set options for client
|
2022-04-27 11:02:45 +00:00
|
|
|
if "CLICKHOUSE_CLIENT_OPT" in os.environ:
|
|
|
|
os.environ["CLICKHOUSE_CLIENT_OPT"] += " "
|
2019-10-11 13:34:26 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
os.environ["CLICKHOUSE_CLIENT_OPT"] = ""
|
2019-10-11 13:34:26 +00:00
|
|
|
|
2022-04-27 11:02:45 +00:00
|
|
|
os.environ["CLICKHOUSE_CLIENT_OPT"] += get_additional_client_options(args)
|
2022-05-05 19:11:38 +00:00
|
|
|
if args.secure:
|
|
|
|
os.environ["CLICKHOUSE_CLIENT_OPT"] += " --secure "
|
2019-10-11 13:34:26 +00:00
|
|
|
|
2022-02-16 10:35:46 +00:00
|
|
|
# Set options for curl
|
2022-04-27 11:02:45 +00:00
|
|
|
if "CLICKHOUSE_URL_PARAMS" in os.environ:
|
|
|
|
os.environ["CLICKHOUSE_URL_PARAMS"] += "&"
|
2022-02-16 10:35:46 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
os.environ["CLICKHOUSE_URL_PARAMS"] = ""
|
2022-02-16 10:35:46 +00:00
|
|
|
|
2021-10-12 19:27:54 +00:00
|
|
|
client_options_query_str = get_additional_client_options_url(args)
|
2022-04-27 11:02:45 +00:00
|
|
|
args.client_options_query_str = client_options_query_str + "&"
|
|
|
|
os.environ["CLICKHOUSE_URL_PARAMS"] += client_options_query_str
|
2021-10-12 19:27:54 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
args.client_options_query_str = ""
|
2019-10-11 13:34:26 +00:00
|
|
|
|
2018-11-07 11:00:46 +00:00
|
|
|
if args.extract_from_config is None:
|
2022-04-27 11:02:45 +00:00
|
|
|
if os.access(args.binary + "-extract-from-config", os.X_OK):
|
|
|
|
args.extract_from_config = args.binary + "-extract-from-config"
|
2018-11-07 11:00:46 +00:00
|
|
|
else:
|
2022-04-27 11:02:45 +00:00
|
|
|
args.extract_from_config = args.binary + " extract-from-config"
|
2018-11-07 11:00:46 +00:00
|
|
|
|
2019-06-20 09:12:49 +00:00
|
|
|
if args.jobs is None:
|
2019-10-11 10:30:32 +00:00
|
|
|
args.jobs = multiprocessing.cpu_count()
|
2019-06-20 09:12:49 +00:00
|
|
|
|
2022-06-24 15:15:15 +00:00
|
|
|
if args.db_engine and args.db_engine == "Ordinary":
|
|
|
|
MESSAGES_TO_RETRY.append(" locking attempt on ")
|
|
|
|
|
2017-05-01 21:27:11 +00:00
|
|
|
main(args)
|