mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-12-18 04:12:19 +00:00
Merge remote-tracking branch 'origin/aku/flaky-stateless' into HEAD
This commit is contained in:
commit
f2325b62f4
@ -51,6 +51,7 @@ function run_tests()
|
|||||||
|
|
||||||
# Skip these tests, because they fail when we rerun them multiple times
|
# Skip these tests, because they fail when we rerun them multiple times
|
||||||
if [ "$NUM_TRIES" -gt "1" ]; then
|
if [ "$NUM_TRIES" -gt "1" ]; then
|
||||||
|
ADDITIONAL_OPTIONS+=('--order=random')
|
||||||
ADDITIONAL_OPTIONS+=('--skip')
|
ADDITIONAL_OPTIONS+=('--skip')
|
||||||
ADDITIONAL_OPTIONS+=('00000_no_tests_to_skip')
|
ADDITIONAL_OPTIONS+=('00000_no_tests_to_skip')
|
||||||
fi
|
fi
|
||||||
@ -75,7 +76,7 @@ timeout "$MAX_RUN_TIME" bash -c run_tests ||:
|
|||||||
|
|
||||||
./process_functional_tests_result.py || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
|
./process_functional_tests_result.py || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
|
||||||
|
|
||||||
clickhouse-client -q "sytem flush logs" ||:
|
clickhouse-client -q "system flush logs" ||:
|
||||||
|
|
||||||
pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.gz &
|
pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.gz &
|
||||||
clickhouse-client -q "select * from system.query_log format TSVWithNamesAndTypes" | pigz > /test_output/query-log.tsv.gz &
|
clickhouse-client -q "select * from system.query_log format TSVWithNamesAndTypes" | pigz > /test_output/query-log.tsv.gz &
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
@ -112,13 +113,14 @@ def get_db_engine(args, database_name):
|
|||||||
return " ENGINE=" + args.db_engine
|
return " ENGINE=" + args.db_engine
|
||||||
return "" # Will use default engine
|
return "" # Will use default engine
|
||||||
|
|
||||||
def run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file):
|
def run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file, suite_tmp_dir):
|
||||||
# print(client_options)
|
# print(client_options)
|
||||||
|
|
||||||
start_time = datetime.now()
|
start_time = datetime.now()
|
||||||
if args.database:
|
if args.database:
|
||||||
database = args.database
|
database = args.database
|
||||||
os.environ.setdefault("CLICKHOUSE_DATABASE", database)
|
os.environ.setdefault("CLICKHOUSE_DATABASE", database)
|
||||||
|
os.environ.setdefault("CLICKHOUSE_TMP", suite_tmp_dir)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# If --database is not specified, we will create temporary database with unique name
|
# If --database is not specified, we will create temporary database with unique name
|
||||||
@ -136,6 +138,12 @@ def run_single_test(args, ext, server_logs_level, client_options, case_file, std
|
|||||||
return clickhouse_proc_create, "", "Timeout creating database {} before test".format(database), total_time
|
return clickhouse_proc_create, "", "Timeout creating database {} before test".format(database), total_time
|
||||||
|
|
||||||
os.environ["CLICKHOUSE_DATABASE"] = database
|
os.environ["CLICKHOUSE_DATABASE"] = database
|
||||||
|
# Set temporary directory to match the randomly generated database,
|
||||||
|
# because .sh tests also use it for temporary files and we want to avoid
|
||||||
|
# collisions.
|
||||||
|
test_tmp_dir = os.path.join(suite_tmp_dir, database)
|
||||||
|
os.mkdir(test_tmp_dir)
|
||||||
|
os.environ.setdefault("CLICKHOUSE_TMP", test_tmp_dir)
|
||||||
|
|
||||||
# This is for .sh tests
|
# This is for .sh tests
|
||||||
os.environ["CLICKHOUSE_LOG_COMMENT"] = case_file
|
os.environ["CLICKHOUSE_LOG_COMMENT"] = case_file
|
||||||
@ -185,6 +193,8 @@ def run_single_test(args, ext, server_logs_level, client_options, case_file, std
|
|||||||
total_time = (datetime.now() - start_time).total_seconds()
|
total_time = (datetime.now() - start_time).total_seconds()
|
||||||
return clickhouse_proc_create, "", "Timeout dropping database {} after test".format(database), total_time
|
return clickhouse_proc_create, "", "Timeout dropping database {} after test".format(database), total_time
|
||||||
|
|
||||||
|
shutil.rmtree(test_tmp_dir)
|
||||||
|
|
||||||
total_time = (datetime.now() - start_time).total_seconds()
|
total_time = (datetime.now() - start_time).total_seconds()
|
||||||
|
|
||||||
# Normalize randomized database names in stdout, stderr files.
|
# Normalize randomized database names in stdout, stderr files.
|
||||||
@ -369,7 +379,7 @@ def run_tests_array(all_tests_with_params):
|
|||||||
stdout_file = os.path.join(suite_tmp_dir, name) + file_suffix + '.stdout'
|
stdout_file = os.path.join(suite_tmp_dir, name) + file_suffix + '.stdout'
|
||||||
stderr_file = os.path.join(suite_tmp_dir, name) + file_suffix + '.stderr'
|
stderr_file = os.path.join(suite_tmp_dir, name) + file_suffix + '.stderr'
|
||||||
|
|
||||||
proc, stdout, stderr, total_time = run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file)
|
proc, stdout, stderr, total_time = run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file, suite_tmp_dir)
|
||||||
|
|
||||||
if proc.returncode is None:
|
if proc.returncode is None:
|
||||||
try:
|
try:
|
||||||
@ -387,7 +397,7 @@ def run_tests_array(all_tests_with_params):
|
|||||||
else:
|
else:
|
||||||
counter = 1
|
counter = 1
|
||||||
while proc.returncode != 0 and need_retry(stderr):
|
while proc.returncode != 0 and need_retry(stderr):
|
||||||
proc, stdout, stderr, total_time = run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file)
|
proc, stdout, stderr, total_time = run_single_test(args, ext, server_logs_level, client_options, case_file, stdout_file, stderr_file, suite_tmp_dir)
|
||||||
sleep(2**counter)
|
sleep(2**counter)
|
||||||
counter += 1
|
counter += 1
|
||||||
if counter > 6:
|
if counter > 6:
|
||||||
@ -657,7 +667,6 @@ def main(args):
|
|||||||
os.environ.setdefault("CLICKHOUSE_CONFIG", args.configserver)
|
os.environ.setdefault("CLICKHOUSE_CONFIG", args.configserver)
|
||||||
if args.configclient:
|
if args.configclient:
|
||||||
os.environ.setdefault("CLICKHOUSE_CONFIG_CLIENT", args.configclient)
|
os.environ.setdefault("CLICKHOUSE_CONFIG_CLIENT", args.configclient)
|
||||||
os.environ.setdefault("CLICKHOUSE_TMP", tmp_dir)
|
|
||||||
|
|
||||||
# Force to print server warnings in stderr
|
# Force to print server warnings in stderr
|
||||||
# Shell scripts could change logging level
|
# Shell scripts could change logging level
|
||||||
|
@ -21,7 +21,7 @@ system flush logs;
|
|||||||
select count()
|
select count()
|
||||||
from system.query_log
|
from system.query_log
|
||||||
where
|
where
|
||||||
query like '%01547_query_log_current_database%'
|
query like 'select \'01547_query_log_current_database%'
|
||||||
and current_database = currentDatabase()
|
and current_database = currentDatabase()
|
||||||
and event_date >= yesterday();
|
and event_date >= yesterday();
|
||||||
|
|
||||||
@ -30,7 +30,6 @@ where
|
|||||||
select count() == 2
|
select count() == 2
|
||||||
from system.query_thread_log
|
from system.query_thread_log
|
||||||
where
|
where
|
||||||
query like '%01547_query_log_current_database%'
|
query like 'select \'01547_query_log_current_database%'
|
||||||
and current_database = currentDatabase()
|
and current_database = currentDatabase()
|
||||||
and event_date = today()
|
and event_date >= yesterday()
|
||||||
and event_time >= now() - interval 1 minute;
|
|
||||||
|
Loading…
Reference in New Issue
Block a user