Move skip lists to clickhouse-test

This commit is contained in:
alesapin 2020-07-03 13:57:16 +03:00
parent 0a6f3ca9fd
commit d015a4d646
2 changed files with 223 additions and 0 deletions

View File

@ -4,6 +4,7 @@ import sys
import os import os
import os.path import os.path
import re import re
import json
from argparse import ArgumentParser from argparse import ArgumentParser
from argparse import FileType from argparse import FileType
@ -377,6 +378,76 @@ def check_server_started(client, retry_count):
return False return False
class BuildFlags(object):
THREAD = 'thread-sanitizer'
ADDRESS = 'address-sanitizer'
UNDEFINED = 'ub-sanitizer'
MEMORY = 'memory-sanitizer'
DEBUG = 'debug-build'
UNBUNDLED = 'unbundled-build'
RELEASE = 'release-build'
DATABASE_ATOMIC = 'database-atomic'
POLYMORPHIC_PARTS = 'polymorphic-parts'
def collect_build_flags(client):
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.build_options WHERE name = 'CXX_FLAGS'")
result = []
if clickhouse_proc.returncode == 0:
if '-fsanitize=thread' in stdout:
result.append(BuildFlags.THREAD)
elif '-fsanitize=address' in stdout:
result.append(BuildFlags.ADDRESS)
elif '-fsanitize=undefined' in stdout:
result.append(BuildFlags.UNDEFINED)
elif '-fsanitize=memory' in stdout:
result.append(BuildFlags.MEMORY)
else:
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.build_options WHERE name = 'BUILD_TYPE'")
if clickhouse_proc.returncode == 0:
if 'Debug' in stdout:
result.append(BuildFlags.DEBUG)
elif 'RelWithDebInfo' in stdout or 'Release' in stdout:
result.append(BuildFlags.RELEASE)
else:
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.build_options WHERE name = 'UNBUNDLED'")
if clickhouse_proc.returncode == 0:
if 'ON' in stdout:
result.append(BuildFlags.UNBUNDLED)
else:
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.settings WHERE name = 'default_database_engine'")
if clickhouse_proc.returncode == 0:
if 'Atomic' in stdout:
result.append(BuildFlags.DATABASE_ATOMIC)
else:
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
clickhouse_proc = Popen(shlex.split(client), stdin=PIPE, stdout=PIPE, stderr=PIPE)
(stdout, stderr) = clickhouse_proc.communicate("SELECT value FROM system.merge_tree_settings WHERE name = 'min_bytes_for_wide_part'")
if clickhouse_proc.returncode == 0:
if '10485760' in stdout:
result.append(BuildFlags.POLYMORPHIC_PARTS)
else:
raise Exception("Cannot get inforamtion about build from server errorcode {}, stderr {}".format(clickhouse_proc.returncode, stderr))
return result
def main(args): def main(args):
global SERVER_DIED global SERVER_DIED
global exit_code global exit_code
@ -392,6 +463,12 @@ def main(args):
if not check_server_started(args.client, args.server_check_retries): if not check_server_started(args.client, args.server_check_retries):
raise Exception("clickhouse-server is not responding. Cannot execute 'SELECT 1' query.") raise Exception("clickhouse-server is not responding. Cannot execute 'SELECT 1' query.")
build_flags = collect_build_flags(args.client)
tests_to_skip_from_list = collect_tests_to_skip(args.skip_list, build_flags)
if args.skip:
args.skip = set(args.skip) | tests_to_skip_from_list
else:
args.skip = tests_to_skip_from_list
base_dir = os.path.abspath(args.queries) base_dir = os.path.abspath(args.queries)
tmp_dir = os.path.abspath(args.tmp) tmp_dir = os.path.abspath(args.tmp)
@ -604,6 +681,17 @@ def get_additional_client_options_url(args):
return '' return ''
def collect_tests_to_skip(skip_list_path, build_flags):
result = set([])
if not os.path.exists(skip_list_path):
return result
with open(skip_list_path, 'r') as skip_list_file:
skip_dict = json.load(skip_list_file)
for build_flag in build_flags:
result |= set(skip_dict[build_flag])
return result
if __name__ == '__main__': if __name__ == '__main__':
parser=ArgumentParser(description='ClickHouse functional tests') parser=ArgumentParser(description='ClickHouse functional tests')
parser.add_argument('-q', '--queries', help='Path to queries dir') parser.add_argument('-q', '--queries', help='Path to queries dir')
@ -627,6 +715,7 @@ if __name__ == '__main__':
parser.add_argument('-j', '--jobs', default=1, nargs='?', type=int, help='Run all tests in parallel') parser.add_argument('-j', '--jobs', default=1, nargs='?', type=int, help='Run all tests in parallel')
parser.add_argument('-U', '--unified', default=3, type=int, help='output NUM lines of unified context') parser.add_argument('-U', '--unified', default=3, type=int, help='output NUM lines of unified context')
parser.add_argument('-r', '--server-check-retries', default=30, type=int, help='Num of tries to execute SELECT 1 before tests started') parser.add_argument('-r', '--server-check-retries', default=30, type=int, help='Num of tries to execute SELECT 1 before tests started')
parser.add_argument('--skip-list', help="Path to skip-list file")
parser.add_argument('--no-stateless', action='store_true', help='Disable all stateless tests') parser.add_argument('--no-stateless', action='store_true', help='Disable all stateless tests')
parser.add_argument('--no-stateful', action='store_true', help='Disable all stateful tests') parser.add_argument('--no-stateful', action='store_true', help='Disable all stateful tests')
@ -655,6 +744,10 @@ if __name__ == '__main__':
if args.queries is None: if args.queries is None:
print("Failed to detect path to the queries directory. Please specify it with '--queries' option.", file=sys.stderr) print("Failed to detect path to the queries directory. Please specify it with '--queries' option.", file=sys.stderr)
exit(1) exit(1)
if args.skip_list is None:
args.skip_list = os.path.join(args.queries, 'skip_list.json')
if args.tmp is None: if args.tmp is None:
args.tmp = args.queries args.tmp = args.queries
if args.client is None: if args.client is None:

View File

@ -0,0 +1,130 @@
{
"thread-sanitizer": [
"00281",
"00877",
"00985",
"avx2",
"query_profiler",
"memory_profiler",
"01083_expressions_in_engine_arguments",
"00505_shard_secure",
"00505_secure",
"01103_check_cpu_instructions_at_startup",
"01098_temporary_and_external_tables",
"00152_insert_different_granularity",
"00151_replace_partition_with_different_granularity"
],
"address-sanitizer": [
"00281",
"00877",
"avx2",
"query_profiler",
"memory_profiler",
"odbc_roundtrip",
"01103_check_cpu_instructions_at_startup"
],
"ub-sanitizer": [
"00281",
"capnproto",
"avx2",
"query_profiler",
"memory_profiler",
"01103_check_cpu_instructions_at_startup",
"00900_orc_load"
],
"memory-sanitizer": [
"00281",
"capnproto",
"avx2",
"query_profiler",
"memory_profiler",
"01103_check_cpu_instructions_at_startup",
"01086_odbc_roundtrip",
"00877_memory_limit_for_new_delete",
"01114_mysql_database_engine_segfault"
],
"debug-build": [
"00281",
"avx2",
"query_profiler",
"memory_profiler",
"00899_long_attach",
"00980_alter_settings_race",
"00834_kill_mutation_replicated_zookeeper",
"00834_kill_mutation",
"01200_mutations_memory_consumption",
"01103_check_cpu_instructions_at_startup",
"01037_polygon_dicts_",
"hyperscan"
],
"unbundled-build": [
"00429",
"00428",
"00877",
"pocopatch",
"parquet",
"xxhash",
"avx2",
"_h3",
"query_profiler",
"memory_profiler",
"orc_load",
"01033_storage_odbc_parsing_exception_check",
"avro",
"01072_optimize_skip_unused_shards_const_expr_eval",
"00505_secure",
"00505_shard_secure",
"odbc_roundtrip",
"01103_check_cpu_instructions_at_startup",
"01114_mysql_database_engine_segfault",
"00834_cancel_http_readonly_queries_on_client_close",
"_arrow",
"01099_parallel_distributed_insert_select",
"01300_client_save_history_when_terminated",
"orc_output"
],
"release-build": [
"avx2"
],
"database-atomic": [
"00065_loyalty_with_storage_join",
"avx",
"00738_lock_for_inner_table",
"00699_materialized_view_mutations",
"00609_mv_index_in_in",
"00510_materizlized_view_and_deduplication_zookeeper",
"00604_show_create_database",
"00080_show_tables_and_system_tables",
"01272_suspicious_codecs",
"01249_bad_arguments_for_bloom_filter",
"00423_storage_log_single_thread",
"00311_array_primary_key",
"00226_zookeeper_deduplication_and_unexpected_parts",
"00180_attach_materialized_view",
"00116_storage_set",
"00816_long_concurrent_alter_column",
"00992_system_parts_race_condition_zookeeper"
],
"polymorphic-parts": [
"avx",
"01045_order_by_pk_special_storages",
"01042_check_query_and_last_granule_size",
"00961_checksums_in_system_parts_columns_table",
"00933_test_fix_extra_seek_on_compressed_cache",
"00926_adaptive_index_granularity_collapsing_merge_tree",
"00926_adaptive_index_granularity_merge_tree",
"00926_adaptive_index_granularity_replacing_merge_tree",
"00926_adaptive_index_granularity_versioned_collapsing_merge_tree",
"00804_test_delta_codec_compression",
"00731_long_merge_tree_select_opened_files",
"00653_verification_monotonic_data_load",
"00484_preferred_max_column_in_block_size_bytes",
"00446_clear_column_in_partition_zookeeper",
"00443_preferred_block_size_bytes",
"00160_merge_and_index_in_in",
"01055_compact_parts",
"01039_mergetree_exec_time",
"00933_ttl_simple",
"00753_system_columns_and_system_tables"
]
}