mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-22 07:31:57 +00:00
Merge pull request #67875 from Avogar/limits-for-random-settings
Allow to specify min and max for random settings in the test
This commit is contained in:
commit
f1b1f8afcf
@ -91,6 +91,28 @@ SELECT 1
|
|||||||
In addition to the above settings, you can use `USE_*` flags from `system.build_options` to define usage of particular ClickHouse features.
|
In addition to the above settings, you can use `USE_*` flags from `system.build_options` to define usage of particular ClickHouse features.
|
||||||
For example, if your test uses a MySQL table, you should add a tag `use-mysql`.
|
For example, if your test uses a MySQL table, you should add a tag `use-mysql`.
|
||||||
|
|
||||||
|
### Specifying limits for random settings
|
||||||
|
|
||||||
|
A test can specify minimum and maximum allowed values for settings that can be randomized during test run.
|
||||||
|
|
||||||
|
For `.sh` tests limits are written as a comment on the line next to tags or on the second line if no tags are specified:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
# Tags: no-fasttest
|
||||||
|
# Random settings limits: max_block_size=(1000, 10000); index_granularity=(100, None)
|
||||||
|
```
|
||||||
|
|
||||||
|
For `.sql` tests tags are placed as a SQL comment in the line next to tags or in the first line:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
-- Tags: no-fasttest
|
||||||
|
-- Random settings limits: max_block_size=(1000, 10000); index_granularity=(100, None)
|
||||||
|
SELECT 1
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need to specify only one limit, you can use `None` for another one.
|
||||||
|
|
||||||
### Choosing the Test Name
|
### Choosing the Test Name
|
||||||
|
|
||||||
The name of the test starts with a five-digit prefix followed by a descriptive name, such as `00422_hash_function_constexpr.sql`. To choose the prefix, find the largest prefix already present in the directory, and increment it by one. In the meantime, some other tests might be added with the same numeric prefix, but this is OK and does not lead to any problems, you don't have to change it later.
|
The name of the test starts with a five-digit prefix followed by a descriptive name, such as `00422_hash_function_constexpr.sql`. To choose the prefix, find the largest prefix already present in the directory, and increment it by one. In the meantime, some other tests might be added with the same numeric prefix, but this is OK and does not lead to any problems, you don't have to change it later.
|
||||||
|
@ -39,6 +39,7 @@ from errno import ESRCH
|
|||||||
from subprocess import PIPE, Popen
|
from subprocess import PIPE, Popen
|
||||||
from time import sleep, time
|
from time import sleep, time
|
||||||
from typing import Dict, List, Optional, Set, Tuple, Union
|
from typing import Dict, List, Optional, Set, Tuple, Union
|
||||||
|
from ast import literal_eval as make_tuple
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import termcolor # type: ignore
|
import termcolor # type: ignore
|
||||||
@ -1145,9 +1146,24 @@ class TestCase:
|
|||||||
|
|
||||||
return description + "\n"
|
return description + "\n"
|
||||||
|
|
||||||
|
def apply_random_settings_limits(self, random_settings):
|
||||||
|
for setting in random_settings:
|
||||||
|
if setting in self.random_settings_limits:
|
||||||
|
min_value = self.random_settings_limits[setting][0]
|
||||||
|
if min_value and random_settings[setting] < min_value:
|
||||||
|
random_settings[setting] = min_value
|
||||||
|
max_value = self.random_settings_limits[setting][1]
|
||||||
|
if max_value and random_settings[setting] > max_value:
|
||||||
|
random_settings[setting] = max_value
|
||||||
|
|
||||||
def __init__(self, suite, case: str, args, is_concurrent: bool):
|
def __init__(self, suite, case: str, args, is_concurrent: bool):
|
||||||
self.case: str = case # case file name
|
self.case: str = case # case file name
|
||||||
self.tags: Set[str] = suite.all_tags[case] if case in suite.all_tags else set()
|
self.tags: Set[str] = suite.all_tags[case] if case in suite.all_tags else set()
|
||||||
|
self.random_settings_limits = (
|
||||||
|
suite.all_random_settings_limits[case]
|
||||||
|
if case in suite.all_random_settings_limits
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
|
||||||
for tag in os.getenv("GLOBAL_TAGS", "").split(","):
|
for tag in os.getenv("GLOBAL_TAGS", "").split(","):
|
||||||
self.tags.add(tag.strip())
|
self.tags.add(tag.strip())
|
||||||
@ -1189,11 +1205,13 @@ class TestCase:
|
|||||||
|
|
||||||
if self.randomize_settings:
|
if self.randomize_settings:
|
||||||
self.random_settings = SettingsRandomizer.get_random_settings(args)
|
self.random_settings = SettingsRandomizer.get_random_settings(args)
|
||||||
|
self.apply_random_settings_limits(self.random_settings)
|
||||||
|
|
||||||
if self.randomize_merge_tree_settings:
|
if self.randomize_merge_tree_settings:
|
||||||
self.merge_tree_random_settings = (
|
self.merge_tree_random_settings = (
|
||||||
MergeTreeSettingsRandomizer.get_random_settings(args)
|
MergeTreeSettingsRandomizer.get_random_settings(args)
|
||||||
)
|
)
|
||||||
|
self.apply_random_settings_limits(self.merge_tree_random_settings)
|
||||||
|
|
||||||
self.base_url_params = (
|
self.base_url_params = (
|
||||||
os.environ["CLICKHOUSE_URL_PARAMS"]
|
os.environ["CLICKHOUSE_URL_PARAMS"]
|
||||||
@ -1963,7 +1981,9 @@ class TestSuite:
|
|||||||
return test_name
|
return test_name
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def read_test_tags(suite_dir: str, all_tests: List[str]) -> Dict[str, Set[str]]:
|
def read_test_tags_and_random_settings_limits(
|
||||||
|
suite_dir: str, all_tests: List[str]
|
||||||
|
) -> (Dict[str, Set[str]], Dict[str, Dict[str, Tuple[int, int]]]):
|
||||||
def get_comment_sign(filename):
|
def get_comment_sign(filename):
|
||||||
if filename.endswith(".sql") or filename.endswith(".sql.j2"):
|
if filename.endswith(".sql") or filename.endswith(".sql.j2"):
|
||||||
return "--"
|
return "--"
|
||||||
@ -1988,27 +2008,58 @@ class TestSuite:
|
|||||||
tags = {tag.strip() for tag in tags}
|
tags = {tag.strip() for tag in tags}
|
||||||
return tags
|
return tags
|
||||||
|
|
||||||
def is_shebang(line: str) -> bool:
|
def parse_random_settings_limits_from_line(
|
||||||
return line.startswith("#!")
|
line, comment_sign
|
||||||
|
) -> Dict[str, Tuple[int, int]]:
|
||||||
|
if not line.startswith(comment_sign):
|
||||||
|
return {}
|
||||||
|
random_settings_limits_str = line[len(comment_sign) :].lstrip()
|
||||||
|
random_settings_limits_prefix = "Random settings limits:"
|
||||||
|
if not random_settings_limits_str.startswith(random_settings_limits_prefix):
|
||||||
|
return {}
|
||||||
|
random_settings_limits_str = random_settings_limits_str[
|
||||||
|
len(random_settings_limits_prefix) :
|
||||||
|
]
|
||||||
|
# limits are specified in a form 'setting1=(min, max); setting2=(min,max); ...'
|
||||||
|
random_settings_limits = {}
|
||||||
|
for setting_and_limit in random_settings_limits_str.split(";"):
|
||||||
|
setting_and_limit = setting_and_limit.split("=")
|
||||||
|
random_settings_limits[setting_and_limit[0].strip()] = make_tuple(
|
||||||
|
setting_and_limit[1]
|
||||||
|
)
|
||||||
|
return random_settings_limits
|
||||||
|
|
||||||
def find_tag_line(file):
|
def find_tag_line(lines, comment_sign):
|
||||||
for line in file:
|
for line in lines:
|
||||||
line = line.strip()
|
if line.startswith(comment_sign) and line[
|
||||||
if line and not is_shebang(line):
|
len(comment_sign) :
|
||||||
|
].lstrip().startswith("Tags:"):
|
||||||
return line
|
return line
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def load_tags_from_file(filepath):
|
def find_random_settings_limits_line(lines, comment_sign):
|
||||||
|
for line in lines:
|
||||||
|
if line.startswith(comment_sign) and line[
|
||||||
|
len(comment_sign) :
|
||||||
|
].lstrip().startswith("Random settings limits:"):
|
||||||
|
return line
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def load_tags_and_random_settings_limits_from_file(filepath):
|
||||||
comment_sign = get_comment_sign(filepath)
|
comment_sign = get_comment_sign(filepath)
|
||||||
need_query_params = False
|
need_query_params = False
|
||||||
with open(filepath, "r", encoding="utf-8") as file:
|
with open(filepath, "r", encoding="utf-8") as file:
|
||||||
try:
|
try:
|
||||||
tag_line = find_tag_line(file)
|
lines = file.readlines()
|
||||||
|
tag_line = find_tag_line(lines, comment_sign)
|
||||||
|
random_settings_limits_line = find_random_settings_limits_line(
|
||||||
|
lines, comment_sign
|
||||||
|
)
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
return []
|
return [], {}
|
||||||
try:
|
try:
|
||||||
if filepath.endswith(".sql"):
|
if filepath.endswith(".sql"):
|
||||||
for line in file:
|
for line in lines:
|
||||||
if "{CLICKHOUSE_DATABASE" in line:
|
if "{CLICKHOUSE_DATABASE" in line:
|
||||||
need_query_params = True
|
need_query_params = True
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
@ -2016,18 +2067,31 @@ class TestSuite:
|
|||||||
parsed_tags = parse_tags_from_line(tag_line, comment_sign)
|
parsed_tags = parse_tags_from_line(tag_line, comment_sign)
|
||||||
if need_query_params:
|
if need_query_params:
|
||||||
parsed_tags.add("need-query-parameters")
|
parsed_tags.add("need-query-parameters")
|
||||||
return parsed_tags
|
random_settings_limits = parse_random_settings_limits_from_line(
|
||||||
|
random_settings_limits_line, comment_sign
|
||||||
|
)
|
||||||
|
return parsed_tags, random_settings_limits
|
||||||
|
|
||||||
all_tags = {}
|
all_tags = {}
|
||||||
|
all_random_settings_limits = {}
|
||||||
start_time = datetime.now()
|
start_time = datetime.now()
|
||||||
for test_name in all_tests:
|
for test_name in all_tests:
|
||||||
tags = load_tags_from_file(os.path.join(suite_dir, test_name))
|
(
|
||||||
|
tags,
|
||||||
|
random_settings_limits,
|
||||||
|
) = load_tags_and_random_settings_limits_from_file(
|
||||||
|
os.path.join(suite_dir, test_name)
|
||||||
|
) # noqa: ignore E203
|
||||||
if tags:
|
if tags:
|
||||||
all_tags[test_name] = tags
|
all_tags[test_name] = tags
|
||||||
|
if random_settings_limits:
|
||||||
|
all_random_settings_limits[test_name] = random_settings_limits
|
||||||
elapsed = (datetime.now() - start_time).total_seconds()
|
elapsed = (datetime.now() - start_time).total_seconds()
|
||||||
if elapsed > 1:
|
if elapsed > 1:
|
||||||
print(f"Tags for suite {suite_dir} read in {elapsed:.2f} seconds")
|
print(
|
||||||
return all_tags
|
f"Tags and random settings limits for suite {suite_dir} read in {elapsed:.2f} seconds"
|
||||||
|
)
|
||||||
|
return all_tags, all_random_settings_limits
|
||||||
|
|
||||||
def __init__(self, args, suite_path: str, suite_tmp_path: str, suite: str):
|
def __init__(self, args, suite_path: str, suite_tmp_path: str, suite: str):
|
||||||
self.args = args
|
self.args = args
|
||||||
@ -2057,10 +2121,16 @@ class TestSuite:
|
|||||||
self.all_tests: List[str] = self.get_tests_list(
|
self.all_tests: List[str] = self.get_tests_list(
|
||||||
self.tests_in_suite_key_func, filter_func
|
self.tests_in_suite_key_func, filter_func
|
||||||
)
|
)
|
||||||
self.all_tags: Dict[str, Set[str]] = self.read_test_tags(
|
|
||||||
self.suite_path, self.all_tests
|
|
||||||
)
|
|
||||||
|
|
||||||
|
all_tags_and_random_settings_limits = (
|
||||||
|
self.read_test_tags_and_random_settings_limits(
|
||||||
|
self.suite_path, self.all_tests
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self.all_tags: Dict[str, Set[str]] = all_tags_and_random_settings_limits[0]
|
||||||
|
self.all_random_settings_limits: Dict[str, Dict[str, (int, int)]] = (
|
||||||
|
all_tags_and_random_settings_limits[1]
|
||||||
|
)
|
||||||
self.sequential_tests = []
|
self.sequential_tests = []
|
||||||
self.parallel_tests = []
|
self.parallel_tests = []
|
||||||
for test_name in self.all_tests:
|
for test_name in self.all_tests:
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None)
|
||||||
|
|
||||||
set allow_experimental_variant_type = 1;
|
set allow_experimental_variant_type = 1;
|
||||||
set use_variant_as_common_type = 1;
|
set use_variant_as_common_type = 1;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None)
|
||||||
|
|
||||||
set allow_experimental_variant_type = 1;
|
set allow_experimental_variant_type = 1;
|
||||||
set use_variant_as_common_type = 1;
|
set use_variant_as_common_type = 1;
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_dynamic_type=1;
|
set allow_experimental_dynamic_type=1;
|
||||||
|
|
||||||
drop table if exists test;
|
drop table if exists test;
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_dynamic_type=1;
|
set allow_experimental_dynamic_type=1;
|
||||||
|
|
||||||
drop table if exists test;
|
drop table if exists test;
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_dynamic_type=1;
|
set allow_experimental_dynamic_type=1;
|
||||||
|
|
||||||
drop table if exists test;
|
drop table if exists test;
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_dynamic_type=1;
|
set allow_experimental_dynamic_type=1;
|
||||||
|
|
||||||
drop table if exists test;
|
drop table if exists test;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_dynamic_type = 1;
|
set allow_experimental_dynamic_type = 1;
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_dynamic_type = 1;
|
set allow_experimental_dynamic_type = 1;
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_dynamic_type = 1;
|
set allow_experimental_dynamic_type = 1;
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_dynamic_type = 1;
|
set allow_experimental_dynamic_type = 1;
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ set allow_experimental_variant_type = 1;
|
|||||||
set use_variant_as_common_type = 1;
|
set use_variant_as_common_type = 1;
|
||||||
set allow_experimental_dynamic_type = 1;
|
set allow_experimental_dynamic_type = 1;
|
||||||
|
|
||||||
|
|
||||||
drop table if exists test;
|
drop table if exists test;
|
||||||
|
|
||||||
{% for engine in ['MergeTree order by id settings min_rows_for_wide_part=1000000000, min_bytes_for_wide_part=10000000000',
|
{% for engine in ['MergeTree order by id settings min_rows_for_wide_part=1000000000, min_bytes_for_wide_part=10000000000',
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_variant_type = 1;
|
set allow_experimental_variant_type = 1;
|
||||||
set use_variant_as_common_type = 1;
|
set use_variant_as_common_type = 1;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_variant_type = 1;
|
set allow_experimental_variant_type = 1;
|
||||||
set use_variant_as_common_type = 1;
|
set use_variant_as_common_type = 1;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_variant_type = 1;
|
set allow_experimental_variant_type = 1;
|
||||||
set use_variant_as_common_type = 1;
|
set use_variant_as_common_type = 1;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
-- Tags: long, no-tsan, no-msan, no-ubsan, no-asan
|
||||||
|
-- Random settings limits: index_granularity=(100, None); merge_max_block_size=(100, None)
|
||||||
|
|
||||||
set allow_experimental_variant_type = 1;
|
set allow_experimental_variant_type = 1;
|
||||||
set use_variant_as_common_type = 1;
|
set use_variant_as_common_type = 1;
|
||||||
|
Loading…
Reference in New Issue
Block a user