mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-29 02:52:13 +00:00
better integration test for redis dictionary (but still bad)
This commit is contained in:
parent
e9336c9166
commit
4df1f1bb9a
@ -57,8 +57,7 @@ namespace DB
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static const size_t max_block_size = 4;
|
static const size_t max_block_size = 8192;
|
||||||
|
|
||||||
|
|
||||||
RedisDictionarySource::RedisDictionarySource(
|
RedisDictionarySource::RedisDictionarySource(
|
||||||
const DictionaryStructure & dict_struct_,
|
const DictionaryStructure & dict_struct_,
|
||||||
@ -155,13 +154,16 @@ namespace DB
|
|||||||
|
|
||||||
/// Get only keys for specified storage type.
|
/// Get only keys for specified storage type.
|
||||||
auto all_keys = client->execute<RedisArray>(command_for_keys);
|
auto all_keys = client->execute<RedisArray>(command_for_keys);
|
||||||
|
if (all_keys.isNull())
|
||||||
|
return std::make_shared<RedisBlockInputStream>(client, RedisArray{}, storage_type, sample_block, max_block_size);
|
||||||
|
|
||||||
RedisArray keys;
|
RedisArray keys;
|
||||||
auto key_type = storageTypeToKeyType(storage_type);
|
auto key_type = storageTypeToKeyType(storage_type);
|
||||||
for (auto & key : all_keys)
|
for (auto & key : all_keys)
|
||||||
if (key_type == client->execute<std::string>(RedisCommand("TYPE").addRedisType(key)))
|
if (key_type == client->execute<std::string>(RedisCommand("TYPE").addRedisType(key)))
|
||||||
keys.addRedisType(std::move(key));
|
keys.addRedisType(std::move(key));
|
||||||
|
|
||||||
if (storage_type == RedisStorageType::HASH_MAP && !keys.isNull())
|
if (storage_type == RedisStorageType::HASH_MAP)
|
||||||
{
|
{
|
||||||
RedisArray hkeys;
|
RedisArray hkeys;
|
||||||
for (const auto & key : keys)
|
for (const auto & key : keys)
|
||||||
|
@ -115,7 +115,7 @@ class ClickHouseCluster:
|
|||||||
cmd += " client"
|
cmd += " client"
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
def add_instance(self, name, config_dir=None, main_configs=[], user_configs=[], macros={}, with_zookeeper=False, with_mysql=False, with_kafka=False, clickhouse_path_dir=None, with_odbc_drivers=False, with_postgres=False, with_hdfs=False, with_mongo=False, with_redis=False, hostname=None, env_variables={}, image="yandex/clickhouse-integration-test", stay_alive=False, ipv4_address=None, ipv6_address=None):
|
def add_instance(self, name, config_dir=None, main_configs=[], user_configs=[], macros={}, with_zookeeper=False, with_mysql=False, with_kafka=False, clickhouse_path_dir=None, with_odbc_drivers=False, with_postgres=False, with_hdfs=False, with_mongo=False, with_redis=False, hostname=None, env_variables={}, image="yandex/clickhouse-integration-test", stay_alive=False, ipv4_address=None, ipv6_address=None, with_installed_binary=False):
|
||||||
"""Add an instance to the cluster.
|
"""Add an instance to the cluster.
|
||||||
|
|
||||||
name - the name of the instance directory and the value of the 'instance' macro in ClickHouse.
|
name - the name of the instance directory and the value of the 'instance' macro in ClickHouse.
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
[pytest]
|
[pytest]
|
||||||
python_files = test*.py
|
python_files = test.py
|
||||||
norecursedirs = _instances
|
norecursedirs = _instances
|
||||||
timeout = 600
|
timeout = 600
|
||||||
|
@ -44,6 +44,9 @@ class Row(object):
|
|||||||
for field, value in zip(fields, values):
|
for field, value in zip(fields, values):
|
||||||
self.data[field.name] = value
|
self.data[field.name] = value
|
||||||
|
|
||||||
|
def has_field(self, name):
|
||||||
|
return name in self.data
|
||||||
|
|
||||||
def get_value_by_name(self, name):
|
def get_value_by_name(self, name):
|
||||||
return self.data[name]
|
return self.data[name]
|
||||||
|
|
||||||
@ -97,6 +100,7 @@ class DictionaryStructure(object):
|
|||||||
self.range_key = None
|
self.range_key = None
|
||||||
self.ordinary_fields = []
|
self.ordinary_fields = []
|
||||||
self.range_fields = []
|
self.range_fields = []
|
||||||
|
self.has_hierarchy = False
|
||||||
|
|
||||||
for field in fields:
|
for field in fields:
|
||||||
if field.is_key:
|
if field.is_key:
|
||||||
@ -106,6 +110,9 @@ class DictionaryStructure(object):
|
|||||||
else:
|
else:
|
||||||
self.ordinary_fields.append(field)
|
self.ordinary_fields.append(field)
|
||||||
|
|
||||||
|
if field.hierarchical:
|
||||||
|
self.has_hierarchy = True
|
||||||
|
|
||||||
if field.is_range_key:
|
if field.is_range_key:
|
||||||
if self.range_key is not None:
|
if self.range_key is not None:
|
||||||
raise Exception("Duplicate range key {}".format(field.name))
|
raise Exception("Duplicate range key {}".format(field.name))
|
||||||
@ -286,14 +293,13 @@ class DictionaryStructure(object):
|
|||||||
|
|
||||||
|
|
||||||
class Dictionary(object):
|
class Dictionary(object):
|
||||||
def __init__(self, name, structure, source, config_path, table_name, fields=None, values=None):
|
def __init__(self, name, structure, source, config_path, table_name, fields):
|
||||||
self.name = name
|
self.name = name
|
||||||
self.structure = copy.deepcopy(structure)
|
self.structure = copy.deepcopy(structure)
|
||||||
self.source = copy.deepcopy(source)
|
self.source = copy.deepcopy(source)
|
||||||
self.config_path = config_path
|
self.config_path = config_path
|
||||||
self.table_name = table_name
|
self.table_name = table_name
|
||||||
self.fields = fields
|
self.fields = fields
|
||||||
self.values = values
|
|
||||||
|
|
||||||
def generate_config(self):
|
def generate_config(self):
|
||||||
with open(self.config_path, 'w') as result:
|
with open(self.config_path, 'w') as result:
|
||||||
@ -343,3 +349,6 @@ class Dictionary(object):
|
|||||||
|
|
||||||
def is_complex(self):
|
def is_complex(self):
|
||||||
return self.structure.layout.is_complex
|
return self.structure.layout.is_complex
|
||||||
|
|
||||||
|
def get_fields(self):
|
||||||
|
return self.fields
|
||||||
|
@ -402,14 +402,20 @@ class SourceRedis(ExternalSource):
|
|||||||
def prepare(self, structure, table_name, cluster):
|
def prepare(self, structure, table_name, cluster):
|
||||||
self.client = redis.StrictRedis(host=self.internal_hostname, port=self.internal_port)
|
self.client = redis.StrictRedis(host=self.internal_hostname, port=self.internal_port)
|
||||||
self.prepared = True
|
self.prepared = True
|
||||||
|
self.ordered_names = structure.get_ordered_names()
|
||||||
|
|
||||||
def load_kv_data(self, values):
|
def load_data(self, data, table_name):
|
||||||
self.client.flushdb()
|
self.client.flushdb()
|
||||||
if len(values[0]) == 2:
|
for row in list(data):
|
||||||
self.client.mset({value[0]: value[1] for value in values})
|
values = []
|
||||||
else:
|
for name in self.ordered_names:
|
||||||
for value in values:
|
values.append(str(row.data[name]))
|
||||||
self.client.hset(value[0], value[1], value[2])
|
print 'values: ', values
|
||||||
|
if len(values) == 2:
|
||||||
|
self.client.set(*values)
|
||||||
|
print 'kek: ', self.client.get(values[0])
|
||||||
|
else:
|
||||||
|
self.client.hset(*values)
|
||||||
|
|
||||||
def compatible_with_layout(self, layout):
|
def compatible_with_layout(self, layout):
|
||||||
if (
|
if (
|
||||||
|
@ -4,9 +4,10 @@ import os
|
|||||||
from helpers.cluster import ClickHouseCluster
|
from helpers.cluster import ClickHouseCluster
|
||||||
from dictionary import Field, Row, Dictionary, DictionaryStructure, Layout
|
from dictionary import Field, Row, Dictionary, DictionaryStructure, Layout
|
||||||
from external_sources import SourceMySQL, SourceClickHouse, SourceFile, SourceExecutableCache, SourceExecutableHashed
|
from external_sources import SourceMySQL, SourceClickHouse, SourceFile, SourceExecutableCache, SourceExecutableHashed
|
||||||
from external_sources import SourceMongo, SourceHTTP, SourceHTTPS
|
from external_sources import SourceMongo, SourceHTTP, SourceHTTPS, SourceRedis
|
||||||
|
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
dict_configs_path = os.path.join(SCRIPT_DIR, 'configs/dictionaries')
|
||||||
|
|
||||||
FIELDS = {
|
FIELDS = {
|
||||||
"simple": [
|
"simple": [
|
||||||
@ -65,9 +66,44 @@ FIELDS = {
|
|||||||
Field("Float32_", 'Float32', default_value_for_get=555.11),
|
Field("Float32_", 'Float32', default_value_for_get=555.11),
|
||||||
Field("Float64_", 'Float64', default_value_for_get=777.11),
|
Field("Float64_", 'Float64', default_value_for_get=777.11),
|
||||||
]
|
]
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
VALUES = {
|
||||||
|
"simple": [
|
||||||
|
[1, 22, 333, 4444, 55555, -6, -77,
|
||||||
|
-888, -999, '550e8400-e29b-41d4-a716-446655440003',
|
||||||
|
'1973-06-28', '1985-02-28 23:43:25', 'hello', 22.543, 3332154213.4, 0],
|
||||||
|
[2, 3, 4, 5, 6, -7, -8,
|
||||||
|
-9, -10, '550e8400-e29b-41d4-a716-446655440002',
|
||||||
|
'1978-06-28', '1986-02-28 23:42:25', 'hello', 21.543, 3222154213.4, 1]
|
||||||
|
],
|
||||||
|
"complex": [
|
||||||
|
[1, 'world', 22, 333, 4444, 55555, -6,
|
||||||
|
-77, -888, -999, '550e8400-e29b-41d4-a716-446655440003',
|
||||||
|
'1973-06-28', '1985-02-28 23:43:25',
|
||||||
|
'hello', 22.543, 3332154213.4],
|
||||||
|
[2, 'qwerty2', 52, 2345, 6544, 9191991, -2,
|
||||||
|
-717, -81818, -92929, '550e8400-e29b-41d4-a716-446655440007',
|
||||||
|
'1975-09-28', '2000-02-28 23:33:24',
|
||||||
|
'my', 255.543, 3332221.44]
|
||||||
|
|
||||||
|
],
|
||||||
|
"ranged": [
|
||||||
|
[1, '2019-02-10', '2019-02-01', '2019-02-28',
|
||||||
|
22, 333, 4444, 55555, -6, -77, -888, -999,
|
||||||
|
'550e8400-e29b-41d4-a716-446655440003',
|
||||||
|
'1973-06-28', '1985-02-28 23:43:25', 'hello',
|
||||||
|
22.543, 3332154213.4],
|
||||||
|
[2, '2019-04-10', '2019-04-01', '2019-04-28',
|
||||||
|
11, 3223, 41444, 52515, -65, -747, -8388, -9099,
|
||||||
|
'550e8400-e29b-41d4-a716-446655440004',
|
||||||
|
'1973-06-29', '2002-02-28 23:23:25', '!!!!',
|
||||||
|
32.543, 3332543.4]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
LAYOUTS = [
|
LAYOUTS = [
|
||||||
Layout("hashed"),
|
Layout("hashed"),
|
||||||
Layout("cache"),
|
Layout("cache"),
|
||||||
@ -91,36 +127,59 @@ SOURCES = [
|
|||||||
|
|
||||||
DICTIONARIES = []
|
DICTIONARIES = []
|
||||||
|
|
||||||
|
# Key-value dictionaries with onle one possible field for key
|
||||||
|
SOURCES_KV = [
|
||||||
|
SourceRedis("RedisSimple", "localhost", "6380", "redis1", "6379", "", "", storage_type="simple"),
|
||||||
|
SourceRedis("RedisHash", "localhost", "6380", "redis1", "6379", "", "", storage_type="hash_map"),
|
||||||
|
]
|
||||||
|
|
||||||
|
DICTIONARIES_KV = []
|
||||||
|
|
||||||
cluster = None
|
cluster = None
|
||||||
node = None
|
node = None
|
||||||
|
|
||||||
|
def get_dict(source, layout, fields, suffix_name=''):
|
||||||
|
global dict_configs_path
|
||||||
|
|
||||||
|
structure = DictionaryStructure(layout, fields)
|
||||||
|
dict_name = source.name + "_" + layout.name + '_' + suffix_name
|
||||||
|
dict_path = os.path.join(dict_configs_path, dict_name + '.xml')
|
||||||
|
dictionary = Dictionary(dict_name, structure, source, dict_path, "table_" + dict_name, fields)
|
||||||
|
dictionary.generate_config()
|
||||||
|
return dictionary
|
||||||
|
|
||||||
def setup_module(module):
|
def setup_module(module):
|
||||||
global DICTIONARIES
|
global DICTIONARIES
|
||||||
global cluster
|
global cluster
|
||||||
global node
|
global node
|
||||||
|
global dict_configs_path
|
||||||
|
|
||||||
dict_configs_path = os.path.join(SCRIPT_DIR, 'configs/dictionaries')
|
|
||||||
for f in os.listdir(dict_configs_path):
|
for f in os.listdir(dict_configs_path):
|
||||||
os.remove(os.path.join(dict_configs_path, f))
|
os.remove(os.path.join(dict_configs_path, f))
|
||||||
|
|
||||||
for layout in LAYOUTS:
|
for layout in LAYOUTS:
|
||||||
for source in SOURCES:
|
for source in SOURCES:
|
||||||
if source.compatible_with_layout(layout):
|
if source.compatible_with_layout(layout):
|
||||||
structure = DictionaryStructure(layout, FIELDS[layout.layout_type])
|
DICTIONARIES.append(get_dict(source, layout, FIELDS[layout.layout_type]))
|
||||||
dict_name = source.name + "_" + layout.name
|
|
||||||
dict_path = os.path.join(dict_configs_path, dict_name + '.xml')
|
|
||||||
dictionary = Dictionary(dict_name, structure, source, dict_path, "table_" + dict_name)
|
|
||||||
dictionary.generate_config()
|
|
||||||
DICTIONARIES.append(dictionary)
|
|
||||||
else:
|
else:
|
||||||
print "Source", source.name, "incompatible with layout", layout.name
|
print "Source", source.name, "incompatible with layout", layout.name
|
||||||
|
|
||||||
|
for layout in LAYOUTS:
|
||||||
|
field_keys = list(filter(lambda x: x.is_key, FIELDS[layout.layout_type]))
|
||||||
|
for source in SOURCES_KV:
|
||||||
|
if not source.compatible_with_layout(layout):
|
||||||
|
print "Source", source.name, "incompatible with layout", layout.name
|
||||||
|
continue
|
||||||
|
|
||||||
|
for field in FIELDS[layout.layout_type]:
|
||||||
|
if not (field.is_key or field.is_range or field.is_range_key):
|
||||||
|
DICTIONARIES_KV.append(get_dict(source, layout, field_keys + [field], field.name))
|
||||||
|
|
||||||
main_configs = []
|
main_configs = []
|
||||||
for fname in os.listdir(dict_configs_path):
|
for fname in os.listdir(dict_configs_path):
|
||||||
main_configs.append(os.path.join(dict_configs_path, fname))
|
main_configs.append(os.path.join(dict_configs_path, fname))
|
||||||
cluster = ClickHouseCluster(__file__, base_configs_dir=os.path.join(SCRIPT_DIR, 'configs'))
|
cluster = ClickHouseCluster(__file__, base_configs_dir=os.path.join(SCRIPT_DIR, 'configs'))
|
||||||
node = cluster.add_instance('node', main_configs=main_configs, with_mysql=True, with_mongo=True)
|
node = cluster.add_instance('node', main_configs=main_configs, with_mysql=True, with_mongo=True, with_redis=True)
|
||||||
cluster.add_instance('clickhouse1')
|
cluster.add_instance('clickhouse1')
|
||||||
|
|
||||||
|
|
||||||
@ -128,7 +187,7 @@ def setup_module(module):
|
|||||||
def started_cluster():
|
def started_cluster():
|
||||||
try:
|
try:
|
||||||
cluster.start()
|
cluster.start()
|
||||||
for dictionary in DICTIONARIES:
|
for dictionary in DICTIONARIES + DICTIONARIES_KV:
|
||||||
print "Preparing", dictionary.name
|
print "Preparing", dictionary.name
|
||||||
dictionary.prepare_source(cluster)
|
dictionary.prepare_source(cluster)
|
||||||
print "Prepared"
|
print "Prepared"
|
||||||
@ -141,16 +200,8 @@ def started_cluster():
|
|||||||
|
|
||||||
def test_simple_dictionaries(started_cluster):
|
def test_simple_dictionaries(started_cluster):
|
||||||
fields = FIELDS["simple"]
|
fields = FIELDS["simple"]
|
||||||
data = [
|
values = VALUES["simple"]
|
||||||
Row(fields,
|
data = [Row(fields, vals) for vals in values]
|
||||||
[1, 22, 333, 4444, 55555, -6, -77,
|
|
||||||
-888, -999, '550e8400-e29b-41d4-a716-446655440003',
|
|
||||||
'1973-06-28', '1985-02-28 23:43:25', 'hello', 22.543, 3332154213.4, 0]),
|
|
||||||
Row(fields,
|
|
||||||
[2, 3, 4, 5, 6, -7, -8,
|
|
||||||
-9, -10, '550e8400-e29b-41d4-a716-446655440002',
|
|
||||||
'1978-06-28', '1986-02-28 23:42:25', 'hello', 21.543, 3222154213.4, 1]),
|
|
||||||
]
|
|
||||||
|
|
||||||
simple_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "simple"]
|
simple_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "simple"]
|
||||||
for dct in simple_dicts:
|
for dct in simple_dicts:
|
||||||
@ -192,18 +243,8 @@ def test_simple_dictionaries(started_cluster):
|
|||||||
|
|
||||||
def test_complex_dictionaries(started_cluster):
|
def test_complex_dictionaries(started_cluster):
|
||||||
fields = FIELDS["complex"]
|
fields = FIELDS["complex"]
|
||||||
data = [
|
values = VALUES["complex"]
|
||||||
Row(fields,
|
data = [Row(fields, vals) for vals in values]
|
||||||
[1, 'world', 22, 333, 4444, 55555, -6,
|
|
||||||
-77, -888, -999, '550e8400-e29b-41d4-a716-446655440003',
|
|
||||||
'1973-06-28', '1985-02-28 23:43:25',
|
|
||||||
'hello', 22.543, 3332154213.4]),
|
|
||||||
Row(fields,
|
|
||||||
[2, 'qwerty2', 52, 2345, 6544, 9191991, -2,
|
|
||||||
-717, -81818, -92929, '550e8400-e29b-41d4-a716-446655440007',
|
|
||||||
'1975-09-28', '2000-02-28 23:33:24',
|
|
||||||
'my', 255.543, 3332221.44]),
|
|
||||||
]
|
|
||||||
|
|
||||||
complex_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "complex"]
|
complex_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "complex"]
|
||||||
for dct in complex_dicts:
|
for dct in complex_dicts:
|
||||||
@ -232,20 +273,8 @@ def test_complex_dictionaries(started_cluster):
|
|||||||
|
|
||||||
def test_ranged_dictionaries(started_cluster):
|
def test_ranged_dictionaries(started_cluster):
|
||||||
fields = FIELDS["ranged"]
|
fields = FIELDS["ranged"]
|
||||||
data = [
|
values = VALUES["ranged"]
|
||||||
Row(fields,
|
data = [Row(fields, vals) for vals in values]
|
||||||
[1, '2019-02-10', '2019-02-01', '2019-02-28',
|
|
||||||
22, 333, 4444, 55555, -6, -77, -888, -999,
|
|
||||||
'550e8400-e29b-41d4-a716-446655440003',
|
|
||||||
'1973-06-28', '1985-02-28 23:43:25', 'hello',
|
|
||||||
22.543, 3332154213.4]),
|
|
||||||
Row(fields,
|
|
||||||
[2, '2019-04-10', '2019-04-01', '2019-04-28',
|
|
||||||
11, 3223, 41444, 52515, -65, -747, -8388, -9099,
|
|
||||||
'550e8400-e29b-41d4-a716-446655440004',
|
|
||||||
'1973-06-29', '2002-02-28 23:23:25', '!!!!',
|
|
||||||
32.543, 3332543.4]),
|
|
||||||
]
|
|
||||||
|
|
||||||
ranged_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "ranged"]
|
ranged_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "ranged"]
|
||||||
for dct in ranged_dicts:
|
for dct in ranged_dicts:
|
||||||
@ -264,3 +293,98 @@ def test_ranged_dictionaries(started_cluster):
|
|||||||
for query, answer in queries_with_answers:
|
for query, answer in queries_with_answers:
|
||||||
print query
|
print query
|
||||||
assert node.query(query) == str(answer) + '\n'
|
assert node.query(query) == str(answer) + '\n'
|
||||||
|
|
||||||
|
|
||||||
|
def test_key_value_simple_dictionaries(started_cluster):
|
||||||
|
fields = FIELDS["simple"]
|
||||||
|
values = VALUES["simple"]
|
||||||
|
data = [Row(fields, vals) for vals in values]
|
||||||
|
|
||||||
|
simple_dicts = [d for d in DICTIONARIES_KV if d.structure.layout.layout_type == "simple"]
|
||||||
|
|
||||||
|
for dct in simple_dicts:
|
||||||
|
queries_with_answers = []
|
||||||
|
local_data = []
|
||||||
|
for row in data:
|
||||||
|
local_fields = dct.get_fields()
|
||||||
|
local_values = [row.get_value_by_name(field.name) for field in local_fields if row.has_field(field.name)]
|
||||||
|
local_data.append(Row(local_fields, local_values))
|
||||||
|
|
||||||
|
dct.load_data(local_data)
|
||||||
|
|
||||||
|
node.query("system reload dictionary {}".format(dct.name))
|
||||||
|
|
||||||
|
print 'name: ', dct.name
|
||||||
|
|
||||||
|
for row in local_data:
|
||||||
|
print dct.get_fields()
|
||||||
|
for field in dct.get_fields():
|
||||||
|
print field.name, field.is_key
|
||||||
|
if not field.is_key:
|
||||||
|
for query in dct.get_select_get_queries(field, row):
|
||||||
|
queries_with_answers.append((query, row.get_value_by_name(field.name)))
|
||||||
|
|
||||||
|
for query in dct.get_select_has_queries(field, row):
|
||||||
|
queries_with_answers.append((query, 1))
|
||||||
|
|
||||||
|
for query in dct.get_select_get_or_default_queries(field, row):
|
||||||
|
queries_with_answers.append((query, field.default_value_for_get))
|
||||||
|
|
||||||
|
if dct.structure.has_hierarchy:
|
||||||
|
for query in dct.get_hierarchical_queries(data[0]):
|
||||||
|
queries_with_answers.append((query, [1]))
|
||||||
|
|
||||||
|
for query in dct.get_hierarchical_queries(data[1]):
|
||||||
|
queries_with_answers.append((query, [2, 1]))
|
||||||
|
|
||||||
|
for query in dct.get_is_in_queries(data[0], data[1]):
|
||||||
|
queries_with_answers.append((query, 0))
|
||||||
|
|
||||||
|
for query in dct.get_is_in_queries(data[1], data[0]):
|
||||||
|
queries_with_answers.append((query, 1))
|
||||||
|
|
||||||
|
for query, answer in queries_with_answers:
|
||||||
|
print query
|
||||||
|
if isinstance(answer, list):
|
||||||
|
answer = str(answer).replace(' ', '')
|
||||||
|
assert node.query(query) == str(answer) + '\n'
|
||||||
|
|
||||||
|
|
||||||
|
def test_key_value_complex_dictionaries(started_cluster):
|
||||||
|
fields = FIELDS["complex"]
|
||||||
|
values = VALUES["complex"]
|
||||||
|
data = [Row(fields, vals) for vals in values]
|
||||||
|
|
||||||
|
complex_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "complex"]
|
||||||
|
for dct in complex_dicts:
|
||||||
|
dct.load_data(data)
|
||||||
|
|
||||||
|
node.query("system reload dictionaries")
|
||||||
|
|
||||||
|
for dct in complex_dicts:
|
||||||
|
queries_with_answers = []
|
||||||
|
local_data = []
|
||||||
|
for row in data:
|
||||||
|
local_fields = dct.get_fields()
|
||||||
|
local_values = [row.get_value_by_name(field.name) for field in local_fields if row.has_field(field.name)]
|
||||||
|
local_data.append(Row(local_fields, local_values))
|
||||||
|
|
||||||
|
dct.load_data(local_data)
|
||||||
|
|
||||||
|
node.query("system reload dictionary {}".format(dct.name))
|
||||||
|
|
||||||
|
for row in local_data:
|
||||||
|
for field in dct.get_fields():
|
||||||
|
if not field.is_key:
|
||||||
|
for query in dct.get_select_get_queries(field, row):
|
||||||
|
queries_with_answers.append((query, row.get_value_by_name(field.name)))
|
||||||
|
|
||||||
|
for query in dct.get_select_has_queries(field, row):
|
||||||
|
queries_with_answers.append((query, 1))
|
||||||
|
|
||||||
|
for query in dct.get_select_get_or_default_queries(field, row):
|
||||||
|
queries_with_answers.append((query, field.default_value_for_get))
|
||||||
|
|
||||||
|
for query, answer in queries_with_answers:
|
||||||
|
print query
|
||||||
|
assert node.query(query) == str(answer) + '\n'
|
||||||
|
@ -1,325 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from dictionary import Field, Row, Dictionary, DictionaryStructure, Layout
|
|
||||||
from external_sources import SourceRedis, SourceAerospike
|
|
||||||
|
|
||||||
from helpers.cluster import ClickHouseCluster
|
|
||||||
|
|
||||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
|
|
||||||
FIELDS = {
|
|
||||||
"simple": [
|
|
||||||
Field("KeyField", 'UInt64', is_key=True, default_value_for_get=9999999),
|
|
||||||
Field("UInt8_", 'UInt8', default_value_for_get=55),
|
|
||||||
Field("UInt16_", 'UInt16', default_value_for_get=66),
|
|
||||||
Field("UInt32_", 'UInt32', default_value_for_get=77),
|
|
||||||
Field("UInt64_", 'UInt64', default_value_for_get=88),
|
|
||||||
Field("Int8_", 'Int8', default_value_for_get=-55),
|
|
||||||
Field("Int16_", 'Int16', default_value_for_get=-66),
|
|
||||||
Field("Int32_", 'Int32', default_value_for_get=-77),
|
|
||||||
Field("Int64_", 'Int64', default_value_for_get=-88),
|
|
||||||
Field("UUID_", 'UUID', default_value_for_get='550e8400-0000-0000-0000-000000000000'),
|
|
||||||
Field("Date_", 'Date', default_value_for_get='2018-12-30'),
|
|
||||||
Field("DateTime_", 'DateTime', default_value_for_get='2018-12-30 00:00:00'),
|
|
||||||
Field("String_", 'String', default_value_for_get='hi'),
|
|
||||||
Field("Float32_", 'Float32', default_value_for_get=555.11),
|
|
||||||
Field("Float64_", 'Float64', default_value_for_get=777.11),
|
|
||||||
Field("ParentKeyField", "UInt64", default_value_for_get=444, hierarchical=True),
|
|
||||||
],
|
|
||||||
"complex": [
|
|
||||||
Field("KeyField1", 'UInt64', is_key=True, default_value_for_get=9999999),
|
|
||||||
Field("KeyField2", 'String', is_key=True, default_value_for_get='xxxxxxxxx'),
|
|
||||||
Field("UInt8_", 'UInt8', default_value_for_get=55),
|
|
||||||
Field("UInt16_", 'UInt16', default_value_for_get=66),
|
|
||||||
Field("UInt32_", 'UInt32', default_value_for_get=77),
|
|
||||||
Field("UInt64_", 'UInt64', default_value_for_get=88),
|
|
||||||
Field("Int8_", 'Int8', default_value_for_get=-55),
|
|
||||||
Field("Int16_", 'Int16', default_value_for_get=-66),
|
|
||||||
Field("Int32_", 'Int32', default_value_for_get=-77),
|
|
||||||
Field("Int64_", 'Int64', default_value_for_get=-88),
|
|
||||||
Field("UUID_", 'UUID', default_value_for_get='550e8400-0000-0000-0000-000000000000'),
|
|
||||||
Field("Date_", 'Date', default_value_for_get='2018-12-30'),
|
|
||||||
Field("DateTime_", 'DateTime', default_value_for_get='2018-12-30 00:00:00'),
|
|
||||||
Field("String_", 'String', default_value_for_get='hi'),
|
|
||||||
Field("Float32_", 'Float32', default_value_for_get=555.11),
|
|
||||||
Field("Float64_", 'Float64', default_value_for_get=777.11),
|
|
||||||
],
|
|
||||||
"ranged": [
|
|
||||||
Field("KeyField1", 'UInt64', is_key=True),
|
|
||||||
Field("KeyField2", 'Date', is_range_key=True),
|
|
||||||
Field("StartDate", 'Date', range_hash_type='min'),
|
|
||||||
Field("EndDate", 'Date', range_hash_type='max'),
|
|
||||||
Field("UInt8_", 'UInt8', default_value_for_get=55),
|
|
||||||
Field("UInt16_", 'UInt16', default_value_for_get=66),
|
|
||||||
Field("UInt32_", 'UInt32', default_value_for_get=77),
|
|
||||||
Field("UInt64_", 'UInt64', default_value_for_get=88),
|
|
||||||
Field("Int8_", 'Int8', default_value_for_get=-55),
|
|
||||||
Field("Int16_", 'Int16', default_value_for_get=-66),
|
|
||||||
Field("Int32_", 'Int32', default_value_for_get=-77),
|
|
||||||
Field("Int64_", 'Int64', default_value_for_get=-88),
|
|
||||||
Field("UUID_", 'UUID', default_value_for_get='550e8400-0000-0000-0000-000000000000'),
|
|
||||||
Field("Date_", 'Date', default_value_for_get='2018-12-30'),
|
|
||||||
Field("DateTime_", 'DateTime', default_value_for_get='2018-12-30 00:00:00'),
|
|
||||||
Field("String_", 'String', default_value_for_get='hi'),
|
|
||||||
Field("Float32_", 'Float32', default_value_for_get=555.11),
|
|
||||||
Field("Float64_", 'Float64', default_value_for_get=777.11),
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
VALUES = {
|
|
||||||
"simple": [
|
|
||||||
[
|
|
||||||
1, 22, 333, 4444, 55555, -6, -77,
|
|
||||||
-888, -999, '550e8400-e29b-41d4-a716-446655440003',
|
|
||||||
'1973-06-28', '1985-02-28 23:43:25', 'hello', 22.543, 3332154213.4, 0,
|
|
||||||
],
|
|
||||||
[
|
|
||||||
2, 3, 4, 5, 6, -7, -8,
|
|
||||||
-9, -10, '550e8400-e29b-41d4-a716-446655440002',
|
|
||||||
'1978-06-28', '1986-02-28 23:42:25', 'hello', 21.543, 3222154213.4, 1,
|
|
||||||
],
|
|
||||||
],
|
|
||||||
"complex": [
|
|
||||||
[
|
|
||||||
1, 'world', 22, 333, 4444, 55555, -6,
|
|
||||||
-77, -888, -999, '550e8400-e29b-41d4-a716-446655440003',
|
|
||||||
'1973-06-28', '1985-02-28 23:43:25',
|
|
||||||
'hello', 22.543, 3332154213.4,
|
|
||||||
],
|
|
||||||
[
|
|
||||||
2, 'qwerty2', 52, 2345, 6544, 9191991, -2,
|
|
||||||
-717, -81818, -92929, '550e8400-e29b-41d4-a716-446655440007',
|
|
||||||
'1975-09-28', '2000-02-28 23:33:24',
|
|
||||||
'my', 255.543, 3332221.44,
|
|
||||||
],
|
|
||||||
],
|
|
||||||
"ranged": [
|
|
||||||
[
|
|
||||||
1, '2019-02-10', '2019-02-01', '2019-02-28',
|
|
||||||
22, 333, 4444, 55555, -6, -77, -888, -999,
|
|
||||||
'550e8400-e29b-41d4-a716-446655440003',
|
|
||||||
'1973-06-28', '1985-02-28 23:43:25', 'hello',
|
|
||||||
22.543, 3332154213.4,
|
|
||||||
],
|
|
||||||
[
|
|
||||||
2, '2019-04-10', '2019-04-01', '2019-04-28',
|
|
||||||
11, 3223, 41444, 52515, -65, -747, -8388, -9099,
|
|
||||||
'550e8400-e29b-41d4-a716-446655440004',
|
|
||||||
'1973-06-29', '2002-02-28 23:23:25', '!!!!',
|
|
||||||
32.543, 3332543.4,
|
|
||||||
],
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
LAYOUTS = [
|
|
||||||
Layout("flat"),
|
|
||||||
Layout("hashed"),
|
|
||||||
Layout("cache"),
|
|
||||||
Layout('complex_key_hashed_one_key'),
|
|
||||||
Layout('complex_key_hashed_two_keys'),
|
|
||||||
Layout("complex_key_cache"),
|
|
||||||
Layout("range_hashed"),
|
|
||||||
]
|
|
||||||
|
|
||||||
SOURCES = [
|
|
||||||
SourceRedis("RedisSimple", "localhost", "6380", "redis1", "6379", "", "", storage_type="simple"),
|
|
||||||
SourceRedis("RedisHash", "localhost", "6380", "redis1", "6379", "", "", storage_type="hash_map"),
|
|
||||||
# SourceAerospike("Aerospike", "localhost", "3000", "aerospike1", "3000", "", ""),
|
|
||||||
]
|
|
||||||
|
|
||||||
DICTIONARIES = []
|
|
||||||
|
|
||||||
cluster = None
|
|
||||||
node = None
|
|
||||||
|
|
||||||
|
|
||||||
def setup_kv_dict(suffix, layout, fields, kv_source, dict_configs_path, values):
|
|
||||||
global DICTIONARIES
|
|
||||||
|
|
||||||
structure = DictionaryStructure(layout, fields)
|
|
||||||
dict_name = "{}_{}_{}".format(kv_source.name, layout.name, suffix)
|
|
||||||
dict_path = os.path.join(dict_configs_path, dict_name + '.xml')
|
|
||||||
dictionary = Dictionary(dict_name, structure, kv_source, dict_path, "table_" + dict_name, fields, values)
|
|
||||||
dictionary.generate_config()
|
|
||||||
DICTIONARIES.append(dictionary)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_module(module):
|
|
||||||
global DICTIONARIES
|
|
||||||
global cluster
|
|
||||||
global node
|
|
||||||
|
|
||||||
dict_configs_path = os.path.join(SCRIPT_DIR, 'configs/dictionaries')
|
|
||||||
for f in os.listdir(dict_configs_path):
|
|
||||||
os.remove(os.path.join(dict_configs_path, f))
|
|
||||||
|
|
||||||
for layout in LAYOUTS:
|
|
||||||
for source in SOURCES:
|
|
||||||
if source.compatible_with_layout(layout):
|
|
||||||
if layout.layout_type == "simple":
|
|
||||||
fields_len = len(FIELDS["simple"])
|
|
||||||
for i in range(fields_len - 1):
|
|
||||||
local_fields = [FIELDS["simple"][0], FIELDS["simple"][i + 1]]
|
|
||||||
local_values = [[value[0], value[i + 1]] for value in VALUES["simple"]]
|
|
||||||
setup_kv_dict(i + 1, layout, local_fields, source, dict_configs_path, local_values)
|
|
||||||
elif layout.layout_type == "complex":
|
|
||||||
fields_len = len(FIELDS["complex"])
|
|
||||||
for i in range(fields_len - 2):
|
|
||||||
if layout.name == 'complex_key_hashed_two_keys':
|
|
||||||
local_fields = [FIELDS['complex'][0], FIELDS['complex'][1], FIELDS['complex'][i + 2]]
|
|
||||||
local_values = [[value[0], value[1], value[i + 2]] for value in VALUES["complex"]]
|
|
||||||
else:
|
|
||||||
local_fields = [FIELDS['complex'][1], FIELDS['complex'][i + 2]]
|
|
||||||
local_values = [[value[1], value[i + 2]] for value in VALUES["complex"]]
|
|
||||||
setup_kv_dict(i + 2, layout, local_fields, source, dict_configs_path, local_values)
|
|
||||||
elif layout.layout_type == "ranged":
|
|
||||||
fields_len = len(FIELDS["ranged"])
|
|
||||||
local_fields = FIELDS["ranged"][0:5]
|
|
||||||
local_values = VALUES["ranged"][0:5]
|
|
||||||
for i in range(fields_len - 4):
|
|
||||||
local_fields[4] = FIELDS["ranged"][i + 4]
|
|
||||||
for j, value in enumerate(VALUES["ranged"]):
|
|
||||||
local_values[j][4] = value[i + 4]
|
|
||||||
setup_kv_dict(i + 2, layout, local_fields, source, dict_configs_path, local_values)
|
|
||||||
else:
|
|
||||||
print "Source", source.name, "incompatible with layout", layout.name
|
|
||||||
|
|
||||||
main_configs = []
|
|
||||||
for fname in os.listdir(dict_configs_path):
|
|
||||||
main_configs.append(os.path.join(dict_configs_path, fname))
|
|
||||||
cluster = ClickHouseCluster(__file__, base_configs_dir=os.path.join(SCRIPT_DIR, 'configs'))
|
|
||||||
node = cluster.add_instance('node', main_configs=main_configs, with_redis=True)
|
|
||||||
cluster.add_instance('clickhouse1')
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="module")
|
|
||||||
def started_cluster():
|
|
||||||
try:
|
|
||||||
cluster.start()
|
|
||||||
for dictionary in DICTIONARIES:
|
|
||||||
print "Preparing", dictionary.name
|
|
||||||
dictionary.prepare_source(cluster)
|
|
||||||
print "Prepared"
|
|
||||||
|
|
||||||
yield cluster
|
|
||||||
|
|
||||||
finally:
|
|
||||||
cluster.shutdown()
|
|
||||||
|
|
||||||
|
|
||||||
def prepare_data(fields, values_by_row):
|
|
||||||
return [Row(fields, values) for values in values_by_row]
|
|
||||||
|
|
||||||
|
|
||||||
def test_simple_kv_dictionaries(started_cluster):
|
|
||||||
simple_kv_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "simple"]
|
|
||||||
|
|
||||||
for dct in simple_kv_dicts:
|
|
||||||
queries_with_answers = []
|
|
||||||
fields = dct.fields
|
|
||||||
print("FIELDS AND VALUES FOR " + dct.name)
|
|
||||||
print(fields)
|
|
||||||
print(dct.values)
|
|
||||||
data = prepare_data(fields, dct.values)
|
|
||||||
dct.source.load_kv_data(dct.values)
|
|
||||||
|
|
||||||
try:
|
|
||||||
node.query("system reload dictionary '{}'".format(dct.name))
|
|
||||||
except Exception:
|
|
||||||
print(dct.name)
|
|
||||||
raise
|
|
||||||
|
|
||||||
for row in data:
|
|
||||||
for field in fields:
|
|
||||||
if not field.is_key:
|
|
||||||
for query in dct.get_select_get_queries(field, row):
|
|
||||||
queries_with_answers.append((query, row.get_value_by_name(field.name)))
|
|
||||||
|
|
||||||
for query in dct.get_select_has_queries(field, row):
|
|
||||||
queries_with_answers.append((query, 1))
|
|
||||||
|
|
||||||
for query in dct.get_select_get_or_default_queries(field, row):
|
|
||||||
queries_with_answers.append((query, field.default_value_for_get))
|
|
||||||
if dct.fields[1].hierarchical:
|
|
||||||
for query in dct.get_hierarchical_queries(data[0]):
|
|
||||||
queries_with_answers.append((query, [1]))
|
|
||||||
|
|
||||||
for query in dct.get_hierarchical_queries(data[1]):
|
|
||||||
queries_with_answers.append((query, [2, 1]))
|
|
||||||
|
|
||||||
for query in dct.get_is_in_queries(data[0], data[1]):
|
|
||||||
queries_with_answers.append((query, 0))
|
|
||||||
|
|
||||||
for query in dct.get_is_in_queries(data[1], data[0]):
|
|
||||||
queries_with_answers.append((query, 1))
|
|
||||||
|
|
||||||
for query, answer in queries_with_answers:
|
|
||||||
if isinstance(answer, list):
|
|
||||||
answer = str(answer).replace(' ', '')
|
|
||||||
print query
|
|
||||||
assert node.query(query) == str(answer) + '\n', query
|
|
||||||
|
|
||||||
|
|
||||||
def test_complex_dictionaries(started_cluster):
|
|
||||||
complex_kv_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "complex"]
|
|
||||||
|
|
||||||
for dct in complex_kv_dicts:
|
|
||||||
queries_with_answers = []
|
|
||||||
fields = dct.fields
|
|
||||||
print("FIELDS AND VALUES FOR " + dct.name)
|
|
||||||
print(fields)
|
|
||||||
print(dct.values)
|
|
||||||
data = prepare_data(fields, dct.values)
|
|
||||||
dct.source.load_kv_data(dct.values)
|
|
||||||
|
|
||||||
try:
|
|
||||||
node.query("system reload dictionary '{}'".format(dct.name))
|
|
||||||
except Exception:
|
|
||||||
print(dct.name)
|
|
||||||
raise
|
|
||||||
|
|
||||||
for row in data:
|
|
||||||
for field in fields:
|
|
||||||
if not field.is_key:
|
|
||||||
for query in dct.get_select_get_queries(field, row):
|
|
||||||
queries_with_answers.append((query, row.get_value_by_name(field.name)))
|
|
||||||
|
|
||||||
for query in dct.get_select_has_queries(field, row):
|
|
||||||
queries_with_answers.append((query, 1))
|
|
||||||
|
|
||||||
for query in dct.get_select_get_or_default_queries(field, row):
|
|
||||||
queries_with_answers.append((query, field.default_value_for_get))
|
|
||||||
|
|
||||||
for query, answer in queries_with_answers:
|
|
||||||
print query
|
|
||||||
assert node.query(query) == str(answer) + '\n'
|
|
||||||
|
|
||||||
|
|
||||||
def test_ranged_dictionaries(started_cluster):
|
|
||||||
complex_kv_dicts = [d for d in DICTIONARIES if d.structure.layout.layout_type == "ranged"]
|
|
||||||
|
|
||||||
for dct in complex_kv_dicts:
|
|
||||||
queries_with_answers = []
|
|
||||||
fields = dct.fields
|
|
||||||
print("FIELDS AND VALUES FOR " + dct.name)
|
|
||||||
print(fields)
|
|
||||||
print(dct.values)
|
|
||||||
data = prepare_data(fields, dct.values)
|
|
||||||
dct.source.load_kv_data(dct.values)
|
|
||||||
|
|
||||||
try:
|
|
||||||
node.query("system reload dictionary '{}'".format(dct.name))
|
|
||||||
except Exception:
|
|
||||||
print(dct.name)
|
|
||||||
raise
|
|
||||||
|
|
||||||
for row in data:
|
|
||||||
for field in fields:
|
|
||||||
if not field.is_key and not field.is_range:
|
|
||||||
for query in dct.get_select_get_queries(field, row):
|
|
||||||
queries_with_answers.append((query, row.get_value_by_name(field.name)))
|
|
||||||
|
|
||||||
for query, answer in queries_with_answers:
|
|
||||||
print query
|
|
||||||
assert node.query(query) == str(answer) + '\n'
|
|
Loading…
Reference in New Issue
Block a user