2022-01-19 23:59:09 +00:00
|
|
|
import pytest
|
|
|
|
from helpers.cluster import ClickHouseCluster
|
|
|
|
import redis
|
|
|
|
|
2022-07-18 12:25:14 +00:00
|
|
|
cluster = ClickHouseCluster(__file__)
|
2022-01-19 23:59:09 +00:00
|
|
|
|
|
|
|
node = cluster.add_instance("node", with_redis=True)
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2023-10-19 09:59:13 +00:00
|
|
|
POOL_SIZE = 16
|
|
|
|
|
2022-01-19 23:59:09 +00:00
|
|
|
|
|
|
|
@pytest.fixture(scope="module")
|
|
|
|
def start_cluster():
|
|
|
|
try:
|
|
|
|
cluster.start()
|
|
|
|
|
|
|
|
N = 1000
|
|
|
|
client = redis.Redis(
|
|
|
|
host="localhost", port=cluster.redis_port, password="clickhouse", db=0
|
|
|
|
)
|
|
|
|
client.flushdb()
|
|
|
|
for i in range(N):
|
|
|
|
client.hset("2020-10-10", i, i)
|
|
|
|
|
|
|
|
node.query(
|
|
|
|
"""
|
|
|
|
CREATE DICTIONARY redis_dict
|
|
|
|
(
|
|
|
|
date String,
|
|
|
|
id UInt64,
|
|
|
|
value UInt64
|
|
|
|
)
|
|
|
|
PRIMARY KEY date, id
|
2023-10-19 09:59:13 +00:00
|
|
|
SOURCE(REDIS(HOST '{}' PORT 6379 STORAGE_TYPE 'hash_map' DB_INDEX 0 PASSWORD 'clickhouse' POOL_SIZE '{}'))
|
2022-01-19 23:59:09 +00:00
|
|
|
LAYOUT(COMPLEX_KEY_DIRECT())
|
|
|
|
""".format(
|
2023-10-19 09:59:13 +00:00
|
|
|
cluster.redis_host, POOL_SIZE
|
2022-01-19 23:59:09 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
node.query(
|
|
|
|
"""
|
|
|
|
CREATE TABLE redis_dictionary_test
|
|
|
|
(
|
|
|
|
date Date,
|
|
|
|
id UInt64
|
|
|
|
)
|
|
|
|
ENGINE = MergeTree ORDER BY id"""
|
|
|
|
)
|
|
|
|
|
|
|
|
node.query(
|
|
|
|
"INSERT INTO default.redis_dictionary_test SELECT '2020-10-10', number FROM numbers(1000000)"
|
|
|
|
)
|
|
|
|
|
|
|
|
yield cluster
|
|
|
|
|
|
|
|
finally:
|
|
|
|
cluster.shutdown()
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2022-01-19 23:59:09 +00:00
|
|
|
def test_redis_dict_long(start_cluster):
|
|
|
|
assert (
|
2023-10-19 09:59:13 +00:00
|
|
|
node.query(
|
|
|
|
f"SELECT count(), uniqExact(date), uniqExact(id) FROM redis_dict SETTINGS max_threads={POOL_SIZE}"
|
|
|
|
)
|
2022-01-19 23:59:09 +00:00
|
|
|
== "1000\t1\t1000\n"
|
|
|
|
)
|
|
|
|
assert (
|
|
|
|
node.query(
|
2023-10-19 09:59:13 +00:00
|
|
|
f"SELECT count(DISTINCT dictGet('redis_dict', 'value', tuple(date, id % 1000))) FROM redis_dictionary_test SETTINGS max_threads={POOL_SIZE}"
|
2022-03-22 16:39:58 +00:00
|
|
|
)
|
2022-01-19 23:59:09 +00:00
|
|
|
== "1000\n"
|
2022-03-22 16:39:58 +00:00
|
|
|
)
|