ClickHouse/tests/integration/test_storage_redis/test.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

389 lines
9.2 KiB
Python
Raw Normal View History

2023-05-26 02:34:37 +00:00
## sudo -H pip install redis
2023-05-25 04:33:07 +00:00
import redis
2023-05-24 10:06:42 +00:00
import pytest
2023-05-30 12:31:23 +00:00
import struct
import sys
2023-05-25 09:29:49 +00:00
from helpers.client import QueryRuntimeException
2023-05-24 10:06:42 +00:00
from helpers.cluster import ClickHouseCluster
2023-05-25 09:29:49 +00:00
from helpers.test_tools import TSV
2023-05-25 04:33:07 +00:00
cluster = ClickHouseCluster(__file__)
node = cluster.add_instance("node", with_redis=True)
2023-05-24 10:06:42 +00:00
@pytest.fixture(scope="module")
2023-05-25 04:33:07 +00:00
def started_cluster():
2023-05-24 10:06:42 +00:00
try:
cluster.start()
yield cluster
finally:
cluster.shutdown()
2023-05-25 04:33:07 +00:00
def get_redis_connection(db_id=0):
client = redis.Redis(
2023-05-31 10:15:38 +00:00
host="localhost", port=cluster.redis_port, password="clickhouse", db=db_id
2023-05-24 10:06:42 +00:00
)
2023-05-25 04:33:07 +00:00
return client
2023-05-24 10:06:42 +00:00
2023-05-25 09:29:49 +00:00
def get_address_for_ch():
2023-05-31 10:15:38 +00:00
return cluster.redis_host + ":6379"
2023-05-25 09:29:49 +00:00
def drop_table(table):
2023-05-31 10:15:38 +00:00
node.query(f"DROP TABLE IF EXISTS {table} SYNC")
2023-05-24 10:06:42 +00:00
2023-05-30 12:31:23 +00:00
# see SerializationString.serializeBinary
def serialize_binary_for_string(x):
var_uint_max = (1 << 63) - 1
buf = bytearray()
# write length
length = len(x)
# length = (length << 1) ^ (length >> 63)
if length > var_uint_max:
raise ValueError("Value too large for varint encoding")
for i in range(9):
byte = length & 0x7F
if length > 0x7F:
byte |= 0x80
2023-05-31 10:15:38 +00:00
buf += bytes([byte])
2023-05-30 12:31:23 +00:00
length >>= 7
if not length:
break
# write data
2023-05-31 10:15:38 +00:00
buf += x.encode("utf-8")
2023-05-30 12:31:23 +00:00
return bytes(buf)
# see SerializationNumber.serializeBinary
def serialize_binary_for_uint32(x):
buf = bytearray()
2023-05-31 10:15:38 +00:00
packed_num = struct.pack("I", x)
2023-05-30 12:31:23 +00:00
buf += packed_num
2023-05-31 10:15:38 +00:00
if sys.byteorder != "little":
2023-05-30 12:31:23 +00:00
buf.reverse()
return bytes(buf)
def test_simple_select(started_cluster):
2023-05-25 04:33:07 +00:00
client = get_redis_connection()
2023-05-25 09:29:49 +00:00
address = get_address_for_ch()
# clean all
client.flushall()
2023-05-31 10:15:38 +00:00
drop_table("test_simple_select")
2023-05-24 10:06:42 +00:00
2023-05-25 04:33:07 +00:00
data = {}
for i in range(100):
2023-05-30 12:31:23 +00:00
packed = serialize_binary_for_string(str(i))
data[packed] = packed
2023-05-24 10:06:42 +00:00
2023-05-25 04:33:07 +00:00
client.mset(data)
2023-05-25 09:29:49 +00:00
client.close()
2023-05-24 10:06:42 +00:00
2023-05-25 04:33:07 +00:00
# create table
2023-05-24 10:06:42 +00:00
node.query(
2023-05-25 04:33:07 +00:00
f"""
2023-05-30 12:31:23 +00:00
CREATE TABLE test_simple_select(
2023-05-25 04:33:07 +00:00
k String,
2023-05-30 12:31:23 +00:00
v String
) Engine=Redis('{address}', 0, 'clickhouse') PRIMARY KEY (k)
2023-05-25 04:33:07 +00:00
"""
2023-05-24 10:06:42 +00:00
)
2023-05-31 10:15:38 +00:00
response = TSV.toMat(
node.query("SELECT k, v FROM test_simple_select WHERE k='0' FORMAT TSV")
)
assert len(response) == 1
assert response[0] == ["0", "0"]
2023-05-25 09:29:49 +00:00
2023-05-31 10:15:38 +00:00
response = TSV.toMat(
node.query("SELECT * FROM test_simple_select ORDER BY k FORMAT TSV")
)
assert len(response) == 100
assert response[0] == ["0", "0"]
2023-05-25 09:29:49 +00:00
2023-05-30 12:31:23 +00:00
def test_select_int(started_cluster):
2023-05-25 09:29:49 +00:00
client = get_redis_connection()
address = get_address_for_ch()
# clean all
client.flushall()
2023-05-31 10:15:38 +00:00
drop_table("test_select_int")
2023-05-25 09:29:49 +00:00
data = {}
for i in range(100):
2023-05-30 12:31:23 +00:00
packed = serialize_binary_for_uint32(i)
data[packed] = packed
2023-05-25 09:29:49 +00:00
2023-05-30 12:31:23 +00:00
client.mset(data)
2023-05-25 09:29:49 +00:00
client.close()
# create table
node.query(
f"""
2023-05-30 12:31:23 +00:00
CREATE TABLE test_select_int(
k UInt32,
2023-05-25 09:29:49 +00:00
v UInt32
2023-05-30 12:31:23 +00:00
) Engine=Redis('{address}', 0, 'clickhouse') PRIMARY KEY (k)
2023-05-25 09:29:49 +00:00
"""
)
2023-05-31 10:15:38 +00:00
response = TSV.toMat(
node.query("SELECT k, v FROM test_select_int WHERE k=0 FORMAT TSV")
)
assert len(response) == 1
assert response[0] == ["0", "0"]
response = TSV.toMat(
node.query("SELECT * FROM test_select_int ORDER BY k FORMAT TSV")
)
assert len(response) == 100
assert response[0] == ["0", "0"]
2023-05-25 09:29:49 +00:00
def test_create_table(started_cluster):
address = get_address_for_ch()
# simple creation
2023-05-31 10:15:38 +00:00
drop_table("test_create_table")
2023-05-25 09:29:49 +00:00
node.query(
f"""
CREATE TABLE test_create_table(
k String,
v UInt32
2023-05-30 12:31:23 +00:00
) Engine=Redis('{address}') PRIMARY KEY (k)
2023-05-25 09:29:49 +00:00
"""
)
# simple creation with full engine args
2023-05-31 10:15:38 +00:00
drop_table("test_create_table")
2023-05-25 09:29:49 +00:00
node.query(
f"""
CREATE TABLE test_create_table(
k String,
v UInt32
2023-05-30 12:31:23 +00:00
) Engine=Redis('{address}', 0, 'clickhouse', 10) PRIMARY KEY (k)
2023-05-25 09:29:49 +00:00
"""
)
2023-05-31 10:15:38 +00:00
drop_table("test_create_table")
2023-05-25 09:29:49 +00:00
node.query(
f"""
CREATE TABLE test_create_table(
k String,
f String,
v UInt32
2023-05-30 12:31:23 +00:00
) Engine=Redis('{address}', 0, 'clickhouse', 10) PRIMARY KEY (k)
2023-05-25 09:29:49 +00:00
"""
)
2023-05-24 10:06:42 +00:00
2023-05-31 10:15:38 +00:00
drop_table("test_create_table")
2023-05-25 09:29:49 +00:00
with pytest.raises(QueryRuntimeException):
node.query(
f"""
CREATE TABLE test_create_table(
k String,
f String,
v UInt32
2023-05-30 12:31:23 +00:00
) Engine=Redis('{address}', 0, 'clickhouse', 10) PRIMARY KEY ()
2023-05-25 09:29:49 +00:00
"""
)
2023-05-31 10:15:38 +00:00
drop_table("test_create_table")
2023-05-25 09:29:49 +00:00
with pytest.raises(QueryRuntimeException):
node.query(
f"""
CREATE TABLE test_create_table(
k String,
f String,
v UInt32
2023-05-30 12:31:23 +00:00
) Engine=Redis('{address}', 0, 'clickhouse', 10)
2023-05-25 09:29:49 +00:00
"""
)
2023-05-24 10:06:42 +00:00
def test_simple_insert(started_cluster):
client = get_redis_connection()
address = get_address_for_ch()
# clean all
client.flushall()
2023-05-31 12:32:34 +00:00
drop_table("test_simple_insert")
node.query(
f"""
CREATE TABLE test_simple_insert(
k UInt32,
m DateTime,
n String
) Engine=Redis('{address}', 0, 'clickhouse') PRIMARY KEY (k)
"""
)
node.query(
"""
INSERT INTO test_simple_insert Values
(1, '2023-06-01 00:00:00', 'lili'), (2, '2023-06-02 00:00:00', 'lucy')
"""
)
response = node.query("SELECT COUNT(*) FROM test_simple_insert FORMAT Values")
2023-05-31 10:15:38 +00:00
assert response == "(2)"
2023-05-31 10:15:38 +00:00
response = TSV.toMat(
node.query("SELECT k, m, n FROM test_simple_insert WHERE k=1 FORMAT TSV")
)
assert len(response) == 1
assert response[0] == ["1", "2023-06-01 00:00:00", "lili"]
2023-05-31 10:15:38 +00:00
response = TSV.toMat(
node.query(
"SELECT k, m, n FROM test_simple_insert WHERE m='2023-06-01 00:00:00' FORMAT TSV"
)
)
assert len(response) == 1
assert response[0] == ["1", "2023-06-01 00:00:00", "lili"]
2023-05-31 10:15:38 +00:00
response = TSV.toMat(
node.query("SELECT k, m, n FROM test_simple_insert WHERE n='lili' FORMAT TSV")
)
assert len(response) == 1
assert response[0] == ["1", "2023-06-01 00:00:00", "lili"]
def test_update(started_cluster):
client = get_redis_connection()
address = get_address_for_ch()
# clean all
client.flushall()
2023-05-31 10:15:38 +00:00
drop_table("test_update")
node.query(
f"""
CREATE TABLE test_update(
k UInt32,
m DateTime,
n String
) Engine=Redis('{address}', 0, 'clickhouse') PRIMARY KEY (k)
"""
)
node.query(
"""
INSERT INTO test_update Values
(1, '2023-06-01 00:00:00', 'lili'), (2, '2023-06-02 00:00:00', 'lucy')
"""
)
response = node.query(
"""
ALTER TABLE test_update UPDATE m='2023-06-03 00:00:00' WHERE k=1
"""
)
2023-05-31 10:15:38 +00:00
print("update response: ", response)
2023-05-31 10:15:38 +00:00
response = TSV.toMat(
node.query("SELECT k, m, n FROM test_update WHERE k=1 FORMAT TSV")
)
assert len(response) == 1
assert response[0] == ["1", "2023-06-03 00:00:00", "lili"]
# can not update key
with pytest.raises(QueryRuntimeException):
node.query(
"""
ALTER TABLE test_update UPDATE k=2 WHERE k=1
"""
)
def test_delete(started_cluster):
client = get_redis_connection()
address = get_address_for_ch()
# clean all
client.flushall()
2023-05-31 10:15:38 +00:00
drop_table("test_delete")
node.query(
f"""
CREATE TABLE test_delete(
k UInt32,
m DateTime,
n String
) Engine=Redis('{address}', 0, 'clickhouse') PRIMARY KEY (k)
"""
)
node.query(
"""
INSERT INTO test_delete Values
(1, '2023-06-01 00:00:00', 'lili'), (2, '2023-06-02 00:00:00', 'lucy')
"""
)
response = node.query(
"""
ALTER TABLE test_delete DELETE WHERE k=1
"""
)
2023-05-31 10:15:38 +00:00
print("delete response: ", response)
response = TSV.toMat(node.query("SELECT k, m, n FROM test_delete FORMAT TSV"))
2023-05-31 10:15:38 +00:00
assert len(response) == 1
assert response[0] == ["2", "2023-06-02 00:00:00", "lucy"]
response = node.query(
"""
ALTER TABLE test_delete DELETE WHERE m='2023-06-02 00:00:00'
"""
)
response = TSV.toMat(node.query("SELECT k, m, n FROM test_delete FORMAT TSV"))
2023-05-31 10:15:38 +00:00
assert len(response) == 0
2023-05-31 08:31:06 +00:00
def test_truncate(started_cluster):
client = get_redis_connection()
address = get_address_for_ch()
# clean all
client.flushall()
2023-05-31 10:15:38 +00:00
drop_table("test_truncate")
2023-05-31 08:31:06 +00:00
node.query(
f"""
CREATE TABLE test_truncate(
k UInt32,
m DateTime,
n String
) Engine=Redis('{address}', 0, 'clickhouse') PRIMARY KEY (k)
"""
)
node.query(
"""
INSERT INTO test_truncate Values
(1, '2023-06-01 00:00:00', 'lili'), (2, '2023-06-02 00:00:00', 'lucy')
"""
)
response = node.query(
"""
TRUNCATE TABLE test_truncate
"""
)
2023-05-31 10:15:38 +00:00
print("truncate table response: ", response)
2023-05-31 08:31:06 +00:00
response = TSV.toMat(node.query("SELECT COUNT(*) FROM test_truncate FORMAT TSV"))
2023-05-31 10:15:38 +00:00
assert len(response) == 1
2023-05-31 12:32:34 +00:00
assert response[0] == ["0"]