ClickHouse/tests/integration/test_dictionaries_postgresql/test.py

586 lines
17 KiB
Python
Raw Normal View History

2020-12-09 21:14:16 +00:00
import pytest
import time
import psycopg2
2021-05-07 09:51:40 +00:00
from multiprocessing.dummy import Pool
2020-12-21 19:20:56 +00:00
2020-12-09 21:14:16 +00:00
from helpers.cluster import ClickHouseCluster
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
cluster = ClickHouseCluster(__file__)
node1 = cluster.add_instance(
"node1",
main_configs=[
"configs/config.xml",
"configs/dictionaries/postgres_dict.xml",
"configs/named_collections.xml",
],
with_postgres=True,
with_postgres_cluster=True,
)
2020-12-09 21:14:16 +00:00
postgres_dict_table_template = """
CREATE TABLE IF NOT EXISTS {} (
2021-08-17 13:33:30 +00:00
id Integer NOT NULL, key Integer NOT NULL, value Integer NOT NULL, PRIMARY KEY (id))
2020-12-09 21:14:16 +00:00
"""
click_dict_table_template = """
CREATE TABLE IF NOT EXISTS `test`.`dict_table_{}` (
2021-08-17 13:33:30 +00:00
`key` UInt32, `value` UInt32
2020-12-09 21:14:16 +00:00
) ENGINE = Dictionary({})
"""
2021-03-19 16:44:08 +00:00
def get_postgres_conn(ip, port, database=False):
2020-12-09 21:14:16 +00:00
if database == True:
conn_string = "host={} port={} dbname='clickhouse' user='postgres' password='mysecretpassword'".format(
ip, port
)
2020-12-09 21:14:16 +00:00
else:
conn_string = (
"host={} port={} user='postgres' password='mysecretpassword'".format(
ip, port
)
)
2020-12-09 21:14:16 +00:00
conn = psycopg2.connect(conn_string)
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
conn.autocommit = True
return conn
2020-12-09 21:14:16 +00:00
def create_postgres_db(conn, name):
cursor = conn.cursor()
cursor.execute("CREATE DATABASE {}".format(name))
def create_postgres_table(cursor, table_name):
2020-12-09 21:14:16 +00:00
cursor.execute(postgres_dict_table_template.format(table_name))
def create_and_fill_postgres_table(cursor, table_name, port, host):
create_postgres_table(cursor, table_name)
2020-12-09 21:14:16 +00:00
# Fill postgres table using clickhouse postgres table function and check
table_func = """postgresql('{}:{}', 'clickhouse', '{}', 'postgres', 'mysecretpassword')""".format(
host, port, table_name
)
node1.query(
"""INSERT INTO TABLE FUNCTION {} SELECT number, number, number from numbers(10000)
""".format(
table_func, table_name
)
)
2020-12-09 21:14:16 +00:00
result = node1.query("SELECT count() FROM {}".format(table_func))
assert result.rstrip() == "10000"
2020-12-09 21:14:16 +00:00
def create_dict(table_name, index=0):
node1.query(click_dict_table_template.format(table_name, "dict" + str(index)))
2020-12-09 21:14:16 +00:00
@pytest.fixture(scope="module")
def started_cluster():
try:
cluster.start()
node1.query("CREATE DATABASE IF NOT EXISTS test")
postgres_conn = get_postgres_conn(
ip=cluster.postgres_ip, port=cluster.postgres_port
)
print("postgres1 connected")
create_postgres_db(postgres_conn, "clickhouse")
postgres_conn = get_postgres_conn(
ip=cluster.postgres2_ip, port=cluster.postgres_port
)
print("postgres2 connected")
create_postgres_db(postgres_conn, "clickhouse")
2020-12-09 21:14:16 +00:00
yield cluster
finally:
cluster.shutdown()
def test_load_dictionaries(started_cluster):
conn = get_postgres_conn(
ip=started_cluster.postgres_ip,
database=True,
port=started_cluster.postgres_port,
)
2020-12-09 21:14:16 +00:00
cursor = conn.cursor()
table_name = "test0"
create_and_fill_postgres_table(
cursor,
table_name,
port=started_cluster.postgres_port,
host=started_cluster.postgres_ip,
)
2020-12-09 21:14:16 +00:00
create_dict(table_name)
dict_name = "dict0"
2020-12-09 21:14:16 +00:00
2021-03-26 15:30:35 +00:00
node1.query("SYSTEM RELOAD DICTIONARY {}".format(dict_name))
assert (
node1.query(
"SELECT count() FROM `test`.`dict_table_{}`".format(table_name)
).rstrip()
== "10000"
)
assert (
node1.query("SELECT dictGetUInt32('{}', 'key', toUInt64(0))".format(dict_name))
== "0\n"
)
assert (
node1.query(
"SELECT dictGetUInt32('{}', 'value', toUInt64(9999))".format(dict_name)
)
== "9999\n"
)
2020-12-09 21:14:16 +00:00
cursor.execute("DROP TABLE IF EXISTS {}".format(table_name))
node1.query("DROP TABLE IF EXISTS {}".format(table_name))
node1.query("DROP DICTIONARY IF EXISTS {}".format(dict_name))
2020-12-09 21:14:16 +00:00
2021-08-09 11:23:44 +00:00
def test_postgres_dictionaries_custom_query_full_load(started_cluster):
conn = get_postgres_conn(
ip=started_cluster.postgres_ip,
database=True,
port=started_cluster.postgres_port,
)
2021-08-09 11:23:44 +00:00
cursor = conn.cursor()
cursor.execute(
"CREATE TABLE IF NOT EXISTS test_table_1 (id Integer, value_1 Text);"
)
cursor.execute(
"CREATE TABLE IF NOT EXISTS test_table_2 (id Integer, value_2 Text);"
)
2021-08-09 11:23:44 +00:00
cursor.execute("INSERT INTO test_table_1 VALUES (1, 'Value_1');")
cursor.execute("INSERT INTO test_table_2 VALUES (1, 'Value_2');")
query = node1.query
query(
"""
2021-08-09 11:23:44 +00:00
CREATE DICTIONARY test_dictionary_custom_query
(
id UInt64,
value_1 String,
value_2 String
)
PRIMARY KEY id
LAYOUT(FLAT())
SOURCE(PostgreSQL(
DB 'clickhouse'
HOST '{}'
PORT {}
USER 'postgres'
PASSWORD 'mysecretpassword'
QUERY $doc$SELECT id, value_1, value_2 FROM test_table_1 INNER JOIN test_table_2 USING (id);$doc$))
LIFETIME(0)
""".format(
started_cluster.postgres_ip, started_cluster.postgres_port
)
)
2021-08-09 11:23:44 +00:00
result = query("SELECT id, value_1, value_2 FROM test_dictionary_custom_query")
assert result == "1\tValue_1\tValue_2\n"
2021-08-09 11:23:44 +00:00
query("DROP DICTIONARY test_dictionary_custom_query;")
cursor.execute("DROP TABLE test_table_2;")
cursor.execute("DROP TABLE test_table_1;")
def test_postgres_dictionaries_custom_query_partial_load_simple_key(started_cluster):
conn = get_postgres_conn(
ip=started_cluster.postgres_ip,
database=True,
port=started_cluster.postgres_port,
)
2021-08-09 11:23:44 +00:00
cursor = conn.cursor()
cursor.execute(
"CREATE TABLE IF NOT EXISTS test_table_1 (id Integer, value_1 Text);"
)
cursor.execute(
"CREATE TABLE IF NOT EXISTS test_table_2 (id Integer, value_2 Text);"
)
2021-08-09 11:23:44 +00:00
cursor.execute("INSERT INTO test_table_1 VALUES (1, 'Value_1');")
cursor.execute("INSERT INTO test_table_2 VALUES (1, 'Value_2');")
query = node1.query
query(
"""
2021-08-09 11:23:44 +00:00
CREATE DICTIONARY test_dictionary_custom_query
(
id UInt64,
value_1 String,
value_2 String
)
PRIMARY KEY id
LAYOUT(DIRECT())
SOURCE(PostgreSQL(
DB 'clickhouse'
HOST '{}'
PORT {}
USER 'postgres'
PASSWORD 'mysecretpassword'
QUERY $doc$SELECT id, value_1, value_2 FROM test_table_1 INNER JOIN test_table_2 USING (id) WHERE {{condition}};$doc$))
""".format(
started_cluster.postgres_ip, started_cluster.postgres_port
)
)
2021-08-09 11:23:44 +00:00
result = query(
"SELECT dictGet('test_dictionary_custom_query', ('value_1', 'value_2'), toUInt64(1))"
)
2021-08-09 11:23:44 +00:00
assert result == "('Value_1','Value_2')\n"
2021-08-09 11:23:44 +00:00
query("DROP DICTIONARY test_dictionary_custom_query;")
cursor.execute("DROP TABLE test_table_2;")
cursor.execute("DROP TABLE test_table_1;")
def test_postgres_dictionaries_custom_query_partial_load_complex_key(started_cluster):
conn = get_postgres_conn(
ip=started_cluster.postgres_ip,
database=True,
port=started_cluster.postgres_port,
)
2021-08-09 11:23:44 +00:00
cursor = conn.cursor()
cursor.execute(
"CREATE TABLE IF NOT EXISTS test_table_1 (id Integer, key Text, value_1 Text);"
)
cursor.execute(
"CREATE TABLE IF NOT EXISTS test_table_2 (id Integer, key Text, value_2 Text);"
)
2021-08-09 11:23:44 +00:00
cursor.execute("INSERT INTO test_table_1 VALUES (1, 'Key', 'Value_1');")
cursor.execute("INSERT INTO test_table_2 VALUES (1, 'Key', 'Value_2');")
query = node1.query
query(
"""
2021-08-09 11:23:44 +00:00
CREATE DICTIONARY test_dictionary_custom_query
(
id UInt64,
key String,
value_1 String,
value_2 String
)
PRIMARY KEY id, key
LAYOUT(COMPLEX_KEY_DIRECT())
SOURCE(PostgreSQL(
DB 'clickhouse'
HOST '{}'
PORT {}
USER 'postgres'
PASSWORD 'mysecretpassword'
QUERY $doc$SELECT id, key, value_1, value_2 FROM test_table_1 INNER JOIN test_table_2 USING (id, key) WHERE {{condition}};$doc$))
""".format(
started_cluster.postgres_ip, started_cluster.postgres_port
)
)
2021-08-09 11:23:44 +00:00
result = query(
"SELECT dictGet('test_dictionary_custom_query', ('value_1', 'value_2'), (toUInt64(1), 'Key'))"
)
2021-08-09 11:23:44 +00:00
assert result == "('Value_1','Value_2')\n"
2021-08-09 11:23:44 +00:00
query("DROP DICTIONARY test_dictionary_custom_query;")
cursor.execute("DROP TABLE test_table_2;")
cursor.execute("DROP TABLE test_table_1;")
2020-12-09 21:14:16 +00:00
def test_invalidate_query(started_cluster):
conn = get_postgres_conn(
ip=started_cluster.postgres_ip,
database=True,
port=started_cluster.postgres_port,
)
2020-12-09 21:14:16 +00:00
cursor = conn.cursor()
table_name = "test0"
create_and_fill_postgres_table(
cursor,
table_name,
port=started_cluster.postgres_port,
host=started_cluster.postgres_ip,
)
2020-12-09 21:14:16 +00:00
2020-12-13 22:48:40 +00:00
# invalidate query: SELECT value FROM test0 WHERE id = 0
dict_name = "dict0"
2020-12-09 21:14:16 +00:00
create_dict(table_name)
2020-12-13 22:48:40 +00:00
node1.query("SYSTEM RELOAD DICTIONARY {}".format(dict_name))
assert (
node1.query(
"SELECT dictGetUInt32('{}', 'value', toUInt64(0))".format(dict_name)
)
== "0\n"
)
assert (
node1.query(
"SELECT dictGetUInt32('{}', 'value', toUInt64(1))".format(dict_name)
)
== "1\n"
)
2020-12-09 21:14:16 +00:00
2020-12-13 22:48:40 +00:00
# update should happen
cursor.execute("UPDATE {} SET value=value+1 WHERE id = 0".format(table_name))
while True:
result = node1.query(
"SELECT dictGetUInt32('{}', 'value', toUInt64(0))".format(dict_name)
)
if result != "0\n":
2020-12-13 22:48:40 +00:00
break
assert (
node1.query(
"SELECT dictGetUInt32('{}', 'value', toUInt64(0))".format(dict_name)
)
== "1\n"
)
2020-12-13 22:48:40 +00:00
# no update should happen
cursor.execute("UPDATE {} SET value=value*2 WHERE id != 0".format(table_name))
time.sleep(5)
assert (
node1.query(
"SELECT dictGetUInt32('{}', 'value', toUInt64(0))".format(dict_name)
)
== "1\n"
)
assert (
node1.query(
"SELECT dictGetUInt32('{}', 'value', toUInt64(1))".format(dict_name)
)
== "1\n"
)
2020-12-09 21:14:16 +00:00
# update should happen
2020-12-13 22:48:40 +00:00
cursor.execute("UPDATE {} SET value=value+1 WHERE id = 0".format(table_name))
2020-12-09 21:14:16 +00:00
time.sleep(5)
assert (
node1.query(
"SELECT dictGetUInt32('{}', 'value', toUInt64(0))".format(dict_name)
)
== "2\n"
)
assert (
node1.query(
"SELECT dictGetUInt32('{}', 'value', toUInt64(1))".format(dict_name)
)
== "2\n"
)
2020-12-09 21:14:16 +00:00
node1.query("DROP TABLE IF EXISTS {}".format(table_name))
node1.query("DROP DICTIONARY IF EXISTS {}".format(dict_name))
2021-03-18 14:22:17 +00:00
cursor.execute("DROP TABLE IF EXISTS {}".format(table_name))
def test_dictionary_with_replicas(started_cluster):
conn1 = get_postgres_conn(
ip=started_cluster.postgres_ip,
port=started_cluster.postgres_port,
database=True,
)
cursor1 = conn1.cursor()
conn2 = get_postgres_conn(
ip=started_cluster.postgres2_ip,
port=started_cluster.postgres_port,
database=True,
)
cursor2 = conn2.cursor()
create_postgres_table(cursor1, "test1")
create_postgres_table(cursor2, "test1")
cursor1.execute(
"INSERT INTO test1 select i, i, i from generate_series(0, 99) as t(i);"
)
cursor2.execute(
"INSERT INTO test1 select i, i, i from generate_series(100, 199) as t(i);"
)
create_dict("test1", 1)
2021-08-17 13:33:30 +00:00
result = node1.query("SELECT * FROM `test`.`dict_table_test1` ORDER BY key")
# priority 0 - non running port
assert node1.contains_in_log("PostgreSQLConnectionPool: Connection error*")
# priority 1 - postgres2, table contains rows with values 100-200
# priority 2 - postgres1, table contains rows with values 0-100
expected = node1.query("SELECT number, number FROM numbers(100, 100)")
assert result == expected
cursor1.execute("DROP TABLE IF EXISTS test1")
cursor2.execute("DROP TABLE IF EXISTS test1")
node1.query("DROP TABLE IF EXISTS test1")
node1.query("DROP DICTIONARY IF EXISTS dict1")
2020-12-09 21:14:16 +00:00
2021-08-09 11:23:44 +00:00
def test_postgres_schema(started_cluster):
conn = get_postgres_conn(
ip=started_cluster.postgres_ip,
port=started_cluster.postgres_port,
database=True,
)
2021-05-09 15:42:54 +00:00
cursor = conn.cursor()
cursor.execute("CREATE SCHEMA test_schema")
cursor.execute("CREATE TABLE test_schema.test_table (id integer, value integer)")
cursor.execute(
"INSERT INTO test_schema.test_table SELECT i, i FROM generate_series(0, 99) as t(i)"
)
2021-05-09 15:42:54 +00:00
node1.query(
"""
2022-01-13 21:07:33 +00:00
DROP DICTIONARY IF EXISTS postgres_dict;
2021-05-09 15:42:54 +00:00
CREATE DICTIONARY postgres_dict (id UInt32, value UInt32)
PRIMARY KEY id
SOURCE(POSTGRESQL(
port 5432
host 'postgres1'
user 'postgres'
password 'mysecretpassword'
db 'clickhouse'
table 'test_schema.test_table'))
LIFETIME(MIN 1 MAX 2)
LAYOUT(HASHED());
"""
)
2021-05-09 15:42:54 +00:00
result = node1.query("SELECT dictGetUInt32(postgres_dict, 'value', toUInt64(1))")
assert int(result.strip()) == 1
2021-05-09 15:42:54 +00:00
result = node1.query("SELECT dictGetUInt32(postgres_dict, 'value', toUInt64(99))")
assert int(result.strip()) == 99
2021-05-09 15:42:54 +00:00
node1.query("DROP DICTIONARY IF EXISTS postgres_dict")
2021-09-01 23:17:15 +00:00
def test_predefined_connection_configuration(started_cluster):
conn = get_postgres_conn(
ip=started_cluster.postgres_ip,
port=started_cluster.postgres_port,
database=True,
)
2021-09-01 23:17:15 +00:00
cursor = conn.cursor()
cursor.execute("DROP TABLE IF EXISTS test_table")
cursor.execute("CREATE TABLE test_table (id integer, value integer)")
cursor.execute(
"INSERT INTO test_table SELECT i, i FROM generate_series(0, 99) as t(i)"
)
2021-09-01 23:17:15 +00:00
node1.query(
"""
2022-01-13 21:07:33 +00:00
DROP DICTIONARY IF EXISTS postgres_dict;
2021-09-01 23:17:15 +00:00
CREATE DICTIONARY postgres_dict (id UInt32, value UInt32)
PRIMARY KEY id
SOURCE(POSTGRESQL(NAME postgres1))
LIFETIME(MIN 1 MAX 2)
LAYOUT(HASHED());
"""
)
2021-09-01 23:17:15 +00:00
result = node1.query("SELECT dictGetUInt32(postgres_dict, 'value', toUInt64(99))")
assert int(result.strip()) == 99
2021-09-01 23:17:15 +00:00
cursor.execute("DROP SCHEMA IF EXISTS test_schema CASCADE")
cursor.execute("CREATE SCHEMA test_schema")
cursor.execute("CREATE TABLE test_schema.test_table (id integer, value integer)")
cursor.execute(
"INSERT INTO test_schema.test_table SELECT i, 100 FROM generate_series(0, 99) as t(i)"
)
2021-09-01 23:17:15 +00:00
node1.query(
"""
2021-09-01 23:17:15 +00:00
DROP DICTIONARY postgres_dict;
CREATE DICTIONARY postgres_dict (id UInt32, value UInt32)
PRIMARY KEY id
SOURCE(POSTGRESQL(NAME postgres1 SCHEMA test_schema))
LIFETIME(MIN 1 MAX 2)
LAYOUT(HASHED());
"""
)
2021-09-01 23:17:15 +00:00
result = node1.query("SELECT dictGetUInt32(postgres_dict, 'value', toUInt64(99))")
assert int(result.strip()) == 100
2021-09-01 23:17:15 +00:00
node1.query(
"""
2021-09-01 23:17:15 +00:00
DROP DICTIONARY postgres_dict;
CREATE DICTIONARY postgres_dict (id UInt32, value UInt32)
PRIMARY KEY id
SOURCE(POSTGRESQL(NAME postgres2))
LIFETIME(MIN 1 MAX 2)
LAYOUT(HASHED());
"""
)
2021-09-01 23:17:15 +00:00
result = node1.query("SELECT dictGetUInt32(postgres_dict, 'value', toUInt64(99))")
assert int(result.strip()) == 100
2021-09-01 23:17:15 +00:00
node1.query("DROP DICTIONARY postgres_dict")
node1.query(
"""
2021-09-01 23:17:15 +00:00
CREATE DICTIONARY postgres_dict (id UInt32, value UInt32)
PRIMARY KEY id
SOURCE(POSTGRESQL(NAME postgres4))
LIFETIME(MIN 1 MAX 2)
LAYOUT(HASHED());
"""
)
result = node1.query_and_get_error(
"SELECT dictGetUInt32(postgres_dict, 'value', toUInt64(99))"
)
2021-09-01 23:17:15 +00:00
node1.query(
"""
2021-09-01 23:17:15 +00:00
DROP DICTIONARY postgres_dict;
CREATE DICTIONARY postgres_dict (id UInt32, value UInt32)
PRIMARY KEY id
SOURCE(POSTGRESQL(NAME postgres1 PORT 5432))
LIFETIME(MIN 1 MAX 2)
LAYOUT(HASHED());
"""
)
2021-09-01 23:17:15 +00:00
result = node1.query("SELECT dictGetUInt32(postgres_dict, 'value', toUInt64(99))")
assert int(result.strip()) == 99
2021-09-01 23:17:15 +00:00
def test_bad_configuration(started_cluster):
conn = get_postgres_conn(
ip=started_cluster.postgres_ip,
port=started_cluster.postgres_port,
database=True,
)
cursor = conn.cursor()
node1.query(
"""
DROP DICTIONARY IF EXISTS postgres_dict;
CREATE DICTIONARY postgres_dict (id UInt32, value UInt32)
PRIMARY KEY id
SOURCE(POSTGRESQL(
port 5432
host 'postgres1'
user 'postgres'
password 'mysecretpassword'
dbbb 'clickhouse'
table 'test_schema.test_table'))
LIFETIME(MIN 1 MAX 2)
LAYOUT(HASHED());
"""
)
node1.query_and_get_error(
"SELECT dictGetUInt32(postgres_dict, 'value', toUInt64(1))"
)
assert node1.contains_in_log("Unexpected key `dbbb`")
if __name__ == "__main__":
2020-12-09 21:14:16 +00:00
cluster.start()
input("Cluster created, press any key to destroy...")
cluster.shutdown()