ClickHouse/tests/integration/helpers/postgres_utility.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

431 lines
15 KiB
Python
Raw Normal View History

2022-01-07 21:27:46 +00:00
import time
2024-09-27 10:19:39 +00:00
import psycopg2
2022-01-07 21:27:46 +00:00
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
postgres_table_template = """
CREATE TABLE IF NOT EXISTS "{}" (
key Integer NOT NULL, value Integer, PRIMARY KEY(key))
"""
postgres_table_template_2 = """
CREATE TABLE IF NOT EXISTS "{}" (
key Integer NOT NULL, value1 Integer, value2 Integer, value3 Integer, PRIMARY KEY(key))
"""
postgres_table_template_3 = """
CREATE TABLE IF NOT EXISTS "{}" (
key1 Integer NOT NULL, value1 Integer, key2 Integer NOT NULL, value2 Integer NOT NULL)
"""
postgres_table_template_4 = """
CREATE TABLE IF NOT EXISTS "{}"."{}" (
key Integer NOT NULL, value Integer, PRIMARY KEY(key))
"""
2022-01-08 12:26:29 +00:00
postgres_table_template_5 = """
CREATE TABLE IF NOT EXISTS "{}" (
key Integer NOT NULL, value UUID, PRIMARY KEY(key))
"""
2023-12-07 09:46:00 +00:00
postgres_table_template_6 = """
CREATE TABLE IF NOT EXISTS "{}" (
key Integer NOT NULL, value Text, PRIMARY KEY(key))
"""
2022-01-07 21:27:46 +00:00
2022-01-07 21:27:46 +00:00
def get_postgres_conn(
ip,
port,
database=False,
auto_commit=True,
database_name="postgres_database",
replication=False,
):
if database == True:
conn_string = f"host={ip} port={port} dbname='{database_name}' user='postgres' password='mysecretpassword'"
else:
conn_string = (
f"host={ip} port={port} user='postgres' password='mysecretpassword'"
)
2022-01-07 21:27:46 +00:00
if replication:
conn_string += " replication='database'"
conn = psycopg2.connect(conn_string)
if auto_commit:
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
conn.autocommit = True
return conn
2022-01-07 21:27:46 +00:00
def create_replication_slot(conn, slot_name="user_slot"):
cursor = conn.cursor()
cursor.execute(
f"CREATE_REPLICATION_SLOT {slot_name} LOGICAL pgoutput EXPORT_SNAPSHOT"
)
result = cursor.fetchall()
print(result[0][0]) # slot name
print(result[0][1]) # start lsn
print(result[0][2]) # snapshot
return result[0][2]
2022-01-07 21:27:46 +00:00
def drop_replication_slot(conn, slot_name="user_slot"):
cursor = conn.cursor()
cursor.execute(f"select pg_drop_replication_slot('{slot_name}')")
def create_postgres_schema(cursor, schema_name):
drop_postgres_schema(cursor, schema_name)
cursor.execute(f"CREATE SCHEMA {schema_name}")
2022-01-07 21:27:46 +00:00
def drop_postgres_schema(cursor, schema_name):
cursor.execute(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE")
def create_postgres_table(
2023-05-09 14:10:53 +00:00
cursor,
table_name,
replica_identity_full=False,
template=postgres_table_template,
2022-01-07 21:27:46 +00:00
):
2024-04-25 11:37:24 +00:00
drop_postgres_table(cursor, table_name)
query = template.format(table_name)
2023-05-11 16:09:46 +00:00
print(f"Query: {query}")
2024-04-25 11:37:24 +00:00
cursor.execute(query)
2022-01-07 21:27:46 +00:00
if replica_identity_full:
2024-04-25 11:37:24 +00:00
cursor.execute(f"""ALTER TABLE "{table_name}" REPLICA IDENTITY FULL;""")
2022-01-07 21:27:46 +00:00
2024-04-25 11:37:24 +00:00
def drop_postgres_table(cursor, name, database_name=""):
if database_name != "":
cursor.execute(f"""DROP TABLE IF EXISTS "{database_name}"."{name}" """)
else:
cursor.execute(f"""DROP TABLE IF EXISTS "{name}" """)
2022-01-07 21:27:46 +00:00
def create_postgres_table_with_schema(cursor, schema_name, table_name):
drop_postgres_table_with_schema(cursor, schema_name, table_name)
cursor.execute(postgres_table_template_4.format(schema_name, table_name))
2022-01-07 21:27:46 +00:00
def drop_postgres_table_with_schema(cursor, schema_name, table_name):
cursor.execute(f"""DROP TABLE IF EXISTS "{schema_name}"."{table_name}" """)
class PostgresManager:
def __init__(self):
self.created_postgres_db_list = set()
self.created_materialized_postgres_db_list = set()
self.created_ch_postgres_db_list = set()
2023-10-12 14:32:56 +00:00
def init(
self,
instance,
ip,
port,
default_database="postgres_database",
postgres_db_exists=False,
):
2022-01-07 21:27:46 +00:00
self.instance = instance
self.ip = ip
self.port = port
2023-05-09 14:10:53 +00:00
self.default_database = default_database
2023-10-12 14:32:56 +00:00
self.postgres_db_exists = postgres_db_exists
2022-01-07 21:27:46 +00:00
self.prepare()
2023-05-09 14:10:53 +00:00
def get_default_database(self):
return self.default_database
2022-01-07 21:27:46 +00:00
def restart(self):
try:
self.clear()
self.prepare()
except Exception as ex:
self.prepare()
raise ex
2023-05-11 16:09:46 +00:00
def execute(self, query):
2023-12-07 09:46:00 +00:00
return self.cursor.execute(query)
2023-05-11 16:09:46 +00:00
2022-01-07 21:27:46 +00:00
def prepare(self):
2023-05-09 14:10:53 +00:00
self.conn = get_postgres_conn(ip=self.ip, port=self.port)
self.cursor = self.conn.cursor()
if self.default_database != "":
2023-10-12 14:32:56 +00:00
if not self.postgres_db_exists:
self.create_postgres_db(self.default_database)
2023-05-09 14:10:53 +00:00
self.conn = get_postgres_conn(
ip=self.ip,
port=self.port,
database=True,
database_name=self.default_database,
)
self.cursor = self.conn.cursor()
2023-05-11 16:09:46 +00:00
self.create_clickhouse_postgres_db()
2022-01-07 21:27:46 +00:00
def clear(self):
2022-01-08 12:26:29 +00:00
if self.conn.closed == 0:
self.conn.close()
2022-01-07 21:27:46 +00:00
for db in self.created_materialized_postgres_db_list.copy():
self.drop_materialized_db(db)
for db in self.created_ch_postgres_db_list.copy():
self.drop_clickhouse_postgres_db(db)
if len(self.created_postgres_db_list) > 0:
2023-05-09 14:10:53 +00:00
self.conn = get_postgres_conn(ip=self.ip, port=self.port)
self.cursor = self.conn.cursor()
2022-01-07 21:27:46 +00:00
for db in self.created_postgres_db_list.copy():
2023-05-09 14:10:53 +00:00
self.drop_postgres_db(db)
2022-01-07 21:27:46 +00:00
2023-05-09 14:10:53 +00:00
def get_db_cursor(self, database_name=""):
if database_name == "":
database_name = self.default_database
self.conn = get_postgres_conn(
ip=self.ip, port=self.port, database=True, database_name=database_name
)
2022-01-08 12:26:29 +00:00
return self.conn.cursor()
2023-05-09 14:10:53 +00:00
def database_or_default(self, database_name):
2023-05-11 16:09:46 +00:00
if database_name != "":
return database_name
if self.default_database != "":
return self.default_database
raise Exception("Database name is empty")
2023-05-09 14:10:53 +00:00
def create_postgres_db(self, database_name=""):
database_name = self.database_or_default(database_name)
self.drop_postgres_db(database_name)
self.created_postgres_db_list.add(database_name)
2023-12-06 19:46:41 +00:00
self.cursor.execute(f'CREATE DATABASE "{database_name}"')
2023-05-09 14:10:53 +00:00
def drop_postgres_db(self, database_name=""):
database_name = self.database_or_default(database_name)
2023-12-06 19:46:41 +00:00
self.cursor.execute(f'DROP DATABASE IF EXISTS "{database_name}" WITH (FORCE)')
2023-05-09 14:10:53 +00:00
if database_name in self.created_postgres_db_list:
self.created_postgres_db_list.remove(database_name)
2022-01-07 21:27:46 +00:00
def create_clickhouse_postgres_db(
self,
2023-05-09 14:10:53 +00:00
database_name="",
2022-01-07 21:27:46 +00:00
schema_name="",
2023-05-11 16:09:46 +00:00
postgres_database="",
2022-01-07 21:27:46 +00:00
):
2023-05-09 14:10:53 +00:00
database_name = self.database_or_default(database_name)
2023-05-11 16:09:46 +00:00
if postgres_database == "":
postgres_database = database_name
2023-05-09 14:10:53 +00:00
self.drop_clickhouse_postgres_db(database_name)
self.created_ch_postgres_db_list.add(database_name)
2022-01-07 21:27:46 +00:00
if len(schema_name) == 0:
self.instance.query(
f"""
2023-12-06 19:23:09 +00:00
CREATE DATABASE \"{database_name}\"
2023-05-11 16:09:46 +00:00
ENGINE = PostgreSQL('{self.ip}:{self.port}', '{postgres_database}', 'postgres', 'mysecretpassword')"""
2022-01-07 21:27:46 +00:00
)
else:
self.instance.query(
f"""
2023-12-06 19:23:09 +00:00
CREATE DATABASE \"{database_name}\"
2023-05-11 16:09:46 +00:00
ENGINE = PostgreSQL('{self.ip}:{self.port}', '{postgres_database}', 'postgres', 'mysecretpassword', '{schema_name}')"""
2022-01-07 21:27:46 +00:00
)
2023-05-09 14:10:53 +00:00
def drop_clickhouse_postgres_db(self, database_name=""):
database_name = self.database_or_default(database_name)
2023-12-06 19:46:41 +00:00
self.instance.query(f'DROP DATABASE IF EXISTS "{database_name}"')
2023-05-09 14:10:53 +00:00
if database_name in self.created_ch_postgres_db_list:
self.created_ch_postgres_db_list.remove(database_name)
2022-01-07 21:27:46 +00:00
def create_materialized_db(
self,
ip,
port,
2022-01-08 12:26:29 +00:00
materialized_database="test_database",
2023-05-09 14:10:53 +00:00
postgres_database="",
2022-01-08 12:26:29 +00:00
settings=[],
table_overrides="",
2023-12-05 12:22:35 +00:00
user="postgres",
password="mysecretpassword",
2022-01-08 12:26:29 +00:00
):
2023-05-09 14:10:53 +00:00
postgres_database = self.database_or_default(postgres_database)
2022-01-07 21:27:46 +00:00
self.created_materialized_postgres_db_list.add(materialized_database)
2024-04-25 11:37:24 +00:00
self.instance.query(f"DROP DATABASE IF EXISTS `{materialized_database}`")
2022-01-07 21:27:46 +00:00
2024-04-25 11:37:24 +00:00
create_query = f"CREATE DATABASE `{materialized_database}` ENGINE = MaterializedPostgreSQL('{ip}:{port}', '{postgres_database}', '{user}', '{password}')"
2022-01-07 21:27:46 +00:00
if len(settings) > 0:
create_query += " SETTINGS "
for i in range(len(settings)):
if i != 0:
create_query += ", "
create_query += settings[i]
2022-01-08 12:26:29 +00:00
create_query += table_overrides
2022-01-07 21:27:46 +00:00
self.instance.query(create_query)
assert materialized_database in self.instance.query("SHOW DATABASES")
def drop_materialized_db(self, materialized_database="test_database"):
2024-04-25 11:37:24 +00:00
self.instance.query(f"DROP DATABASE IF EXISTS `{materialized_database}` SYNC")
2022-01-07 21:27:46 +00:00
if materialized_database in self.created_materialized_postgres_db_list:
self.created_materialized_postgres_db_list.remove(materialized_database)
2022-01-08 12:26:29 +00:00
2023-05-11 16:09:46 +00:00
def create_postgres_schema(self, name):
create_postgres_schema(self.cursor, name)
def create_postgres_table(
self, table_name, database_name="", template=postgres_table_template
):
2024-04-25 11:37:24 +00:00
database_name = self.database_or_default(database_name)
cursor = self.cursor
if database_name != self.get_default_database:
try:
self.create_postgres_db(database_name)
except:
# postgres does not support create database if not exists
pass
conn = get_postgres_conn(
ip=self.ip,
port=self.port,
database=True,
database_name=database_name,
)
cursor = conn.cursor()
create_postgres_table(cursor, table_name, template=template)
2023-05-11 16:09:46 +00:00
2023-05-09 14:10:53 +00:00
def create_and_fill_postgres_table(self, table_name, database_name=""):
database_name = self.database_or_default(database_name)
2024-04-25 11:37:24 +00:00
self.create_postgres_table(table_name, database_name)
2022-01-07 21:27:46 +00:00
self.instance.query(
2024-04-25 11:37:24 +00:00
f"INSERT INTO `{database_name}`.`{table_name}` SELECT number, number from numbers(50)"
2022-01-07 21:27:46 +00:00
)
2023-12-12 14:43:44 +00:00
def create_and_fill_postgres_tables(
self,
tables_num,
numbers=50,
database_name="",
table_name_base="postgresql_replica",
):
2022-01-07 21:27:46 +00:00
for i in range(tables_num):
2023-12-12 14:27:42 +00:00
table_name = f"{table_name_base}_{i}"
2024-04-25 11:37:24 +00:00
self.create_postgres_table(table_name, database_name)
2022-01-07 21:27:46 +00:00
if numbers > 0:
2023-05-11 16:09:46 +00:00
db = self.database_or_default(database_name)
2022-01-07 21:27:46 +00:00
self.instance.query(
2024-04-25 11:37:24 +00:00
f"INSERT INTO `{db}`.{table_name} SELECT number, number from numbers({numbers})"
2022-01-07 21:27:46 +00:00
)
queries = [
"INSERT INTO postgresql_replica_{} select i, i from generate_series(0, 10000) as t(i);",
"DELETE FROM postgresql_replica_{} WHERE (value*value) % 3 = 0;",
"UPDATE postgresql_replica_{} SET value = value - 125 WHERE key % 2 = 0;",
"UPDATE postgresql_replica_{} SET key=key+20000 WHERE key%2=0",
"INSERT INTO postgresql_replica_{} select i, i from generate_series(40000, 50000) as t(i);",
"DELETE FROM postgresql_replica_{} WHERE key % 10 = 0;",
"UPDATE postgresql_replica_{} SET value = value + 101 WHERE key % 2 = 1;",
"UPDATE postgresql_replica_{} SET key=key+80000 WHERE key%2=1",
"DELETE FROM postgresql_replica_{} WHERE value % 2 = 0;",
"UPDATE postgresql_replica_{} SET value = value + 2000 WHERE key % 5 = 0;",
"INSERT INTO postgresql_replica_{} select i, i from generate_series(200000, 250000) as t(i);",
"DELETE FROM postgresql_replica_{} WHERE value % 3 = 0;",
"UPDATE postgresql_replica_{} SET value = value * 2 WHERE key % 3 = 0;",
"UPDATE postgresql_replica_{} SET key=key+500000 WHERE key%2=1",
"INSERT INTO postgresql_replica_{} select i, i from generate_series(1000000, 1050000) as t(i);",
"DELETE FROM postgresql_replica_{} WHERE value % 9 = 2;",
"UPDATE postgresql_replica_{} SET key=key+10000000",
"UPDATE postgresql_replica_{} SET value = value + 2 WHERE key % 3 = 1;",
"DELETE FROM postgresql_replica_{} WHERE value%5 = 0;",
]
def assert_nested_table_is_created(
instance, table_name, materialized_database="test_database", schema_name=""
):
if len(schema_name) == 0:
table = table_name
else:
table = schema_name + "." + table_name
print(f"Checking table {table} exists in {materialized_database}")
2024-08-27 16:00:33 +00:00
database_tables = instance.query(
f"SHOW TABLES FROM `{materialized_database}` WHERE name = '{table}'"
)
2022-01-07 21:27:46 +00:00
while table not in database_tables:
time.sleep(0.2)
2024-08-27 16:00:33 +00:00
database_tables = instance.query(
f"SHOW TABLES FROM `{materialized_database}` WHERE name = '{table}'"
)
2022-01-07 21:27:46 +00:00
assert table in database_tables
def assert_number_of_columns(
instance, expected, table_name, database_name="test_database"
):
result = instance.query(
f"select count() from system.columns where table = '{table_name}' and database = '{database_name}' and not startsWith(name, '_')"
)
while int(result) != expected:
time.sleep(1)
result = instance.query(
f"select count() from system.columns where table = '{table_name}' and database = '{database_name}' and not startsWith(name, '_')"
)
print("Number of columns ok")
2022-01-07 21:27:46 +00:00
def check_tables_are_synchronized(
instance,
table_name,
order_by="key",
postgres_database="postgres_database",
materialized_database="test_database",
schema_name="",
columns=["*"],
2022-01-07 21:27:46 +00:00
):
assert_nested_table_is_created(
instance, table_name, materialized_database, schema_name
)
2022-01-07 21:27:46 +00:00
table_path = ""
if len(schema_name) == 0:
2024-04-25 11:37:24 +00:00
table_path = f"`{materialized_database}`.`{table_name}`"
2022-01-07 21:27:46 +00:00
else:
2024-04-25 11:37:24 +00:00
table_path = f"`{materialized_database}`.`{schema_name}.{table_name}`"
2022-01-07 21:27:46 +00:00
print(f"Checking table is synchronized: {table_path}")
result_query = f"select * from {table_path} order by {order_by};"
expected = instance.query(
f"select {','.join(columns)} from `{postgres_database}`.`{table_name}` order by {order_by};"
2022-01-07 21:27:46 +00:00
)
result = instance.query(result_query)
2023-02-21 15:25:11 +00:00
for _ in range(50):
2022-01-07 21:27:46 +00:00
if result == expected:
break
else:
2023-02-21 15:25:11 +00:00
time.sleep(1)
2022-01-07 21:27:46 +00:00
result = instance.query(result_query)
2023-10-12 14:32:56 +00:00
if result != expected:
count = int(instance.query(f"select count() from {table_path}"))
expected_count = int(
2023-12-06 19:23:09 +00:00
instance.query(f"select count() from `{postgres_database}`.`{table_name}`")
2023-10-12 14:32:56 +00:00
)
print(f"Having {count}, expected {expected_count}")
2022-01-07 21:27:46 +00:00
assert result == expected
def check_several_tables_are_synchronized(
instance,
tables_num,
order_by="key",
postgres_database="postgres_database",
materialized_database="test_database",
schema_name="",
):
for i in range(tables_num):
2024-04-25 11:37:24 +00:00
check_tables_are_synchronized(
instance,
f"postgresql_replica_{i}",
postgres_database=postgres_database,
materialized_database=materialized_database,
)