ClickHouse/tests/integration/test_keeper_zookeeper_converter/test.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

535 lines
16 KiB
Python
Raw Normal View History

2021-06-18 11:02:15 +00:00
#!/usr/bin/env python3
import pytest
from helpers.cluster import ClickHouseCluster
2022-09-06 10:58:14 +00:00
import helpers.keeper_utils as keeper_utils
from kazoo.client import KazooClient
from kazoo.retry import KazooRetry
from kazoo.security import make_acl
from kazoo.handlers.threading import KazooTimeoutError
2021-06-18 18:36:19 +00:00
import os
2022-06-06 20:59:34 +00:00
import time
2021-06-18 11:02:15 +00:00
cluster = ClickHouseCluster(__file__)
node = cluster.add_instance(
"node",
main_configs=["configs/keeper_config.xml", "configs/logs_conf.xml"],
stay_alive=True,
)
2021-06-18 11:02:15 +00:00
def start_zookeeper():
node.exec_in_container(["bash", "-c", "/opt/zookeeper/bin/zkServer.sh start"])
2021-06-18 11:02:15 +00:00
def stop_zookeeper():
node.exec_in_container(["bash", "-c", "/opt/zookeeper/bin/zkServer.sh stop"])
2023-01-03 09:59:06 +00:00
timeout = time.time() + 60
2023-01-03 09:39:14 +00:00
while node.get_process_pid("zookeeper") != None:
2023-01-03 09:59:06 +00:00
if time.time() > timeout:
raise Exception("Failed to stop ZooKeeper in 60 secs")
2023-01-03 09:39:14 +00:00
time.sleep(0.2)
2021-06-18 11:02:15 +00:00
2021-06-18 18:36:19 +00:00
def clear_zookeeper():
node.exec_in_container(["bash", "-c", "rm -fr /zookeeper/*"])
2021-06-18 18:36:19 +00:00
def restart_and_clear_zookeeper():
stop_zookeeper()
clear_zookeeper()
start_zookeeper()
def restart_zookeeper():
stop_zookeeper()
start_zookeeper()
2023-01-07 17:08:25 +00:00
def generate_zk_snapshot():
for _ in range(100):
stop_zookeeper()
start_zookeeper()
time.sleep(2)
stop_zookeeper()
# get last snapshot
last_snapshot = node.exec_in_container(
[
"bash",
"-c",
"find /zookeeper/version-2 -name 'snapshot.*' -printf '%T@ %p\n' | sort -n | awk 'END {print $2}'",
]
).strip()
print(f"Latest snapshot: {last_snapshot}")
try:
# verify last snapshot
# zkSnapShotToolkit is a tool to inspect generated snapshots - if it's broken, an exception is thrown
node.exec_in_container(
[
"bash",
"-c",
f"/opt/zookeeper/bin/zkSnapShotToolkit.sh {last_snapshot}",
]
)
return
except Exception as err:
print(f"Got error while reading snapshot: {err}")
raise Exception("Failed to generate a ZooKeeper snapshot")
2021-06-18 11:02:15 +00:00
def clear_clickhouse_data():
node.exec_in_container(
[
2021-06-18 11:02:15 +00:00
"bash",
"-c",
"rm -fr /var/lib/clickhouse/coordination/logs/* /var/lib/clickhouse/coordination/snapshots/*",
]
2021-06-18 11:02:15 +00:00
)
2021-06-18 11:02:15 +00:00
def convert_zookeeper_data():
2023-01-07 17:08:25 +00:00
node.exec_in_container(
[
"bash",
"-c",
"tar -cvzf /var/lib/clickhouse/zk-data.tar.gz /zookeeper/version-2",
]
)
2021-06-18 11:02:15 +00:00
cmd = "/usr/bin/clickhouse keeper-converter --zookeeper-logs-dir /zookeeper/version-2/ --zookeeper-snapshots-dir /zookeeper/version-2/ --output-dir /var/lib/clickhouse/coordination/snapshots"
node.exec_in_container(["bash", "-c", cmd])
2021-06-18 11:02:15 +00:00
def stop_clickhouse():
node.stop_clickhouse()
2021-06-18 11:02:15 +00:00
def start_clickhouse():
node.start_clickhouse()
2022-09-06 10:58:14 +00:00
keeper_utils.wait_until_connected(cluster, node)
2021-06-18 11:02:15 +00:00
2021-06-22 11:43:26 +00:00
def copy_zookeeper_data(make_zk_snapshots):
if make_zk_snapshots: # force zookeeper to create snapshot
2023-01-07 17:08:25 +00:00
generate_zk_snapshot()
else:
2021-06-22 11:43:26 +00:00
stop_zookeeper()
2021-06-18 11:02:15 +00:00
stop_clickhouse()
clear_clickhouse_data()
convert_zookeeper_data()
start_zookeeper()
start_clickhouse()
2021-06-18 11:02:15 +00:00
@pytest.fixture(scope="module")
def started_cluster():
try:
cluster.start()
yield cluster
finally:
cluster.shutdown()
2021-10-26 09:07:07 +00:00
def get_fake_zk(timeout=60.0):
2021-06-18 11:02:15 +00:00
_fake_zk_instance = KazooClient(
hosts=cluster.get_instance_ip("node") + ":9181", timeout=timeout
)
_fake_zk_instance.start()
return _fake_zk_instance
2021-10-26 09:07:07 +00:00
def get_genuine_zk(timeout=60.0):
CONNECTION_RETRIES = 100
for i in range(CONNECTION_RETRIES):
try:
_genuine_zk_instance = KazooClient(
hosts=cluster.get_instance_ip("node") + ":2181",
timeout=timeout,
connection_retry=KazooRetry(max_tries=20),
)
_genuine_zk_instance.start()
return _genuine_zk_instance
except KazooTimeoutError:
if i == CONNECTION_RETRIES - 1:
raise
print(
"Failed to connect to ZK cluster because of timeout. Restarting cluster and trying again."
)
time.sleep(0.2)
restart_zookeeper()
2021-06-18 11:02:15 +00:00
2022-06-06 20:59:34 +00:00
def compare_stats(stat1, stat2, path, ignore_pzxid=False):
2021-06-18 18:36:19 +00:00
assert stat1.czxid == stat2.czxid, (
"path "
+ path
+ " cxzids not equal for stats: "
+ str(stat1.czxid)
+ " != "
+ str(stat2.zxid)
)
assert stat1.mzxid == stat2.mzxid, (
"path "
+ path
+ " mxzids not equal for stats: "
+ str(stat1.mzxid)
+ " != "
+ str(stat2.mzxid)
)
assert stat1.version == stat2.version, (
"path "
+ path
+ " versions not equal for stats: "
+ str(stat1.version)
+ " != "
+ str(stat2.version)
)
assert stat1.cversion == stat2.cversion, (
"path "
+ path
+ " cversions not equal for stats: "
+ str(stat1.cversion)
+ " != "
+ str(stat2.cversion)
)
assert stat1.aversion == stat2.aversion, (
"path "
+ path
+ " aversions not equal for stats: "
+ str(stat1.aversion)
+ " != "
+ str(stat2.aversion)
)
assert stat1.ephemeralOwner == stat2.ephemeralOwner, (
"path "
+ path
+ " ephemeralOwners not equal for stats: "
+ str(stat1.ephemeralOwner)
+ " != "
+ str(stat2.ephemeralOwner)
)
assert stat1.dataLength == stat2.dataLength, (
"path "
+ path
+ " ephemeralOwners not equal for stats: "
+ str(stat1.dataLength)
+ " != "
+ str(stat2.dataLength)
)
assert stat1.numChildren == stat2.numChildren, (
"path "
+ path
+ " numChildren not equal for stats: "
+ str(stat1.numChildren)
+ " != "
+ str(stat2.numChildren)
)
2022-06-06 20:59:34 +00:00
if not ignore_pzxid:
assert stat1.pzxid == stat2.pzxid, (
"path "
+ path
+ " pzxid not equal for stats: "
+ str(stat1.pzxid)
+ " != "
+ str(stat2.pzxid)
)
2021-06-18 18:36:19 +00:00
2022-06-06 20:59:34 +00:00
def compare_states(zk1, zk2, path="/", exclude_paths=[]):
2021-06-18 18:36:19 +00:00
data1, stat1 = zk1.get(path)
data2, stat2 = zk2.get(path)
print("Left Stat", stat1)
print("Right Stat", stat2)
assert data1 == data2, "Data not equal on path " + str(path)
# both paths have strange stats
2022-06-06 20:59:34 +00:00
if path not in ("/", "/zookeeper") and path not in exclude_paths:
2021-06-18 18:36:19 +00:00
compare_stats(stat1, stat2, path)
first_children = list(sorted(zk1.get_children(path)))
second_children = list(sorted(zk2.get_children(path)))
print("Got children left", first_children)
print("Got children rigth", second_children)
2022-07-20 07:55:19 +00:00
if path == "/":
assert set(first_children) ^ set(second_children) == set(["keeper"])
else:
2022-07-20 08:10:52 +00:00
assert first_children == second_children, (
"Childrens are not equal on path " + path
)
2021-06-18 18:36:19 +00:00
for children in first_children:
2022-07-20 07:55:19 +00:00
if path != "/" or children != "keeper":
print("Checking child", os.path.join(path, children))
compare_states(zk1, zk2, os.path.join(path, children), exclude_paths)
2021-06-18 11:02:15 +00:00
2021-06-22 11:43:26 +00:00
@pytest.mark.parametrize(("create_snapshots"), [True, False])
def test_smoke(started_cluster, create_snapshots):
2021-06-18 18:36:19 +00:00
restart_and_clear_zookeeper()
2021-06-18 11:02:15 +00:00
genuine_connection = get_genuine_zk()
genuine_connection.create("/test", b"data")
assert genuine_connection.get("/test")[0] == b"data"
2021-06-22 11:43:26 +00:00
copy_zookeeper_data(create_snapshots)
2021-06-18 11:02:15 +00:00
2021-06-18 18:36:19 +00:00
genuine_connection = get_genuine_zk()
2021-06-18 11:02:15 +00:00
fake_connection = get_fake_zk()
2021-06-18 18:36:19 +00:00
compare_states(genuine_connection, fake_connection)
genuine_connection.stop()
genuine_connection.close()
fake_connection.stop()
fake_connection.close()
2021-06-18 18:36:19 +00:00
def get_bytes(s):
return s.encode()
2022-06-07 14:09:21 +00:00
def assert_ephemeral_disappear(connection, path):
for _ in range(200):
if not connection.exists(path):
break
time.sleep(0.1)
else:
raise Exception("ZK refuse to remove ephemeral nodes")
2021-06-22 11:43:26 +00:00
@pytest.mark.parametrize(("create_snapshots"), [True, False])
def test_simple_crud_requests(started_cluster, create_snapshots):
2021-06-18 18:36:19 +00:00
restart_and_clear_zookeeper()
2022-06-06 20:59:34 +00:00
genuine_connection = get_genuine_zk(timeout=5)
2021-06-18 18:36:19 +00:00
for i in range(100):
genuine_connection.create("/test_create" + str(i), get_bytes("data" + str(i)))
# some set queries
for i in range(10):
for j in range(i + 1):
genuine_connection.set("/test_create" + str(i), get_bytes("value" + str(j)))
for i in range(10, 20):
genuine_connection.delete("/test_create" + str(i))
path = "/test_create_deep"
for i in range(10):
genuine_connection.create(path, get_bytes("data" + str(i)))
path = os.path.join(path, str(i))
genuine_connection.create("/test_sequential", b"")
for i in range(10):
genuine_connection.create(
"/test_sequential/" + "a" * i + "-",
get_bytes("dataX" + str(i)),
sequence=True,
)
genuine_connection.create("/test_ephemeral", b"")
for i in range(10):
genuine_connection.create(
"/test_ephemeral/" + str(i), get_bytes("dataX" + str(i)), ephemeral=True
)
2021-06-22 11:43:26 +00:00
copy_zookeeper_data(create_snapshots)
2021-06-18 18:36:19 +00:00
2022-06-06 20:59:34 +00:00
genuine_connection.stop()
genuine_connection.close()
2021-06-18 18:36:19 +00:00
2022-06-06 20:59:34 +00:00
genuine_connection = get_genuine_zk(timeout=5)
fake_connection = get_fake_zk(timeout=5)
for conn in [genuine_connection, fake_connection]:
2022-06-07 14:09:21 +00:00
assert_ephemeral_disappear(conn, "/test_ephemeral/0")
2022-06-06 20:59:34 +00:00
# After receiving close request zookeeper updates pzxid of ephemeral parent.
# Keeper doesn't receive such request (snapshot created before it) so it doesn't do it.
2022-06-06 22:42:56 +00:00
compare_states(
genuine_connection, fake_connection, exclude_paths=["/test_ephemeral"]
)
2022-06-06 20:59:34 +00:00
eph1, stat1 = fake_connection.get("/test_ephemeral")
eph2, stat2 = genuine_connection.get("/test_ephemeral")
assert eph1 == eph2
compare_stats(stat1, stat2, "/test_ephemeral", ignore_pzxid=True)
2021-06-18 18:36:19 +00:00
# especially ensure that counters are the same
genuine_connection.create(
"/test_sequential/" + "a" * 10 + "-", get_bytes("dataX" + str(i)), sequence=True
)
fake_connection.create(
"/test_sequential/" + "a" * 10 + "-", get_bytes("dataX" + str(i)), sequence=True
)
first_children = list(sorted(genuine_connection.get_children("/test_sequential")))
second_children = list(sorted(fake_connection.get_children("/test_sequential")))
assert first_children == second_children, "Childrens are not equal on path " + path
genuine_connection.stop()
genuine_connection.close()
fake_connection.stop()
fake_connection.close()
2021-06-22 11:43:26 +00:00
@pytest.mark.parametrize(("create_snapshots"), [True, False])
def test_multi_and_failed_requests(started_cluster, create_snapshots):
2021-06-18 18:36:19 +00:00
restart_and_clear_zookeeper()
2022-06-06 20:59:34 +00:00
genuine_connection = get_genuine_zk(timeout=5)
2021-06-18 18:36:19 +00:00
genuine_connection.create("/test_multitransactions")
for i in range(10):
t = genuine_connection.transaction()
t.create("/test_multitransactions/freddy" + str(i), get_bytes("data" + str(i)))
t.create(
"/test_multitransactions/fred" + str(i),
get_bytes("value" + str(i)),
ephemeral=True,
)
t.create(
"/test_multitransactions/smith" + str(i),
get_bytes("entity" + str(i)),
sequence=True,
)
t.set_data("/test_multitransactions", get_bytes("somedata" + str(i)))
t.commit()
with pytest.raises(Exception):
genuine_connection.set(
"/test_multitransactions/freddy0", get_bytes("mustfail" + str(i)), version=1
)
t = genuine_connection.transaction()
t.create("/test_bad_transaction", get_bytes("data" + str(1)))
t.check("/test_multitransactions", version=32)
t.create("/test_bad_transaction1", get_bytes("data" + str(2)))
# should fail
t.commit()
assert genuine_connection.exists("/test_bad_transaction") is None
assert genuine_connection.exists("/test_bad_transaction1") is None
t = genuine_connection.transaction()
t.create("/test_bad_transaction2", get_bytes("data" + str(1)))
t.delete("/test_multitransactions/freddy0", version=5)
# should fail
t.commit()
assert genuine_connection.exists("/test_bad_transaction2") is None
assert genuine_connection.exists("/test_multitransactions/freddy0") is not None
2021-06-22 11:43:26 +00:00
copy_zookeeper_data(create_snapshots)
2021-06-18 18:36:19 +00:00
2022-06-06 20:59:34 +00:00
genuine_connection.stop()
genuine_connection.close()
2021-06-18 18:36:19 +00:00
2022-06-06 20:59:34 +00:00
genuine_connection = get_genuine_zk(timeout=5)
fake_connection = get_fake_zk(timeout=5)
for conn in [genuine_connection, fake_connection]:
2022-06-07 14:09:21 +00:00
assert_ephemeral_disappear(conn, "/test_multitransactions/fred0")
2022-06-06 20:59:34 +00:00
# After receiving close request zookeeper updates pzxid of ephemeral parent.
# Keeper doesn't receive such request (snapshot created before it) so it doesn't do it.
2022-06-06 22:42:56 +00:00
compare_states(
genuine_connection, fake_connection, exclude_paths=["/test_multitransactions"]
)
2022-06-06 20:59:34 +00:00
eph1, stat1 = fake_connection.get("/test_multitransactions")
eph2, stat2 = genuine_connection.get("/test_multitransactions")
assert eph1 == eph2
compare_stats(stat1, stat2, "/test_multitransactions", ignore_pzxid=True)
2021-06-18 18:36:19 +00:00
genuine_connection.stop()
genuine_connection.close()
fake_connection.stop()
fake_connection.close()
2021-06-22 11:43:26 +00:00
@pytest.mark.parametrize(("create_snapshots"), [True, False])
def test_acls(started_cluster, create_snapshots):
2021-06-21 13:58:39 +00:00
restart_and_clear_zookeeper()
genuine_connection = get_genuine_zk()
genuine_connection.add_auth("digest", "user1:password1")
genuine_connection.add_auth("digest", "user2:password2")
genuine_connection.add_auth("digest", "user3:password3")
genuine_connection.create(
"/test_multi_all_acl", b"data", acl=[make_acl("auth", "", all=True)]
)
other_connection = get_genuine_zk()
other_connection.add_auth("digest", "user1:password1")
other_connection.set("/test_multi_all_acl", b"X")
assert other_connection.get("/test_multi_all_acl")[0] == b"X"
yet_other_auth_connection = get_genuine_zk()
yet_other_auth_connection.add_auth("digest", "user2:password2")
yet_other_auth_connection.set("/test_multi_all_acl", b"Y")
2021-06-22 10:49:35 +00:00
genuine_connection.add_auth("digest", "user3:password3")
# just to check that we are able to deserialize it
genuine_connection.set_acls(
"/test_multi_all_acl",
acls=[
make_acl(
"auth", "", read=True, write=False, create=True, delete=True, admin=True
)
],
)
2021-06-22 10:49:35 +00:00
2021-06-21 15:45:45 +00:00
no_auth_connection = get_genuine_zk()
with pytest.raises(Exception):
no_auth_connection.set("/test_multi_all_acl", b"Z")
2021-06-22 11:43:26 +00:00
copy_zookeeper_data(create_snapshots)
2021-06-21 13:58:39 +00:00
genuine_connection = get_genuine_zk()
genuine_connection.add_auth("digest", "user1:password1")
genuine_connection.add_auth("digest", "user2:password2")
genuine_connection.add_auth("digest", "user3:password3")
fake_connection = get_fake_zk()
fake_connection.add_auth("digest", "user1:password1")
fake_connection.add_auth("digest", "user2:password2")
fake_connection.add_auth("digest", "user3:password3")
compare_states(genuine_connection, fake_connection)
2021-06-22 10:49:35 +00:00
for connection in [genuine_connection, fake_connection]:
acls, stat = connection.get_acls("/test_multi_all_acl")
assert stat.aversion == 1
assert len(acls) == 3
for acl in acls:
assert acl.acl_list == ["READ", "CREATE", "DELETE", "ADMIN"]
assert acl.id.scheme == "digest"
assert acl.perms == 29
assert acl.id.id in (
"user1:XDkd2dsEuhc9ImU3q8pa8UOdtpI=",
"user2:lo/iTtNMP+gEZlpUNaCqLYO3i5U=",
"user3:wr5Y0kEs9nFX3bKrTMKxrlcFeWo=",
)
genuine_connection.stop()
genuine_connection.close()
fake_connection.stop()
fake_connection.close()