Merge pull request #70982 from ClickHouse/fix-test_keeper_broken_logs

Fix `test_keeper_broken_logs`
This commit is contained in:
Antonio Andelic 2024-10-24 07:16:39 +00:00 committed by GitHub
commit e2821c5e8b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

View File

@ -1,4 +1,5 @@
import time
from multiprocessing.dummy import Pool
import pytest
@ -52,15 +53,34 @@ def get_fake_zk(nodename, timeout=30.0):
return _fake_zk_instance
def start_clickhouse(node):
node.start_clickhouse()
def clean_start():
nodes = [node1, node2, node3]
for node in nodes:
node.stop_clickhouse()
p = Pool(3)
waiters = []
for node in nodes:
node.exec_in_container(["rm", "-rf", "/var/lib/clickhouse/coordination/log"])
node.exec_in_container(
["rm", "-rf", "/var/lib/clickhouse/coordination/snapshots"]
)
waiters.append(p.apply_async(start_clickhouse, (node,)))
for waiter in waiters:
waiter.wait()
def test_single_node_broken_log(started_cluster):
clean_start()
try:
wait_nodes()
node1_conn = get_fake_zk("node1")
# Cleanup
if node1_conn.exists("/test_broken_log") != None:
node1_conn.delete("/test_broken_log")
node1_conn.create("/test_broken_log")
for _ in range(10):
node1_conn.create(f"/test_broken_log/node", b"somedata1", sequence=True)
@ -110,10 +130,12 @@ def test_single_node_broken_log(started_cluster):
verify_nodes(node3_conn)
assert node3_conn.get("/test_broken_log_final_node")[0] == b"somedata1"
assert (
node1_logs = (
node1.exec_in_container(["ls", "/var/lib/clickhouse/coordination/log"])
== "changelog_1_100000.bin\nchangelog_14_100013.bin\n"
.strip()
.split("\n")
)
assert len(node1_logs) == 2 and node1_logs[0] == "changelog_1_100000.bin"
assert (
node2.exec_in_container(["ls", "/var/lib/clickhouse/coordination/log"])
== "changelog_1_100000.bin\n"