ClickHouse/tests/integration/test_intersecting_parts/test.py

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

73 lines
1.8 KiB
Python
Raw Normal View History

import logging
2024-09-27 10:19:39 +00:00
import pytest
from helpers.cluster import ClickHouseCluster
2024-04-30 18:24:37 +00:00
cluster = ClickHouseCluster(__file__)
node = cluster.add_instance("node", with_zookeeper=True)
2024-04-30 18:24:37 +00:00
@pytest.fixture(scope="module")
def started_cluster():
try:
cluster.start()
yield cluster
finally:
cluster.shutdown()
2024-04-30 18:24:37 +00:00
# This test construct intersecting parts intentially. It's not a elegent test.
# TODO(hanfei): write a test which select part 1_1 merging with part 2_2 and drop range.
def test_intersect_parts_when_restart(started_cluster):
node.query(
2024-04-30 18:24:37 +00:00
"""
CREATE TABLE data (
key Int
)
ENGINE = ReplicatedMergeTree('/ch/tables/default/data', 'node')
ORDER BY key;
"""
2024-04-30 18:24:37 +00:00
)
node.query("system stop cleanup data")
node.query("INSERT INTO data values (1)")
node.query("INSERT INTO data values (2)")
node.query("INSERT INTO data values (3)")
node.query("INSERT INTO data values (4)")
node.query("ALTER TABLE data DROP PART 'all_1_1_0'")
node.query("ALTER TABLE data DROP PART 'all_2_2_0'")
node.query("OPTIMIZE TABLE data FINAL")
2024-04-30 18:24:37 +00:00
part_path = node.query(
"SELECT path FROM system.parts WHERE table = 'data' and name = 'all_0_3_1'"
).strip()
assert len(part_path) != 0
node.query("detach table data")
2024-04-30 18:24:37 +00:00
new_path = part_path[:-6] + "1_2_3"
node.exec_in_container(
[
"bash",
"-c",
2024-04-30 18:24:37 +00:00
"cp -r {p} {p1}".format(p=part_path, p1=new_path),
],
privileged=True,
)
# mock empty part
node.exec_in_container(
[
"bash",
"-c",
2024-04-30 18:24:37 +00:00
"echo -n 0 > {p1}/count.txt".format(p1=new_path),
],
privileged=True,
)
node.query("attach table data")
data_size = node.query("SELECT sum(key) FROM data").strip()
2024-04-30 18:24:37 +00:00
assert data_size == "5"