2020-02-21 19:01:43 +00:00
|
|
|
import pytest
|
|
|
|
|
|
|
|
from helpers.cluster import ClickHouseCluster
|
|
|
|
|
|
|
|
cluster = ClickHouseCluster(__file__)
|
2022-03-22 16:39:58 +00:00
|
|
|
node1 = cluster.add_instance("node1")
|
|
|
|
node2 = cluster.add_instance("node2")
|
2020-02-21 19:01:43 +00:00
|
|
|
|
2020-09-16 04:26:10 +00:00
|
|
|
|
2020-02-21 19:01:43 +00:00
|
|
|
@pytest.fixture(scope="module")
|
|
|
|
def start_cluster():
|
|
|
|
try:
|
|
|
|
cluster.start()
|
|
|
|
|
|
|
|
for node in [node1, node2]:
|
2020-09-16 04:26:10 +00:00
|
|
|
node.query(
|
2022-03-22 16:39:58 +00:00
|
|
|
"create table da_memory_efficient_shard(A Int64, B Int64) Engine=MergeTree order by A partition by B % 2;"
|
|
|
|
)
|
2020-02-21 19:01:43 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
node1.query(
|
|
|
|
"insert into da_memory_efficient_shard select number, number from numbers(100000);"
|
|
|
|
)
|
|
|
|
node2.query(
|
|
|
|
"insert into da_memory_efficient_shard select number + 100000, number from numbers(100000);"
|
|
|
|
)
|
2020-02-21 19:01:43 +00:00
|
|
|
|
|
|
|
yield cluster
|
|
|
|
|
|
|
|
finally:
|
|
|
|
cluster.shutdown()
|
|
|
|
|
|
|
|
|
|
|
|
def test_remote(start_cluster):
|
2020-09-16 04:26:10 +00:00
|
|
|
node1.query(
|
2022-03-22 16:39:58 +00:00
|
|
|
"set distributed_aggregation_memory_efficient = 1, group_by_two_level_threshold = 1, group_by_two_level_threshold_bytes=1"
|
|
|
|
)
|
2020-09-16 04:26:10 +00:00
|
|
|
res = node1.query(
|
2022-03-22 16:39:58 +00:00
|
|
|
"select sum(a) from (SELECT B, uniqExact(A) a FROM remote('node{1,2}', default.da_memory_efficient_shard) GROUP BY B)"
|
|
|
|
)
|
|
|
|
assert res == "200000\n"
|
2020-02-21 19:01:43 +00:00
|
|
|
|
2020-05-15 19:28:10 +00:00
|
|
|
node1.query("set distributed_aggregation_memory_efficient = 0")
|
2020-09-16 04:26:10 +00:00
|
|
|
res = node1.query(
|
2022-03-22 16:39:58 +00:00
|
|
|
"select sum(a) from (SELECT B, uniqExact(A) a FROM remote('node{1,2}', default.da_memory_efficient_shard) GROUP BY B)"
|
|
|
|
)
|
|
|
|
assert res == "200000\n"
|
2020-02-21 19:01:43 +00:00
|
|
|
|
2020-09-16 04:26:10 +00:00
|
|
|
node1.query(
|
2022-03-22 16:39:58 +00:00
|
|
|
"set distributed_aggregation_memory_efficient = 1, group_by_two_level_threshold = 1, group_by_two_level_threshold_bytes=1"
|
|
|
|
)
|
2020-09-16 04:26:10 +00:00
|
|
|
res = node1.query(
|
2022-03-22 16:39:58 +00:00
|
|
|
"SELECT fullHostName() AS h, uniqExact(A) AS a FROM remote('node{1,2}', default.da_memory_efficient_shard) GROUP BY h ORDER BY h;"
|
|
|
|
)
|
|
|
|
assert res == "node1\t100000\nnode2\t100000\n"
|
2020-02-21 19:01:43 +00:00
|
|
|
|
2020-05-15 19:28:10 +00:00
|
|
|
node1.query("set distributed_aggregation_memory_efficient = 0")
|
2020-09-16 04:26:10 +00:00
|
|
|
res = node1.query(
|
2022-03-22 16:39:58 +00:00
|
|
|
"SELECT fullHostName() AS h, uniqExact(A) AS a FROM remote('node{1,2}', default.da_memory_efficient_shard) GROUP BY h ORDER BY h;"
|
|
|
|
)
|
|
|
|
assert res == "node1\t100000\nnode2\t100000\n"
|