This commit is contained in:
Yatsishin Ilya 2021-05-27 10:58:12 +03:00
parent ae60a3dd3a
commit 55dd173535
2 changed files with 10 additions and 13 deletions

View File

@ -121,7 +121,6 @@
"test_insert_into_distributed/test.py::test_prefer_localhost_replica",
"test_insert_into_distributed/test.py::test_reconnect",
"test_insert_into_distributed/test.py::test_table_function",
"test_insert_into_distributed_through_materialized_view/test.py::test_inserts_batching SKIPPED",
"test_insert_into_distributed_through_materialized_view/test.py::test_inserts_local",
"test_insert_into_distributed_through_materialized_view/test.py::test_reconnect",
"test_keeper_multinode_blocade_leader/test.py::test_blocade_leader",
@ -192,8 +191,6 @@
"test_polymorphic_parts/test.py::test_in_memory_wal_rotate",
"test_polymorphic_parts/test.py::test_polymorphic_parts_basics[first_node0-second_node0]",
"test_polymorphic_parts/test.py::test_polymorphic_parts_basics[first_node1-second_node1]",
"test_polymorphic_parts/test.py::test_polymorphic_parts_diff_versions_2 SKIPPED",
"test_polymorphic_parts/test.py::test_polymorphic_parts_diff_versions SKIPPED",
"test_polymorphic_parts/test.py::test_polymorphic_parts_index",
"test_polymorphic_parts/test.py::test_polymorphic_parts_non_adaptive",
"test_quorum_inserts_parallel/test.py::test_parallel_quorum_actually_parallel",

View File

@ -8,7 +8,7 @@ from pyhdfs import HdfsClient
@pytest.fixture(scope="module")
def cluster():
def started_cluster():
try:
cluster = ClickHouseCluster(__file__)
cluster.add_instance("node",
@ -18,7 +18,7 @@ def cluster():
cluster.start()
logging.info("Cluster started")
fs = HdfsClient(hosts='localhost')
fs = HdfsClient(hosts=cluster.hdfs_ip)
fs.mkdirs('/clickhouse')
yield cluster
@ -26,8 +26,8 @@ def cluster():
cluster.shutdown()
def assert_objects_count(cluster, objects_count, path='data/'):
fs = HdfsClient(hosts='localhost')
def assert_objects_count(started_cluster, objects_count, path='data/'):
fs = HdfsClient(hosts=cluster.hdfs_ip)
hdfs_objects = fs.listdir('/clickhouse')
assert objects_count == len(hdfs_objects)
@ -35,25 +35,25 @@ def assert_objects_count(cluster, objects_count, path='data/'):
@pytest.mark.parametrize(
"log_engine,files_overhead,files_overhead_per_insert",
[("TinyLog", 1, 1), ("Log", 2, 1), ("StripeLog", 1, 2)])
def test_log_family_hdfs(cluster, log_engine, files_overhead, files_overhead_per_insert):
node = cluster.instances["node"]
def test_log_family_hdfs(started_cluster, log_engine, files_overhead, files_overhead_per_insert):
node = started_cluster.instances["node"]
node.query("CREATE TABLE hdfs_test (id UInt64) ENGINE={} SETTINGS disk = 'hdfs'".format(log_engine))
node.query("INSERT INTO hdfs_test SELECT number FROM numbers(5)")
assert node.query("SELECT * FROM hdfs_test") == "0\n1\n2\n3\n4\n"
assert_objects_count(cluster, files_overhead_per_insert + files_overhead)
assert_objects_count(started_cluster, files_overhead_per_insert + files_overhead)
node.query("INSERT INTO hdfs_test SELECT number + 5 FROM numbers(3)")
assert node.query("SELECT * FROM hdfs_test order by id") == "0\n1\n2\n3\n4\n5\n6\n7\n"
assert_objects_count(cluster, files_overhead_per_insert * 2 + files_overhead)
assert_objects_count(started_cluster, files_overhead_per_insert * 2 + files_overhead)
node.query("INSERT INTO hdfs_test SELECT number + 8 FROM numbers(1)")
assert node.query("SELECT * FROM hdfs_test order by id") == "0\n1\n2\n3\n4\n5\n6\n7\n8\n"
assert_objects_count(cluster, files_overhead_per_insert * 3 + files_overhead)
assert_objects_count(started_cluster, files_overhead_per_insert * 3 + files_overhead)
node.query("TRUNCATE TABLE hdfs_test")
assert_objects_count(cluster, 0)
assert_objects_count(started_cluster, 0)
node.query("DROP TABLE hdfs_test")