Added integration test for s3 table function

This commit is contained in:
Diego Nieto (lesandie) 2022-08-27 18:33:04 +02:00
parent 7e183675e5
commit 9a07489151
3 changed files with 109 additions and 0 deletions

View File

@ -0,0 +1 @@
#!/usr/bin/env python3

View File

@ -0,0 +1,12 @@
<?xml version="1.0"?>
<!-- Using named collections 22.4+ -->
<clickhouse>
<named_collections>
<nc_s3>
<url>http://minio1:9001/root/data/</url>
<access_key_id>minio</access_key_id>
<secret_access_key>minio123</secret_access_key>
</nc_s3>
</named_collections>
</clickhouse>

View File

@ -0,0 +1,96 @@
import logging
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.network import PartitionManager
from helpers.client import QueryRuntimeException
cluster = ClickHouseCluster(__file__)
node = cluster.add_instance(
"node",
main_configs=[
"configs/config.d/minio.xml",
],
with_minio=True
)
settings = {
"s3_max_connections": "1",
"max_insert_threads": "1",
"s3_truncate_on_insert": "1",
"s3_min_upload_part_size": "33554432",
}
@pytest.fixture(scope="module")
def started_cluster():
try:
logging.info("Starting cluster...")
cluster.start()
logging.info("Cluster started")
yield cluster
finally:
logging.info("Stopping cluster")
cluster.shutdown()
logging.info("Cluster stopped")
def test_s3_table_functions(started_cluster):
"""
Simple test to check s3 table function functionalities
"""
node.query(
"""
INSERT INTO FUNCTION s3
(
nc_s3,
filename = 'test_file.tsv.gz',
format = 'TSV',
structure = 'number UInt64',
compression_method = 'gz'
)
SELECT * FROM numbers(1000000)
""",
settings=settings,
)
assert(
node.query(
"""
SELECT count(*) FROM s3
(
nc_s3,
filename = 'test_file.tsv.gz',
format = 'TSV',
structure = 'number UInt64',
compression_method = 'gz'
);
"""
)
== "1000000\n"
)
def test_s3_table_functions_timeouts(started_cluster):
"""
Test with timeout limit of 1200ms.
This should raise an Exception and pass.
"""
with PartitionManager() as pm:
pm.add_network_delay(node, 1200)
with pytest.raises(QueryRuntimeException):
node.query(
"""
INSERT INTO FUNCTION s3
(
nc_s3,
filename = 'test_file.tsv.gz',
format = 'TSV',
structure = 'number UInt64',
compression_method = 'gz'
)
SELECT * FROM numbers(1000000)
""",
settings=settings,
)