improvements

This commit is contained in:
Yatsishin Ilya 2021-05-25 16:40:22 +03:00
parent 893cd47bd2
commit e9ccf906c4
4 changed files with 4 additions and 6 deletions

View File

@ -7,9 +7,6 @@ cluster = ClickHouseCluster(__file__)
node1 = cluster.add_instance('node1', node1 = cluster.add_instance('node1',
main_configs=['configs/logs_config.xml']) main_configs=['configs/logs_config.xml'])
node2 = cluster.add_instance('node2',
main_configs=['configs/logs_config.xml'])
@pytest.fixture(scope="module") @pytest.fixture(scope="module")
def started_cluster(): def started_cluster():

View File

@ -1,5 +1,6 @@
import time import time
import pytest import pytest
import logging
from helpers.cluster import ClickHouseCluster from helpers.cluster import ClickHouseCluster
from helpers.test_tools import assert_eq_with_retry from helpers.test_tools import assert_eq_with_retry

View File

@ -7,7 +7,7 @@
default_realm = TEST.CLICKHOUSE.TECH default_realm = TEST.CLICKHOUSE.TECH
dns_lookup_realm = false dns_lookup_realm = false
dns_lookup_kdc = false dns_lookup_kdc = false
ticket_lifetime = 15s ticket_lifetime = 5s
forwardable = true forwardable = true
default_tgs_enctypes = des3-hmac-sha1 default_tgs_enctypes = des3-hmac-sha1
default_tkt_enctypes = des3-hmac-sha1 default_tkt_enctypes = des3-hmac-sha1

View File

@ -53,7 +53,7 @@ def test_write_storage_not_expired(started_cluster):
node1.query("create table SimpleHDFSStorageNotExpired (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/simple_storage_not_expired', 'TSV')") node1.query("create table SimpleHDFSStorageNotExpired (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/simple_storage_not_expired', 'TSV')")
time.sleep(45) # wait for ticket expiration time.sleep(15) # wait for ticket expiration
node1.query("insert into SimpleHDFSStorageNotExpired values (1, 'Mark', 72.53)") node1.query("insert into SimpleHDFSStorageNotExpired values (1, 'Mark', 72.53)")
api_read = hdfs_api.read_data("/simple_storage_not_expired") api_read = hdfs_api.read_data("/simple_storage_not_expired")
@ -83,7 +83,7 @@ def test_read_table_expired(started_cluster):
hdfs_api.write_data("/simple_table_function_relogin", data) hdfs_api.write_data("/simple_table_function_relogin", data)
started_cluster.pause_container('hdfskerberos') started_cluster.pause_container('hdfskerberos')
time.sleep(45) time.sleep(15)
try: try:
select_read = node1.query("select * from hdfs('hdfs://reloginuser&kerberizedhdfs1:9010/simple_table_function', 'TSV', 'id UInt64, text String, number Float64')") select_read = node1.query("select * from hdfs('hdfs://reloginuser&kerberizedhdfs1:9010/simple_table_function', 'TSV', 'id UInt64, text String, number Float64')")