2020-09-10 10:02:46 +00:00
import time
import pytest
import os
from helpers . cluster import ClickHouseCluster
import subprocess
cluster = ClickHouseCluster ( __file__ )
node1 = cluster . add_instance ( ' node1 ' , with_kerberized_hdfs = True , user_configs = [ ] , main_configs = [ ' configs/log_conf.xml ' , ' configs/hdfs.xml ' ] )
@pytest.fixture ( scope = " module " )
def started_cluster ( ) :
try :
cluster . start ( )
yield cluster
except Exception as ex :
print ( ex )
raise ex
finally :
cluster . shutdown ( )
2020-09-28 17:20:04 +00:00
def test_read_table ( started_cluster ) :
data = " 1 \t Serialize \t 555.222 \n 2 \t Data \t 777.333 \n "
started_cluster . hdfs_api . write_data ( " /simple_table_function " , data )
api_read = started_cluster . hdfs_api . read_data ( " /simple_table_function " )
assert api_read == data
2021-02-02 09:08:47 +00:00
select_read = node1 . query ( " select * from hdfs( ' hdfs://kerberizedhdfs1:9010/simple_table_function ' , ' TSV ' , ' id UInt64, text String, number Float64 ' ) " )
2020-09-28 17:20:04 +00:00
assert select_read == data
2020-09-10 10:02:46 +00:00
2020-09-28 17:20:04 +00:00
def test_read_write_storage ( started_cluster ) :
2021-02-02 09:08:47 +00:00
node1 . query ( " create table SimpleHDFSStorage2 (id UInt32, name String, weight Float64) ENGINE = HDFS( ' hdfs://kerberizedhdfs1:9010/simple_storage1 ' , ' TSV ' ) " )
2020-09-28 17:20:04 +00:00
node1 . query ( " insert into SimpleHDFSStorage2 values (1, ' Mark ' , 72.53) " )
api_read = started_cluster . hdfs_api . read_data ( " /simple_storage1 " )
assert api_read == " 1 \t Mark \t 72.53 \n "
select_read = node1 . query ( " select * from SimpleHDFSStorage2 " )
assert select_read == " 1 \t Mark \t 72.53 \n "
2020-10-30 19:40:16 +00:00
def test_write_storage_not_expired ( started_cluster ) :
2021-02-02 09:08:47 +00:00
node1 . query ( " create table SimpleHDFSStorageNotExpired (id UInt32, name String, weight Float64) ENGINE = HDFS( ' hdfs://kerberizedhdfs1:9010/simple_storage_not_expired ' , ' TSV ' ) " )
2020-09-28 17:20:04 +00:00
time . sleep ( 45 ) # wait for ticket expiration
2020-10-30 19:40:16 +00:00
node1 . query ( " insert into SimpleHDFSStorageNotExpired values (1, ' Mark ' , 72.53) " )
2020-09-28 17:20:04 +00:00
2020-10-30 19:40:16 +00:00
api_read = started_cluster . hdfs_api . read_data ( " /simple_storage_not_expired " )
2020-09-28 17:20:04 +00:00
assert api_read == " 1 \t Mark \t 72.53 \n "
2020-10-30 19:40:16 +00:00
select_read = node1 . query ( " select * from SimpleHDFSStorageNotExpired " )
2020-09-28 17:20:04 +00:00
assert select_read == " 1 \t Mark \t 72.53 \n "
def test_two_users ( started_cluster ) :
2021-02-02 09:08:47 +00:00
node1 . query ( " create table HDFSStorOne (id UInt32, name String, weight Float64) ENGINE = HDFS( ' hdfs://kerberizedhdfs1:9010/storage_user_one ' , ' TSV ' ) " )
2020-11-18 21:08:17 +00:00
node1 . query ( " insert into HDFSStorOne values (1, ' Real ' , 86.00) " )
2020-09-28 17:20:04 +00:00
2021-02-02 09:08:47 +00:00
node1 . query ( " create table HDFSStorTwo (id UInt32, name String, weight Float64) ENGINE = HDFS( ' hdfs://suser@kerberizedhdfs1:9010/user/specuser/storage_user_two ' , ' TSV ' ) " )
2020-11-18 21:08:17 +00:00
node1 . query ( " insert into HDFSStorTwo values (1, ' Ideal ' , 74.00) " )
2020-09-28 17:20:04 +00:00
2021-02-02 09:08:47 +00:00
select_read_1 = node1 . query ( " select * from hdfs( ' hdfs://kerberizedhdfs1:9010/user/specuser/storage_user_two ' , ' TSV ' , ' id UInt64, text String, number Float64 ' ) " )
2020-09-28 17:20:04 +00:00
2021-02-02 09:08:47 +00:00
select_read_2 = node1 . query ( " select * from hdfs( ' hdfs://suser@kerberizedhdfs1:9010/storage_user_one ' , ' TSV ' , ' id UInt64, text String, number Float64 ' ) " )
2020-09-28 17:20:04 +00:00
2020-10-30 19:40:16 +00:00
def test_read_table_expired ( started_cluster ) :
2020-09-28 17:20:04 +00:00
data = " 1 \t Serialize \t 555.222 \n 2 \t Data \t 777.333 \n "
started_cluster . hdfs_api . write_data ( " /simple_table_function_relogin " , data )
started_cluster . pause_container ( ' hdfskerberos ' )
time . sleep ( 45 )
try :
2021-02-02 09:08:47 +00:00
select_read = node1 . query ( " select * from hdfs( ' hdfs://reloginuser&kerberizedhdfs1:9010/simple_table_function ' , ' TSV ' , ' id UInt64, text String, number Float64 ' ) " )
2020-09-28 17:20:04 +00:00
assert False , " Exception have to be thrown "
except Exception as ex :
assert " DB::Exception: kinit failure: " in str ( ex )
started_cluster . unpause_container ( ' hdfskerberos ' )
2020-10-30 19:40:16 +00:00
def test_prohibited ( started_cluster ) :
2021-02-02 09:08:47 +00:00
node1 . query ( " create table HDFSStorTwoProhibited (id UInt32, name String, weight Float64) ENGINE = HDFS( ' hdfs://suser@kerberizedhdfs1:9010/storage_user_two_prohibited ' , ' TSV ' ) " )
2020-10-30 19:40:16 +00:00
try :
node1 . query ( " insert into HDFSStorTwoProhibited values (1, ' SomeOne ' , 74.00) " )
assert False , " Exception have to be thrown "
except Exception as ex :
assert " Unable to open HDFS file: /storage_user_two_prohibited error: Permission denied: user=specuser, access=WRITE " in str ( ex )
2020-09-28 17:20:04 +00:00
2020-10-30 19:40:16 +00:00
def test_cache_path ( started_cluster ) :
2021-02-02 09:08:47 +00:00
node1 . query ( " create table HDFSStorCachePath (id UInt32, name String, weight Float64) ENGINE = HDFS( ' hdfs://dedicatedcachepath@kerberizedhdfs1:9010/storage_dedicated_cache_path ' , ' TSV ' ) " )
2020-10-30 19:40:16 +00:00
try :
node1 . query ( " insert into HDFSStorCachePath values (1, ' FatMark ' , 92.53) " )
assert False , " Exception have to be thrown "
except Exception as ex :
assert " DB::Exception: hadoop.security.kerberos.ticket.cache.path cannot be set per user " in str ( ex )
2020-09-10 10:02:46 +00:00
if __name__ == ' __main__ ' :
cluster . start ( )
2020-10-30 19:40:16 +00:00
input ( " Cluster created, press any key to destroy... " )
2020-09-10 10:02:46 +00:00
cluster . shutdown ( )