2020-09-10 10:02:46 +00:00
|
|
|
import time
|
|
|
|
import pytest
|
|
|
|
|
|
|
|
import os
|
|
|
|
|
2024-03-20 09:46:07 +00:00
|
|
|
from helpers.cluster import ClickHouseCluster, is_arm
|
2020-09-10 10:02:46 +00:00
|
|
|
import subprocess
|
|
|
|
|
2024-03-20 17:23:08 +00:00
|
|
|
if is_arm():
|
|
|
|
pytestmark = pytest.mark.skip
|
|
|
|
|
2020-09-10 10:02:46 +00:00
|
|
|
cluster = ClickHouseCluster(__file__)
|
2022-03-22 16:39:58 +00:00
|
|
|
node1 = cluster.add_instance(
|
|
|
|
"node1",
|
|
|
|
with_kerberized_hdfs=True,
|
|
|
|
user_configs=[],
|
|
|
|
main_configs=["configs/hdfs.xml"],
|
|
|
|
)
|
|
|
|
|
2020-09-10 10:02:46 +00:00
|
|
|
|
|
|
|
@pytest.fixture(scope="module")
|
|
|
|
def started_cluster():
|
|
|
|
try:
|
|
|
|
cluster.start()
|
|
|
|
|
|
|
|
yield cluster
|
|
|
|
|
|
|
|
except Exception as ex:
|
|
|
|
print(ex)
|
|
|
|
raise ex
|
|
|
|
finally:
|
|
|
|
cluster.shutdown()
|
|
|
|
|
2021-06-09 13:53:16 +00:00
|
|
|
|
2020-09-28 17:20:04 +00:00
|
|
|
def test_read_table(started_cluster):
|
2021-06-09 09:23:02 +00:00
|
|
|
hdfs_api = started_cluster.hdfs_api
|
2021-02-19 12:58:11 +00:00
|
|
|
|
2020-09-28 17:20:04 +00:00
|
|
|
data = "1\tSerialize\t555.222\n2\tData\t777.333\n"
|
2021-02-19 12:58:11 +00:00
|
|
|
hdfs_api.write_data("/simple_table_function", data)
|
2020-09-28 17:20:04 +00:00
|
|
|
|
2021-02-19 12:58:11 +00:00
|
|
|
api_read = hdfs_api.read_data("/simple_table_function")
|
2020-09-28 17:20:04 +00:00
|
|
|
assert api_read == data
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
select_read = node1.query(
|
|
|
|
"select * from hdfs('hdfs://kerberizedhdfs1:9010/simple_table_function', 'TSV', 'id UInt64, text String, number Float64')"
|
|
|
|
)
|
2020-09-28 17:20:04 +00:00
|
|
|
assert select_read == data
|
2020-09-10 10:02:46 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2020-09-28 17:20:04 +00:00
|
|
|
def test_read_write_storage(started_cluster):
|
2021-06-09 09:23:02 +00:00
|
|
|
hdfs_api = started_cluster.hdfs_api
|
2021-02-19 12:58:11 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
node1.query(
|
|
|
|
"create table SimpleHDFSStorage2 (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/simple_storage1', 'TSV')"
|
|
|
|
)
|
2020-09-28 17:20:04 +00:00
|
|
|
node1.query("insert into SimpleHDFSStorage2 values (1, 'Mark', 72.53)")
|
|
|
|
|
2021-02-19 12:58:11 +00:00
|
|
|
api_read = hdfs_api.read_data("/simple_storage1")
|
2020-09-28 17:20:04 +00:00
|
|
|
assert api_read == "1\tMark\t72.53\n"
|
|
|
|
|
|
|
|
select_read = node1.query("select * from SimpleHDFSStorage2")
|
|
|
|
assert select_read == "1\tMark\t72.53\n"
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2020-10-30 19:40:16 +00:00
|
|
|
def test_write_storage_not_expired(started_cluster):
|
2021-06-09 09:23:02 +00:00
|
|
|
hdfs_api = started_cluster.hdfs_api
|
2021-02-19 12:58:11 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
node1.query(
|
|
|
|
"create table SimpleHDFSStorageNotExpired (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/simple_storage_not_expired', 'TSV')"
|
|
|
|
)
|
2020-09-28 17:20:04 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
time.sleep(15) # wait for ticket expiration
|
2020-10-30 19:40:16 +00:00
|
|
|
node1.query("insert into SimpleHDFSStorageNotExpired values (1, 'Mark', 72.53)")
|
2020-09-28 17:20:04 +00:00
|
|
|
|
2021-02-19 12:58:11 +00:00
|
|
|
api_read = hdfs_api.read_data("/simple_storage_not_expired")
|
2020-09-28 17:20:04 +00:00
|
|
|
assert api_read == "1\tMark\t72.53\n"
|
|
|
|
|
2020-10-30 19:40:16 +00:00
|
|
|
select_read = node1.query("select * from SimpleHDFSStorageNotExpired")
|
2020-09-28 17:20:04 +00:00
|
|
|
assert select_read == "1\tMark\t72.53\n"
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
|
2020-09-28 17:20:04 +00:00
|
|
|
def test_two_users(started_cluster):
|
2021-06-09 09:23:02 +00:00
|
|
|
hdfs_api = started_cluster.hdfs_api
|
2021-02-19 12:58:11 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
node1.query(
|
|
|
|
"create table HDFSStorOne (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9010/storage_user_one', 'TSV')"
|
|
|
|
)
|
2020-11-18 21:08:17 +00:00
|
|
|
node1.query("insert into HDFSStorOne values (1, 'Real', 86.00)")
|
2020-09-28 17:20:04 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
node1.query(
|
|
|
|
"create table HDFSStorTwo (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://suser@kerberizedhdfs1:9010/user/specuser/storage_user_two', 'TSV')"
|
|
|
|
)
|
2020-11-18 21:08:17 +00:00
|
|
|
node1.query("insert into HDFSStorTwo values (1, 'Ideal', 74.00)")
|
2020-09-28 17:20:04 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
select_read_1 = node1.query(
|
|
|
|
"select * from hdfs('hdfs://kerberizedhdfs1:9010/user/specuser/storage_user_two', 'TSV', 'id UInt64, text String, number Float64')"
|
|
|
|
)
|
|
|
|
|
|
|
|
select_read_2 = node1.query(
|
|
|
|
"select * from hdfs('hdfs://suser@kerberizedhdfs1:9010/storage_user_one', 'TSV', 'id UInt64, text String, number Float64')"
|
|
|
|
)
|
2020-09-28 17:20:04 +00:00
|
|
|
|
|
|
|
|
2020-10-30 19:40:16 +00:00
|
|
|
def test_read_table_expired(started_cluster):
|
2021-06-09 09:23:02 +00:00
|
|
|
hdfs_api = started_cluster.hdfs_api
|
2021-02-19 12:58:11 +00:00
|
|
|
|
2020-09-28 17:20:04 +00:00
|
|
|
data = "1\tSerialize\t555.222\n2\tData\t777.333\n"
|
2021-02-19 12:58:11 +00:00
|
|
|
hdfs_api.write_data("/simple_table_function_relogin", data)
|
2020-09-28 17:20:04 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
started_cluster.pause_container("hdfskerberos")
|
2021-05-25 13:40:22 +00:00
|
|
|
time.sleep(15)
|
2020-09-28 17:20:04 +00:00
|
|
|
|
|
|
|
try:
|
2022-03-22 16:39:58 +00:00
|
|
|
select_read = node1.query(
|
|
|
|
"select * from hdfs('hdfs://reloginuser&kerberizedhdfs1:9010/simple_table_function', 'TSV', 'id UInt64, text String, number Float64')"
|
|
|
|
)
|
2020-09-28 17:20:04 +00:00
|
|
|
assert False, "Exception have to be thrown"
|
|
|
|
except Exception as ex:
|
2022-06-07 09:06:22 +00:00
|
|
|
assert "DB::Exception: KerberosInit failure:" in str(ex)
|
2020-09-28 17:20:04 +00:00
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
started_cluster.unpause_container("hdfskerberos")
|
|
|
|
|
2020-09-28 17:20:04 +00:00
|
|
|
|
2020-10-30 19:40:16 +00:00
|
|
|
def test_prohibited(started_cluster):
|
2022-03-22 16:39:58 +00:00
|
|
|
node1.query(
|
|
|
|
"create table HDFSStorTwoProhibited (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://suser@kerberizedhdfs1:9010/storage_user_two_prohibited', 'TSV')"
|
|
|
|
)
|
2020-10-30 19:40:16 +00:00
|
|
|
try:
|
|
|
|
node1.query("insert into HDFSStorTwoProhibited values (1, 'SomeOne', 74.00)")
|
|
|
|
assert False, "Exception have to be thrown"
|
|
|
|
except Exception as ex:
|
2022-03-22 16:39:58 +00:00
|
|
|
assert (
|
|
|
|
"Unable to open HDFS file: /storage_user_two_prohibited error: Permission denied: user=specuser, access=WRITE"
|
|
|
|
in str(ex)
|
|
|
|
)
|
|
|
|
|
2020-10-30 19:40:16 +00:00
|
|
|
|
|
|
|
def test_cache_path(started_cluster):
|
2022-03-22 16:39:58 +00:00
|
|
|
node1.query(
|
|
|
|
"create table HDFSStorCachePath (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://dedicatedcachepath@kerberizedhdfs1:9010/storage_dedicated_cache_path', 'TSV')"
|
|
|
|
)
|
2020-10-30 19:40:16 +00:00
|
|
|
try:
|
|
|
|
node1.query("insert into HDFSStorCachePath values (1, 'FatMark', 92.53)")
|
|
|
|
assert False, "Exception have to be thrown"
|
|
|
|
except Exception as ex:
|
2022-03-22 16:39:58 +00:00
|
|
|
assert (
|
|
|
|
"DB::Exception: hadoop.security.kerberos.ticket.cache.path cannot be set per user"
|
|
|
|
in str(ex)
|
|
|
|
)
|
2020-09-10 10:02:46 +00:00
|
|
|
|
|
|
|
|
2022-03-22 16:39:58 +00:00
|
|
|
if __name__ == "__main__":
|
2020-09-10 10:02:46 +00:00
|
|
|
cluster.start()
|
2020-10-30 19:40:16 +00:00
|
|
|
input("Cluster created, press any key to destroy...")
|
2020-09-10 10:02:46 +00:00
|
|
|
cluster.shutdown()
|