From fc0a1af2141199af5315cf7453bda16dc089d45a Mon Sep 17 00:00:00 2001 From: Ilya Golshtein Date: Thu, 19 Nov 2020 00:08:17 +0300 Subject: [PATCH] doc minor changes, cleanup, krb5-user as a recommended package --- contrib/libhdfs3-cmake/CMakeLists.txt | 5 ----- debian/control | 2 +- docs/en/engines/table-engines/integrations/hdfs.md | 4 +++- tests/integration/README.md | 3 ++- tests/integration/test_storage_kerberized_hdfs/test.py | 4 ++-- 5 files changed, 8 insertions(+), 10 deletions(-) diff --git a/contrib/libhdfs3-cmake/CMakeLists.txt b/contrib/libhdfs3-cmake/CMakeLists.txt index 49b35d09431..60f4376bdea 100644 --- a/contrib/libhdfs3-cmake/CMakeLists.txt +++ b/contrib/libhdfs3-cmake/CMakeLists.txt @@ -33,11 +33,6 @@ set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/CMake" ${CMAKE_MODULE_PATH}) include(Platform) include(Options) -# # prefer shared libraries -# if (WITH_KERBEROS) -# find_package(KERBEROS REQUIRED) -# endif() - # source set(PROTO_FILES #${HDFS3_SOURCE_DIR}/proto/encryption.proto diff --git a/debian/control b/debian/control index 9b34e982698..809b5bb5d2a 100644 --- a/debian/control +++ b/debian/control @@ -40,7 +40,7 @@ Description: Common files for ClickHouse Package: clickhouse-server Architecture: all Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-common-static (= ${binary:Version}), adduser -Recommends: libcap2-bin +Recommends: libcap2-bin, krb5-user Replaces: clickhouse-server-common, clickhouse-server-base Provides: clickhouse-server-common Description: Server binary for ClickHouse diff --git a/docs/en/engines/table-engines/integrations/hdfs.md b/docs/en/engines/table-engines/integrations/hdfs.md index e85982e2b85..e77cfe0a113 100644 --- a/docs/en/engines/table-engines/integrations/hdfs.md +++ b/docs/en/engines/table-engines/integrations/hdfs.md @@ -174,7 +174,7 @@ Similar to GraphiteMergeTree, the HDFS engine supports extended configuration us | dfs\_domain\_socket\_path | "" | -See [HDFS Configuration Reference ](https://hawq.apache.org/docs/userguide/2.3.0.0-incubating/reference/HDFSConfigurationParameterReference.html) for details. +[HDFS Configuration Reference ](https://hawq.apache.org/docs/userguide/2.3.0.0-incubating/reference/HDFSConfigurationParameterReference.html) might explain some parameters. #### ClickHouse extras {#clickhouse-extras} @@ -195,6 +195,8 @@ Note that due to libhdfs3 limitations only old-fashioned approach is supported, datanode communications are not secured by SASL (HADOOP\_SECURE\_DN\_USER is a reliable indicator of such security approach). Use tests/integration/test\_storage\_kerberized\_hdfs/hdfs_configs/bootstrap.sh for reference. +kinit tool and configuration files are required. + ## Virtual Columns {#virtual-columns} - `_path` — Path to the file. diff --git a/tests/integration/README.md b/tests/integration/README.md index cea1bd6f893..cdfb6b1a70a 100644 --- a/tests/integration/README.md +++ b/tests/integration/README.md @@ -38,7 +38,8 @@ sudo -H pip install \ pytest-timeout \ redis \ tzlocal \ - urllib3 + urllib3 \ + requests-kerberos ``` (highly not recommended) If you really want to use OS packages on modern debian/ubuntu instead of "pip": `sudo apt install -y docker docker-compose python3-pytest python3-dicttoxml python3-docker python3-pymysql python3-pymongo python3-tzlocal python3-kazoo python3-psycopg2 kafka-python python3-pytest-timeout python3-minio` diff --git a/tests/integration/test_storage_kerberized_hdfs/test.py b/tests/integration/test_storage_kerberized_hdfs/test.py index b5b330f7c78..a2a2a4ef88e 100644 --- a/tests/integration/test_storage_kerberized_hdfs/test.py +++ b/tests/integration/test_storage_kerberized_hdfs/test.py @@ -59,10 +59,10 @@ def test_write_storage_not_expired(started_cluster): def test_two_users(started_cluster): node1.query("create table HDFSStorOne (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9000/storage_user_one', 'TSV')") - node1.query("insert into HDFSStorOne values (1, 'IlyaReal', 86.00)") + node1.query("insert into HDFSStorOne values (1, 'Real', 86.00)") node1.query("create table HDFSStorTwo (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://suser@kerberizedhdfs1:9000/user/specuser/storage_user_two', 'TSV')") - node1.query("insert into HDFSStorTwo values (1, 'IlyaIdeal', 74.00)") + node1.query("insert into HDFSStorTwo values (1, 'Ideal', 74.00)") select_read_1 = node1.query("select * from hdfs('hdfs://kerberizedhdfs1:9000/user/specuser/storage_user_two', 'TSV', 'id UInt64, text String, number Float64')")