mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 15:12:02 +00:00
doc minor changes, cleanup, krb5-user as a recommended package
This commit is contained in:
parent
7e97814859
commit
fc0a1af214
@ -33,11 +33,6 @@ set(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/CMake" ${CMAKE_MODULE_PATH})
|
|||||||
include(Platform)
|
include(Platform)
|
||||||
include(Options)
|
include(Options)
|
||||||
|
|
||||||
# # prefer shared libraries
|
|
||||||
# if (WITH_KERBEROS)
|
|
||||||
# find_package(KERBEROS REQUIRED)
|
|
||||||
# endif()
|
|
||||||
|
|
||||||
# source
|
# source
|
||||||
set(PROTO_FILES
|
set(PROTO_FILES
|
||||||
#${HDFS3_SOURCE_DIR}/proto/encryption.proto
|
#${HDFS3_SOURCE_DIR}/proto/encryption.proto
|
||||||
|
2
debian/control
vendored
2
debian/control
vendored
@ -40,7 +40,7 @@ Description: Common files for ClickHouse
|
|||||||
Package: clickhouse-server
|
Package: clickhouse-server
|
||||||
Architecture: all
|
Architecture: all
|
||||||
Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-common-static (= ${binary:Version}), adduser
|
Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-common-static (= ${binary:Version}), adduser
|
||||||
Recommends: libcap2-bin
|
Recommends: libcap2-bin, krb5-user
|
||||||
Replaces: clickhouse-server-common, clickhouse-server-base
|
Replaces: clickhouse-server-common, clickhouse-server-base
|
||||||
Provides: clickhouse-server-common
|
Provides: clickhouse-server-common
|
||||||
Description: Server binary for ClickHouse
|
Description: Server binary for ClickHouse
|
||||||
|
@ -174,7 +174,7 @@ Similar to GraphiteMergeTree, the HDFS engine supports extended configuration us
|
|||||||
| dfs\_domain\_socket\_path | "" |
|
| dfs\_domain\_socket\_path | "" |
|
||||||
|
|
||||||
|
|
||||||
See [HDFS Configuration Reference ](https://hawq.apache.org/docs/userguide/2.3.0.0-incubating/reference/HDFSConfigurationParameterReference.html) for details.
|
[HDFS Configuration Reference ](https://hawq.apache.org/docs/userguide/2.3.0.0-incubating/reference/HDFSConfigurationParameterReference.html) might explain some parameters.
|
||||||
|
|
||||||
|
|
||||||
#### ClickHouse extras {#clickhouse-extras}
|
#### ClickHouse extras {#clickhouse-extras}
|
||||||
@ -195,6 +195,8 @@ Note that due to libhdfs3 limitations only old-fashioned approach is supported,
|
|||||||
datanode communications are not secured by SASL (HADOOP\_SECURE\_DN\_USER is a reliable indicator of such
|
datanode communications are not secured by SASL (HADOOP\_SECURE\_DN\_USER is a reliable indicator of such
|
||||||
security approach). Use tests/integration/test\_storage\_kerberized\_hdfs/hdfs_configs/bootstrap.sh for reference.
|
security approach). Use tests/integration/test\_storage\_kerberized\_hdfs/hdfs_configs/bootstrap.sh for reference.
|
||||||
|
|
||||||
|
kinit tool and configuration files are required.
|
||||||
|
|
||||||
## Virtual Columns {#virtual-columns}
|
## Virtual Columns {#virtual-columns}
|
||||||
|
|
||||||
- `_path` — Path to the file.
|
- `_path` — Path to the file.
|
||||||
|
@ -38,7 +38,8 @@ sudo -H pip install \
|
|||||||
pytest-timeout \
|
pytest-timeout \
|
||||||
redis \
|
redis \
|
||||||
tzlocal \
|
tzlocal \
|
||||||
urllib3
|
urllib3 \
|
||||||
|
requests-kerberos
|
||||||
```
|
```
|
||||||
|
|
||||||
(highly not recommended) If you really want to use OS packages on modern debian/ubuntu instead of "pip": `sudo apt install -y docker docker-compose python3-pytest python3-dicttoxml python3-docker python3-pymysql python3-pymongo python3-tzlocal python3-kazoo python3-psycopg2 kafka-python python3-pytest-timeout python3-minio`
|
(highly not recommended) If you really want to use OS packages on modern debian/ubuntu instead of "pip": `sudo apt install -y docker docker-compose python3-pytest python3-dicttoxml python3-docker python3-pymysql python3-pymongo python3-tzlocal python3-kazoo python3-psycopg2 kafka-python python3-pytest-timeout python3-minio`
|
||||||
|
@ -59,10 +59,10 @@ def test_write_storage_not_expired(started_cluster):
|
|||||||
|
|
||||||
def test_two_users(started_cluster):
|
def test_two_users(started_cluster):
|
||||||
node1.query("create table HDFSStorOne (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9000/storage_user_one', 'TSV')")
|
node1.query("create table HDFSStorOne (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://kerberizedhdfs1:9000/storage_user_one', 'TSV')")
|
||||||
node1.query("insert into HDFSStorOne values (1, 'IlyaReal', 86.00)")
|
node1.query("insert into HDFSStorOne values (1, 'Real', 86.00)")
|
||||||
|
|
||||||
node1.query("create table HDFSStorTwo (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://suser@kerberizedhdfs1:9000/user/specuser/storage_user_two', 'TSV')")
|
node1.query("create table HDFSStorTwo (id UInt32, name String, weight Float64) ENGINE = HDFS('hdfs://suser@kerberizedhdfs1:9000/user/specuser/storage_user_two', 'TSV')")
|
||||||
node1.query("insert into HDFSStorTwo values (1, 'IlyaIdeal', 74.00)")
|
node1.query("insert into HDFSStorTwo values (1, 'Ideal', 74.00)")
|
||||||
|
|
||||||
select_read_1 = node1.query("select * from hdfs('hdfs://kerberizedhdfs1:9000/user/specuser/storage_user_two', 'TSV', 'id UInt64, text String, number Float64')")
|
select_read_1 = node1.query("select * from hdfs('hdfs://kerberizedhdfs1:9000/user/specuser/storage_user_two', 'TSV', 'id UInt64, text String, number Float64')")
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user