style fix per code review, doc improvement, params consistency check

This commit is contained in:
Ilya Golshtein 2020-12-11 00:52:05 +03:00
parent 815856bc3d
commit 9701d6a2d5
3 changed files with 25 additions and 12 deletions

View File

@ -179,9 +179,10 @@ Similar to GraphiteMergeTree, the HDFS engine supports extended configuration us
#### ClickHouse extras {#clickhouse-extras}
hadoop\_kerberos\_keytab
hadoop\_kerberos\_principal
hadoop\_kerberos\_kinit\_command
| **parameter** | **default value** |
|hadoop\_kerberos\_keytab | "" |
|hadoop\_kerberos\_principal | "" |
|hadoop\_kerberos\_kinit\_command | kinit |
#### Limitations {#limitations}
@ -195,7 +196,7 @@ Note that due to libhdfs3 limitations only old-fashioned approach is supported,
datanode communications are not secured by SASL (HADOOP\_SECURE\_DN\_USER is a reliable indicator of such
security approach). Use tests/integration/test\_storage\_kerberized\_hdfs/hdfs_configs/bootstrap.sh for reference.
kinit tool and configuration files are required.
If hadoop\_kerberos\_keytab, hadoop\_kerberos\_principal or hadoop\_kerberos\_kinit\_command is specified, kinit will be invoked. hadoop\_kerberos\_keytab and hadoop\_kerberos\_principal are mandatory in this case. kinit tool and krb5 configuration files are required.
## Virtual Columns {#virtual-columns}

View File

@ -15,6 +15,7 @@ namespace ErrorCodes
extern const int BAD_ARGUMENTS;
extern const int NETWORK_ERROR;
extern const int EXCESSIVE_ELEMENT_IN_CONFIG;
extern const int NO_ELEMENTS_IN_CONFIG;
}
const String HDFSBuilderWrapper::CONFIG_PREFIX = "hdfs";
@ -32,13 +33,13 @@ void HDFSBuilderWrapper::loadFromConfig(const Poco::Util::AbstractConfiguration
String key_name;
if (key == "hadoop_kerberos_keytab")
{
needKinit = true;
need_kinit = true;
hadoop_kerberos_keytab = config.getString(key_path);
continue;
}
else if (key == "hadoop_kerberos_principal")
{
needKinit = true;
need_kinit = true;
hadoop_kerberos_principal = config.getString(key_path);
#if USE_INTERNAL_HDFS3_LIBRARY
@ -49,7 +50,7 @@ void HDFSBuilderWrapper::loadFromConfig(const Poco::Util::AbstractConfiguration
}
else if (key == "hadoop_kerberos_kinit_command")
{
needKinit = true;
need_kinit = true;
hadoop_kerberos_kinit_command = config.getString(key_path);
continue;
}
@ -74,12 +75,21 @@ void HDFSBuilderWrapper::loadFromConfig(const Poco::Util::AbstractConfiguration
String HDFSBuilderWrapper::getKinitCmd()
{
if (hadoop_kerberos_keytab.empty() || hadoop_kerberos_principal.empty())
{
throw Exception("Not enough parameters to run kinit",
ErrorCodes::EXCESSIVE_ELEMENT_IN_CONFIG);
}
WriteBufferFromOwnString ss;
String cache_name = hadoop_security_kerberos_ticket_cache_path.empty() ?
String() :
(String(" -c \"") + hadoop_security_kerberos_ticket_cache_path + "\"");
// command to run looks like
// kinit -R -t /keytab_dir/clickhouse.keytab -k somebody@TEST.CLICKHOUSE.TECH || ..
ss << hadoop_kerberos_kinit_command << cache_name <<
" -R -t \"" << hadoop_kerberos_keytab << "\" -k " << hadoop_kerberos_principal <<
"|| " << hadoop_kerberos_kinit_command << cache_name << " -t \"" <<
@ -158,7 +168,7 @@ HDFSBuilderWrapper createHDFSBuilder(const String & uri_str, const Poco::Util::A
}
}
if (builder.needKinit)
if (builder.need_kinit)
{
builder.runKinit();
}

View File

@ -67,16 +67,18 @@ class HDFSBuilderWrapper
return config_stor.emplace_back(std::make_pair(k, v));
}
bool need_kinit{false};
static const String CONFIG_PREFIX;
private:
void loadFromConfig(const Poco::Util::AbstractConfiguration & config, const String & config_path, bool isUser = false);
String getKinitCmd();
bool needKinit{false};
void runKinit();
static const String CONFIG_PREFIX;
public:
hdfsBuilder *