mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-12-11 17:02:25 +00:00
rename some symboles
This commit is contained in:
parent
f02d769343
commit
046a2ba51c
@ -184,7 +184,7 @@ std::unique_ptr<IMergeTreeDataPart::MinMaxIndex> HiveOrcFile::buildMinMaxIndex(c
|
||||
}
|
||||
|
||||
|
||||
void HiveOrcFile::loadMinMaxIndex()
|
||||
void HiveOrcFile::loadFileMinMaxIndex()
|
||||
{
|
||||
if (!reader)
|
||||
{
|
||||
@ -193,7 +193,7 @@ void HiveOrcFile::loadMinMaxIndex()
|
||||
}
|
||||
|
||||
auto statistics = reader->GetRawORCReader()->getStatistics();
|
||||
minmax_idx = buildMinMaxIndex(statistics.get());
|
||||
file_minmax_idx = buildMinMaxIndex(statistics.get());
|
||||
}
|
||||
|
||||
bool HiveOrcFile::useSplitMinMaxIndex() const
|
||||
@ -202,7 +202,7 @@ bool HiveOrcFile::useSplitMinMaxIndex() const
|
||||
}
|
||||
|
||||
|
||||
void HiveOrcFile::loadSubMinMaxIndex()
|
||||
void HiveOrcFile::loadSplitMinMaxIndex()
|
||||
{
|
||||
if (!reader)
|
||||
{
|
||||
@ -218,11 +218,11 @@ void HiveOrcFile::loadSubMinMaxIndex()
|
||||
fmt::format("orc file:{} has different strip num {} and strip statistics num {}", path, stripe_num, stripe_stats_num),
|
||||
ErrorCodes::BAD_ARGUMENTS);
|
||||
|
||||
sub_minmax_idxes.resize(stripe_num);
|
||||
split_minmax_idxes.resize(stripe_num);
|
||||
for (size_t i = 0; i < stripe_num; ++i)
|
||||
{
|
||||
auto stripe_stats = raw_reader->getStripeStatistics(i);
|
||||
sub_minmax_idxes[i] = buildMinMaxIndex(stripe_stats.get());
|
||||
split_minmax_idxes[i] = buildMinMaxIndex(stripe_stats.get());
|
||||
}
|
||||
}
|
||||
|
||||
@ -239,7 +239,7 @@ void HiveParquetFile::prepareReader()
|
||||
THROW_ARROW_NOT_OK(parquet::arrow::OpenFile(asArrowFile(*in, format_settings, is_stopped), arrow::default_memory_pool(), &reader));
|
||||
}
|
||||
|
||||
void HiveParquetFile::loadSubMinMaxIndex()
|
||||
void HiveParquetFile::loadSplitMinMaxIndex()
|
||||
{
|
||||
if (!reader)
|
||||
prepareReader();
|
||||
@ -256,12 +256,12 @@ void HiveParquetFile::loadSubMinMaxIndex()
|
||||
}
|
||||
|
||||
|
||||
sub_minmax_idxes.resize(num_row_groups);
|
||||
split_minmax_idxes.resize(num_row_groups);
|
||||
for (size_t i = 0; i < num_row_groups; ++i)
|
||||
{
|
||||
auto row_group_meta = meta->RowGroup(i);
|
||||
sub_minmax_idxes[i] = std::make_shared<IMergeTreeDataPart::MinMaxIndex>();
|
||||
sub_minmax_idxes[i]->hyperrectangle.resize(num_cols);
|
||||
split_minmax_idxes[i] = std::make_shared<IMergeTreeDataPart::MinMaxIndex>();
|
||||
split_minmax_idxes[i]->hyperrectangle.resize(num_cols);
|
||||
|
||||
size_t j = 0;
|
||||
auto it = index_names_and_types.begin();
|
||||
@ -284,31 +284,31 @@ void HiveParquetFile::loadSubMinMaxIndex()
|
||||
|
||||
if (auto bool_stats = std::dynamic_pointer_cast<parquet::BoolStatistics>(stats))
|
||||
{
|
||||
sub_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<UInt8>(bool_stats);
|
||||
split_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<UInt8>(bool_stats);
|
||||
}
|
||||
else if (auto int32_stats = std::dynamic_pointer_cast<parquet::Int32Statistics>(stats))
|
||||
{
|
||||
sub_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<Int32>(int32_stats);
|
||||
split_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<Int32>(int32_stats);
|
||||
}
|
||||
else if (auto int64_stats = std::dynamic_pointer_cast<parquet::Int64Statistics>(stats))
|
||||
{
|
||||
sub_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<Int64>(int64_stats);
|
||||
split_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<Int64>(int64_stats);
|
||||
}
|
||||
else if (auto float_stats = std::dynamic_pointer_cast<parquet::FloatStatistics>(stats))
|
||||
{
|
||||
sub_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<Float64>(float_stats);
|
||||
split_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<Float64>(float_stats);
|
||||
}
|
||||
else if (auto double_stats = std::dynamic_pointer_cast<parquet::FloatStatistics>(stats))
|
||||
{
|
||||
sub_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<Float64>(double_stats);
|
||||
split_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics<Float64>(double_stats);
|
||||
}
|
||||
else if (auto string_stats = std::dynamic_pointer_cast<parquet::ByteArrayStatistics>(stats))
|
||||
{
|
||||
sub_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics(string_stats);
|
||||
split_minmax_idxes[i]->hyperrectangle[j] = createRangeFromParquetStatistics(string_stats);
|
||||
}
|
||||
/// Other types are not supported for minmax index, skip
|
||||
}
|
||||
sub_minmax_idxes[i]->initialized = true;
|
||||
split_minmax_idxes[i]->initialized = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -102,8 +102,8 @@ public:
|
||||
size_t getSize() const { return size; }
|
||||
const FieldVector & getPartitionValues() const { return partition_values; }
|
||||
const String & getNamenodeUrl() { return namenode_url; }
|
||||
MinMaxIndexPtr getMinMaxIndex() const { return minmax_idx; }
|
||||
const std::vector<MinMaxIndexPtr> & getSubMinMaxIndexes() const { return sub_minmax_idxes; }
|
||||
MinMaxIndexPtr getMinMaxIndex() const { return file_minmax_idx; }
|
||||
const std::vector<MinMaxIndexPtr> & getSubMinMaxIndexes() const { return split_minmax_idxes; }
|
||||
|
||||
const std::unordered_set<int> & getSkipSplits() const { return skip_splits; }
|
||||
void setSkipSplits(const std::unordered_set<int> & skip_splits_) { skip_splits = skip_splits_; }
|
||||
@ -125,17 +125,17 @@ public:
|
||||
|
||||
virtual bool useFileMinMaxIndex() const { return false; }
|
||||
|
||||
virtual void loadMinMaxIndex()
|
||||
virtual void loadFileMinMaxIndex()
|
||||
{
|
||||
throw Exception("Method loadMinMaxIndex is not supported by hive file:" + getFormatName(), ErrorCodes::NOT_IMPLEMENTED);
|
||||
throw Exception("Method loadFileMinMaxIndex is not supported by hive file:" + getFormatName(), ErrorCodes::NOT_IMPLEMENTED);
|
||||
}
|
||||
|
||||
/// If hive query could use contains sub-file level minmax index?
|
||||
virtual bool useSplitMinMaxIndex() const { return false; }
|
||||
|
||||
virtual void loadSubMinMaxIndex()
|
||||
virtual void loadSplitMinMaxIndex()
|
||||
{
|
||||
throw Exception("Method loadSubMinMaxIndex is not supported by hive file:" + getFormatName(), ErrorCodes::NOT_IMPLEMENTED);
|
||||
throw Exception("Method loadSplitMinMaxIndex is not supported by hive file:" + getFormatName(), ErrorCodes::NOT_IMPLEMENTED);
|
||||
}
|
||||
|
||||
protected:
|
||||
@ -145,8 +145,8 @@ protected:
|
||||
UInt64 last_modify_time;
|
||||
size_t size;
|
||||
NamesAndTypesList index_names_and_types;
|
||||
MinMaxIndexPtr minmax_idx;
|
||||
std::vector<MinMaxIndexPtr> sub_minmax_idxes;
|
||||
MinMaxIndexPtr file_minmax_idx;
|
||||
std::vector<MinMaxIndexPtr> split_minmax_idxes;
|
||||
/// Skip splits for this file after applying minmax index (if any)
|
||||
std::unordered_set<int> skip_splits;
|
||||
std::shared_ptr<HiveSettings> storage_settings;
|
||||
@ -192,10 +192,10 @@ public:
|
||||
|
||||
FileFormat getFormat() const override { return FileFormat::ORC; }
|
||||
bool useFileMinMaxIndex() const override;
|
||||
void loadMinMaxIndex() override;
|
||||
void loadFileMinMaxIndex() override;
|
||||
|
||||
bool useSplitMinMaxIndex() const override;
|
||||
void loadSubMinMaxIndex() override;
|
||||
void loadSplitMinMaxIndex() override;
|
||||
|
||||
private:
|
||||
static Range buildRange(const orc::ColumnStatistics * col_stats);
|
||||
@ -228,7 +228,7 @@ public:
|
||||
FileFormat getFormat() const override { return FileFormat::PARQUET; }
|
||||
|
||||
bool useSplitMinMaxIndex() const override;
|
||||
void loadSubMinMaxIndex() override;
|
||||
void loadSplitMinMaxIndex() override;
|
||||
|
||||
private:
|
||||
void prepareReader();
|
||||
|
@ -545,7 +545,7 @@ HiveFilePtr StorageHive::createHiveFileIfNeeded(
|
||||
const KeyCondition hivefile_key_condition(query_info, getContext(), hivefile_name_types.getNames(), hivefile_minmax_idx_expr);
|
||||
if (hive_file->useFileMinMaxIndex())
|
||||
{
|
||||
hive_file->loadMinMaxIndex();
|
||||
hive_file->loadFileMinMaxIndex();
|
||||
if (!hivefile_key_condition.checkInHyperrectangle(hive_file->getMinMaxIndex()->hyperrectangle, hivefile_name_types.getTypes())
|
||||
.can_be_true)
|
||||
{
|
||||
@ -559,7 +559,7 @@ HiveFilePtr StorageHive::createHiveFileIfNeeded(
|
||||
if (hive_file->useSplitMinMaxIndex())
|
||||
{
|
||||
std::unordered_set<int> skip_splits;
|
||||
hive_file->loadSubMinMaxIndex();
|
||||
hive_file->loadSplitMinMaxIndex();
|
||||
const auto & sub_minmax_idxes = hive_file->getSubMinMaxIndexes();
|
||||
for (size_t i = 0; i < sub_minmax_idxes.size(); ++i)
|
||||
{
|
||||
|
Loading…
Reference in New Issue
Block a user