Merge pull request #41230 from kssenii/minor-change

minor change in MergeTreeDataPartCompact
This commit is contained in:
Kseniia Sumarokova 2022-09-13 12:23:18 +02:00 committed by GitHub
commit 651d850652
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 5 additions and 5 deletions

View File

@ -93,7 +93,7 @@ void MergeTreeDataPartCompact::calculateEachColumnSizes(ColumnSizeByName & /*eac
void MergeTreeDataPartCompact::loadIndexGranularityImpl( void MergeTreeDataPartCompact::loadIndexGranularityImpl(
MergeTreeIndexGranularity & index_granularity_, const MergeTreeIndexGranularityInfo & index_granularity_info_, MergeTreeIndexGranularity & index_granularity_, const MergeTreeIndexGranularityInfo & index_granularity_info_,
const NamesAndTypesList & columns_, const DataPartStoragePtr & data_part_storage_) size_t columns_count, const DataPartStoragePtr & data_part_storage_)
{ {
if (!index_granularity_info_.is_adaptive) if (!index_granularity_info_.is_adaptive)
throw Exception("MergeTreeDataPartCompact cannot be created with non-adaptive granulary.", ErrorCodes::NOT_IMPLEMENTED); throw Exception("MergeTreeDataPartCompact cannot be created with non-adaptive granulary.", ErrorCodes::NOT_IMPLEMENTED);
@ -111,13 +111,13 @@ void MergeTreeDataPartCompact::loadIndexGranularityImpl(
while (!buffer->eof()) while (!buffer->eof())
{ {
/// Skip offsets for columns /// Skip offsets for columns
buffer->seek(columns_.size() * sizeof(MarkInCompressedFile), SEEK_CUR); buffer->seek(columns_count * sizeof(MarkInCompressedFile), SEEK_CUR);
size_t granularity; size_t granularity;
readIntBinary(granularity, *buffer); readIntBinary(granularity, *buffer);
index_granularity_.appendMark(granularity); index_granularity_.appendMark(granularity);
} }
if (index_granularity_.getMarksCount() * index_granularity_info_.getMarkSizeInBytes(columns_.size()) != marks_file_size) if (index_granularity_.getMarksCount() * index_granularity_info_.getMarkSizeInBytes(columns_count) != marks_file_size)
throw Exception("Cannot read all marks from file " + marks_file_path, ErrorCodes::CANNOT_READ_ALL_DATA); throw Exception("Cannot read all marks from file " + marks_file_path, ErrorCodes::CANNOT_READ_ALL_DATA);
index_granularity_.setInitialized(); index_granularity_.setInitialized();
@ -128,7 +128,7 @@ void MergeTreeDataPartCompact::loadIndexGranularity()
if (columns.empty()) if (columns.empty())
throw Exception("No columns in part " + name, ErrorCodes::NO_FILE_IN_DATA_PART); throw Exception("No columns in part " + name, ErrorCodes::NO_FILE_IN_DATA_PART);
loadIndexGranularityImpl(index_granularity, index_granularity_info, columns, data_part_storage); loadIndexGranularityImpl(index_granularity, index_granularity_info, columns.size(), data_part_storage);
} }
bool MergeTreeDataPartCompact::hasColumnFiles(const NameAndTypePair & column) const bool MergeTreeDataPartCompact::hasColumnFiles(const NameAndTypePair & column) const

View File

@ -68,7 +68,7 @@ public:
protected: protected:
static void loadIndexGranularityImpl( static void loadIndexGranularityImpl(
MergeTreeIndexGranularity & index_granularity_, const MergeTreeIndexGranularityInfo & index_granularity_info_, MergeTreeIndexGranularity & index_granularity_, const MergeTreeIndexGranularityInfo & index_granularity_info_,
const NamesAndTypesList & columns_, const DataPartStoragePtr & data_part_storage_); size_t columns_count, const DataPartStoragePtr & data_part_storage_);
private: private:
void checkConsistency(bool require_part_metadata) const override; void checkConsistency(bool require_part_metadata) const override;