mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-26 17:41:59 +00:00
Little better [#CLICKHOUSE-3231].
This commit is contained in:
parent
731b4b4728
commit
164ad05ae1
@ -268,9 +268,7 @@ MergeTreeData::MutableDataPartPtr Fetcher::fetchPartImpl(
|
|||||||
new_data_part->info = MergeTreePartInfo::fromPartName(part_name);
|
new_data_part->info = MergeTreePartInfo::fromPartName(part_name);
|
||||||
MergeTreePartInfo::parseMinMaxDatesFromPartName(part_name, new_data_part->min_date, new_data_part->max_date);
|
MergeTreePartInfo::parseMinMaxDatesFromPartName(part_name, new_data_part->min_date, new_data_part->max_date);
|
||||||
new_data_part->modification_time = time(nullptr);
|
new_data_part->modification_time = time(nullptr);
|
||||||
new_data_part->loadColumns(true);
|
new_data_part->loadColumnsChecksumsIndex(true, false);
|
||||||
new_data_part->loadChecksums(true);
|
|
||||||
new_data_part->loadIndex();
|
|
||||||
new_data_part->is_sharded = false;
|
new_data_part->is_sharded = false;
|
||||||
new_data_part->checksums.checkEqual(checksums, false);
|
new_data_part->checksums.checkEqual(checksums, false);
|
||||||
|
|
||||||
|
@ -302,7 +302,7 @@ void MergeTreeData::loadDataParts(bool skip_sanity_checks)
|
|||||||
Poco::DirectoryIterator end;
|
Poco::DirectoryIterator end;
|
||||||
for (Poco::DirectoryIterator it(full_path); it != end; ++it)
|
for (Poco::DirectoryIterator it(full_path); it != end; ++it)
|
||||||
{
|
{
|
||||||
/// Skip temporary directories older than one day.
|
/// Skip temporary directories.
|
||||||
if (startsWith(it.name(), "tmp"))
|
if (startsWith(it.name(), "tmp"))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
@ -326,10 +326,7 @@ void MergeTreeData::loadDataParts(bool skip_sanity_checks)
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
part->loadColumns(require_part_metadata);
|
part->loadColumnsChecksumsIndex(require_part_metadata, true);
|
||||||
part->loadChecksums(require_part_metadata);
|
|
||||||
part->loadIndex();
|
|
||||||
part->checkNotBroken(require_part_metadata);
|
|
||||||
}
|
}
|
||||||
catch (const Exception & e)
|
catch (const Exception & e)
|
||||||
{
|
{
|
||||||
@ -1544,11 +1541,7 @@ MergeTreeData::MutableDataPartPtr MergeTreeData::loadPartAndFixMetadata(const St
|
|||||||
if (Poco::File(full_part_path + "columns.txt").exists())
|
if (Poco::File(full_part_path + "columns.txt").exists())
|
||||||
Poco::File(full_part_path + "columns.txt").remove();
|
Poco::File(full_part_path + "columns.txt").remove();
|
||||||
|
|
||||||
part->loadColumns(false);
|
part->loadColumnsChecksumsIndex(false, true);
|
||||||
part->loadChecksums(false);
|
|
||||||
part->loadIndex();
|
|
||||||
part->checkNotBroken(false);
|
|
||||||
|
|
||||||
part->modification_time = Poco::File(full_part_path).getLastModified().epochTime();
|
part->modification_time = Poco::File(full_part_path).getLastModified().epochTime();
|
||||||
|
|
||||||
/// If the checksums file is not present, calculate the checksums and write them to disk.
|
/// If the checksums file is not present, calculate the checksums and write them to disk.
|
||||||
|
@ -529,6 +529,17 @@ void MergeTreeDataPart::renameAddPrefix(bool to_detached, const String & prefix)
|
|||||||
renameTo(dst_name());
|
renameTo(dst_name());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void MergeTreeDataPart::loadColumnsChecksumsIndex(bool require_columns_checksums, bool check_consistency)
|
||||||
|
{
|
||||||
|
loadColumns(require_columns_checksums);
|
||||||
|
loadChecksums(require_columns_checksums);
|
||||||
|
loadIndex();
|
||||||
|
if (check_consistency)
|
||||||
|
checkConsistency(require_columns_checksums);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void MergeTreeDataPart::loadIndex()
|
void MergeTreeDataPart::loadIndex()
|
||||||
{
|
{
|
||||||
/// Size - in number of marks.
|
/// Size - in number of marks.
|
||||||
@ -627,7 +638,7 @@ void MergeTreeDataPart::loadColumns(bool require)
|
|||||||
columns.readText(file);
|
columns.readText(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
void MergeTreeDataPart::checkNotBroken(bool require_part_metadata)
|
void MergeTreeDataPart::checkConsistency(bool require_part_metadata)
|
||||||
{
|
{
|
||||||
String path = getFullPath();
|
String path = getFullPath();
|
||||||
|
|
||||||
|
@ -180,19 +180,12 @@ struct MergeTreeDataPart
|
|||||||
/// Renames a part by appending a prefix to the name. To_detached - also moved to the detached directory.
|
/// Renames a part by appending a prefix to the name. To_detached - also moved to the detached directory.
|
||||||
void renameAddPrefix(bool to_detached, const String & prefix) const;
|
void renameAddPrefix(bool to_detached, const String & prefix) const;
|
||||||
|
|
||||||
/// Loads index file. Also calculates this->size if size=0
|
|
||||||
void loadIndex();
|
|
||||||
|
|
||||||
/// If checksums.txt exists, reads files' checksums (and sizes) from it
|
|
||||||
void loadChecksums(bool require);
|
|
||||||
|
|
||||||
/// Populates columns_to_size map (compressed size).
|
/// Populates columns_to_size map (compressed size).
|
||||||
void accumulateColumnSizes(ColumnToSize & column_to_size) const;
|
void accumulateColumnSizes(ColumnToSize & column_to_size) const;
|
||||||
|
|
||||||
/// Reads columns names and types from columns.txt
|
/// Initialize columns (from columns.txt if exists, or create from column files if not).
|
||||||
void loadColumns(bool require);
|
/// Load checksums from checksums.txt if exists. Load index if required.
|
||||||
|
void loadColumnsChecksumsIndex(bool require_columns_checksums, bool check_consistency);
|
||||||
void checkNotBroken(bool require_part_metadata);
|
|
||||||
|
|
||||||
/// Checks that .bin and .mrk files exist
|
/// Checks that .bin and .mrk files exist
|
||||||
bool hasColumnFiles(const String & column) const;
|
bool hasColumnFiles(const String & column) const;
|
||||||
@ -200,6 +193,18 @@ struct MergeTreeDataPart
|
|||||||
/// For data in RAM ('index')
|
/// For data in RAM ('index')
|
||||||
size_t getIndexSizeInBytes() const;
|
size_t getIndexSizeInBytes() const;
|
||||||
size_t getIndexSizeInAllocatedBytes() const;
|
size_t getIndexSizeInAllocatedBytes() const;
|
||||||
|
|
||||||
|
private:
|
||||||
|
/// Reads columns names and types from columns.txt
|
||||||
|
void loadColumns(bool require);
|
||||||
|
|
||||||
|
/// If checksums.txt exists, reads files' checksums (and sizes) from it
|
||||||
|
void loadChecksums(bool require);
|
||||||
|
|
||||||
|
/// Loads index file. Also calculates this->size if size=0
|
||||||
|
void loadIndex();
|
||||||
|
|
||||||
|
void checkConsistency(bool require_part_metadata);
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -107,9 +107,7 @@ void Service::processQuery(const Poco::Net::HTMLForm & params, ReadBuffer & body
|
|||||||
data_part->info = MergeTreePartInfo::fromPartName(part_name);
|
data_part->info = MergeTreePartInfo::fromPartName(part_name);
|
||||||
MergeTreePartInfo::parseMinMaxDatesFromPartName(part_name, data_part->min_date, data_part->max_date);
|
MergeTreePartInfo::parseMinMaxDatesFromPartName(part_name, data_part->min_date, data_part->max_date);
|
||||||
data_part->modification_time = time(nullptr);
|
data_part->modification_time = time(nullptr);
|
||||||
data_part->loadColumns(true);
|
data_part->loadColumnsChecksumsIndex(true, false);
|
||||||
data_part->loadChecksums(true);
|
|
||||||
data_part->loadIndex();
|
|
||||||
data_part->is_sharded = false;
|
data_part->is_sharded = false;
|
||||||
data_part->checksums.checkEqual(checksums, false);
|
data_part->checksums.checkEqual(checksums, false);
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user