Merge pull request #10068 from ClickHouse/fix_check_table_with_index

Fix bug with uncompressed checksums in CHECK TABLE query
This commit is contained in:
alexey-milovidov 2020-04-08 05:37:18 +03:00 committed by GitHub
commit dd333460d6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 44 additions and 14 deletions

View File

@ -63,6 +63,7 @@ IMergeTreeDataPart::Checksums checkDataPart(
/// Real checksums based on contents of data. Must correspond to checksums.txt. If not - it means the data is broken.
IMergeTreeDataPart::Checksums checksums_data;
/// This function calculates checksum for both compressed and decompressed contents of compressed file.
auto checksum_compressed_file = [](const DiskPtr & disk_, const String & file_path)
{
auto file_buf = disk_->readFile(file_path);
@ -78,6 +79,7 @@ IMergeTreeDataPart::Checksums checkDataPart(
};
};
/// First calculate checksums for columns data
if (part_type == MergeTreeDataPartType::COMPACT)
{
const auto & file_name = MergeTreeDataPartCompact::DATA_FILE_NAME_WITH_EXTENSION;
@ -99,20 +101,7 @@ IMergeTreeDataPart::Checksums checkDataPart(
throw Exception("Unknown type in part " + path, ErrorCodes::UNKNOWN_PART_TYPE);
}
for (auto it = disk->iterateDirectory(path); it->isValid(); it->next())
{
const String & file_name = it->name();
auto checksum_it = checksums_data.files.find(file_name);
if (checksum_it == checksums_data.files.end() && file_name != "checksums.txt" && file_name != "columns.txt")
{
auto file_buf = disk->readFile(it->path());
HashingReadBuffer hashing_buf(*file_buf);
hashing_buf.tryIgnore(std::numeric_limits<size_t>::max());
checksums_data.files[file_name] = IMergeTreeDataPart::Checksums::Checksum(hashing_buf.count(), hashing_buf.getHash());
}
}
/// Checksums from file checksums.txt. May be absent. If present, they are subsequently compared with the actual data checksums.
/// Checksums from the rest files listed in checksums.txt. May be absent. If present, they are subsequently compared with the actual data checksums.
IMergeTreeDataPart::Checksums checksums_txt;
if (require_checksums || disk->exists(path + "checksums.txt"))
@ -122,6 +111,31 @@ IMergeTreeDataPart::Checksums checkDataPart(
assertEOF(*buf);
}
const auto & checksum_files_txt = checksums_txt.files;
for (auto it = disk->iterateDirectory(path); it->isValid(); it->next())
{
const String & file_name = it->name();
auto checksum_it = checksums_data.files.find(file_name);
/// Skip files that we already calculated. Also skip metadata files that are not checksummed.
if (checksum_it == checksums_data.files.end() && file_name != "checksums.txt" && file_name != "columns.txt")
{
auto txt_checksum_it = checksum_files_txt.find(file_name);
if (txt_checksum_it == checksum_files_txt.end() || txt_checksum_it->second.uncompressed_size == 0)
{
/// The file is not compressed.
auto file_buf = disk->readFile(it->path());
HashingReadBuffer hashing_buf(*file_buf);
hashing_buf.tryIgnore(std::numeric_limits<size_t>::max());
checksums_data.files[file_name] = IMergeTreeDataPart::Checksums::Checksum(hashing_buf.count(), hashing_buf.getHash());
}
else /// If we have both compressed and uncompressed in txt, than calculate them
{
checksums_data.files[file_name] = checksum_compressed_file(disk, it->path());
}
}
}
if (is_cancelled())
return {};

View File

@ -0,0 +1 @@
all_1_1_0 1

View File

@ -0,0 +1,15 @@
SET check_query_single_value_result = 'false';
DROP TABLE IF EXISTS check_table_with_indices;
CREATE TABLE check_table_with_indices (
id UInt64,
data String,
INDEX a (id) type minmax GRANULARITY 3
) ENGINE = MergeTree() ORDER BY id;
INSERT INTO check_table_with_indices VALUES (0, 'test'), (1, 'test2');
CHECK TABLE check_table_with_indices;
DROP TABLE check_table_with_indices;