Fix build warnings

This commit is contained in:
alesapin 2019-03-27 19:23:38 +03:00
parent 22a75196ab
commit 5528edea93
8 changed files with 21 additions and 31 deletions

View File

@ -297,12 +297,12 @@ public:
/// Is stride in rows between marks non fixed?
bool is_adaptive;
/// Approximate bytes size of one granule
size_t index_granularity_bytes;
/// Fixed size in rows of one granule if index_granularity_bytes is zero
size_t fixed_index_granularity;
/// Approximate bytes size of one granule
size_t index_granularity_bytes;
IndexGranularityInfo(const MergeTreeSettings & settings);
String getMarksFilePath(const String & column_path) const

View File

@ -98,12 +98,13 @@ public:
size_t last_mark = 0;
DelayedStream stream;
MergeTreeReader * merge_tree_reader = nullptr;
const IndexGranularity * index_granularity = nullptr;
size_t current_mark_index_granularity = 0;
DelayedStream stream;
void checkNotFinished() const;
void checkEnoughSpaceInCurrentGranule(size_t num_rows) const;
size_t readRows(Block & block, size_t num_rows);

View File

@ -39,7 +39,7 @@ MergeTreeReader::MergeTreeReader(const String & path,
size_t aio_threshold, size_t max_read_buffer_size, const ValueSizeMap & avg_value_size_hints,
const ReadBufferFromFileBase::ProfileCallback & profile_callback,
clockid_t clock_type)
: avg_value_size_hints(avg_value_size_hints), path(path), data_part(data_part), columns(columns)
: data_part(data_part), avg_value_size_hints(avg_value_size_hints), path(path), columns(columns)
, uncompressed_cache(uncompressed_cache), mark_cache(mark_cache), save_marks_in_cache(save_marks_in_cache), storage(storage)
, all_mark_ranges(all_mark_ranges), aio_threshold(aio_threshold), max_read_buffer_size(max_read_buffer_size)
{

View File

@ -36,15 +36,16 @@ private:
std::string path_prefix;
std::string data_file_extension;
std::string marks_file_extension;
size_t marks_count;
size_t one_mark_bytes_size;
MarkCache * mark_cache;
bool save_marks_in_cache;
MarkCache::MappedPtr marks;
std::string marks_file_extension;
size_t one_mark_bytes_size;
std::unique_ptr<CachedCompressedReadBuffer> cached_buffer;
std::unique_ptr<CompressedReadBufferFromFile> non_cached_buffer;
};

View File

@ -137,10 +137,10 @@ void fillIndexGranularityImpl(
}
if (index_granularity_for_block == 0) /// very rare case when index granularity bytes less then single row
index_granularity_for_block = 1;
//std::cerr << "GRANULARITY SIZE IN ROWS:"<< index_granularity_for_block << std::endl;
for (size_t current_row = index_offset; current_row < rows_in_block; current_row += index_granularity_for_block)
index_granularity.appendMark(index_granularity_for_block);
}
void IMergedBlockOutputStream::fillIndexGranularity(const Block & block)

View File

@ -39,10 +39,10 @@ class Stream
public:
String base_name;
String bin_file_extension;
String mrk_file_extension;
String bin_file_path;
String mrk_file_path;
private:
String marks_file_extension;
const IndexGranularity & index_granularity;
ReadBufferFromFile file_buf;
HashingReadBuffer compressed_hashing_buf;
@ -59,10 +59,10 @@ public:
Stream(const String & path, const String & base_name, const String & bin_file_extension_, const String & mrk_file_extension_, const IndexGranularity & index_granularity_)
:
base_name(base_name),
bin_file_path(path + base_name + bin_file_extension_),
mrk_file_path(path + base_name + mrk_file_extension_),
bin_file_extension(bin_file_extension_),
marks_file_extension(mrk_file_extension_),
mrk_file_extension(mrk_file_extension_),
bin_file_path(path + base_name + bin_file_extension),
mrk_file_path(path + base_name + mrk_file_extension),
index_granularity(index_granularity_),
file_buf(bin_file_path),
compressed_hashing_buf(file_buf),
@ -79,7 +79,7 @@ public:
readIntBinary(mrk_mark.offset_in_decompressed_block, mrk_hashing_buf);
size_t mrk_rows;
//std::cerr << "File path:" << mrk_file_path << std::endl;
if (marks_file_extension == ".mrk2")
if (mrk_file_extension == ".mrk2")
readIntBinary(mrk_rows, mrk_hashing_buf);
else
mrk_rows = index_granularity.getMarkRows(mark_position);
@ -148,7 +148,7 @@ public:
compressed_hashing_buf.count(), compressed_hashing_buf.getHash(),
uncompressed_hashing_buf.count(), uncompressed_hashing_buf.getHash());
checksums.files[base_name + marks_file_extension] = MergeTreeData::DataPart::Checksums::Checksum(
checksums.files[base_name + mrk_file_extension] = MergeTreeData::DataPart::Checksums::Checksum(
mrk_hashing_buf.count(), mrk_hashing_buf.getHash());
}
};
@ -159,8 +159,7 @@ public:
MergeTreeData::DataPart::Checksums checkDataPart(
const String & full_path,
const IndexGranularity & adaptive_index_granularity,
const size_t fixed_granularity,
const String marks_file_extension,
const String & mrk_file_extension,
bool require_checksums,
const DataTypes & primary_key_data_types,
const MergeTreeIndices & indices,
@ -271,7 +270,7 @@ MergeTreeData::DataPart::Checksums checkDataPart(
/// Read and check skip indices.
for (const auto & index : indices)
{
Stream stream(path, index->getFileName(), ".idx", marks_file_extension, adaptive_index_granularity);
Stream stream(path, index->getFileName(), ".idx", mrk_file_extension, adaptive_index_granularity);
size_t mark_num = 0;
while (!stream.uncompressed_hashing_buf.eof())
@ -328,7 +327,7 @@ MergeTreeData::DataPart::Checksums checkDataPart(
name_type.type->enumerateStreams([&](const IDataType::SubstreamPath & substream_path)
{
String file_name = IDataType::getFileNameForStream(name_type.name, substream_path);
auto & stream = streams.try_emplace(file_name, path, file_name, ".bin", marks_file_extension, adaptive_index_granularity).first->second;
auto & stream = streams.try_emplace(file_name, path, file_name, ".bin", mrk_file_extension, adaptive_index_granularity).first->second;
try
{
if (!stream.mrk_hashing_buf.eof())
@ -419,15 +418,6 @@ MergeTreeData::DataPart::Checksums checkDataPart(
}
}
for (auto & [fname, checksum] : checksums_data.files)
{
std::cerr << "FILE NAME IN DATA:" << fname << std::endl;
}
for (auto & [fname, checksum] : checksums_txt.files)
{
std::cerr << "FILE NAME IN TXT:" << fname << std::endl;
}
if (require_checksums || !checksums_txt.files.empty())
checksums_txt.checkEqual(checksums_data, true);
@ -444,7 +434,6 @@ MergeTreeData::DataPart::Checksums checkDataPart(
return checkDataPart(
data_part->getFullPath(),
data_part->index_granularity,
data_part->storage.index_granularity_info.fixed_index_granularity,
data_part->storage.index_granularity_info.marks_file_extension,
require_checksums,
primary_key_data_types,

View File

@ -22,8 +22,7 @@ MergeTreeData::DataPart::Checksums checkDataPart(
MergeTreeData::DataPart::Checksums checkDataPart(
const String & full_path,
const IndexGranularity & index_granularity,
const size_t fixed_granularity,
const String marks_file_extension,
const String & marks_file_extension,
bool require_checksums,
const DataTypes & primary_key_data_types, /// Check the primary key. If it is not necessary, pass an empty array.
const MergeTreeIndices & indices = {}, /// Check skip indices

View File

@ -76,7 +76,7 @@ int main(int argc, char ** argv)
auto marks_file_extension = "." + mrk_file_path.getExtension();
bool require_checksums = parse<bool>(argv[2]);
checkDataPart(full_path, adaptive_granularity, fixed_granularity, marks_file_extension, require_checksums, {});
checkDataPart(full_path, adaptive_granularity, marks_file_extension, require_checksums, {});
}
catch (...)
{