mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-25 00:52:02 +00:00
clang-tidy, part 14
This commit is contained in:
parent
7334c13de9
commit
d3aa0e8ed8
@ -22,7 +22,7 @@ struct GetStringTestRecord
|
|||||||
std::string result;
|
std::string result;
|
||||||
};
|
};
|
||||||
|
|
||||||
TEST(JSON_Suite, SimpleTest)
|
TEST(JSONSuite, SimpleTest)
|
||||||
{
|
{
|
||||||
std::vector<GetStringTestRecord> test_data =
|
std::vector<GetStringTestRecord> test_data =
|
||||||
{
|
{
|
||||||
|
@ -272,9 +272,9 @@ namespace Hashes
|
|||||||
};
|
};
|
||||||
size_t res = 0;
|
size_t res = 0;
|
||||||
|
|
||||||
for (size_t i = 0; i < 8; ++i)
|
for (auto & rand : random)
|
||||||
{
|
{
|
||||||
res ^= random[i][UInt8(x)];
|
res ^= rand[UInt8(x)];
|
||||||
x >>= 8;
|
x >>= 8;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -68,26 +68,26 @@ void run()
|
|||||||
|
|
||||||
const std::vector<std::function<bool()>> tests =
|
const std::vector<std::function<bool()>> tests =
|
||||||
{
|
{
|
||||||
std::bind(test1, std::ref(filename)),
|
[&]{ return test1(filename); },
|
||||||
std::bind(test2, std::ref(filename), std::ref(buf)),
|
[&]{ return test2(filename, buf); },
|
||||||
std::bind(test3, std::ref(filename), std::ref(buf)),
|
[&]{ return test3(filename, buf); },
|
||||||
std::bind(test4, std::ref(filename), std::ref(buf)),
|
[&]{ return test4(filename, buf); },
|
||||||
std::bind(test5, std::ref(filename), std::ref(buf)),
|
[&]{ return test5(filename, buf); },
|
||||||
std::bind(test6, std::ref(filename), std::ref(buf)),
|
[&]{ return test6(filename, buf); },
|
||||||
std::bind(test7, std::ref(filename), std::ref(buf)),
|
[&]{ return test7(filename, buf); },
|
||||||
std::bind(test8, std::ref(filename), std::ref(buf)),
|
[&]{ return test8(filename, buf); },
|
||||||
std::bind(test9, std::ref(filename), std::ref(buf)),
|
[&]{ return test9(filename, buf); },
|
||||||
std::bind(test10, std::ref(filename), std::ref(buf)),
|
[&]{ return test10(filename, buf); },
|
||||||
std::bind(test11, std::ref(filename)),
|
[&]{ return test11(filename); },
|
||||||
std::bind(test12, std::ref(filename), std::ref(buf)),
|
[&]{ return test12(filename, buf); },
|
||||||
std::bind(test13, std::ref(filename2), std::ref(buf2)),
|
[&]{ return test13(filename2, buf2); },
|
||||||
std::bind(test14, std::ref(filename), std::ref(buf)),
|
[&]{ return test14(filename, buf); },
|
||||||
std::bind(test15, std::ref(filename3), std::ref(buf3)),
|
[&]{ return test15(filename3, buf3); },
|
||||||
std::bind(test16, std::ref(filename3), std::ref(buf3)),
|
[&]{ return test16(filename3, buf3); },
|
||||||
std::bind(test17, std::ref(filename4), std::ref(buf4)),
|
[&]{ return test17(filename4, buf4); },
|
||||||
std::bind(test18, std::ref(filename5), std::ref(buf5)),
|
[&]{ return test18(filename5, buf5); },
|
||||||
std::bind(test19, std::ref(filename), std::ref(buf)),
|
[&]{ return test19(filename, buf); },
|
||||||
std::bind(test20, std::ref(filename), std::ref(buf))
|
[&]{ return test20(filename, buf); }
|
||||||
};
|
};
|
||||||
|
|
||||||
unsigned int num = 0;
|
unsigned int num = 0;
|
||||||
|
@ -96,9 +96,9 @@ void MergeTreeDataPartTTLInfos::write(WriteBuffer & out) const
|
|||||||
{
|
{
|
||||||
if (!columns_ttl.empty())
|
if (!columns_ttl.empty())
|
||||||
writeString(",", out);
|
writeString(",", out);
|
||||||
writeString("\"table\":{\"min\":", out);
|
writeString(R"("table":{"min":)", out);
|
||||||
writeIntText(table_ttl.min, out);
|
writeIntText(table_ttl.min, out);
|
||||||
writeString(",\"max\":", out);
|
writeString(R"(,"max":)", out);
|
||||||
writeIntText(table_ttl.max, out);
|
writeIntText(table_ttl.max, out);
|
||||||
writeString("}", out);
|
writeString("}", out);
|
||||||
}
|
}
|
||||||
@ -106,17 +106,17 @@ void MergeTreeDataPartTTLInfos::write(WriteBuffer & out) const
|
|||||||
{
|
{
|
||||||
if (!columns_ttl.empty() || table_ttl.min)
|
if (!columns_ttl.empty() || table_ttl.min)
|
||||||
writeString(",", out);
|
writeString(",", out);
|
||||||
writeString("\"moves\":[", out);
|
writeString(R"("moves":[)", out);
|
||||||
for (auto it = moves_ttl.begin(); it != moves_ttl.end(); ++it)
|
for (auto it = moves_ttl.begin(); it != moves_ttl.end(); ++it)
|
||||||
{
|
{
|
||||||
if (it != moves_ttl.begin())
|
if (it != moves_ttl.begin())
|
||||||
writeString(",", out);
|
writeString(",", out);
|
||||||
|
|
||||||
writeString("{\"expression\":", out);
|
writeString(R"({"expression":)", out);
|
||||||
writeString(doubleQuoteString(it->first), out);
|
writeString(doubleQuoteString(it->first), out);
|
||||||
writeString(",\"min\":", out);
|
writeString(R"(,"min":)", out);
|
||||||
writeIntText(it->second.min, out);
|
writeIntText(it->second.min, out);
|
||||||
writeString(",\"max\":", out);
|
writeString(R"(,"max":)", out);
|
||||||
writeIntText(it->second.max, out);
|
writeIntText(it->second.max, out);
|
||||||
writeString("}", out);
|
writeString("}", out);
|
||||||
}
|
}
|
||||||
|
@ -295,12 +295,12 @@ void MergeTreeDataPartWriterWide::finishDataSerialization(IMergeTreeDataPart::Ch
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto it = column_streams.begin(); it != column_streams.end(); ++it)
|
for (auto & stream : column_streams)
|
||||||
{
|
{
|
||||||
it->second->finalize();
|
stream.second->finalize();
|
||||||
if (sync)
|
if (sync)
|
||||||
it->second->sync();
|
stream.second->sync();
|
||||||
it->second->addToChecksums(checksums);
|
stream.second->addToChecksums(checksums);
|
||||||
}
|
}
|
||||||
|
|
||||||
column_streams.clear();
|
column_streams.clear();
|
||||||
|
@ -109,18 +109,17 @@ size_t MergeTreeDataSelectExecutor::getApproximateTotalRowsToRead(
|
|||||||
/// We will find out how many rows we would have read without sampling.
|
/// We will find out how many rows we would have read without sampling.
|
||||||
LOG_DEBUG(log, "Preliminary index scan with condition: " << key_condition.toString());
|
LOG_DEBUG(log, "Preliminary index scan with condition: " << key_condition.toString());
|
||||||
|
|
||||||
for (size_t i = 0; i < parts.size(); ++i)
|
for (const auto & part : parts)
|
||||||
{
|
{
|
||||||
const MergeTreeData::DataPartPtr & part = parts[i];
|
|
||||||
MarkRanges ranges = markRangesFromPKRange(part, key_condition, settings);
|
MarkRanges ranges = markRangesFromPKRange(part, key_condition, settings);
|
||||||
|
|
||||||
/** In order to get a lower bound on the number of rows that match the condition on PK,
|
/** In order to get a lower bound on the number of rows that match the condition on PK,
|
||||||
* consider only guaranteed full marks.
|
* consider only guaranteed full marks.
|
||||||
* That is, do not take into account the first and last marks, which may be incomplete.
|
* That is, do not take into account the first and last marks, which may be incomplete.
|
||||||
*/
|
*/
|
||||||
for (size_t j = 0; j < ranges.size(); ++j)
|
for (const auto & range : ranges)
|
||||||
if (ranges[j].end - ranges[j].begin > 2)
|
if (range.end - range.begin > 2)
|
||||||
rows_count += part->index_granularity.getRowsCountInRange({ranges[j].begin + 1, ranges[j].end - 1});
|
rows_count += part->index_granularity.getRowsCountInRange({range.begin + 1, range.end - 1});
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -794,10 +793,8 @@ Pipes MergeTreeDataSelectExecutor::spreadMarkRangesAmongStreams(
|
|||||||
{
|
{
|
||||||
/// Sequential query execution.
|
/// Sequential query execution.
|
||||||
|
|
||||||
for (size_t part_index = 0; part_index < parts.size(); ++part_index)
|
for (const auto & part : parts)
|
||||||
{
|
{
|
||||||
RangesInDataPart & part = parts[part_index];
|
|
||||||
|
|
||||||
auto source = std::make_shared<MergeTreeSelectProcessor>(
|
auto source = std::make_shared<MergeTreeSelectProcessor>(
|
||||||
data, part.data_part, max_block_size, settings.preferred_block_size_bytes,
|
data, part.data_part, max_block_size, settings.preferred_block_size_bytes,
|
||||||
settings.preferred_max_column_in_block_size_bytes, column_names, part.ranges, use_uncompressed_cache,
|
settings.preferred_max_column_in_block_size_bytes, column_names, part.ranges, use_uncompressed_cache,
|
||||||
@ -1025,13 +1022,13 @@ Pipes MergeTreeDataSelectExecutor::spreadMarkRangesAmongStreamsFinal(
|
|||||||
const auto data_settings = data.getSettings();
|
const auto data_settings = data.getSettings();
|
||||||
size_t sum_marks = 0;
|
size_t sum_marks = 0;
|
||||||
size_t adaptive_parts = 0;
|
size_t adaptive_parts = 0;
|
||||||
for (size_t i = 0; i < parts.size(); ++i)
|
for (const auto & part : parts)
|
||||||
{
|
{
|
||||||
for (size_t j = 0; j < parts[i].ranges.size(); ++j)
|
for (const auto & range : part.ranges)
|
||||||
sum_marks += parts[i].ranges[j].end - parts[i].ranges[j].begin;
|
sum_marks += range.end - range.begin;
|
||||||
|
|
||||||
if (parts[i].data_part->index_granularity_info.is_adaptive)
|
if (part.data_part->index_granularity_info.is_adaptive)
|
||||||
adaptive_parts++;
|
++adaptive_parts;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t index_granularity_bytes = 0;
|
size_t index_granularity_bytes = 0;
|
||||||
@ -1049,10 +1046,8 @@ Pipes MergeTreeDataSelectExecutor::spreadMarkRangesAmongStreamsFinal(
|
|||||||
|
|
||||||
Pipes pipes;
|
Pipes pipes;
|
||||||
|
|
||||||
for (size_t part_index = 0; part_index < parts.size(); ++part_index)
|
for (const auto & part : parts)
|
||||||
{
|
{
|
||||||
RangesInDataPart & part = parts[part_index];
|
|
||||||
|
|
||||||
auto source_processor = std::make_shared<MergeTreeSelectProcessor>(
|
auto source_processor = std::make_shared<MergeTreeSelectProcessor>(
|
||||||
data, part.data_part, max_block_size, settings.preferred_block_size_bytes,
|
data, part.data_part, max_block_size, settings.preferred_block_size_bytes,
|
||||||
settings.preferred_max_column_in_block_size_bytes, column_names, part.ranges, use_uncompressed_cache,
|
settings.preferred_max_column_in_block_size_bytes, column_names, part.ranges, use_uncompressed_cache,
|
||||||
|
@ -34,7 +34,7 @@ StoragePtr TableFunctionGenerateRandom::executeImpl(const ASTPtr & ast_function,
|
|||||||
|
|
||||||
ASTs & args = args_func.at(0)->children;
|
ASTs & args = args_func.at(0)->children;
|
||||||
|
|
||||||
if (args.size() < 1)
|
if (args.empty())
|
||||||
throw Exception("Table function '" + getName() + "' requires at least one argument: "
|
throw Exception("Table function '" + getName() + "' requires at least one argument: "
|
||||||
" structure, [random_seed, max_string_length, max_array_length].",
|
" structure, [random_seed, max_string_length, max_array_length].",
|
||||||
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||||
|
Loading…
Reference in New Issue
Block a user