mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 15:12:02 +00:00
clang-tidy, part 14
This commit is contained in:
parent
7334c13de9
commit
d3aa0e8ed8
@ -22,7 +22,7 @@ struct GetStringTestRecord
|
||||
std::string result;
|
||||
};
|
||||
|
||||
TEST(JSON_Suite, SimpleTest)
|
||||
TEST(JSONSuite, SimpleTest)
|
||||
{
|
||||
std::vector<GetStringTestRecord> test_data =
|
||||
{
|
||||
|
@ -272,9 +272,9 @@ namespace Hashes
|
||||
};
|
||||
size_t res = 0;
|
||||
|
||||
for (size_t i = 0; i < 8; ++i)
|
||||
for (auto & rand : random)
|
||||
{
|
||||
res ^= random[i][UInt8(x)];
|
||||
res ^= rand[UInt8(x)];
|
||||
x >>= 8;
|
||||
}
|
||||
|
||||
|
@ -68,26 +68,26 @@ void run()
|
||||
|
||||
const std::vector<std::function<bool()>> tests =
|
||||
{
|
||||
std::bind(test1, std::ref(filename)),
|
||||
std::bind(test2, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test3, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test4, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test5, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test6, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test7, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test8, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test9, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test10, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test11, std::ref(filename)),
|
||||
std::bind(test12, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test13, std::ref(filename2), std::ref(buf2)),
|
||||
std::bind(test14, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test15, std::ref(filename3), std::ref(buf3)),
|
||||
std::bind(test16, std::ref(filename3), std::ref(buf3)),
|
||||
std::bind(test17, std::ref(filename4), std::ref(buf4)),
|
||||
std::bind(test18, std::ref(filename5), std::ref(buf5)),
|
||||
std::bind(test19, std::ref(filename), std::ref(buf)),
|
||||
std::bind(test20, std::ref(filename), std::ref(buf))
|
||||
[&]{ return test1(filename); },
|
||||
[&]{ return test2(filename, buf); },
|
||||
[&]{ return test3(filename, buf); },
|
||||
[&]{ return test4(filename, buf); },
|
||||
[&]{ return test5(filename, buf); },
|
||||
[&]{ return test6(filename, buf); },
|
||||
[&]{ return test7(filename, buf); },
|
||||
[&]{ return test8(filename, buf); },
|
||||
[&]{ return test9(filename, buf); },
|
||||
[&]{ return test10(filename, buf); },
|
||||
[&]{ return test11(filename); },
|
||||
[&]{ return test12(filename, buf); },
|
||||
[&]{ return test13(filename2, buf2); },
|
||||
[&]{ return test14(filename, buf); },
|
||||
[&]{ return test15(filename3, buf3); },
|
||||
[&]{ return test16(filename3, buf3); },
|
||||
[&]{ return test17(filename4, buf4); },
|
||||
[&]{ return test18(filename5, buf5); },
|
||||
[&]{ return test19(filename, buf); },
|
||||
[&]{ return test20(filename, buf); }
|
||||
};
|
||||
|
||||
unsigned int num = 0;
|
||||
|
@ -96,9 +96,9 @@ void MergeTreeDataPartTTLInfos::write(WriteBuffer & out) const
|
||||
{
|
||||
if (!columns_ttl.empty())
|
||||
writeString(",", out);
|
||||
writeString("\"table\":{\"min\":", out);
|
||||
writeString(R"("table":{"min":)", out);
|
||||
writeIntText(table_ttl.min, out);
|
||||
writeString(",\"max\":", out);
|
||||
writeString(R"(,"max":)", out);
|
||||
writeIntText(table_ttl.max, out);
|
||||
writeString("}", out);
|
||||
}
|
||||
@ -106,17 +106,17 @@ void MergeTreeDataPartTTLInfos::write(WriteBuffer & out) const
|
||||
{
|
||||
if (!columns_ttl.empty() || table_ttl.min)
|
||||
writeString(",", out);
|
||||
writeString("\"moves\":[", out);
|
||||
writeString(R"("moves":[)", out);
|
||||
for (auto it = moves_ttl.begin(); it != moves_ttl.end(); ++it)
|
||||
{
|
||||
if (it != moves_ttl.begin())
|
||||
writeString(",", out);
|
||||
|
||||
writeString("{\"expression\":", out);
|
||||
writeString(R"({"expression":)", out);
|
||||
writeString(doubleQuoteString(it->first), out);
|
||||
writeString(",\"min\":", out);
|
||||
writeString(R"(,"min":)", out);
|
||||
writeIntText(it->second.min, out);
|
||||
writeString(",\"max\":", out);
|
||||
writeString(R"(,"max":)", out);
|
||||
writeIntText(it->second.max, out);
|
||||
writeString("}", out);
|
||||
}
|
||||
|
@ -295,12 +295,12 @@ void MergeTreeDataPartWriterWide::finishDataSerialization(IMergeTreeDataPart::Ch
|
||||
}
|
||||
}
|
||||
|
||||
for (auto it = column_streams.begin(); it != column_streams.end(); ++it)
|
||||
for (auto & stream : column_streams)
|
||||
{
|
||||
it->second->finalize();
|
||||
stream.second->finalize();
|
||||
if (sync)
|
||||
it->second->sync();
|
||||
it->second->addToChecksums(checksums);
|
||||
stream.second->sync();
|
||||
stream.second->addToChecksums(checksums);
|
||||
}
|
||||
|
||||
column_streams.clear();
|
||||
|
@ -109,18 +109,17 @@ size_t MergeTreeDataSelectExecutor::getApproximateTotalRowsToRead(
|
||||
/// We will find out how many rows we would have read without sampling.
|
||||
LOG_DEBUG(log, "Preliminary index scan with condition: " << key_condition.toString());
|
||||
|
||||
for (size_t i = 0; i < parts.size(); ++i)
|
||||
for (const auto & part : parts)
|
||||
{
|
||||
const MergeTreeData::DataPartPtr & part = parts[i];
|
||||
MarkRanges ranges = markRangesFromPKRange(part, key_condition, settings);
|
||||
|
||||
/** In order to get a lower bound on the number of rows that match the condition on PK,
|
||||
* consider only guaranteed full marks.
|
||||
* That is, do not take into account the first and last marks, which may be incomplete.
|
||||
*/
|
||||
for (size_t j = 0; j < ranges.size(); ++j)
|
||||
if (ranges[j].end - ranges[j].begin > 2)
|
||||
rows_count += part->index_granularity.getRowsCountInRange({ranges[j].begin + 1, ranges[j].end - 1});
|
||||
for (const auto & range : ranges)
|
||||
if (range.end - range.begin > 2)
|
||||
rows_count += part->index_granularity.getRowsCountInRange({range.begin + 1, range.end - 1});
|
||||
|
||||
}
|
||||
|
||||
@ -794,10 +793,8 @@ Pipes MergeTreeDataSelectExecutor::spreadMarkRangesAmongStreams(
|
||||
{
|
||||
/// Sequential query execution.
|
||||
|
||||
for (size_t part_index = 0; part_index < parts.size(); ++part_index)
|
||||
for (const auto & part : parts)
|
||||
{
|
||||
RangesInDataPart & part = parts[part_index];
|
||||
|
||||
auto source = std::make_shared<MergeTreeSelectProcessor>(
|
||||
data, part.data_part, max_block_size, settings.preferred_block_size_bytes,
|
||||
settings.preferred_max_column_in_block_size_bytes, column_names, part.ranges, use_uncompressed_cache,
|
||||
@ -1025,13 +1022,13 @@ Pipes MergeTreeDataSelectExecutor::spreadMarkRangesAmongStreamsFinal(
|
||||
const auto data_settings = data.getSettings();
|
||||
size_t sum_marks = 0;
|
||||
size_t adaptive_parts = 0;
|
||||
for (size_t i = 0; i < parts.size(); ++i)
|
||||
for (const auto & part : parts)
|
||||
{
|
||||
for (size_t j = 0; j < parts[i].ranges.size(); ++j)
|
||||
sum_marks += parts[i].ranges[j].end - parts[i].ranges[j].begin;
|
||||
for (const auto & range : part.ranges)
|
||||
sum_marks += range.end - range.begin;
|
||||
|
||||
if (parts[i].data_part->index_granularity_info.is_adaptive)
|
||||
adaptive_parts++;
|
||||
if (part.data_part->index_granularity_info.is_adaptive)
|
||||
++adaptive_parts;
|
||||
}
|
||||
|
||||
size_t index_granularity_bytes = 0;
|
||||
@ -1049,10 +1046,8 @@ Pipes MergeTreeDataSelectExecutor::spreadMarkRangesAmongStreamsFinal(
|
||||
|
||||
Pipes pipes;
|
||||
|
||||
for (size_t part_index = 0; part_index < parts.size(); ++part_index)
|
||||
for (const auto & part : parts)
|
||||
{
|
||||
RangesInDataPart & part = parts[part_index];
|
||||
|
||||
auto source_processor = std::make_shared<MergeTreeSelectProcessor>(
|
||||
data, part.data_part, max_block_size, settings.preferred_block_size_bytes,
|
||||
settings.preferred_max_column_in_block_size_bytes, column_names, part.ranges, use_uncompressed_cache,
|
||||
|
@ -34,7 +34,7 @@ StoragePtr TableFunctionGenerateRandom::executeImpl(const ASTPtr & ast_function,
|
||||
|
||||
ASTs & args = args_func.at(0)->children;
|
||||
|
||||
if (args.size() < 1)
|
||||
if (args.empty())
|
||||
throw Exception("Table function '" + getName() + "' requires at least one argument: "
|
||||
" structure, [random_seed, max_string_length, max_array_length].",
|
||||
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||
|
Loading…
Reference in New Issue
Block a user