mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-22 15:42:02 +00:00
Merge pull request #9718 from ClickHouse/clang-tidy-3
clang-tidy, part 3
This commit is contained in:
commit
c98bf95003
32
.clang-tidy
32
.clang-tidy
@ -57,6 +57,38 @@ Checks: '-*,
|
||||
bugprone-inaccurate-erase,
|
||||
bugprone-incorrect-roundings,
|
||||
bugprone-infinite-loop,
|
||||
bugprone-integer-division,
|
||||
bugprone-macro-parentheses,
|
||||
bugprone-macro-repeated-side-effects,
|
||||
bugprone-misplaced-operator-in-strlen-in-alloc,
|
||||
bugprone-misplaced-pointer-artithmetic-in-alloc,
|
||||
bugprone-misplaced-widening-cast,
|
||||
bugprone-move-forwarding-reference,
|
||||
bugprone-multiple-statement-macro,
|
||||
bugprone-parent-virtual-call,
|
||||
bugprone-posix-return,
|
||||
bugprone-reserved-identifier,
|
||||
bugprone-signed-char-misuse,
|
||||
bugprone-sizeof-container,
|
||||
bugprone-sizeof-expression,
|
||||
bugprone-string-constructor,
|
||||
bugprone-string-integer-assignment,
|
||||
bugprone-string-literal-with-embedded-nul,
|
||||
bugprone-suspicious-enum-usage,
|
||||
bugprone-suspicious-include,
|
||||
bugprone-suspicious-memset-usage,
|
||||
bugprone-suspicious-missing-comma,
|
||||
bugprone-suspicious-string-compare,
|
||||
bugprone-swapped-arguments,
|
||||
bugprone-terminating-continue,
|
||||
bugprone-throw-keyword-missing,
|
||||
bugprone-too-small-loop-variable,
|
||||
bugprone-undefined-memory-manipulation,
|
||||
bugprone-unhandled-self-assignment,
|
||||
bugprone-unused-raii,
|
||||
bugprone-unused-return-value,
|
||||
bugprone-use-after-move,
|
||||
bugprone-virtual-near-miss,
|
||||
|
||||
boost-use-to-string,
|
||||
'
|
||||
|
@ -677,7 +677,7 @@ public:
|
||||
if (!histogram.total)
|
||||
continue;
|
||||
|
||||
double average = histogram.total / histogram.buckets.size();
|
||||
double average = double(histogram.total) / histogram.buckets.size();
|
||||
|
||||
UInt64 new_total = 0;
|
||||
for (auto & bucket : histogram.buckets)
|
||||
|
@ -111,7 +111,7 @@ void ODBCBridge::defineOptions(Poco::Util::OptionSet & options)
|
||||
.binding("help")
|
||||
.callback(Poco::Util::OptionCallback<Me>(this, &Me::handleHelp)));
|
||||
|
||||
ServerApplication::defineOptions(options); /// Don't need complex BaseDaemon's .xml config
|
||||
ServerApplication::defineOptions(options); // NOLINT Don't need complex BaseDaemon's .xml config
|
||||
}
|
||||
|
||||
void ODBCBridge::initialize(Application & self)
|
||||
@ -138,7 +138,7 @@ void ODBCBridge::initialize(Application & self)
|
||||
|
||||
initializeTerminationAndSignalProcessing();
|
||||
|
||||
ServerApplication::initialize(self);
|
||||
ServerApplication::initialize(self); // NOLINT
|
||||
}
|
||||
|
||||
void ODBCBridge::uninitialize()
|
||||
|
@ -175,7 +175,7 @@ int Server::run()
|
||||
std::cout << DBMS_NAME << " server version " << VERSION_STRING << VERSION_OFFICIAL << "." << std::endl;
|
||||
return 0;
|
||||
}
|
||||
return Application::run();
|
||||
return Application::run(); // NOLINT
|
||||
}
|
||||
|
||||
void Server::initialize(Poco::Util::Application & self)
|
||||
|
@ -75,6 +75,9 @@ public:
|
||||
|
||||
Node & operator =(const Node & src)
|
||||
{
|
||||
if (this == &src)
|
||||
return *this;
|
||||
|
||||
node_name = src.node_name;
|
||||
level = src.level;
|
||||
inherited_access = src.inherited_access;
|
||||
|
@ -135,6 +135,9 @@ struct QuotaContext::Impl
|
||||
|
||||
QuotaContext::Interval & QuotaContext::Interval::operator =(const Interval & src)
|
||||
{
|
||||
if (this == &src)
|
||||
return *this;
|
||||
|
||||
randomize_interval = src.randomize_interval;
|
||||
duration = src.duration;
|
||||
end_of_interval.store(src.end_of_interval.load());
|
||||
|
@ -56,7 +56,7 @@ AggregateFunctionPtr createAggregateFunctionSimpleLinearRegression(
|
||||
FOR_LEASTSQR_TYPES_2(M, Float64)
|
||||
#define DISPATCH(T1, T2) \
|
||||
if (which_x.idx == TypeIndex::T1 && which_y.idx == TypeIndex::T2) \
|
||||
return std::make_shared<AggregateFunctionSimpleLinearRegression<T1, T2>>( \
|
||||
return std::make_shared<AggregateFunctionSimpleLinearRegression<T1, T2>>(/* NOLINT */ \
|
||||
arguments, \
|
||||
params \
|
||||
);
|
||||
|
@ -501,7 +501,7 @@ MutableColumns ColumnAggregateFunction::scatter(IColumn::ColumnIndex num_columns
|
||||
size_t num_rows = size();
|
||||
|
||||
{
|
||||
size_t reserve_size = num_rows / num_columns * 1.1; /// 1.1 is just a guess. Better to use n-sigma rule.
|
||||
size_t reserve_size = double(num_rows) / num_columns * 1.1; /// 1.1 is just a guess. Better to use n-sigma rule.
|
||||
|
||||
if (reserve_size > 1)
|
||||
for (auto & column : columns)
|
||||
|
@ -118,7 +118,7 @@ void ColumnVector<T>::getPermutation(bool reverse, size_t limit, int nan_directi
|
||||
if (s >= 256 && s <= std::numeric_limits<UInt32>::max())
|
||||
{
|
||||
PaddedPODArray<ValueWithIndex<T>> pairs(s);
|
||||
for (UInt32 i = 0; i < s; ++i)
|
||||
for (UInt32 i = 0; i < UInt32(s); ++i)
|
||||
pairs[i] = {data[i], i};
|
||||
|
||||
RadixSort<RadixSortTraits<T>>::executeLSD(pairs.data(), s);
|
||||
|
@ -168,7 +168,7 @@ void collectSymbolsFromProgramHeaders(dl_phdr_info * info,
|
||||
const ElfW(Sym) * elf_sym = reinterpret_cast<const ElfW(Sym) *>(correct_address(info->dlpi_addr, it->d_un.d_ptr));
|
||||
|
||||
/* Iterate over the symbol table */
|
||||
for (ElfW(Word) sym_index = 0; sym_index < sym_cnt; ++sym_index)
|
||||
for (ElfW(Word) sym_index = 0; sym_index < ElfW(Word)(sym_cnt); ++sym_index)
|
||||
{
|
||||
/// We are not interested in empty symbols.
|
||||
if (!elf_sym[sym_index].st_size)
|
||||
|
@ -225,7 +225,7 @@ void ThreadPoolImpl<Thread>::worker(typename std::list<Thread>::iterator thread_
|
||||
{
|
||||
std::unique_lock lock(mutex);
|
||||
if (!first_exception)
|
||||
first_exception = std::current_exception();
|
||||
first_exception = std::current_exception(); // NOLINT
|
||||
shutdown = true;
|
||||
--scheduled_jobs;
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ int main(int argc, char ** argv)
|
||||
|
||||
Arr arr2 = std::move(arr);
|
||||
|
||||
std::cerr << arr.size() << ", " << arr2.size() << std::endl;
|
||||
std::cerr << arr.size() << ", " << arr2.size() << std::endl; // NOLINT
|
||||
|
||||
for (auto & elem : arr2)
|
||||
std::cerr << elem << std::endl;
|
||||
@ -182,7 +182,7 @@ int main(int argc, char ** argv)
|
||||
}
|
||||
|
||||
arr2 = std::move(arr1);
|
||||
arr1.resize(n);
|
||||
arr1.resize(n); // NOLINT
|
||||
|
||||
std::cerr
|
||||
<< "arr1.size(): " << arr1.size() << ", arr2.size(): " << arr2.size() << std::endl
|
||||
|
@ -409,7 +409,7 @@ static void test3()
|
||||
|
||||
Array arr2{std::move(arr)};
|
||||
|
||||
ASSERT_CHECK((arr.empty()), res);
|
||||
ASSERT_CHECK((arr.empty()), res); // NOLINT
|
||||
|
||||
ASSERT_CHECK((arr2.size() == 3), res);
|
||||
ASSERT_CHECK((arr2[0] == 1), res);
|
||||
@ -428,7 +428,7 @@ static void test3()
|
||||
|
||||
Array arr2{std::move(arr)};
|
||||
|
||||
ASSERT_CHECK((arr.empty()), res);
|
||||
ASSERT_CHECK((arr.empty()), res); // NOLINT
|
||||
|
||||
ASSERT_CHECK((arr2.size() == 5), res);
|
||||
ASSERT_CHECK((arr2[0] == 1), res);
|
||||
|
@ -465,7 +465,7 @@ void SettingURI::deserialize(ReadBuffer & buf, SettingsBinaryFormat)
|
||||
case static_cast<UnderlyingType>(EnumType::NAME): return IO_NAME;
|
||||
|
||||
#define IMPLEMENT_SETTING_ENUM_FROM_STRING_HELPER_(NAME, IO_NAME) \
|
||||
if (s == IO_NAME) \
|
||||
if (s == (IO_NAME)) \
|
||||
{ \
|
||||
set(EnumType::NAME); \
|
||||
return; \
|
||||
@ -474,7 +474,7 @@ void SettingURI::deserialize(ReadBuffer & buf, SettingsBinaryFormat)
|
||||
#define IMPLEMENT_SETTING_ENUM_CONCAT_NAMES_HELPER_(NAME, IO_NAME) \
|
||||
if (!all_io_names.empty()) \
|
||||
all_io_names += ", "; \
|
||||
all_io_names += String("'") + IO_NAME + "'";
|
||||
all_io_names += String("'") + (IO_NAME) + "'";
|
||||
|
||||
|
||||
#define LOAD_BALANCING_LIST_OF_NAMES(M) \
|
||||
|
@ -447,8 +447,8 @@ CacheDictionary::Attribute CacheDictionary::createAttributeWithType(const Attrib
|
||||
{
|
||||
#define DISPATCH(TYPE) \
|
||||
case AttributeUnderlyingType::ut##TYPE: \
|
||||
attr.null_values = TYPE(null_value.get<NearestFieldType<TYPE>>()); \
|
||||
attr.arrays = std::make_unique<ContainerType<TYPE>>(size); \
|
||||
attr.null_values = TYPE(null_value.get<NearestFieldType<TYPE>>()); /* NOLINT */ \
|
||||
attr.arrays = std::make_unique<ContainerType<TYPE>>(size); /* NOLINT */ \
|
||||
bytes_allocated += size * sizeof(TYPE); \
|
||||
break;
|
||||
DISPATCH(UInt8)
|
||||
|
@ -11,8 +11,8 @@ ComplexKeyCacheDictionary::createAttributeWithType(const AttributeUnderlyingType
|
||||
{
|
||||
#define DISPATCH(TYPE) \
|
||||
case AttributeUnderlyingType::ut##TYPE: \
|
||||
attr.null_values = TYPE(null_value.get<NearestFieldType<TYPE>>()); \
|
||||
attr.arrays = std::make_unique<ContainerType<TYPE>>(size); \
|
||||
attr.null_values = TYPE(null_value.get<NearestFieldType<TYPE>>()); /* NOLINT */ \
|
||||
attr.arrays = std::make_unique<ContainerType<TYPE>>(size); /* NOLINT */ \
|
||||
bytes_allocated += size * sizeof(TYPE); \
|
||||
break;
|
||||
DISPATCH(UInt8)
|
||||
|
@ -446,7 +446,7 @@ void HashedDictionary::addAttributeSize(const Attribute & attribute)
|
||||
/** TODO: more accurate calculation */
|
||||
bytes_allocated += sizeof(CollectionType<T>);
|
||||
bytes_allocated += bucket_count;
|
||||
bytes_allocated += map_ref->size() * sizeof(Key) * sizeof(T);
|
||||
bytes_allocated += map_ref->size() * (sizeof(Key) + sizeof(T));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -183,7 +183,7 @@ namespace DB
|
||||
/// Do not store more than max_block_size values for one request.
|
||||
if (primary_with_secondary.size() == max_block_size + 1)
|
||||
{
|
||||
hkeys.add(std::move(primary_with_secondary));
|
||||
hkeys.add(primary_with_secondary);
|
||||
primary_with_secondary.clear();
|
||||
primary_with_secondary.addRedisType(key);
|
||||
}
|
||||
|
@ -273,30 +273,35 @@ UInt64 ProtobufReader::SimpleReader::continueReadingVarint(UInt64 first_byte)
|
||||
char c;
|
||||
|
||||
#define PROTOBUF_READER_READ_VARINT_BYTE(byteNo) \
|
||||
in.readStrict(c); \
|
||||
++cursor; \
|
||||
if constexpr (byteNo < 10) \
|
||||
do \
|
||||
{ \
|
||||
result |= static_cast<UInt64>(static_cast<UInt8>(c)) << (7 * (byteNo - 1)); \
|
||||
if (likely(!(c & 0x80))) \
|
||||
return result; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
if (likely(c == 1)) \
|
||||
return result; \
|
||||
} \
|
||||
if constexpr (byteNo < 9) \
|
||||
result &= ~(static_cast<UInt64>(0x80) << (7 * (byteNo - 1)));
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(2)
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(3)
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(4)
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(5)
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(6)
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(7)
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(8)
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(9)
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(10)
|
||||
in.readStrict(c); \
|
||||
++cursor; \
|
||||
if constexpr ((byteNo) < 10) \
|
||||
{ \
|
||||
result |= static_cast<UInt64>(static_cast<UInt8>(c)) << (7 * ((byteNo) - 1)); \
|
||||
if (likely(!(c & 0x80))) \
|
||||
return result; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
if (likely(c == 1)) \
|
||||
return result; \
|
||||
} \
|
||||
if constexpr ((byteNo) < 9) \
|
||||
result &= ~(static_cast<UInt64>(0x80) << (7 * ((byteNo) - 1))); \
|
||||
} while (false)
|
||||
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(2);
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(3);
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(4);
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(5);
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(6);
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(7);
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(8);
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(9);
|
||||
PROTOBUF_READER_READ_VARINT_BYTE(10);
|
||||
|
||||
#undef PROTOBUF_READER_READ_VARINT_BYTE
|
||||
|
||||
throwUnknownFormat();
|
||||
@ -307,28 +312,32 @@ void ProtobufReader::SimpleReader::ignoreVarint()
|
||||
char c;
|
||||
|
||||
#define PROTOBUF_READER_IGNORE_VARINT_BYTE(byteNo) \
|
||||
in.readStrict(c); \
|
||||
++cursor; \
|
||||
if constexpr (byteNo < 10) \
|
||||
do \
|
||||
{ \
|
||||
if (likely(!(c & 0x80))) \
|
||||
return; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
if (likely(c == 1)) \
|
||||
return; \
|
||||
}
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(1)
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(2)
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(3)
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(4)
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(5)
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(6)
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(7)
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(8)
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(9)
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(10)
|
||||
in.readStrict(c); \
|
||||
++cursor; \
|
||||
if constexpr ((byteNo) < 10) \
|
||||
{ \
|
||||
if (likely(!(c & 0x80))) \
|
||||
return; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
if (likely(c == 1)) \
|
||||
return; \
|
||||
} \
|
||||
} while (false)
|
||||
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(1);
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(2);
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(3);
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(4);
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(5);
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(6);
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(7);
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(8);
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(9);
|
||||
PROTOBUF_READER_IGNORE_VARINT_BYTE(10);
|
||||
#undef PROTOBUF_READER_IGNORE_VARINT_BYTE
|
||||
|
||||
throwUnknownFormat();
|
||||
@ -846,7 +855,7 @@ private:
|
||||
std::unique_ptr<ProtobufReader::IConverter> ProtobufReader::createConverter<field_type_id>( \
|
||||
const google::protobuf::FieldDescriptor * field) \
|
||||
{ \
|
||||
return std::make_unique<ConverterFromNumber<field_type_id, field_type>>(simple_reader, field); \
|
||||
return std::make_unique<ConverterFromNumber<field_type_id, field_type>>(simple_reader, field); /* NOLINT */ \
|
||||
}
|
||||
PROTOBUF_READER_CREATE_CONVERTER_SPECIALIZATION_FOR_NUMBERS(google::protobuf::FieldDescriptor::TYPE_INT32, Int64);
|
||||
PROTOBUF_READER_CREATE_CONVERTER_SPECIALIZATION_FOR_NUMBERS(google::protobuf::FieldDescriptor::TYPE_SINT32, Int64);
|
||||
|
@ -17,9 +17,7 @@ struct CRCBase
|
||||
{
|
||||
T c = i;
|
||||
for (size_t j = 0; j < 8; ++j)
|
||||
{
|
||||
c = c & 1 ? polynomial ^ (c >> 1) : c >> 1;
|
||||
}
|
||||
tab[i] = c;
|
||||
}
|
||||
}
|
||||
@ -34,13 +32,9 @@ struct CRCImpl
|
||||
{
|
||||
static CRCBase<ReturnType> base(polynomial);
|
||||
|
||||
T i, crc;
|
||||
|
||||
crc = 0;
|
||||
for (i = 0; i < size; i++)
|
||||
{
|
||||
T crc = 0;
|
||||
for (size_t i = 0; i < size; i++)
|
||||
crc = base.tab[(crc ^ buf[i]) & 0xff] ^ (crc >> 8);
|
||||
}
|
||||
return crc;
|
||||
}
|
||||
};
|
||||
@ -63,10 +57,12 @@ struct CRC32ZLIBImpl
|
||||
static constexpr auto name = "CRC32";
|
||||
|
||||
static UInt32 make_crc(const unsigned char *buf, size_t size)
|
||||
{ return crc32_z(0L, buf, size); }
|
||||
{
|
||||
return crc32_z(0L, buf, size);
|
||||
}
|
||||
};
|
||||
|
||||
} // \anonymous
|
||||
}
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
@ -132,17 +132,17 @@ inline std::tuple<Encoded, Encoded> split(const Encoded & combined, uint8_t prec
|
||||
lat.fill(0);
|
||||
lon.fill(0);
|
||||
|
||||
uint8_t i = 0;
|
||||
size_t i = 0;
|
||||
for (; i < precision * BITS_PER_SYMBOL - 1; i += 2)
|
||||
{
|
||||
// longitude is even bits
|
||||
lon[i/2] = combined[i];
|
||||
lat[i/2] = combined[i + 1];
|
||||
lon[i / 2] = combined[i];
|
||||
lat[i / 2] = combined[i + 1];
|
||||
}
|
||||
// precision is even, read the last bit as lat.
|
||||
if (precision & 0x1)
|
||||
{
|
||||
lon[i/2] = combined[precision * BITS_PER_SYMBOL - 1];
|
||||
lon[i / 2] = combined[precision * BITS_PER_SYMBOL - 1];
|
||||
}
|
||||
|
||||
return std::tie(lon, lat);
|
||||
@ -152,7 +152,7 @@ inline void base32Encode(const Encoded & binary, uint8_t precision, char * out)
|
||||
{
|
||||
extern const char geohash_base32_encode_lookup_table[32];
|
||||
|
||||
for (uint8_t i = 0; i < precision * BITS_PER_SYMBOL; i += BITS_PER_SYMBOL)
|
||||
for (size_t i = 0; i < precision * BITS_PER_SYMBOL; i += BITS_PER_SYMBOL)
|
||||
{
|
||||
uint8_t v = binary[i];
|
||||
v <<= 1;
|
||||
|
@ -214,7 +214,7 @@ void FunctionArrayUniq::executeMethodImpl(
|
||||
for (ColumnArray::Offset j = prev_off; j < off; ++j)
|
||||
{
|
||||
if constexpr (has_null_map)
|
||||
{
|
||||
{ // NOLINT
|
||||
if ((*null_map)[j])
|
||||
{
|
||||
found_null = true;
|
||||
|
@ -79,14 +79,14 @@ private:
|
||||
const char * char_end = char_data + size;
|
||||
|
||||
if constexpr (mode::trim_left)
|
||||
{
|
||||
{ // NOLINT
|
||||
const char * found = find_first_not_symbols<' '>(char_data, char_end);
|
||||
size_t num_chars = found - char_data;
|
||||
char_data += num_chars;
|
||||
}
|
||||
|
||||
if constexpr (mode::trim_right)
|
||||
{
|
||||
{ // NOLINT
|
||||
const char * found = find_last_not_symbols_or_null<' '>(char_data, char_end);
|
||||
if (found)
|
||||
char_end = found + 1;
|
||||
|
@ -68,7 +68,7 @@ inline void readDecimalNumber(T & res, const char * src)
|
||||
template <typename T>
|
||||
inline void readDecimalNumber(T & res, size_t num_digits, const char * src)
|
||||
{
|
||||
#define READ_DECIMAL_NUMBER(N) res *= common::exp10_i32(N); readDecimalNumber<N>(res, src); src += N; num_digits -= N; break
|
||||
#define READ_DECIMAL_NUMBER(N) do { res *= common::exp10_i32(N); readDecimalNumber<N>(res, src); src += (N); num_digits -= (N); } while (false)
|
||||
|
||||
while (num_digits)
|
||||
{
|
||||
@ -77,7 +77,7 @@ inline void readDecimalNumber(T & res, size_t num_digits, const char * src)
|
||||
case 3: READ_DECIMAL_NUMBER(3); break;
|
||||
case 2: READ_DECIMAL_NUMBER(2); break;
|
||||
case 1: READ_DECIMAL_NUMBER(1); break;
|
||||
default: READ_DECIMAL_NUMBER(4);
|
||||
default: READ_DECIMAL_NUMBER(4); break;
|
||||
}
|
||||
}
|
||||
#undef DECIMAL_NUMBER_CASE
|
||||
|
@ -82,8 +82,8 @@ void AggregatedDataVariants::convertToTwoLevel()
|
||||
{
|
||||
#define M(NAME) \
|
||||
case Type::NAME: \
|
||||
NAME ## _two_level = std::make_unique<decltype(NAME ## _two_level)::element_type>(*NAME); \
|
||||
NAME.reset(); \
|
||||
NAME ## _two_level = std::make_unique<decltype(NAME ## _two_level)::element_type>(*(NAME)); \
|
||||
(NAME).reset(); \
|
||||
type = Type::NAME ## _two_level; \
|
||||
break;
|
||||
|
||||
|
@ -102,7 +102,7 @@ static QueryDescriptors extractQueriesExceptMeAndCheckAccess(const Block & proce
|
||||
res.emplace_back(std::move(query_id), std::move(query_user), i, false);
|
||||
}
|
||||
|
||||
if (res.empty() && !query_user.empty())
|
||||
if (res.empty() && !query_user.empty()) // NOLINT
|
||||
throw Exception("User " + my_client.current_user + " attempts to kill query created by " + query_user, ErrorCodes::ACCESS_DENIED);
|
||||
|
||||
return res;
|
||||
|
@ -23,7 +23,7 @@ void SetVariantsTemplate<Variant>::init(Type type_)
|
||||
case Type::EMPTY: break;
|
||||
|
||||
#define M(NAME) \
|
||||
case Type::NAME: NAME = std::make_unique<typename decltype(NAME)::element_type>(); break;
|
||||
case Type::NAME: (NAME) = std::make_unique<typename decltype(NAME)::element_type>(); break;
|
||||
APPLY_FOR_SET_VARIANTS(M)
|
||||
#undef M
|
||||
}
|
||||
@ -37,7 +37,7 @@ size_t SetVariantsTemplate<Variant>::getTotalRowCount() const
|
||||
case Type::EMPTY: return 0;
|
||||
|
||||
#define M(NAME) \
|
||||
case Type::NAME: return NAME->data.size();
|
||||
case Type::NAME: return (NAME)->data.size();
|
||||
APPLY_FOR_SET_VARIANTS(M)
|
||||
#undef M
|
||||
}
|
||||
@ -53,7 +53,7 @@ size_t SetVariantsTemplate<Variant>::getTotalByteCount() const
|
||||
case Type::EMPTY: return 0;
|
||||
|
||||
#define M(NAME) \
|
||||
case Type::NAME: return NAME->data.getBufferSizeInBytes();
|
||||
case Type::NAME: return (NAME)->data.getBufferSizeInBytes();
|
||||
APPLY_FOR_SET_VARIANTS(M)
|
||||
#undef M
|
||||
}
|
||||
|
@ -54,16 +54,16 @@ struct STRUCT : public StringRef {}; \
|
||||
namespace ZeroTraits \
|
||||
{ \
|
||||
template <> \
|
||||
inline bool check<STRUCT>(STRUCT x) { return 0 == x.size; } \
|
||||
inline bool check<STRUCT>(STRUCT x) { return 0 == x.size; } /* NOLINT */ \
|
||||
\
|
||||
template <> \
|
||||
inline void set<STRUCT>(STRUCT & x) { x.size = 0; } \
|
||||
inline void set<STRUCT>(STRUCT & x) { x.size = 0; } /* NOLINT */ \
|
||||
} \
|
||||
\
|
||||
template <> \
|
||||
struct DefaultHash<STRUCT> \
|
||||
{ \
|
||||
size_t operator() (STRUCT x) const \
|
||||
size_t operator() (STRUCT x) const /* NOLINT */ \
|
||||
{ \
|
||||
return CityHash_v1_0_2::CityHash64(x.data, x.size); \
|
||||
} \
|
||||
|
@ -57,10 +57,10 @@ struct STRUCT : public StringRef {}; \
|
||||
namespace ZeroTraits \
|
||||
{ \
|
||||
template <> \
|
||||
inline bool check<STRUCT>(STRUCT x) { return nullptr == x.data; } \
|
||||
inline bool check<STRUCT>(STRUCT x) { return nullptr == x.data; } /* NOLINT */ \
|
||||
\
|
||||
template <> \
|
||||
inline void set<STRUCT>(STRUCT & x) { x.data = nullptr; } \
|
||||
inline void set<STRUCT>(STRUCT & x) { x.data = nullptr; } /* NOLINT */ \
|
||||
} \
|
||||
\
|
||||
template <> \
|
||||
|
@ -11,7 +11,7 @@ do \
|
||||
{ \
|
||||
if (member) \
|
||||
{ \
|
||||
res->member = member->clone(); \
|
||||
res->member = (member)->clone(); \
|
||||
res->children.push_back(res->member); \
|
||||
} \
|
||||
} \
|
||||
|
@ -50,7 +50,7 @@ bool ParserKeyword::parseImpl(Pos & pos, ASTPtr & /*node*/, Expected & expected)
|
||||
if (word_length != pos->size())
|
||||
return false;
|
||||
|
||||
if (strncasecmp(pos->begin, current_word, word_length))
|
||||
if (0 != strncasecmp(pos->begin, current_word, word_length))
|
||||
return false;
|
||||
|
||||
++pos;
|
||||
|
@ -65,7 +65,7 @@ ForkProcessor::Status ForkProcessor::prepare()
|
||||
{
|
||||
++num_processed_outputs;
|
||||
if (num_processed_outputs == num_active_outputs)
|
||||
output.push(std::move(data)); /// Can push because no full or unneeded outputs.
|
||||
output.push(std::move(data)); // NOLINT Can push because no full or unneeded outputs.
|
||||
else
|
||||
output.push(data.clone());
|
||||
}
|
||||
|
@ -431,7 +431,7 @@ void registerStorageKafka(StorageFactory & factory)
|
||||
// Check arguments and settings
|
||||
#define CHECK_KAFKA_STORAGE_ARGUMENT(ARG_NUM, PAR_NAME) \
|
||||
/* One of the four required arguments is not specified */ \
|
||||
if (args_count < ARG_NUM && ARG_NUM <= 4 && \
|
||||
if (args_count < (ARG_NUM) && (ARG_NUM) <= 4 && \
|
||||
!kafka_settings.PAR_NAME.changed) \
|
||||
{ \
|
||||
throw Exception( \
|
||||
@ -442,7 +442,7 @@ void registerStorageKafka(StorageFactory & factory)
|
||||
/* The same argument is given in two places */ \
|
||||
if (has_settings && \
|
||||
kafka_settings.PAR_NAME.changed && \
|
||||
args_count >= ARG_NUM) \
|
||||
args_count >= (ARG_NUM)) \
|
||||
{ \
|
||||
throw Exception( \
|
||||
"The argument №" #ARG_NUM " of storage Kafka " \
|
||||
|
@ -323,7 +323,7 @@ ASTPtr StorageLiveView::getInnerBlocksQuery()
|
||||
/// Rewrite inner query with right aliases for JOIN.
|
||||
/// It cannot be done in constructor or startup() because InterpreterSelectQuery may access table,
|
||||
/// which is not loaded yet during server startup, so we do it lazily
|
||||
InterpreterSelectQuery(inner_blocks_query, *live_view_context, SelectQueryOptions().modify().analyze());
|
||||
InterpreterSelectQuery(inner_blocks_query, *live_view_context, SelectQueryOptions().modify().analyze()); // NOLINT
|
||||
auto table_id = getStorageID();
|
||||
extractDependentTable(inner_blocks_query, global_context, table_id.table_name, inner_subquery);
|
||||
}
|
||||
|
@ -516,7 +516,7 @@ void KeyCondition::traverseAST(const ASTPtr & node, const Context & context, Blo
|
||||
* - in this case `n - 1` elements are added (where `n` is the number of arguments).
|
||||
*/
|
||||
if (i != 0 || element.function == RPNElement::FUNCTION_NOT)
|
||||
rpn.emplace_back(std::move(element));
|
||||
rpn.emplace_back(element);
|
||||
}
|
||||
|
||||
return;
|
||||
|
@ -1091,8 +1091,10 @@ Pipes MergeTreeDataSelectExecutor::spreadMarkRangesAmongStreamsFinal(
|
||||
{
|
||||
auto merged_processor =
|
||||
std::make_shared<MergingSortedTransform>(header, pipes.size(), sort_description, max_block_size);
|
||||
pipes.emplace_back(std::move(pipes), std::move(merged_processor));
|
||||
break;
|
||||
Pipe pipe(std::move(pipes), std::move(merged_processor));
|
||||
pipes = Pipes();
|
||||
pipes.emplace_back(std::move(pipe));
|
||||
return pipes;
|
||||
}
|
||||
|
||||
case MergeTreeData::MergingParams::Collapsing:
|
||||
|
@ -70,7 +70,7 @@ void MergeTreeSettings::loadFromQuery(ASTStorage & storage_def)
|
||||
if (std::find_if(changes.begin(), changes.end(), \
|
||||
[](const SettingChange & c) { return c.name == #NAME; }) \
|
||||
== changes.end()) \
|
||||
changes.push_back(SettingChange{#NAME, NAME.value});
|
||||
changes.push_back(SettingChange{#NAME, (NAME).value});
|
||||
|
||||
APPLY_FOR_IMMUTABLE_MERGE_TREE_SETTINGS(ADD_IF_ABSENT)
|
||||
#undef ADD_IF_ABSENT
|
||||
|
@ -35,11 +35,13 @@ StorageInMemoryMetadata::StorageInMemoryMetadata(const StorageInMemoryMetadata &
|
||||
|
||||
StorageInMemoryMetadata & StorageInMemoryMetadata::operator=(const StorageInMemoryMetadata & other)
|
||||
{
|
||||
if (this == &other)
|
||||
return *this;
|
||||
|
||||
columns = other.columns;
|
||||
indices = other.indices;
|
||||
constraints = other.constraints;
|
||||
|
||||
|
||||
if (other.partition_by_ast)
|
||||
partition_by_ast = other.partition_by_ast->clone();
|
||||
else
|
||||
|
Loading…
Reference in New Issue
Block a user