From eea47a65d2edbfe28fa516b7b9cd1743d167a400 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Mon, 3 May 2021 01:42:01 +0300 Subject: [PATCH] PVS-Studio --- programs/client/QueryFuzzer.h | 2 +- src/Common/ColumnsHashing.h | 8 ++++++-- src/Common/HashTable/Hash.h | 24 +++++++----------------- src/Common/SpaceSaving.h | 24 ++++++++++++------------ src/Core/Field.h | 4 ++-- src/Functions/IFunction.cpp | 2 +- src/Interpreters/Aggregator.h | 12 ++++++------ 7 files changed, 35 insertions(+), 41 deletions(-) diff --git a/programs/client/QueryFuzzer.h b/programs/client/QueryFuzzer.h index 38714205967..9ef66db1873 100644 --- a/programs/client/QueryFuzzer.h +++ b/programs/client/QueryFuzzer.h @@ -50,7 +50,7 @@ struct QueryFuzzer // Some debug fields for detecting problematic ASTs with loops. // These are reset for each fuzzMain call. std::unordered_set debug_visited_nodes; - ASTPtr * debug_top_ast; + ASTPtr * debug_top_ast = nullptr; // This is the only function you have to call -- it will modify the passed diff --git a/src/Common/ColumnsHashing.h b/src/Common/ColumnsHashing.h index b7173b25ce5..ada783d982c 100644 --- a/src/Common/ColumnsHashing.h +++ b/src/Common/ColumnsHashing.h @@ -1,6 +1,5 @@ #pragma once - #include #include #include @@ -15,6 +14,8 @@ #include #include +#include + namespace DB { @@ -594,8 +595,11 @@ struct HashMethodKeysFixed return prepared_keys[row]; #if defined(__SSSE3__) && !defined(MEMORY_SANITIZER) - if constexpr (!has_low_cardinality && !has_nullable_keys && sizeof(Key) <= 16) + if constexpr (sizeof(Key) <= 16) + { + assert(!has_low_cardinality && !has_nullable_keys); return packFixedShuffle(columns_data.get(), keys_size, key_sizes.data(), row, masks.get()); + } #endif return packFixed(row, keys_size, Base::getActualColumns(), key_sizes); } diff --git a/src/Common/HashTable/Hash.h b/src/Common/HashTable/Hash.h index 0abe96497bd..8e876c6b994 100644 --- a/src/Common/HashTable/Hash.h +++ b/src/Common/HashTable/Hash.h @@ -94,40 +94,30 @@ inline UInt32 updateWeakHash32(const DB::UInt8 * pos, size_t size, DB::UInt32 up DB::UInt64 value = 0; auto * value_ptr = reinterpret_cast(&value); - typedef __attribute__((__aligned__(1))) uint16_t uint16_unaligned_t; - typedef __attribute__((__aligned__(1))) uint32_t uint32_unaligned_t; - - /// Adopted code from FastMemcpy.h (memcpy_tiny) switch (size) { case 0: break; case 1: - value_ptr[0] = pos[0]; + __builtin_memcpy(value_ptr, pos, 1); break; case 2: - *reinterpret_cast(value_ptr) = *reinterpret_cast(pos); + __builtin_memcpy(value_ptr, pos, 2); break; case 3: - *reinterpret_cast(value_ptr) = *reinterpret_cast(pos); - value_ptr[2] = pos[2]; + __builtin_memcpy(value_ptr, pos, 3); break; case 4: - *reinterpret_cast(value_ptr) = *reinterpret_cast(pos); + __builtin_memcpy(value_ptr, pos, 4); break; case 5: - *reinterpret_cast(value_ptr) = *reinterpret_cast(pos); - value_ptr[4] = pos[4]; + __builtin_memcpy(value_ptr, pos, 5); break; case 6: - *reinterpret_cast(value_ptr) = *reinterpret_cast(pos); - *reinterpret_cast(value_ptr + 4) = - *reinterpret_cast(pos + 4); + __builtin_memcpy(value_ptr, pos, 6); break; case 7: - *reinterpret_cast(value_ptr) = *reinterpret_cast(pos); - *reinterpret_cast(value_ptr + 3) = - *reinterpret_cast(pos + 3); + __builtin_memcpy(value_ptr, pos, 7); break; default: __builtin_unreachable(); diff --git a/src/Common/SpaceSaving.h b/src/Common/SpaceSaving.h index 185b4aa90ae..390afaac81f 100644 --- a/src/Common/SpaceSaving.h +++ b/src/Common/SpaceSaving.h @@ -148,7 +148,7 @@ public: // Increase weight of a key that already exists auto hash = counter_map.hash(key); - if (auto counter = findCounter(key, hash); counter) + if (auto * counter = findCounter(key, hash); counter) { counter->count += increment; counter->error += error; @@ -159,12 +159,12 @@ public: // Key doesn't exist, but can fit in the top K if (unlikely(size() < capacity())) { - auto c = new Counter(arena.emplace(key), increment, error, hash); + auto * c = new Counter(arena.emplace(key), increment, error, hash); push(c); return; } - auto min = counter_list.back(); + auto * min = counter_list.back(); // The key doesn't exist and cannot fit in the current top K, but // the new key has a bigger weight and is virtually more present // compared to the element who is less present on the set. This part @@ -218,7 +218,7 @@ public: */ if (m2 > 0) { - for (auto counter : counter_list) + for (auto * counter : counter_list) { counter->count += m2; counter->error += m2; @@ -226,10 +226,10 @@ public: } // The list is sorted in descending order, we have to scan in reverse - for (auto counter : boost::adaptors::reverse(rhs.counter_list)) + for (auto * counter : boost::adaptors::reverse(rhs.counter_list)) { size_t hash = counter_map.hash(counter->key); - if (auto current = findCounter(counter->key, hash)) + if (auto * current = findCounter(counter->key, hash)) { // Subtract m2 previously added, guaranteed not negative current->count += (counter->count - m2); @@ -262,7 +262,7 @@ public: std::vector topK(size_t k) const { std::vector res; - for (auto counter : counter_list) + for (auto * counter : counter_list) { res.push_back(*counter); if (res.size() == k) @@ -274,7 +274,7 @@ public: void write(WriteBuffer & wb) const { writeVarUInt(size(), wb); - for (auto counter : counter_list) + for (auto * counter : counter_list) counter->write(wb); writeVarUInt(alpha_map.size(), wb); @@ -290,7 +290,7 @@ public: for (size_t i = 0; i < count; ++i) { - auto counter = new Counter(); + auto * counter = new Counter(); counter->read(rb); counter->hash = counter_map.hash(counter->key); push(counter); @@ -325,7 +325,7 @@ protected: { while (counter->slot > 0) { - auto next = counter_list[counter->slot - 1]; + auto * next = counter_list[counter->slot - 1]; if (*counter > *next) { std::swap(next->slot, counter->slot); @@ -339,7 +339,7 @@ protected: private: void destroyElements() { - for (auto counter : counter_list) + for (auto * counter : counter_list) { arena.free(counter->key); delete counter; @@ -376,7 +376,7 @@ private: { removed_keys = 0; counter_map.clear(); - for (auto counter : counter_list) + for (auto * counter : counter_list) counter_map[counter->key] = counter; } diff --git a/src/Core/Field.h b/src/Core/Field.h index 5c4c2e165ad..2c3ca1310e4 100644 --- a/src/Core/Field.h +++ b/src/Core/Field.h @@ -310,7 +310,7 @@ public: template using enable_if_not_field_or_stringlike_t = std::enable_if_t, Field> && !std::is_same_v>, String>, Z>; - Field() + Field() //-V730 : which(Types::Null) { } @@ -851,7 +851,7 @@ decltype(auto) castToNearestFieldType(T && x) } template -Field::Field(T && rhs, enable_if_not_field_or_stringlike_t) +Field::Field(T && rhs, enable_if_not_field_or_stringlike_t) //-V730 { auto && val = castToNearestFieldType(std::forward(rhs)); createConcrete(std::forward(val)); diff --git a/src/Functions/IFunction.cpp b/src/Functions/IFunction.cpp index 43b344bebc4..5bbe6eb523e 100644 --- a/src/Functions/IFunction.cpp +++ b/src/Functions/IFunction.cpp @@ -137,7 +137,7 @@ ColumnPtr wrapInNullable(const ColumnPtr & src, const ColumnsWithTypeAndName & a if (const auto * nullable = checkAndGetColumn(*elem.column)) { const ColumnPtr & null_map_column = nullable->getNullMapColumnPtr(); - if (!result_null_map_column) + if (!result_null_map_column) //-V1051 { result_null_map_column = null_map_column; } diff --git a/src/Interpreters/Aggregator.h b/src/Interpreters/Aggregator.h index 1b5ed75b0d0..31eefe03b3c 100644 --- a/src/Interpreters/Aggregator.h +++ b/src/Interpreters/Aggregator.h @@ -1043,12 +1043,12 @@ private: */ struct AggregateFunctionInstruction { - const IAggregateFunction * that; - size_t state_offset; - const IColumn ** arguments; - const IAggregateFunction * batch_that; - const IColumn ** batch_arguments; - const UInt64 * offsets = nullptr; + const IAggregateFunction * that{}; + size_t state_offset{}; + const IColumn ** arguments{}; + const IAggregateFunction * batch_that{}; + const IColumn ** batch_arguments{}; + const UInt64 * offsets{}; }; using AggregateFunctionInstructions = std::vector;