PVS-Studio

This commit is contained in:
Alexey Milovidov 2021-05-03 01:42:01 +03:00
parent 5fa3701540
commit eea47a65d2
7 changed files with 35 additions and 41 deletions

View File

@ -50,7 +50,7 @@ struct QueryFuzzer
// Some debug fields for detecting problematic ASTs with loops.
// These are reset for each fuzzMain call.
std::unordered_set<const IAST *> debug_visited_nodes;
ASTPtr * debug_top_ast;
ASTPtr * debug_top_ast = nullptr;
// This is the only function you have to call -- it will modify the passed

View File

@ -1,6 +1,5 @@
#pragma once
#include <Common/HashTable/HashTable.h>
#include <Common/HashTable/HashTableKeyHolder.h>
#include <Common/ColumnsHashingImpl.h>
@ -15,6 +14,8 @@
#include <Core/Defines.h>
#include <memory>
#include <cassert>
namespace DB
{
@ -594,8 +595,11 @@ struct HashMethodKeysFixed
return prepared_keys[row];
#if defined(__SSSE3__) && !defined(MEMORY_SANITIZER)
if constexpr (!has_low_cardinality && !has_nullable_keys && sizeof(Key) <= 16)
if constexpr (sizeof(Key) <= 16)
{
assert(!has_low_cardinality && !has_nullable_keys);
return packFixedShuffle<Key>(columns_data.get(), keys_size, key_sizes.data(), row, masks.get());
}
#endif
return packFixed<Key>(row, keys_size, Base::getActualColumns(), key_sizes);
}

View File

@ -94,40 +94,30 @@ inline UInt32 updateWeakHash32(const DB::UInt8 * pos, size_t size, DB::UInt32 up
DB::UInt64 value = 0;
auto * value_ptr = reinterpret_cast<unsigned char *>(&value);
typedef __attribute__((__aligned__(1))) uint16_t uint16_unaligned_t;
typedef __attribute__((__aligned__(1))) uint32_t uint32_unaligned_t;
/// Adopted code from FastMemcpy.h (memcpy_tiny)
switch (size)
{
case 0:
break;
case 1:
value_ptr[0] = pos[0];
__builtin_memcpy(value_ptr, pos, 1);
break;
case 2:
*reinterpret_cast<uint16_t *>(value_ptr) = *reinterpret_cast<const uint16_unaligned_t *>(pos);
__builtin_memcpy(value_ptr, pos, 2);
break;
case 3:
*reinterpret_cast<uint16_t *>(value_ptr) = *reinterpret_cast<const uint16_unaligned_t *>(pos);
value_ptr[2] = pos[2];
__builtin_memcpy(value_ptr, pos, 3);
break;
case 4:
*reinterpret_cast<uint32_t *>(value_ptr) = *reinterpret_cast<const uint32_unaligned_t *>(pos);
__builtin_memcpy(value_ptr, pos, 4);
break;
case 5:
*reinterpret_cast<uint32_t *>(value_ptr) = *reinterpret_cast<const uint32_unaligned_t *>(pos);
value_ptr[4] = pos[4];
__builtin_memcpy(value_ptr, pos, 5);
break;
case 6:
*reinterpret_cast<uint32_t *>(value_ptr) = *reinterpret_cast<const uint32_unaligned_t *>(pos);
*reinterpret_cast<uint16_unaligned_t *>(value_ptr + 4) =
*reinterpret_cast<const uint16_unaligned_t *>(pos + 4);
__builtin_memcpy(value_ptr, pos, 6);
break;
case 7:
*reinterpret_cast<uint32_t *>(value_ptr) = *reinterpret_cast<const uint32_unaligned_t *>(pos);
*reinterpret_cast<uint32_unaligned_t *>(value_ptr + 3) =
*reinterpret_cast<const uint32_unaligned_t *>(pos + 3);
__builtin_memcpy(value_ptr, pos, 7);
break;
default:
__builtin_unreachable();

View File

@ -148,7 +148,7 @@ public:
// Increase weight of a key that already exists
auto hash = counter_map.hash(key);
if (auto counter = findCounter(key, hash); counter)
if (auto * counter = findCounter(key, hash); counter)
{
counter->count += increment;
counter->error += error;
@ -159,12 +159,12 @@ public:
// Key doesn't exist, but can fit in the top K
if (unlikely(size() < capacity()))
{
auto c = new Counter(arena.emplace(key), increment, error, hash);
auto * c = new Counter(arena.emplace(key), increment, error, hash);
push(c);
return;
}
auto min = counter_list.back();
auto * min = counter_list.back();
// The key doesn't exist and cannot fit in the current top K, but
// the new key has a bigger weight and is virtually more present
// compared to the element who is less present on the set. This part
@ -218,7 +218,7 @@ public:
*/
if (m2 > 0)
{
for (auto counter : counter_list)
for (auto * counter : counter_list)
{
counter->count += m2;
counter->error += m2;
@ -226,10 +226,10 @@ public:
}
// The list is sorted in descending order, we have to scan in reverse
for (auto counter : boost::adaptors::reverse(rhs.counter_list))
for (auto * counter : boost::adaptors::reverse(rhs.counter_list))
{
size_t hash = counter_map.hash(counter->key);
if (auto current = findCounter(counter->key, hash))
if (auto * current = findCounter(counter->key, hash))
{
// Subtract m2 previously added, guaranteed not negative
current->count += (counter->count - m2);
@ -262,7 +262,7 @@ public:
std::vector<Counter> topK(size_t k) const
{
std::vector<Counter> res;
for (auto counter : counter_list)
for (auto * counter : counter_list)
{
res.push_back(*counter);
if (res.size() == k)
@ -274,7 +274,7 @@ public:
void write(WriteBuffer & wb) const
{
writeVarUInt(size(), wb);
for (auto counter : counter_list)
for (auto * counter : counter_list)
counter->write(wb);
writeVarUInt(alpha_map.size(), wb);
@ -290,7 +290,7 @@ public:
for (size_t i = 0; i < count; ++i)
{
auto counter = new Counter();
auto * counter = new Counter();
counter->read(rb);
counter->hash = counter_map.hash(counter->key);
push(counter);
@ -325,7 +325,7 @@ protected:
{
while (counter->slot > 0)
{
auto next = counter_list[counter->slot - 1];
auto * next = counter_list[counter->slot - 1];
if (*counter > *next)
{
std::swap(next->slot, counter->slot);
@ -339,7 +339,7 @@ protected:
private:
void destroyElements()
{
for (auto counter : counter_list)
for (auto * counter : counter_list)
{
arena.free(counter->key);
delete counter;
@ -376,7 +376,7 @@ private:
{
removed_keys = 0;
counter_map.clear();
for (auto counter : counter_list)
for (auto * counter : counter_list)
counter_map[counter->key] = counter;
}

View File

@ -310,7 +310,7 @@ public:
template <typename T, typename Z = void *>
using enable_if_not_field_or_stringlike_t = std::enable_if_t<!std::is_same_v<std::decay_t<T>, Field> && !std::is_same_v<NearestFieldType<std::decay_t<T>>, String>, Z>;
Field()
Field() //-V730
: which(Types::Null)
{
}
@ -851,7 +851,7 @@ decltype(auto) castToNearestFieldType(T && x)
}
template <typename T>
Field::Field(T && rhs, enable_if_not_field_or_stringlike_t<T>)
Field::Field(T && rhs, enable_if_not_field_or_stringlike_t<T>) //-V730
{
auto && val = castToNearestFieldType(std::forward<T>(rhs));
createConcrete(std::forward<decltype(val)>(val));

View File

@ -137,7 +137,7 @@ ColumnPtr wrapInNullable(const ColumnPtr & src, const ColumnsWithTypeAndName & a
if (const auto * nullable = checkAndGetColumn<ColumnNullable>(*elem.column))
{
const ColumnPtr & null_map_column = nullable->getNullMapColumnPtr();
if (!result_null_map_column)
if (!result_null_map_column) //-V1051
{
result_null_map_column = null_map_column;
}

View File

@ -1043,12 +1043,12 @@ private:
*/
struct AggregateFunctionInstruction
{
const IAggregateFunction * that;
size_t state_offset;
const IColumn ** arguments;
const IAggregateFunction * batch_that;
const IColumn ** batch_arguments;
const UInt64 * offsets = nullptr;
const IAggregateFunction * that{};
size_t state_offset{};
const IColumn ** arguments{};
const IAggregateFunction * batch_that{};
const IColumn ** batch_arguments{};
const UInt64 * offsets{};
};
using AggregateFunctionInstructions = std::vector<AggregateFunctionInstruction>;