From 7521fd2dcb2050d9837e19b6374dfa0a11f5106e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Mar=C3=ADn?= Date: Thu, 19 Sep 2024 18:17:04 +0200 Subject: [PATCH] Remove unused buggy code --- src/AggregateFunctions/UniquesHashSet.h | 45 ------------------- src/Common/CombinedCardinalityEstimator.h | 42 ----------------- src/Common/HashTable/FixedHashSet.h | 7 --- src/Common/HashTable/HashSet.h | 20 --------- src/Common/HyperLogLogCounter.h | 12 ----- .../HyperLogLogWithSmallSetOptimization.h | 18 -------- 6 files changed, 144 deletions(-) diff --git a/src/AggregateFunctions/UniquesHashSet.h b/src/AggregateFunctions/UniquesHashSet.h index d5241547711..8c05ab36265 100644 --- a/src/AggregateFunctions/UniquesHashSet.h +++ b/src/AggregateFunctions/UniquesHashSet.h @@ -447,51 +447,6 @@ public: } } - void readAndMerge(DB::ReadBuffer & rb) - { - UInt8 rhs_skip_degree = 0; - DB::readBinaryLittleEndian(rhs_skip_degree, rb); - - if (rhs_skip_degree > skip_degree) - { - skip_degree = rhs_skip_degree; - rehash(); - } - - size_t rhs_size = 0; - DB::readVarUInt(rhs_size, rb); - - if (rhs_size > UNIQUES_HASH_MAX_SIZE) - throw Poco::Exception("Cannot read UniquesHashSet: too large size_degree."); - - if ((1ULL << size_degree) < rhs_size) - { - UInt8 new_size_degree = std::max(UNIQUES_HASH_SET_INITIAL_SIZE_DEGREE, static_cast(log2(rhs_size - 1)) + 2); - resize(new_size_degree); - } - - if (rhs_size <= 1) - { - for (size_t i = 0; i < rhs_size; ++i) - { - HashValue x = 0; - DB::readBinaryLittleEndian(x, rb); - insertHash(x); - } - } - else - { - auto hs = std::make_unique(rhs_size); - rb.readStrict(reinterpret_cast(hs.get()), rhs_size * sizeof(HashValue)); - - for (size_t i = 0; i < rhs_size; ++i) - { - DB::transformEndianness(hs[i]); - insertHash(hs[i]); - } - } - } - static void skip(DB::ReadBuffer & rb) { size_t size = 0; diff --git a/src/Common/CombinedCardinalityEstimator.h b/src/Common/CombinedCardinalityEstimator.h index 132f00de8eb..2c3e1e07745 100644 --- a/src/Common/CombinedCardinalityEstimator.h +++ b/src/Common/CombinedCardinalityEstimator.h @@ -177,48 +177,6 @@ public: } } - void readAndMerge(DB::ReadBuffer & in) - { - auto container_type = getContainerType(); - - /// If readAndMerge is called with an empty state, just deserialize - /// the state is specified as a parameter. - if ((container_type == details::ContainerType::SMALL) && small.empty()) - { - read(in); - return; - } - - UInt8 v; - readBinary(v, in); - auto rhs_container_type = static_cast(v); - - auto max_container_type = details::max(container_type, rhs_container_type); - - if (container_type != max_container_type) - { - if (max_container_type == details::ContainerType::MEDIUM) - toMedium(); - else if (max_container_type == details::ContainerType::LARGE) - toLarge(); - } - - if (rhs_container_type == details::ContainerType::SMALL) - { - typename Small::Reader reader(in); - while (reader.next()) - insert(reader.get()); - } - else if (rhs_container_type == details::ContainerType::MEDIUM) - { - typename Medium::Reader reader(in); - while (reader.next()) - insert(reader.get()); - } - else if (rhs_container_type == details::ContainerType::LARGE) - getContainer().readAndMerge(in); - } - void write(DB::WriteBuffer & out) const { auto container_type = getContainerType(); diff --git a/src/Common/HashTable/FixedHashSet.h b/src/Common/HashTable/FixedHashSet.h index e764038e6c3..5b314b1f0a6 100644 --- a/src/Common/HashTable/FixedHashSet.h +++ b/src/Common/HashTable/FixedHashSet.h @@ -16,11 +16,4 @@ public: if (Base::buf[i].isZero(*this) && !rhs.buf[i].isZero(*this)) new (&Base::buf[i]) Cell(rhs.buf[i]); } - - /// NOTE: Currently this method isn't used. When it does, the ReadBuffer should - /// contain the Key explicitly. - // void readAndMerge(DB::ReadBuffer & rb) - // { - - // } }; diff --git a/src/Common/HashTable/HashSet.h b/src/Common/HashTable/HashSet.h index c25bfb14d9c..9b7445d20c6 100644 --- a/src/Common/HashTable/HashSet.h +++ b/src/Common/HashTable/HashSet.h @@ -55,26 +55,6 @@ public: if (!rhs.buf[i].isZero(*this)) this->insert(rhs.buf[i].getValue()); } - - - void readAndMerge(DB::ReadBuffer & rb) - { - Cell::State::read(rb); - - size_t new_size = 0; - DB::readVarUInt(new_size, rb); - if (new_size > 100'000'000'000) - throw DB::Exception(DB::ErrorCodes::TOO_LARGE_ARRAY_SIZE, "The size of serialized hash table is suspiciously large: {}", new_size); - - this->resize(new_size); - - for (size_t i = 0; i < new_size; ++i) - { - Cell x; - x.read(rb); - this->insert(x.getValue()); - } - } }; diff --git a/src/Common/HyperLogLogCounter.h b/src/Common/HyperLogLogCounter.h index 9b2b33dc918..b3e509c782d 100644 --- a/src/Common/HyperLogLogCounter.h +++ b/src/Common/HyperLogLogCounter.h @@ -353,18 +353,6 @@ public: } } - void readAndMerge(DB::ReadBuffer & in) - { - typename RankStore::Reader reader(in); - while (reader.next()) - { - const auto & data = reader.get(); - update(data.first, data.second); - } - - in.ignore(sizeof(DenominatorCalculatorType) + sizeof(ZerosCounterType)); - } - static void skip(DB::ReadBuffer & in) { in.ignore(sizeof(RankStore) + sizeof(DenominatorCalculatorType) + sizeof(ZerosCounterType)); diff --git a/src/Common/HyperLogLogWithSmallSetOptimization.h b/src/Common/HyperLogLogWithSmallSetOptimization.h index 1d2408186de..1748f32cd95 100644 --- a/src/Common/HyperLogLogWithSmallSetOptimization.h +++ b/src/Common/HyperLogLogWithSmallSetOptimization.h @@ -113,24 +113,6 @@ public: small.read(in); } - void readAndMerge(DB::ReadBuffer & in) - { - bool is_rhs_large; - readBinary(is_rhs_large, in); - - if (!isLarge() && is_rhs_large) - toLarge(); - - if (!is_rhs_large) - { - typename Small::Reader reader(in); - while (reader.next()) - insert(reader.get()); - } - else - large->readAndMerge(in); - } - void write(DB::WriteBuffer & out) const { writeBinary(isLarge(), out);