Alternative way

This commit is contained in:
Alexey Milovidov 2024-02-16 01:23:00 +01:00
parent 66248d4814
commit 8b7a11cc2c
5 changed files with 7 additions and 7 deletions

View File

@ -79,7 +79,7 @@ void SerializationAggregateFunction::deserializeBinaryBulk(IColumn & column, Rea
Arena & arena = real_column.createOrGetArena(); Arena & arena = real_column.createOrGetArena();
real_column.set(function, version); real_column.set(function, version);
vec.reserve(vec.size() + limit); vec.reserve_exact(vec.size() + limit);
size_t size_of_state = function->sizeOfData(); size_t size_of_state = function->sizeOfData();
size_t align_of_state = function->alignOfData(); size_t align_of_state = function->alignOfData();

View File

@ -66,7 +66,7 @@ size_t deserializeOffsets(IColumn::Offsets & offsets,
} }
/// Just try to guess number of offsets. /// Just try to guess number of offsets.
offsets.reserve(offsets.size() offsets.reserve_exact(offsets.size()
+ static_cast<size_t>(limit * (1.0 - ColumnSparse::DEFAULT_RATIO_FOR_SPARSE_SERIALIZATION))); + static_cast<size_t>(limit * (1.0 - ColumnSparse::DEFAULT_RATIO_FOR_SPARSE_SERIALIZATION)));
bool first = true; bool first = true;

View File

@ -235,7 +235,7 @@ void SerializationString::deserializeBinaryBulk(IColumn & column, ReadBuffer & i
{ {
try try
{ {
data.reserve(size_to_reserve); data.reserve_exact(size_to_reserve);
} }
catch (Exception & e) catch (Exception & e)
{ {
@ -247,7 +247,7 @@ void SerializationString::deserializeBinaryBulk(IColumn & column, ReadBuffer & i
} }
} }
offsets.reserve(offsets.size() + limit); offsets.reserve_exact(offsets.size() + limit);
if (avg_chars_size >= 64) if (avg_chars_size >= 64)
deserializeBinarySSE2<4>(data, offsets, istr, limit); deserializeBinarySSE2<4>(data, offsets, istr, limit);

View File

@ -320,7 +320,7 @@ void SerializationVariant::deserializeBinaryBulkWithMultipleStreams(
else else
{ {
auto & offsets = col.getOffsets(); auto & offsets = col.getOffsets();
offsets.reserve(offsets.size() + limit); offsets.reserve_exact(offsets.size() + limit);
std::vector<size_t> variant_offsets; std::vector<size_t> variant_offsets;
variant_offsets.reserve(variants.size()); variant_offsets.reserve(variants.size());
for (size_t i = 0; i != variants.size(); ++i) for (size_t i = 0; i != variants.size(); ++i)

View File

@ -129,7 +129,7 @@ void SerializationVariantElement::deserializeBinaryBulkWithMultipleStreams(
/// Otherwise we should iterate through discriminators to fill null map. /// Otherwise we should iterate through discriminators to fill null map.
else else
{ {
null_map.reserve(null_map.size() + limit); null_map.reserve_exact(null_map.size() + limit);
for (size_t i = discriminators_offset; i != discriminators_data.size(); ++i) for (size_t i = discriminators_offset; i != discriminators_data.size(); ++i)
null_map.push_back(discriminators_data[i] != variant_discriminator); null_map.push_back(discriminators_data[i] != variant_discriminator);
} }
@ -241,7 +241,7 @@ ColumnPtr SerializationVariantElement::VariantSubcolumnCreator::create(const DB:
/// In general case we should iterate through discriminators and create null-map for our variant. /// In general case we should iterate through discriminators and create null-map for our variant.
NullMap null_map; NullMap null_map;
null_map.reserve(local_discriminators->size()); null_map.reserve_exact(local_discriminators->size());
const auto & local_discriminators_data = assert_cast<const ColumnVariant::ColumnDiscriminators &>(*local_discriminators).getData(); const auto & local_discriminators_data = assert_cast<const ColumnVariant::ColumnDiscriminators &>(*local_discriminators).getData();
for (auto local_discr : local_discriminators_data) for (auto local_discr : local_discriminators_data)
null_map.push_back(local_discr != local_variant_discriminator); null_map.push_back(local_discr != local_variant_discriminator);