Renamed methods for bulk binary serialization [#METR-2944].

This commit is contained in:
Alexey Milovidov 2017-01-03 01:47:28 +03:00
parent 3fbb597694
commit 701ca28c85
28 changed files with 73 additions and 70 deletions

View File

@ -46,8 +46,8 @@ public:
void serializeBinary(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override;
void deserializeBinary(IColumn & column, ReadBuffer & istr) const override;
void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const override;
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const override;
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void serializeText(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override;
void serializeTextEscaped(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override;
void deserializeTextEscaped(IColumn & column, ReadBuffer & istr) const override;

View File

@ -62,15 +62,15 @@ public:
*/
/** Записать только значения, без размеров. Вызывающая сторона также должна куда-нибудь записать смещения. */
void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const override;
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const override;
/** Прочитать только значения, без размеров.
* При этом, в column уже заранее должны быть считаны все размеры.
*/
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
/** Записать размеры. */
void serializeOffsets(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const;
void serializeOffsets(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const;
/** Прочитать размеры. Вызывайте этот метод перед чтением значений. */
void deserializeOffsets(IColumn & column, ReadBuffer & istr, size_t limit) const;

View File

@ -99,8 +99,8 @@ public:
void serializeTextCSV(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override;
void deserializeTextCSV(IColumn & column, ReadBuffer & istr, const char delimiter) const override;
void serializeBinary(const IColumn & column, WriteBuffer & ostr, const size_t offset = 0, size_t limit = 0) const override;
void deserializeBinary(IColumn & column, ReadBuffer & istr, const size_t limit, const double avg_value_size_hint) const override;
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, const size_t offset, size_t limit) const override;
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, const size_t limit, const double avg_value_size_hint) const override;
size_t getSizeOfField() const override { return sizeof(FieldType); }

View File

@ -41,8 +41,8 @@ public:
void serializeBinary(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override;
void deserializeBinary(IColumn & column, ReadBuffer & istr) const override;
void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const override;
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const override;
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void serializeText(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override;

View File

@ -36,8 +36,8 @@ public:
return std::make_shared<DataTypeNull>();
}
void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const override;
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const override;
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
ColumnPtr createColumn() const override
{

View File

@ -22,8 +22,8 @@ public:
DataTypePtr clone() const override { return std::make_shared<DataTypeNullable>(nested_data_type->clone()); }
/// Bulk serialization and deserialization is processing only nested columns. You should process null byte map separately.
void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const override;
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const override;
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void serializeBinary(const Field & field, WriteBuffer & ostr) const override { nested_data_type->serializeBinary(field, ostr); }
void deserializeBinary(Field & field, ReadBuffer & istr) const override { nested_data_type->deserializeBinary(field, istr); }

View File

@ -28,8 +28,8 @@ public:
void serializeBinary(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override;
void deserializeBinary(IColumn & column, ReadBuffer & istr) const override;
void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const override;
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const override;
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void serializeText(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override;

View File

@ -39,13 +39,13 @@ public:
void serializeTextCSV(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override;
void deserializeTextCSV(IColumn & column, ReadBuffer & istr, const char delimiter) const override;
void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const override;
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const override;
/** limit обязательно должен быть в точности равен количеству сериализованных значений.
* Именно из-за этого (невозможности читать меньший кусок записанных данных), Tuple не могут быть использованы для хранения данных в таблицах.
* (Хотя могут быть использованы для передачи данных по сети в Native формате.)
*/
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override;
ColumnPtr createColumn() const override;
ColumnPtr createConstColumn(size_t size, const Field & field) const override;

View File

@ -53,14 +53,18 @@ public:
* offset + limit может быть больше размера столбца
* - в этом случае, столбец сериализуется до конца.
*/
virtual void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const = 0;
virtual void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const = 0;
/** Считать не более limit значений и дописать их в конец столбца.
* avg_value_size_hint - если не 0, то может использоваться, чтобы избежать реаллокаций при чтении строкового столбца.
*/
virtual void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const = 0;
virtual void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const = 0;
/// Сериализация единичных значений.
/** Serialization/deserialization of individual values.
* For complex data types (like arrays) it may differ from bulk serde.
* For example, if you serialize single array, it will be represented as its size and values in single contiguous stream,
* but if you serialize column with arrays as bulk, then sizes and values will be written to separate streams.
*/
/// Для бинарной сериализации есть два варианта. Один вариант работает с Field.
virtual void serializeBinary(const Field & field, WriteBuffer & ostr) const = 0;

View File

@ -25,10 +25,10 @@ public:
void serializeBinary(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override { throwNoSerialization(); }
void deserializeBinary(IColumn & column, ReadBuffer & istr) const override { throwNoSerialization(); }
void serializeBinary(const IColumn & column, WriteBuffer & ostr,
size_t offset = 0, size_t limit = 0) const override { throwNoSerialization(); }
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr,
size_t offset, size_t limit) const override { throwNoSerialization(); }
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override { throwNoSerialization(); }
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override { throwNoSerialization(); }
void serializeText(const IColumn & column, size_t row_num, WriteBuffer & ostr) const override { throwNoSerialization(); }

View File

@ -57,7 +57,7 @@ public:
static_cast<ColumnType &>(column).getData().push_back(x);
}
void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const override
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const override
{
const typename ColumnType::Container_t & x = typeid_cast<const ColumnType &>(column).getData();
@ -69,7 +69,7 @@ public:
ostr.write(reinterpret_cast<const char *>(&x[offset]), sizeof(typename ColumnType::value_type) * limit);
}
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override
{
typename ColumnType::Container_t & x = typeid_cast<ColumnType &>(column).getData();
size_t initial_size = x.size();
@ -114,12 +114,12 @@ public:
throw Exception{"Method deserializeBinary is not supported for DataTypeVoid", ErrorCodes::NOT_IMPLEMENTED};
}
void serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset = 0, size_t limit = 0) const override
void serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const override
{
throw Exception{"Method serializeBinary is not supported for DataTypeVoid", ErrorCodes::NOT_IMPLEMENTED};
}
void deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override
void deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const override
{
throw Exception{"Method deserializeBinary is not supported for DataTypeVoid", ErrorCodes::NOT_IMPLEMENTED};
}

View File

@ -55,7 +55,7 @@ void NativeBlockInputStream::readData(const IDataType & type, IColumn & column,
IColumn & nested_col = *nullable_col.getNestedColumn();
IColumn & null_map = *nullable_col.getNullMapColumn();
DataTypeUInt8{}.deserializeBinary(null_map, istr, rows, 0);
DataTypeUInt8{}.deserializeBinaryBulk(null_map, istr, rows, 0);
readData(nested_type, nested_col, istr, rows);
@ -66,7 +66,7 @@ void NativeBlockInputStream::readData(const IDataType & type, IColumn & column,
/** Для массивов требуется сначала десериализовать смещения, а потом значения.
*/
IColumn & offsets_column = *typeid_cast<ColumnArray &>(column).getOffsetsColumn();
type_arr->getOffsetsType()->deserializeBinary(offsets_column, istr, rows, 0);
type_arr->getOffsetsType()->deserializeBinaryBulk(offsets_column, istr, rows, 0);
if (offsets_column.size() != rows)
throw Exception("Cannot read all data in NativeBlockInputStream.", ErrorCodes::CANNOT_READ_ALL_DATA);
@ -79,7 +79,7 @@ void NativeBlockInputStream::readData(const IDataType & type, IColumn & column,
typeid_cast<const ColumnArray &>(column).getOffsets()[rows - 1]);
}
else
type.deserializeBinary(column, istr, rows, 0); /// TODO Использовать avg_value_size_hint.
type.deserializeBinaryBulk(column, istr, rows, 0); /// TODO Использовать avg_value_size_hint.
if (column.size() != rows)
throw Exception("Cannot read all data in NativeBlockInputStream.", ErrorCodes::CANNOT_READ_ALL_DATA);

View File

@ -62,7 +62,7 @@ void NativeBlockOutputStream::writeData(const IDataType & type, const ColumnPtr
const ColumnPtr & nested_col = nullable_col.getNestedColumn();
const IColumn & null_map = *nullable_col.getNullMapColumn();
DataTypeUInt8{}.serializeBinary(null_map, ostr, offset, limit);
DataTypeUInt8{}.serializeBinaryBulk(null_map, ostr, offset, limit);
writeData(nested_type, nested_col, ostr, offset, limit);
}
@ -71,7 +71,7 @@ void NativeBlockOutputStream::writeData(const IDataType & type, const ColumnPtr
/** Для массивов требуется сначала сериализовать смещения, а потом значения.
*/
const ColumnArray & column_array = typeid_cast<const ColumnArray &>(*full_column);
type_arr->getOffsetsType()->serializeBinary(*column_array.getOffsetsColumn(), ostr, offset, limit);
type_arr->getOffsetsType()->serializeBinaryBulk(*column_array.getOffsetsColumn(), ostr, offset, limit);
if (!typeid_cast<const ColumnArray &>(*full_column).getData().empty())
{
@ -111,7 +111,7 @@ void NativeBlockOutputStream::writeData(const IDataType & type, const ColumnPtr
}
}
else
type.serializeBinary(*full_column, ostr, offset, limit);
type.serializeBinaryBulk(*full_column, ostr, offset, limit);
}

View File

@ -80,7 +80,7 @@ void DataTypeAggregateFunction::deserializeBinary(IColumn & column, ReadBuffer &
column_concrete.getData().push_back(place);
}
void DataTypeAggregateFunction::serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
void DataTypeAggregateFunction::serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
{
const ColumnAggregateFunction & real_column = typeid_cast<const ColumnAggregateFunction &>(column);
const ColumnAggregateFunction::Container_t & vec = real_column.getData();
@ -95,7 +95,7 @@ void DataTypeAggregateFunction::serializeBinary(const IColumn & column, WriteBuf
function->serialize(*it, ostr);
}
void DataTypeAggregateFunction::deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
void DataTypeAggregateFunction::deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
{
ColumnAggregateFunction & real_column = typeid_cast<ColumnAggregateFunction &>(column);
ColumnAggregateFunction::Container_t & vec = real_column.getData();

View File

@ -97,7 +97,7 @@ void DataTypeArray::deserializeBinary(IColumn & column, ReadBuffer & istr) const
}
void DataTypeArray::serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
void DataTypeArray::serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
{
const ColumnArray & column_array = typeid_cast<const ColumnArray &>(column);
const ColumnArray::Offsets_t & offsets = column_array.getOffsets();
@ -121,11 +121,11 @@ void DataTypeArray::serializeBinary(const IColumn & column, WriteBuffer & ostr,
: 0;
if (limit == 0 || nested_limit)
nested->serializeBinary(column_array.getData(), ostr, nested_offset, nested_limit);
nested->serializeBinaryBulk(column_array.getData(), ostr, nested_offset, nested_limit);
}
void DataTypeArray::deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
void DataTypeArray::deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
{
ColumnArray & column_array = typeid_cast<ColumnArray &>(column);
ColumnArray::Offsets_t & offsets = column_array.getOffsets();
@ -136,7 +136,7 @@ void DataTypeArray::deserializeBinary(IColumn & column, ReadBuffer & istr, size_
if (last_offset < nested_column.size())
throw Exception("Nested column longer than last offset", ErrorCodes::LOGICAL_ERROR);
size_t nested_limit = last_offset - nested_column.size();
nested->deserializeBinary(nested_column, istr, nested_limit, 0);
nested->deserializeBinaryBulk(nested_column, istr, nested_limit, 0);
if (column_array.getData().size() != last_offset)
throw Exception("Cannot read all array values", ErrorCodes::CANNOT_READ_ALL_DATA);

View File

@ -205,7 +205,7 @@ void DataTypeEnum<Type>::deserializeTextCSV(IColumn & column, ReadBuffer & istr,
}
template <typename Type>
void DataTypeEnum<Type>::serializeBinary(
void DataTypeEnum<Type>::serializeBinaryBulk(
const IColumn & column, WriteBuffer & ostr, const size_t offset, size_t limit) const
{
const auto & x = typeid_cast<const ColumnType &>(column).getData();
@ -218,7 +218,7 @@ void DataTypeEnum<Type>::serializeBinary(
}
template <typename Type>
void DataTypeEnum<Type>::deserializeBinary(
void DataTypeEnum<Type>::deserializeBinaryBulk(
IColumn & column, ReadBuffer & istr, const size_t limit, const double avg_value_size_hint) const
{
auto & x = typeid_cast<ColumnType &>(column).getData();

View File

@ -69,7 +69,7 @@ void DataTypeFixedString::deserializeBinary(IColumn & column, ReadBuffer & istr)
}
void DataTypeFixedString::serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
void DataTypeFixedString::serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
{
const ColumnFixedString::Chars_t & data = typeid_cast<const ColumnFixedString &>(column).getChars();
@ -82,7 +82,7 @@ void DataTypeFixedString::serializeBinary(const IColumn & column, WriteBuffer &
}
void DataTypeFixedString::deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
void DataTypeFixedString::deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
{
ColumnFixedString::Chars_t & data = typeid_cast<ColumnFixedString &>(column).getChars();

View File

@ -3,7 +3,7 @@
namespace DB
{
void DataTypeNull::serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
void DataTypeNull::serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
{
size_t size = column.size();
@ -15,7 +15,7 @@ void DataTypeNull::serializeBinary(const IColumn & column, WriteBuffer & ostr, s
writeBinary(x, ostr);
}
void DataTypeNull::deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
void DataTypeNull::deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
{
ColumnNull & null_col = static_cast<ColumnNull &>(column);

View File

@ -16,16 +16,16 @@ DataTypeNullable::DataTypeNullable(DataTypePtr nested_data_type_)
{
}
void DataTypeNullable::serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
void DataTypeNullable::serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
{
const ColumnNullable & col = static_cast<const ColumnNullable &>(column);
nested_data_type->serializeBinary(*col.getNestedColumn(), ostr, offset, limit);
nested_data_type->serializeBinaryBulk(*col.getNestedColumn(), ostr, offset, limit);
}
void DataTypeNullable::deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
void DataTypeNullable::deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
{
ColumnNullable & col = static_cast<ColumnNullable &>(column);
nested_data_type->deserializeBinary(*col.getNestedColumn(), istr, limit, avg_value_size_hint);
nested_data_type->deserializeBinaryBulk(*col.getNestedColumn(), istr, limit, avg_value_size_hint);
}

View File

@ -74,7 +74,7 @@ void DataTypeString::deserializeBinary(IColumn & column, ReadBuffer & istr) cons
}
void DataTypeString::serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
void DataTypeString::serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
{
const ColumnString & column_string = typeid_cast<const ColumnString &>(column);
const ColumnString::Chars_t & data = column_string.getChars();
@ -167,7 +167,7 @@ static NO_INLINE void deserializeBinarySSE2(ColumnString::Chars_t & data, Column
}
void DataTypeString::deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
void DataTypeString::deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
{
ColumnString & column_string = typeid_cast<ColumnString &>(column);
ColumnString::Chars_t & data = column_string.getChars();

View File

@ -216,14 +216,14 @@ void DataTypeTuple::deserializeTextCSV(IColumn & column, ReadBuffer & istr, cons
});
}
void DataTypeTuple::serializeBinary(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
void DataTypeTuple::serializeBinaryBulk(const IColumn & column, WriteBuffer & ostr, size_t offset, size_t limit) const
{
const ColumnTuple & real_column = static_cast<const ColumnTuple &>(column);
for (size_t i = 0, size = elems.size(); i < size; ++i)
NativeBlockOutputStream::writeData(*elems[i], real_column.getData().safeGetByPosition(i).column, ostr, offset, limit);
}
void DataTypeTuple::deserializeBinary(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
void DataTypeTuple::deserializeBinaryBulk(IColumn & column, ReadBuffer & istr, size_t limit, double avg_value_size_hint) const
{
ColumnTuple & real_column = static_cast<ColumnTuple &>(column);
for (size_t i = 0, size = elems.size(); i < size; ++i)

View File

@ -39,7 +39,7 @@ try
WriteBufferFromOStream out_buf(ostr);
stopwatch.restart();
data_type.serializeBinary(*column, out_buf);
data_type.serializeBinaryBulk(*column, out_buf);
stopwatch.stop();
std::cout << "Writing, elapsed: " << stopwatch.elapsedSeconds() << std::endl;
@ -52,7 +52,7 @@ try
ReadBufferFromIStream in_buf(istr);
stopwatch.restart();
data_type.deserializeBinary(*column, in_buf, n, 0);
data_type.deserializeBinaryBulk(*column, in_buf, n, 0);
stopwatch.stop();
std::cout << "Reading, elapsed: " << stopwatch.elapsedSeconds() << std::endl;

View File

@ -27,7 +27,7 @@ int main(int argc, char ** argv)
WriteBufferFromOStream out_buf(ostr);
stopwatch.restart();
data_type.serializeBinary(*column, out_buf);
data_type.serializeBinaryBulk(*column, out_buf);
stopwatch.stop();
std::cout << "Elapsed: " << stopwatch.elapsedSeconds() << std::endl;

View File

@ -72,7 +72,7 @@ public:
size_t read(size_t rows)
{
ColumnPtr column = type->createColumn();
type->deserializeBinary(*column, uncompressed_hashing_buf, rows, 0);
type->deserializeBinaryBulk(*column, uncompressed_hashing_buf, rows, 0);
return column->size();
}

View File

@ -417,7 +417,7 @@ void MergeTreeReader::readData(const String & name, const IDataType & type, ICol
Stream & stream = *(streams.at(filename));
stream.seekToMark(from_mark);
IColumn & col8 = *(nullable_col.getNullMapColumn());
DataTypeUInt8{}.deserializeBinary(col8, *stream.data_buffer, max_rows_to_read, 0);
DataTypeUInt8{}.deserializeBinaryBulk(col8, *stream.data_buffer, max_rows_to_read, 0);
/// Then read data.
readData(name, nested_type, nested_col, from_mark, max_rows_to_read, level, read_offsets);
@ -483,7 +483,7 @@ void MergeTreeReader::readData(const String & name, const IDataType & type, ICol
double & avg_value_size_hint = avg_value_size_hints[name];
stream.seekToMark(from_mark);
type.deserializeBinary(column, *stream.data_buffer, max_rows_to_read, avg_value_size_hint);
type.deserializeBinaryBulk(column, *stream.data_buffer, max_rows_to_read, avg_value_size_hint);
/// Вычисление подсказки о среднем размере значения.
size_t column_size = column.size();

View File

@ -170,7 +170,7 @@ void IMergedBlockOutputStream::writeDataImpl(
writeIntBinary(stream.compressed.offset(), stream.marks);
}
DataTypeUInt8{}.serializeBinary(*(nullable_col.getNullMapColumn()), stream.compressed);
DataTypeUInt8{}.serializeBinaryBulk(*(nullable_col.getNullMapColumn()), stream.compressed);
/// Чтобы вместо засечек, указывающих на конец сжатого блока, были засечки, указывающие на начало следующего.
stream.compressed.nextIfAtEnd();
@ -253,7 +253,7 @@ void IMergedBlockOutputStream::writeDataImpl(
writeIntBinary(stream.compressed.offset(), stream.marks);
}
type.serializeBinary(column, stream.compressed, prev_mark, limit);
type.serializeBinaryBulk(column, stream.compressed, prev_mark, limit);
/// Чтобы вместо засечек, указывающих на конец сжатого блока, были засечки, указывающие на начало следующего.
stream.compressed.nextIfAtEnd();

View File

@ -359,7 +359,7 @@ void LogBlockInputStream::readData(const String & name, const IDataType & type,
ColumnNullable & nullable_col = static_cast<ColumnNullable &>(column);
IColumn & nested_col = *nullable_col.getNestedColumn();
DataTypeUInt8{}.deserializeBinary(*nullable_col.getNullMapColumn(),
DataTypeUInt8{}.deserializeBinaryBulk(*nullable_col.getNullMapColumn(),
streams[name + DBMS_STORAGE_LOG_DATA_BINARY_NULL_MAP_EXTENSION]->compressed, max_rows_to_read, 0);
/// Then read data.
readData(name, nested_type, nested_col, max_rows_to_read, level, read_offsets);
@ -384,7 +384,7 @@ void LogBlockInputStream::readData(const String & name, const IDataType & type,
level + 1);
}
else
type.deserializeBinary(column, streams[name]->compressed, max_rows_to_read, 0); /// TODO Использовать avg_value_size_hint.
type.deserializeBinaryBulk(column, streams[name]->compressed, max_rows_to_read, 0); /// TODO Использовать avg_value_size_hint.
}
@ -490,8 +490,7 @@ void LogBlockOutputStream::writeData(const String & name, const IDataType & type
out_null_marks.emplace_back(storage.files[filename].column_index, mark);
DataTypeUInt8{}.serializeBinary(*nullable_col.getNullMapColumn(),
streams[filename]->compressed);
DataTypeUInt8{}.serializeBinaryBulk(*nullable_col.getNullMapColumn(), streams[filename]->compressed);
streams[filename]->compressed.next();
/// Then write data.
@ -527,7 +526,7 @@ void LogBlockOutputStream::writeData(const String & name, const IDataType & type
out_marks.push_back(std::make_pair(storage.files[name].column_index, mark));
type.serializeBinary(column, streams[name]->compressed);
type.serializeBinaryBulk(column, streams[name]->compressed);
streams[name]->compressed.next();
}
}

View File

@ -300,7 +300,7 @@ void TinyLogBlockInputStream::readData(const String & name, const IDataType & ty
IColumn & nested_col = *nullable_col.getNestedColumn();
/// First read from the null map.
DataTypeUInt8{}.deserializeBinary(*nullable_col.getNullMapColumn(),
DataTypeUInt8{}.deserializeBinaryBulk(*nullable_col.getNullMapColumn(),
streams[name + DBMS_STORAGE_LOG_DATA_BINARY_NULL_MAP_EXTENSION]->compressed, limit, 0);
/// Then read data.
@ -328,7 +328,7 @@ void TinyLogBlockInputStream::readData(const String & name, const IDataType & ty
}
}
else
type.deserializeBinary(column, streams[name]->compressed, limit, 0); /// TODO Использовать avg_value_size_hint.
type.deserializeBinaryBulk(column, streams[name]->compressed, limit, 0); /// TODO Использовать avg_value_size_hint.
}
@ -372,7 +372,7 @@ void TinyLogBlockOutputStream::writeData(const String & name, const IDataType &
const ColumnNullable & nullable_col = static_cast<const ColumnNullable &>(column);
const IColumn & nested_col = *nullable_col.getNestedColumn();
DataTypeUInt8{}.serializeBinary(*nullable_col.getNullMapColumn(),
DataTypeUInt8{}.serializeBinaryBulk(*nullable_col.getNullMapColumn(),
streams[name + DBMS_STORAGE_LOG_DATA_BINARY_NULL_MAP_EXTENSION]->compressed);
/// Then write data.
@ -394,7 +394,7 @@ void TinyLogBlockOutputStream::writeData(const String & name, const IDataType &
writeData(name, *type_arr->getNestedType(), typeid_cast<const ColumnArray &>(column).getData(), offset_columns, level + 1);
}
else
type.serializeBinary(column, streams[name]->compressed);
type.serializeBinaryBulk(column, streams[name]->compressed);
}