mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-12-13 09:52:38 +00:00
serializations: fix mutations
This commit is contained in:
parent
76613a5dd1
commit
a06f2fed9a
@ -66,11 +66,6 @@ SerializationInfoPtr SerializationInfoBuilder::build()
|
||||
|
||||
SerializationInfoPtr SerializationInfoBuilder::buildFrom(const SerializationInfo & other)
|
||||
{
|
||||
if (info->number_of_rows && info->number_of_rows != other.number_of_rows)
|
||||
throw Exception(ErrorCodes::LOGICAL_ERROR,
|
||||
"Cannot update SerializationInfo with {} rows by SerializationInfo with {} rows",
|
||||
info->number_of_rows, other.number_of_rows);
|
||||
|
||||
for (auto & [name, column_info] : other.columns)
|
||||
{
|
||||
auto it = info->columns.find(name);
|
||||
|
@ -1009,7 +1009,7 @@ void IMergeTreeDataPart::loadUUID()
|
||||
}
|
||||
}
|
||||
|
||||
void IMergeTreeDataPart::loadSerializationInfo()
|
||||
void IMergeTreeDataPart::loadSerializationInfo() const
|
||||
{
|
||||
String path = getFullRelativePath() + SERIALIZATION_FILE_NAME;
|
||||
if (volume->getDisk()->exists(path))
|
||||
|
@ -462,7 +462,7 @@ private:
|
||||
/// Loads ttl infos in json format from file ttl.txt. If file doesn't exists assigns ttl infos with all zeros
|
||||
void loadTTLInfos();
|
||||
|
||||
void loadSerializationInfo();
|
||||
void loadSerializationInfo() const;
|
||||
|
||||
void loadPartitionAndMinMaxIndex();
|
||||
|
||||
|
@ -1253,6 +1253,7 @@ MergeTreeData::MutableDataPartPtr MergeTreeDataMergerMutator::mutatePartToTempor
|
||||
new_data_part->uuid = future_part.uuid;
|
||||
new_data_part->is_temp = true;
|
||||
new_data_part->ttl_infos = source_part->ttl_infos;
|
||||
new_data_part->serialization_info = source_part->serialization_info;
|
||||
|
||||
/// It shouldn't be changed by mutation.
|
||||
new_data_part->index_granularity_info = source_part->index_granularity_info;
|
||||
|
@ -155,8 +155,9 @@ size_t MergeTreeReaderCompact::readRows(size_t from_mark, bool continue_reading,
|
||||
if (!column_positions[i])
|
||||
continue;
|
||||
|
||||
auto column_from_part = getColumnFromPart(*column_it);
|
||||
if (res_columns[i] == nullptr)
|
||||
res_columns[i] = getColumnFromPart(*column_it).type->createColumn(*serializations[column_it->name]);
|
||||
res_columns[i] = column_from_part.type->createColumn(*serializations.at(column_from_part.name));
|
||||
}
|
||||
|
||||
while (read_rows < max_rows_to_read)
|
||||
@ -234,7 +235,7 @@ void MergeTreeReaderCompact::readData(
|
||||
auto name_in_storage = name_and_type.getNameInStorage();
|
||||
auto type_in_storage = name_and_type.getTypeInStorage();
|
||||
|
||||
const auto & serialization = serializations[name_in_storage];
|
||||
const auto & serialization = serializations.at(name_in_storage);
|
||||
ColumnPtr temp_column = type_in_storage->createColumn(*serialization);
|
||||
|
||||
serialization->deserializeBinaryBulkStatePrefix(deserialize_settings, state);
|
||||
@ -243,7 +244,7 @@ void MergeTreeReaderCompact::readData(
|
||||
}
|
||||
else
|
||||
{
|
||||
const auto & serialization = serializations[name];
|
||||
const auto & serialization = serializations.at(name);
|
||||
serialization->deserializeBinaryBulkStatePrefix(deserialize_settings, state);
|
||||
serialization->deserializeBinaryBulkWithMultipleStreams(column, rows_to_read, deserialize_settings, state, nullptr);
|
||||
}
|
||||
|
@ -100,9 +100,11 @@ IMergeTreeDataPart::Checksums checkDataPart(
|
||||
};
|
||||
|
||||
auto serialization_info = std::make_shared<SerializationInfo>();
|
||||
auto serialization_path = path + IMergeTreeDataPart::SERIALIZATION_FILE_NAME;
|
||||
|
||||
if (disk->exists(serialization_path))
|
||||
{
|
||||
auto serialization_file = disk->readFile(path + IMergeTreeDataPart::SERIALIZATION_FILE_NAME);
|
||||
auto serialization_file = disk->readFile(serialization_path);
|
||||
serialization_info->readText(*serialization_file);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user