2019-10-10 16:30:30 +00:00
|
|
|
#include "MergeTreeDataPartWide.h"
|
2018-03-14 03:19:23 +00:00
|
|
|
|
2019-05-31 04:03:46 +00:00
|
|
|
#include <optional>
|
2017-04-01 09:19:00 +00:00
|
|
|
#include <IO/ReadHelpers.h>
|
|
|
|
#include <IO/WriteHelpers.h>
|
2018-12-28 18:15:26 +00:00
|
|
|
#include <Compression/CompressedReadBuffer.h>
|
|
|
|
#include <Compression/CompressedWriteBuffer.h>
|
2017-04-01 09:19:00 +00:00
|
|
|
#include <IO/ReadBufferFromString.h>
|
|
|
|
#include <IO/WriteBufferFromString.h>
|
|
|
|
#include <IO/ReadBufferFromFile.h>
|
2017-08-31 15:40:34 +00:00
|
|
|
#include <IO/HashingWriteBuffer.h>
|
2017-04-01 09:19:00 +00:00
|
|
|
#include <Core/Defines.h>
|
|
|
|
#include <Common/SipHash.h>
|
|
|
|
#include <Common/escapeForFileName.h>
|
2018-01-15 19:07:47 +00:00
|
|
|
#include <Common/StringUtils/StringUtils.h>
|
2018-05-21 13:49:54 +00:00
|
|
|
#include <Common/localBackup.h>
|
2018-12-21 13:25:39 +00:00
|
|
|
#include <Compression/CompressionInfo.h>
|
2017-04-01 09:19:00 +00:00
|
|
|
#include <Storages/MergeTree/MergeTreeData.h>
|
2016-02-14 04:58:47 +00:00
|
|
|
#include <Poco/File.h>
|
2017-05-16 15:40:32 +00:00
|
|
|
#include <Poco/Path.h>
|
|
|
|
#include <Poco/DirectoryIterator.h>
|
|
|
|
#include <common/logger_useful.h>
|
2019-04-15 09:30:45 +00:00
|
|
|
#include <common/JSON.h>
|
2016-02-14 04:58:47 +00:00
|
|
|
|
2019-10-10 16:30:30 +00:00
|
|
|
#include <Storages/MergeTree/MergeTreeReaderWide.h>
|
|
|
|
#include <Storages/MergeTree/IMergeTreeReader.h>
|
2019-10-21 17:23:06 +00:00
|
|
|
#include <Storages/MergeTree/MergeTreeDataPartWriterWide.h>
|
|
|
|
#include <Storages/MergeTree/IMergeTreeDataPartWriter.h>
|
2019-10-10 16:30:30 +00:00
|
|
|
|
|
|
|
|
2016-02-14 04:58:47 +00:00
|
|
|
namespace DB
|
|
|
|
{
|
|
|
|
|
|
|
|
namespace ErrorCodes
|
|
|
|
{
|
2017-04-01 07:20:54 +00:00
|
|
|
extern const int FILE_DOESNT_EXIST;
|
|
|
|
extern const int NO_FILE_IN_DATA_PART;
|
|
|
|
extern const int EXPECTED_END_OF_FILE;
|
2017-09-11 17:55:41 +00:00
|
|
|
extern const int CORRUPTED_DATA;
|
2017-11-20 19:33:12 +00:00
|
|
|
extern const int NOT_FOUND_EXPECTED_DATA_PART;
|
2018-03-21 20:21:34 +00:00
|
|
|
extern const int BAD_SIZE_OF_FILE_IN_DATA_PART;
|
2019-04-15 09:30:45 +00:00
|
|
|
extern const int BAD_TTL_FILE;
|
2019-06-16 19:32:52 +00:00
|
|
|
extern const int CANNOT_UNLINK;
|
2016-02-14 04:58:47 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2019-10-31 14:44:17 +00:00
|
|
|
// static ReadBufferFromFile openForReading(const String & path)
|
|
|
|
// {
|
|
|
|
// return ReadBufferFromFile(path, std::min(static_cast<Poco::File::FileSize>(DBMS_DEFAULT_BUFFER_SIZE), Poco::File(path).getSize()));
|
|
|
|
// }
|
2017-08-31 15:40:34 +00:00
|
|
|
|
2019-12-18 16:41:11 +00:00
|
|
|
MergeTreeDataPartWide::MergeTreeDataPartWide(
|
2019-10-10 16:30:30 +00:00
|
|
|
MergeTreeData & storage_,
|
|
|
|
const String & name_,
|
2019-12-19 13:10:57 +00:00
|
|
|
const DiskPtr & disk_,
|
2019-10-10 16:30:30 +00:00
|
|
|
const std::optional<String> & relative_path_)
|
2019-11-21 16:10:22 +00:00
|
|
|
: IMergeTreeDataPart(storage_, name_, disk_, relative_path_)
|
2017-08-16 19:24:50 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2019-10-10 16:30:30 +00:00
|
|
|
MergeTreeDataPartWide::MergeTreeDataPartWide(
|
|
|
|
const MergeTreeData & storage_,
|
|
|
|
const String & name_,
|
|
|
|
const MergeTreePartInfo & info_,
|
2019-12-19 13:10:57 +00:00
|
|
|
const DiskPtr & disk_,
|
2019-10-10 16:30:30 +00:00
|
|
|
const std::optional<String> & relative_path_)
|
2019-11-21 16:10:22 +00:00
|
|
|
: IMergeTreeDataPart(storage_, name_, info_, disk_, relative_path_)
|
2017-08-25 20:41:45 +00:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2019-10-10 16:30:30 +00:00
|
|
|
IMergeTreeDataPart::MergeTreeReaderPtr MergeTreeDataPartWide::getReader(
|
|
|
|
const NamesAndTypesList & columns_to_read,
|
|
|
|
const MarkRanges & mark_ranges,
|
|
|
|
UncompressedCache * uncompressed_cache,
|
|
|
|
MarkCache * mark_cache,
|
2019-12-18 15:54:45 +00:00
|
|
|
const MergeTreeReaderSettings & reader_settings,
|
2019-10-10 16:30:30 +00:00
|
|
|
const ValueSizeMap & avg_value_size_hints,
|
|
|
|
const ReadBufferFromFileBase::ProfileCallback & profile_callback) const
|
2019-03-22 12:56:58 +00:00
|
|
|
{
|
2019-10-10 16:30:30 +00:00
|
|
|
return std::make_unique<MergeTreeReaderWide>(shared_from_this(), columns_to_read, uncompressed_cache,
|
|
|
|
mark_cache, mark_ranges, reader_settings, avg_value_size_hints, profile_callback);
|
2019-03-22 12:56:58 +00:00
|
|
|
}
|
|
|
|
|
2019-10-21 15:33:59 +00:00
|
|
|
IMergeTreeDataPart::MergeTreeWriterPtr MergeTreeDataPartWide::getWriter(
|
|
|
|
const NamesAndTypesList & columns_list,
|
2019-11-07 11:11:38 +00:00
|
|
|
const std::vector<MergeTreeIndexPtr> & indices_to_recalc,
|
2019-10-21 15:33:59 +00:00
|
|
|
const CompressionCodecPtr & default_codec,
|
2019-12-18 15:54:45 +00:00
|
|
|
const MergeTreeWriterSettings & writer_settings,
|
2019-11-07 11:11:38 +00:00
|
|
|
const MergeTreeIndexGranularity & computed_index_granularity) const
|
2019-10-21 15:33:59 +00:00
|
|
|
{
|
2019-10-21 17:23:06 +00:00
|
|
|
return std::make_unique<MergeTreeDataPartWriterWide>(
|
2019-11-07 11:11:38 +00:00
|
|
|
getFullPath(), storage, columns_list, indices_to_recalc,
|
2019-10-21 17:23:06 +00:00
|
|
|
index_granularity_info.marks_file_extension,
|
2019-11-07 11:11:38 +00:00
|
|
|
default_codec, writer_settings, computed_index_granularity);
|
2019-12-18 16:41:11 +00:00
|
|
|
}
|
2019-10-21 15:33:59 +00:00
|
|
|
|
2019-03-22 12:56:58 +00:00
|
|
|
|
2018-03-26 14:18:04 +00:00
|
|
|
/// Takes into account the fact that several columns can e.g. share their .size substreams.
|
|
|
|
/// When calculating totals these should be counted only once.
|
2019-10-10 16:30:30 +00:00
|
|
|
ColumnSize MergeTreeDataPartWide::getColumnSizeImpl(
|
2019-01-04 12:10:00 +00:00
|
|
|
const String & column_name, const IDataType & type, std::unordered_set<String> * processed_substreams) const
|
2017-03-24 13:52:50 +00:00
|
|
|
{
|
2018-03-26 14:18:04 +00:00
|
|
|
ColumnSize size;
|
|
|
|
if (checksums.empty())
|
|
|
|
return size;
|
|
|
|
|
|
|
|
type.enumerateStreams([&](const IDataType::SubstreamPath & substream_path)
|
|
|
|
{
|
2019-01-04 12:10:00 +00:00
|
|
|
String file_name = IDataType::getFileNameForStream(column_name, substream_path);
|
2018-03-26 14:18:04 +00:00
|
|
|
|
|
|
|
if (processed_substreams && !processed_substreams->insert(file_name).second)
|
|
|
|
return;
|
|
|
|
|
|
|
|
auto bin_checksum = checksums.files.find(file_name + ".bin");
|
|
|
|
if (bin_checksum != checksums.files.end())
|
|
|
|
{
|
|
|
|
size.data_compressed += bin_checksum->second.file_size;
|
|
|
|
size.data_uncompressed += bin_checksum->second.uncompressed_size;
|
|
|
|
}
|
|
|
|
|
2019-06-19 10:07:56 +00:00
|
|
|
auto mrk_checksum = checksums.files.find(file_name + index_granularity_info.marks_file_extension);
|
2018-03-26 14:18:04 +00:00
|
|
|
if (mrk_checksum != checksums.files.end())
|
|
|
|
size.marks += mrk_checksum->second.file_size;
|
|
|
|
}, {});
|
2016-02-14 04:58:47 +00:00
|
|
|
|
2018-03-26 14:18:04 +00:00
|
|
|
return size;
|
2017-03-24 13:52:50 +00:00
|
|
|
}
|
2016-02-14 04:58:47 +00:00
|
|
|
|
|
|
|
/** Returns the name of a column with minimum compressed size (as returned by getColumnSize()).
|
2017-06-05 20:43:23 +00:00
|
|
|
* If no checksums are present returns the name of the first physically existing column.
|
|
|
|
*/
|
2019-10-10 16:30:30 +00:00
|
|
|
String MergeTreeDataPartWide::getColumnNameWithMinumumCompressedSize() const
|
2016-02-14 04:58:47 +00:00
|
|
|
{
|
2019-01-04 12:10:00 +00:00
|
|
|
const auto & storage_columns = storage.getColumns().getAllPhysical();
|
2017-04-01 07:20:54 +00:00
|
|
|
const std::string * minimum_size_column = nullptr;
|
2018-03-03 15:36:20 +00:00
|
|
|
UInt64 minimum_size = std::numeric_limits<UInt64>::max();
|
2017-04-01 07:20:54 +00:00
|
|
|
|
2019-01-04 12:10:00 +00:00
|
|
|
for (const auto & column : storage_columns)
|
2017-04-01 07:20:54 +00:00
|
|
|
{
|
2019-06-16 19:32:52 +00:00
|
|
|
if (!hasColumnFiles(column.name, *column.type))
|
2017-04-01 07:20:54 +00:00
|
|
|
continue;
|
|
|
|
|
2019-10-16 18:27:53 +00:00
|
|
|
const auto size = getColumnSizeImpl(column.name, *column.type, nullptr).data_compressed;
|
2017-04-01 07:20:54 +00:00
|
|
|
if (size < minimum_size)
|
|
|
|
{
|
|
|
|
minimum_size = size;
|
|
|
|
minimum_size_column = &column.name;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!minimum_size_column)
|
2017-08-17 10:28:57 +00:00
|
|
|
throw Exception("Could not find a column of minimum size in MergeTree, part " + getFullPath(), ErrorCodes::LOGICAL_ERROR);
|
2017-04-01 07:20:54 +00:00
|
|
|
|
|
|
|
return *minimum_size_column;
|
2016-02-14 04:58:47 +00:00
|
|
|
}
|
|
|
|
|
2019-12-03 00:23:11 +00:00
|
|
|
ColumnSize MergeTreeDataPartWide::getTotalColumnsSize() const
|
|
|
|
{
|
|
|
|
ColumnSize totals;
|
|
|
|
std::unordered_set<String> processed_substreams;
|
|
|
|
for (const NameAndTypePair & column : columns)
|
|
|
|
{
|
|
|
|
ColumnSize size = getColumnSizeImpl(column.name, *column.type, &processed_substreams);
|
|
|
|
totals.add(size);
|
|
|
|
}
|
|
|
|
return totals;
|
|
|
|
}
|
|
|
|
|
|
|
|
ColumnSize MergeTreeDataPartWide::getColumnSize(const String & column_name, const IDataType & type) const
|
|
|
|
{
|
|
|
|
return getColumnSizeImpl(column_name, type, nullptr);
|
|
|
|
}
|
|
|
|
|
2019-10-10 16:30:30 +00:00
|
|
|
void MergeTreeDataPartWide::loadIndexGranularity()
|
2016-02-14 04:58:47 +00:00
|
|
|
{
|
2019-06-19 10:07:56 +00:00
|
|
|
String full_path = getFullPath();
|
2019-10-16 18:27:53 +00:00
|
|
|
index_granularity_info.changeGranularityIfRequired(full_path);
|
2019-06-19 10:07:56 +00:00
|
|
|
|
2019-12-09 21:21:17 +00:00
|
|
|
|
2018-11-15 14:06:54 +00:00
|
|
|
if (columns.empty())
|
|
|
|
throw Exception("No columns in part " + name, ErrorCodes::NO_FILE_IN_DATA_PART);
|
2017-04-01 07:20:54 +00:00
|
|
|
|
2018-11-15 14:06:54 +00:00
|
|
|
/// We can use any column, it doesn't matter
|
2019-06-19 12:30:56 +00:00
|
|
|
std::string marks_file_path = index_granularity_info.getMarksFilePath(full_path + escapeForFileName(columns.front().name));
|
2019-03-25 13:55:24 +00:00
|
|
|
if (!Poco::File(marks_file_path).exists())
|
|
|
|
throw Exception("Marks file '" + marks_file_path + "' doesn't exist", ErrorCodes::NO_FILE_IN_DATA_PART);
|
2018-11-15 14:06:54 +00:00
|
|
|
|
|
|
|
size_t marks_file_size = Poco::File(marks_file_path).getSize();
|
2018-11-12 17:44:43 +00:00
|
|
|
|
2019-06-19 10:07:56 +00:00
|
|
|
if (!index_granularity_info.is_adaptive)
|
2018-11-15 14:06:54 +00:00
|
|
|
{
|
2019-06-19 10:07:56 +00:00
|
|
|
size_t marks_count = marks_file_size / index_granularity_info.mark_size_in_bytes;
|
|
|
|
index_granularity.resizeWithFixedGranularity(marks_count, index_granularity_info.fixed_index_granularity); /// all the same
|
2017-04-01 07:20:54 +00:00
|
|
|
}
|
2018-11-15 14:06:54 +00:00
|
|
|
else
|
|
|
|
{
|
|
|
|
ReadBufferFromFile buffer(marks_file_path, marks_file_size, -1);
|
|
|
|
while (!buffer.eof())
|
|
|
|
{
|
|
|
|
buffer.seek(sizeof(size_t) * 2, SEEK_CUR); /// skip offset_in_compressed file and offset_in_decompressed_block
|
2019-03-25 13:55:24 +00:00
|
|
|
size_t granularity;
|
|
|
|
readIntBinary(granularity, buffer);
|
|
|
|
index_granularity.appendMark(granularity);
|
2018-11-15 14:06:54 +00:00
|
|
|
}
|
2019-10-31 14:44:17 +00:00
|
|
|
|
2019-06-19 10:07:56 +00:00
|
|
|
if (index_granularity.getMarksCount() * index_granularity_info.mark_size_in_bytes != marks_file_size)
|
2018-11-15 14:06:54 +00:00
|
|
|
throw Exception("Cannot read all marks from file " + marks_file_path, ErrorCodes::CANNOT_READ_ALL_DATA);
|
|
|
|
}
|
2016-02-14 04:58:47 +00:00
|
|
|
|
2019-10-31 14:44:17 +00:00
|
|
|
index_granularity.setInitialized();
|
2019-04-15 09:30:45 +00:00
|
|
|
}
|
|
|
|
|
2019-11-18 12:22:27 +00:00
|
|
|
MergeTreeDataPartWide::~MergeTreeDataPartWide()
|
|
|
|
{
|
|
|
|
removeIfNeeded();
|
|
|
|
}
|
|
|
|
|
2019-11-18 15:18:50 +00:00
|
|
|
void MergeTreeDataPartWide::accumulateColumnSizes(ColumnToSize & column_to_size) const
|
|
|
|
{
|
|
|
|
std::shared_lock<std::shared_mutex> part_lock(columns_lock);
|
|
|
|
|
|
|
|
for (const NameAndTypePair & name_type : storage.getColumns().getAllPhysical())
|
|
|
|
{
|
|
|
|
IDataType::SubstreamPath path;
|
|
|
|
name_type.type->enumerateStreams([&](const IDataType::SubstreamPath & substream_path)
|
|
|
|
{
|
|
|
|
Poco::File bin_file(getFullPath() + IDataType::getFileNameForStream(name_type.name, substream_path) + ".bin");
|
|
|
|
if (bin_file.exists())
|
|
|
|
column_to_size[name_type.name] += bin_file.getSize();
|
|
|
|
}, path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void MergeTreeDataPartWide::checkConsistency(bool require_part_metadata) const
|
|
|
|
{
|
|
|
|
String path = getFullPath();
|
|
|
|
|
|
|
|
if (!checksums.empty())
|
|
|
|
{
|
|
|
|
if (!storage.primary_key_columns.empty() && !checksums.files.count("primary.idx"))
|
|
|
|
throw Exception("No checksum for primary.idx", ErrorCodes::NO_FILE_IN_DATA_PART);
|
|
|
|
|
|
|
|
if (require_part_metadata)
|
|
|
|
{
|
|
|
|
for (const NameAndTypePair & name_type : columns)
|
|
|
|
{
|
|
|
|
IDataType::SubstreamPath stream_path;
|
|
|
|
name_type.type->enumerateStreams([&](const IDataType::SubstreamPath & substream_path)
|
|
|
|
{
|
|
|
|
String file_name = IDataType::getFileNameForStream(name_type.name, substream_path);
|
|
|
|
String mrk_file_name = file_name + index_granularity_info.marks_file_extension;
|
|
|
|
String bin_file_name = file_name + ".bin";
|
|
|
|
if (!checksums.files.count(mrk_file_name))
|
|
|
|
throw Exception("No " + mrk_file_name + " file checksum for column " + name_type.name + " in part " + path,
|
|
|
|
ErrorCodes::NO_FILE_IN_DATA_PART);
|
|
|
|
if (!checksums.files.count(bin_file_name))
|
|
|
|
throw Exception("No " + bin_file_name + " file checksum for column " + name_type.name + " in part " + path,
|
|
|
|
ErrorCodes::NO_FILE_IN_DATA_PART);
|
|
|
|
}, stream_path);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (storage.format_version >= MERGE_TREE_DATA_MIN_FORMAT_VERSION_WITH_CUSTOM_PARTITIONING)
|
|
|
|
{
|
|
|
|
if (!checksums.files.count("count.txt"))
|
|
|
|
throw Exception("No checksum for count.txt", ErrorCodes::NO_FILE_IN_DATA_PART);
|
|
|
|
|
|
|
|
if (storage.partition_key_expr && !checksums.files.count("partition.dat"))
|
|
|
|
throw Exception("No checksum for partition.dat", ErrorCodes::NO_FILE_IN_DATA_PART);
|
|
|
|
|
|
|
|
if (!isEmpty())
|
|
|
|
{
|
|
|
|
for (const String & col_name : storage.minmax_idx_columns)
|
|
|
|
{
|
|
|
|
if (!checksums.files.count("minmax_" + escapeForFileName(col_name) + ".idx"))
|
|
|
|
throw Exception("No minmax idx file checksum for column " + col_name, ErrorCodes::NO_FILE_IN_DATA_PART);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
checksums.checkSizes(path);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
auto check_file_not_empty = [&path](const String & file_path)
|
|
|
|
{
|
|
|
|
Poco::File file(file_path);
|
|
|
|
if (!file.exists() || file.getSize() == 0)
|
|
|
|
throw Exception("Part " + path + " is broken: " + file_path + " is empty", ErrorCodes::BAD_SIZE_OF_FILE_IN_DATA_PART);
|
|
|
|
return file.getSize();
|
|
|
|
};
|
|
|
|
|
|
|
|
/// Check that the primary key index is not empty.
|
|
|
|
if (!storage.primary_key_columns.empty())
|
|
|
|
check_file_not_empty(path + "primary.idx");
|
|
|
|
|
|
|
|
if (storage.format_version >= MERGE_TREE_DATA_MIN_FORMAT_VERSION_WITH_CUSTOM_PARTITIONING)
|
|
|
|
{
|
|
|
|
check_file_not_empty(path + "count.txt");
|
|
|
|
|
|
|
|
if (storage.partition_key_expr)
|
|
|
|
check_file_not_empty(path + "partition.dat");
|
|
|
|
|
|
|
|
for (const String & col_name : storage.minmax_idx_columns)
|
|
|
|
check_file_not_empty(path + "minmax_" + escapeForFileName(col_name) + ".idx");
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Check that all marks are nonempty and have the same size.
|
|
|
|
|
|
|
|
std::optional<UInt64> marks_size;
|
|
|
|
for (const NameAndTypePair & name_type : columns)
|
|
|
|
{
|
|
|
|
name_type.type->enumerateStreams([&](const IDataType::SubstreamPath & substream_path)
|
|
|
|
{
|
|
|
|
Poco::File file(IDataType::getFileNameForStream(name_type.name, substream_path) + index_granularity_info.marks_file_extension);
|
|
|
|
|
|
|
|
/// Missing file is Ok for case when new column was added.
|
|
|
|
if (file.exists())
|
|
|
|
{
|
|
|
|
UInt64 file_size = file.getSize();
|
|
|
|
|
|
|
|
if (!file_size)
|
|
|
|
throw Exception("Part " + path + " is broken: " + file.path() + " is empty.",
|
|
|
|
ErrorCodes::BAD_SIZE_OF_FILE_IN_DATA_PART);
|
|
|
|
|
|
|
|
if (!marks_size)
|
|
|
|
marks_size = file_size;
|
|
|
|
else if (file_size != *marks_size)
|
|
|
|
throw Exception("Part " + path + " is broken: marks have different sizes.",
|
|
|
|
ErrorCodes::BAD_SIZE_OF_FILE_IN_DATA_PART);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-12-02 22:12:27 +00:00
|
|
|
|
2019-12-09 21:21:17 +00:00
|
|
|
bool MergeTreeDataPartWide::hasColumnFiles(const String & column_name, const IDataType & type) const
|
|
|
|
{
|
|
|
|
bool res = true;
|
2019-01-05 03:33:22 +00:00
|
|
|
|
2019-12-09 21:21:17 +00:00
|
|
|
type.enumerateStreams([&](const IDataType::SubstreamPath & substream_path)
|
|
|
|
{
|
|
|
|
String file_name = IDataType::getFileNameForStream(column_name, substream_path);
|
2017-09-11 22:40:51 +00:00
|
|
|
|
2019-12-09 21:21:17 +00:00
|
|
|
auto bin_checksum = checksums.files.find(file_name + ".bin");
|
|
|
|
auto mrk_checksum = checksums.files.find(file_name + index_granularity_info.marks_file_extension);
|
2017-09-11 22:40:51 +00:00
|
|
|
|
2019-12-09 21:21:17 +00:00
|
|
|
if (bin_checksum == checksums.files.end() || mrk_checksum == checksums.files.end())
|
|
|
|
res = false;
|
|
|
|
}, {});
|
2017-11-20 19:33:12 +00:00
|
|
|
|
2019-12-09 21:21:17 +00:00
|
|
|
return res;
|
|
|
|
}
|
2017-11-20 19:33:12 +00:00
|
|
|
|
2019-12-16 14:51:19 +00:00
|
|
|
NameToNameMap MergeTreeDataPartWide::createRenameMapForAlter(
|
|
|
|
AlterAnalysisResult & analysis_result,
|
|
|
|
const NamesAndTypesList & old_columns) const
|
|
|
|
{
|
|
|
|
const auto & part_mrk_file_extension = index_granularity_info.marks_file_extension;
|
|
|
|
NameToNameMap rename_map;
|
|
|
|
|
|
|
|
for (const auto & index_name : analysis_result.removed_indices)
|
|
|
|
{
|
|
|
|
rename_map["skp_idx_" + index_name + ".idx"] = "";
|
|
|
|
rename_map["skp_idx_" + index_name + part_mrk_file_extension] = "";
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Collect counts for shared streams of different columns. As an example, Nested columns have shared stream with array sizes.
|
|
|
|
std::map<String, size_t> stream_counts;
|
|
|
|
for (const NameAndTypePair & column : old_columns)
|
|
|
|
{
|
|
|
|
column.type->enumerateStreams([&](const IDataType::SubstreamPath & substream_path)
|
|
|
|
{
|
|
|
|
++stream_counts[IDataType::getFileNameForStream(column.name, substream_path)];
|
|
|
|
}, {});
|
|
|
|
}
|
|
|
|
|
|
|
|
for (const auto & column : analysis_result.removed_columns)
|
|
|
|
{
|
|
|
|
if (hasColumnFiles(column.name, *column.type))
|
|
|
|
{
|
|
|
|
column.type->enumerateStreams([&](const IDataType::SubstreamPath & substream_path)
|
|
|
|
{
|
|
|
|
String file_name = IDataType::getFileNameForStream(column.name, substream_path);
|
|
|
|
|
|
|
|
/// Delete files if they are no longer shared with another column.
|
|
|
|
if (--stream_counts[file_name] == 0)
|
|
|
|
{
|
|
|
|
rename_map[file_name + ".bin"] = "";
|
|
|
|
rename_map[file_name + part_mrk_file_extension] = "";
|
|
|
|
}
|
|
|
|
}, {});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!analysis_result.conversions.empty())
|
|
|
|
{
|
|
|
|
/// Give proper names for temporary columns with conversion results.
|
|
|
|
NamesWithAliases projection;
|
|
|
|
projection.reserve(analysis_result.conversions.size());
|
|
|
|
for (const auto & source_and_expression : analysis_result.conversions)
|
|
|
|
{
|
|
|
|
/// Column name for temporary filenames before renaming. NOTE The is unnecessarily tricky.
|
|
|
|
const auto & source_name = source_and_expression.first;
|
|
|
|
String temporary_column_name = source_name + " converting";
|
|
|
|
|
|
|
|
projection.emplace_back(source_and_expression.second, temporary_column_name);
|
|
|
|
|
|
|
|
/// After conversion, we need to rename temporary files into original.
|
|
|
|
analysis_result.new_types.at(source_name)->enumerateStreams(
|
|
|
|
[&](const IDataType::SubstreamPath & substream_path)
|
|
|
|
{
|
|
|
|
/// Skip array sizes, because they cannot be modified in ALTER.
|
|
|
|
if (!substream_path.empty() && substream_path.back().type == IDataType::Substream::ArraySizes)
|
|
|
|
return;
|
|
|
|
|
|
|
|
String original_file_name = IDataType::getFileNameForStream(source_name, substream_path);
|
|
|
|
String temporary_file_name = IDataType::getFileNameForStream(temporary_column_name, substream_path);
|
|
|
|
|
|
|
|
rename_map[temporary_file_name + ".bin"] = original_file_name + ".bin";
|
|
|
|
rename_map[temporary_file_name + part_mrk_file_extension] = original_file_name + part_mrk_file_extension;
|
|
|
|
}, {});
|
|
|
|
}
|
|
|
|
|
|
|
|
analysis_result.expression->add(ExpressionAction::project(projection));
|
|
|
|
}
|
|
|
|
|
|
|
|
return rename_map;
|
|
|
|
}
|
|
|
|
|
2019-12-19 13:10:57 +00:00
|
|
|
String MergeTreeDataPartWide::getFileNameForColumn(const NameAndTypePair & column) const
|
|
|
|
{
|
|
|
|
String filename;
|
|
|
|
column.type->enumerateStreams([&](const IDataType::SubstreamPath & substream_path) {
|
|
|
|
if (filename.empty())
|
|
|
|
filename = IDataType::getFileNameForStream(column.name, substream_path);
|
|
|
|
});
|
|
|
|
return filename;
|
|
|
|
}
|
|
|
|
|
2016-02-14 04:58:47 +00:00
|
|
|
}
|