mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 15:12:02 +00:00
clang-tidy check performance-move-const-arg fix
This commit is contained in:
parent
1f5837359e
commit
b1a956c5f1
@ -34,7 +34,7 @@ Checks: '-*,
|
|||||||
performance-trivially-destructible,
|
performance-trivially-destructible,
|
||||||
performance-unnecessary-copy-initialization,
|
performance-unnecessary-copy-initialization,
|
||||||
performance-noexcept-move-constructor,
|
performance-noexcept-move-constructor,
|
||||||
# performance-move-const-arg,
|
performance-move-const-arg,
|
||||||
|
|
||||||
readability-avoid-const-params-in-decls,
|
readability-avoid-const-params-in-decls,
|
||||||
readability-const-return-type,
|
readability-const-return-type,
|
||||||
|
@ -26,3 +26,27 @@ void insertAtEnd(std::vector<T> & dest, std::vector<T> && src)
|
|||||||
dest.insert(dest.end(), std::make_move_iterator(src.begin()), std::make_move_iterator(src.end()));
|
dest.insert(dest.end(), std::make_move_iterator(src.begin()), std::make_move_iterator(src.end()));
|
||||||
src.clear();
|
src.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename Container>
|
||||||
|
void insertAtEnd(Container & dest, const Container & src)
|
||||||
|
{
|
||||||
|
if (src.empty())
|
||||||
|
return;
|
||||||
|
|
||||||
|
dest.insert(dest.end(), src.begin(), src.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename Container>
|
||||||
|
void insertAtEnd(Container & dest, Container && src)
|
||||||
|
{
|
||||||
|
if (src.empty())
|
||||||
|
return;
|
||||||
|
if (dest.empty())
|
||||||
|
{
|
||||||
|
dest.swap(src);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
dest.insert(dest.end(), std::make_move_iterator(src.begin()), std::make_move_iterator(src.end()));
|
||||||
|
src.clear();
|
||||||
|
}
|
||||||
|
@ -165,7 +165,7 @@ std::variant<int, ConnectionEstablisher::TryResult> ConnectionEstablisherAsync::
|
|||||||
fiber = std::move(fiber).resume();
|
fiber = std::move(fiber).resume();
|
||||||
|
|
||||||
if (exception)
|
if (exception)
|
||||||
std::rethrow_exception(std::move(exception));
|
std::rethrow_exception(exception);
|
||||||
|
|
||||||
if (connection_establisher.isFinished())
|
if (connection_establisher.isFinished())
|
||||||
{
|
{
|
||||||
|
@ -363,7 +363,7 @@ bool HedgedConnections::resumePacketReceiver(const HedgedConnections::ReplicaLoc
|
|||||||
else if (std::holds_alternative<std::exception_ptr>(res))
|
else if (std::holds_alternative<std::exception_ptr>(res))
|
||||||
{
|
{
|
||||||
finishProcessReplica(replica_state, true);
|
finishProcessReplica(replica_state, true);
|
||||||
std::rethrow_exception(std::move(std::get<std::exception_ptr>(res)));
|
std::rethrow_exception(std::get<std::exception_ptr>(res));
|
||||||
}
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
|
@ -161,11 +161,11 @@ void LocalConnection::sendData(const Block & block, const String &, bool)
|
|||||||
|
|
||||||
if (state->pushing_async_executor)
|
if (state->pushing_async_executor)
|
||||||
{
|
{
|
||||||
state->pushing_async_executor->push(std::move(block));
|
state->pushing_async_executor->push(block);
|
||||||
}
|
}
|
||||||
else if (state->pushing_executor)
|
else if (state->pushing_executor)
|
||||||
{
|
{
|
||||||
state->pushing_executor->push(std::move(block));
|
state->pushing_executor->push(block);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -541,7 +541,7 @@ ColumnPtr ColumnNullable::compress() const
|
|||||||
size_t byte_size = nested_column->byteSize() + null_map->byteSize();
|
size_t byte_size = nested_column->byteSize() + null_map->byteSize();
|
||||||
|
|
||||||
return ColumnCompressed::create(size(), byte_size,
|
return ColumnCompressed::create(size(), byte_size,
|
||||||
[nested_column = std::move(nested_column), null_map = std::move(null_map)]
|
[nested_column = std::move(nested_compressed), null_map = std::move(null_map_compressed)]
|
||||||
{
|
{
|
||||||
return ColumnNullable::create(nested_column->decompress(), null_map->decompress());
|
return ColumnNullable::create(nested_column->decompress(), null_map->decompress());
|
||||||
});
|
});
|
||||||
|
@ -330,7 +330,7 @@ ColumnPtr ColumnSparse::filter(const Filter & filt, ssize_t) const
|
|||||||
}
|
}
|
||||||
|
|
||||||
auto res_values = values->filter(values_filter, values_result_size_hint);
|
auto res_values = values->filter(values_filter, values_result_size_hint);
|
||||||
return this->create(std::move(res_values), std::move(res_offsets), res_offset);
|
return this->create(res_values, std::move(res_offsets), res_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
void ColumnSparse::expand(const Filter & mask, bool inverted)
|
void ColumnSparse::expand(const Filter & mask, bool inverted)
|
||||||
|
@ -100,6 +100,11 @@ public:
|
|||||||
return emplace(x);
|
return emplace(x);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[[nodiscard]] bool push(T && x)
|
||||||
|
{
|
||||||
|
return emplace(std::move(x));
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns false if queue is finished
|
/// Returns false if queue is finished
|
||||||
template <typename... Args>
|
template <typename... Args>
|
||||||
[[nodiscard]] bool emplace(Args &&... args)
|
[[nodiscard]] bool emplace(Args &&... args)
|
||||||
|
@ -316,7 +316,7 @@ CodecTestSequence operator+(CodecTestSequence && left, const CodecTestSequence &
|
|||||||
|
|
||||||
std::vector<CodecTestSequence> operator+(const std::vector<CodecTestSequence> & left, const std::vector<CodecTestSequence> & right)
|
std::vector<CodecTestSequence> operator+(const std::vector<CodecTestSequence> & left, const std::vector<CodecTestSequence> & right)
|
||||||
{
|
{
|
||||||
std::vector<CodecTestSequence> result(std::move(left));
|
std::vector<CodecTestSequence> result(left);
|
||||||
std::move(std::begin(right), std::end(right), std::back_inserter(result));
|
std::move(std::begin(right), std::end(right), std::back_inserter(result));
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
|
@ -317,7 +317,7 @@ struct KeeperStorageCreateRequestProcessor final : public KeeperStorageRequestPr
|
|||||||
created_node.data = request.data;
|
created_node.data = request.data;
|
||||||
created_node.is_sequental = request.is_sequential;
|
created_node.is_sequental = request.is_sequential;
|
||||||
|
|
||||||
auto [map_key, _] = container.insert(path_created, std::move(created_node));
|
auto [map_key, _] = container.insert(path_created, created_node);
|
||||||
/// Take child path from key owned by map.
|
/// Take child path from key owned by map.
|
||||||
auto child_path = getBaseName(map_key->getKey());
|
auto child_path = getBaseName(map_key->getKey());
|
||||||
|
|
||||||
|
@ -122,6 +122,11 @@ Block::Block(const ColumnsWithTypeAndName & data_) : data{data_}
|
|||||||
initializeIndexByName();
|
initializeIndexByName();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Block::Block(ColumnsWithTypeAndName && data_) : data{std::move(data_)}
|
||||||
|
{
|
||||||
|
initializeIndexByName();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void Block::initializeIndexByName()
|
void Block::initializeIndexByName()
|
||||||
{
|
{
|
||||||
|
@ -37,6 +37,7 @@ public:
|
|||||||
Block() = default;
|
Block() = default;
|
||||||
Block(std::initializer_list<ColumnWithTypeAndName> il);
|
Block(std::initializer_list<ColumnWithTypeAndName> il);
|
||||||
Block(const ColumnsWithTypeAndName & data_);
|
Block(const ColumnsWithTypeAndName & data_);
|
||||||
|
Block(ColumnsWithTypeAndName && data_);
|
||||||
|
|
||||||
/// insert the column at the specified position
|
/// insert the column at the specified position
|
||||||
void insert(size_t position, ColumnWithTypeAndName elem);
|
void insert(size_t position, ColumnWithTypeAndName elem);
|
||||||
|
@ -89,7 +89,7 @@ Poco::JSON::Object SerializationInfoTuple::toJSON() const
|
|||||||
for (const auto & elem : elems)
|
for (const auto & elem : elems)
|
||||||
subcolumns.add(elem->toJSON());
|
subcolumns.add(elem->toJSON());
|
||||||
|
|
||||||
object.set("subcolumns", std::move(subcolumns));
|
object.set("subcolumns", subcolumns);
|
||||||
return object;
|
return object;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ public:
|
|||||||
};
|
};
|
||||||
|
|
||||||
DatabaseAtomic::DatabaseAtomic(String name_, String metadata_path_, UUID uuid, const String & logger_name, ContextPtr context_)
|
DatabaseAtomic::DatabaseAtomic(String name_, String metadata_path_, UUID uuid, const String & logger_name, ContextPtr context_)
|
||||||
: DatabaseOrdinary(name_, std::move(metadata_path_), "store/", logger_name, context_)
|
: DatabaseOrdinary(name_, metadata_path_, "store/", logger_name, context_)
|
||||||
, path_to_table_symlinks(fs::path(getContext()->getPath()) / "data" / escapeForFileName(name_) / "")
|
, path_to_table_symlinks(fs::path(getContext()->getPath()) / "data" / escapeForFileName(name_) / "")
|
||||||
, path_to_metadata_symlink(fs::path(getContext()->getPath()) / "metadata" / escapeForFileName(name_))
|
, path_to_metadata_symlink(fs::path(getContext()->getPath()) / "metadata" / escapeForFileName(name_))
|
||||||
, db_uuid(uuid)
|
, db_uuid(uuid)
|
||||||
|
@ -61,7 +61,7 @@ DatabaseMySQL::DatabaseMySQL(
|
|||||||
, database_engine_define(database_engine_define_->clone())
|
, database_engine_define(database_engine_define_->clone())
|
||||||
, database_name_in_mysql(database_name_in_mysql_)
|
, database_name_in_mysql(database_name_in_mysql_)
|
||||||
, database_settings(std::move(settings_))
|
, database_settings(std::move(settings_))
|
||||||
, mysql_pool(std::move(pool))
|
, mysql_pool(std::move(pool)) /// NOLINT
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
@ -253,7 +253,7 @@ void MaterializeMetadata::transaction(const MySQLReplication::Position & positio
|
|||||||
out.close();
|
out.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
commitMetadata(std::move(fun), persistent_tmp_path, persistent_path);
|
commitMetadata(fun, persistent_tmp_path, persistent_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
MaterializeMetadata::MaterializeMetadata(const String & path_, const Settings & settings_) : persistent_path(path_), settings(settings_)
|
MaterializeMetadata::MaterializeMetadata(const String & path_, const Settings & settings_) : persistent_path(path_), settings(settings_)
|
||||||
|
@ -159,7 +159,7 @@ MaterializedMySQLSyncThread::MaterializedMySQLSyncThread(
|
|||||||
, log(&Poco::Logger::get("MaterializedMySQLSyncThread"))
|
, log(&Poco::Logger::get("MaterializedMySQLSyncThread"))
|
||||||
, database_name(database_name_)
|
, database_name(database_name_)
|
||||||
, mysql_database_name(mysql_database_name_)
|
, mysql_database_name(mysql_database_name_)
|
||||||
, pool(std::move(pool_))
|
, pool(std::move(pool_)) /// NOLINT
|
||||||
, client(std::move(client_))
|
, client(std::move(client_))
|
||||||
, settings(settings_)
|
, settings(settings_)
|
||||||
{
|
{
|
||||||
|
@ -494,7 +494,7 @@ Pipe CacheDictionary<dictionary_key_type>::read(const Names & column_names, size
|
|||||||
{
|
{
|
||||||
auto keys = cache_storage_ptr->getCachedSimpleKeys();
|
auto keys = cache_storage_ptr->getCachedSimpleKeys();
|
||||||
auto keys_column = getColumnFromPODArray(std::move(keys));
|
auto keys_column = getColumnFromPODArray(std::move(keys));
|
||||||
key_columns = {ColumnWithTypeAndName(std::move(keys_column), std::make_shared<DataTypeUInt64>(), dict_struct.id->name)};
|
key_columns = {ColumnWithTypeAndName(keys_column, std::make_shared<DataTypeUInt64>(), dict_struct.id->name)};
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -52,7 +52,7 @@ Block blockForKeys(
|
|||||||
|
|
||||||
auto filtered_column = source_column->filter(filter, requested_rows.size());
|
auto filtered_column = source_column->filter(filter, requested_rows.size());
|
||||||
|
|
||||||
block.insert({std::move(filtered_column), (*dict_struct.key)[i].type, (*dict_struct.key)[i].name});
|
block.insert({filtered_column, (*dict_struct.key)[i].type, (*dict_struct.key)[i].name});
|
||||||
}
|
}
|
||||||
|
|
||||||
return block;
|
return block;
|
||||||
|
@ -147,7 +147,7 @@ ColumnPtr FlatDictionary::getColumn(
|
|||||||
callOnDictionaryAttributeType(attribute.type, type_call);
|
callOnDictionaryAttributeType(attribute.type, type_call);
|
||||||
|
|
||||||
if (attribute.is_nullable_set)
|
if (attribute.is_nullable_set)
|
||||||
result = ColumnNullable::create(std::move(result), std::move(col_null_map_to));
|
result = ColumnNullable::create(result, std::move(col_null_map_to));
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -572,7 +572,7 @@ Pipe FlatDictionary::read(const Names & column_names, size_t max_block_size, siz
|
|||||||
keys.push_back(key_index);
|
keys.push_back(key_index);
|
||||||
|
|
||||||
auto keys_column = getColumnFromPODArray(std::move(keys));
|
auto keys_column = getColumnFromPODArray(std::move(keys));
|
||||||
ColumnsWithTypeAndName key_columns = {ColumnWithTypeAndName(std::move(keys_column), std::make_shared<DataTypeUInt64>(), dict_struct.id->name)};
|
ColumnsWithTypeAndName key_columns = {ColumnWithTypeAndName(keys_column, std::make_shared<DataTypeUInt64>(), dict_struct.id->name)};
|
||||||
|
|
||||||
std::shared_ptr<const IDictionary> dictionary = shared_from_this();
|
std::shared_ptr<const IDictionary> dictionary = shared_from_this();
|
||||||
auto coordinator = DictionarySourceCoordinator::create(dictionary, column_names, std::move(key_columns), max_block_size);
|
auto coordinator = DictionarySourceCoordinator::create(dictionary, column_names, std::move(key_columns), max_block_size);
|
||||||
|
@ -578,7 +578,7 @@ ColumnPtr HashedArrayDictionary<dictionary_key_type>::getAttributeColumn(
|
|||||||
callOnDictionaryAttributeType(attribute.type, type_call);
|
callOnDictionaryAttributeType(attribute.type, type_call);
|
||||||
|
|
||||||
if (is_attribute_nullable)
|
if (is_attribute_nullable)
|
||||||
result = ColumnNullable::create(std::move(result), std::move(col_null_map_to));
|
result = ColumnNullable::create(result, std::move(col_null_map_to));
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -159,7 +159,7 @@ ColumnPtr HashedDictionary<dictionary_key_type, sparse>::getColumn(
|
|||||||
callOnDictionaryAttributeType(attribute.type, type_call);
|
callOnDictionaryAttributeType(attribute.type, type_call);
|
||||||
|
|
||||||
if (is_attribute_nullable)
|
if (is_attribute_nullable)
|
||||||
result = ColumnNullable::create(std::move(result), std::move(col_null_map_to));
|
result = ColumnNullable::create(result, std::move(col_null_map_to));
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -61,7 +61,7 @@ void IPolygonDictionary::convertKeyColumns(Columns & key_columns, DataTypes & ke
|
|||||||
|
|
||||||
auto & key_column_to_cast = key_columns[key_type_index];
|
auto & key_column_to_cast = key_columns[key_type_index];
|
||||||
ColumnWithTypeAndName column_to_cast = {key_column_to_cast, key_type, ""};
|
ColumnWithTypeAndName column_to_cast = {key_column_to_cast, key_type, ""};
|
||||||
auto casted_column = castColumnAccurate(std::move(column_to_cast), float_64_type);
|
auto casted_column = castColumnAccurate(column_to_cast, float_64_type);
|
||||||
key_column_to_cast = std::move(casted_column);
|
key_column_to_cast = std::move(casted_column);
|
||||||
key_type = float_64_type;
|
key_type = float_64_type;
|
||||||
}
|
}
|
||||||
|
@ -198,7 +198,7 @@ ColumnPtr RangeHashedDictionary<dictionary_key_type>::getColumn(
|
|||||||
callOnDictionaryAttributeType(attribute.type, type_call);
|
callOnDictionaryAttributeType(attribute.type, type_call);
|
||||||
|
|
||||||
if (is_attribute_nullable)
|
if (is_attribute_nullable)
|
||||||
result = ColumnNullable::create(std::move(result), std::move(col_null_map_to));
|
result = ColumnNullable::create(result, std::move(col_null_map_to));
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -298,7 +298,7 @@ ColumnPtr RangeHashedDictionary<dictionary_key_type>::getColumnInternal(
|
|||||||
callOnDictionaryAttributeType(attribute.type, type_call);
|
callOnDictionaryAttributeType(attribute.type, type_call);
|
||||||
|
|
||||||
if (is_attribute_nullable)
|
if (is_attribute_nullable)
|
||||||
result = ColumnNullable::create(std::move(result), std::move(col_null_map_to));
|
result = ColumnNullable::create(result, std::move(col_null_map_to));
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -136,9 +136,9 @@ namespace DB
|
|||||||
|
|
||||||
RedisArray keys;
|
RedisArray keys;
|
||||||
auto key_type = storageTypeToKeyType(configuration.storage_type);
|
auto key_type = storageTypeToKeyType(configuration.storage_type);
|
||||||
for (const auto & key : all_keys)
|
for (auto && key : all_keys)
|
||||||
if (key_type == connection->client->execute<String>(RedisCommand("TYPE").addRedisType(key)))
|
if (key_type == connection->client->execute<String>(RedisCommand("TYPE").addRedisType(key)))
|
||||||
keys.addRedisType(std::move(key));
|
keys.addRedisType(key);
|
||||||
|
|
||||||
if (configuration.storage_type == RedisStorageType::HASH_MAP)
|
if (configuration.storage_type == RedisStorageType::HASH_MAP)
|
||||||
{
|
{
|
||||||
@ -165,10 +165,10 @@ namespace DB
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (primary_with_secondary.size() > 1)
|
if (primary_with_secondary.size() > 1)
|
||||||
hkeys.add(std::move(primary_with_secondary));
|
hkeys.add(primary_with_secondary);
|
||||||
}
|
}
|
||||||
|
|
||||||
keys = std::move(hkeys);
|
keys = hkeys;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Pipe(std::make_shared<RedisSource>(
|
return Pipe(std::make_shared<RedisSource>(
|
||||||
|
@ -278,9 +278,10 @@ OutputFormatPtr FormatFactory::getOutputFormatParallelIfPossible(
|
|||||||
if (settings.output_format_parallel_formatting && getCreators(name).supports_parallel_formatting
|
if (settings.output_format_parallel_formatting && getCreators(name).supports_parallel_formatting
|
||||||
&& !settings.output_format_json_array_of_rows)
|
&& !settings.output_format_json_array_of_rows)
|
||||||
{
|
{
|
||||||
auto formatter_creator = [output_getter, sample, callback, format_settings]
|
auto formatter_creator = [output_getter, sample, callback, format_settings] (WriteBuffer & output) -> OutputFormatPtr
|
||||||
(WriteBuffer & output) -> OutputFormatPtr
|
{
|
||||||
{ return output_getter(output, sample, {std::move(callback)}, format_settings);};
|
return output_getter(output, sample, {callback}, format_settings);
|
||||||
|
};
|
||||||
|
|
||||||
ParallelFormattingOutputFormat::Params builder{buf, sample, formatter_creator, settings.max_threads};
|
ParallelFormattingOutputFormat::Params builder{buf, sample, formatter_creator, settings.max_threads};
|
||||||
|
|
||||||
|
@ -379,8 +379,7 @@ private:
|
|||||||
if (!max_key_column_type->equals(*input.key_series_type))
|
if (!max_key_column_type->equals(*input.key_series_type))
|
||||||
{
|
{
|
||||||
ColumnWithTypeAndName column_to_cast = {max_key_column, max_key_column_type, ""};
|
ColumnWithTypeAndName column_to_cast = {max_key_column, max_key_column_type, ""};
|
||||||
auto casted_column = castColumnAccurate(std::move(column_to_cast), input.key_series_type);
|
max_key_column = castColumnAccurate(column_to_cast, input.key_series_type);
|
||||||
max_key_column = std::move(casted_column);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,7 +99,7 @@ public:
|
|||||||
{
|
{
|
||||||
const ColumnWithTypeAndName & column_to_cast = arguments[0];
|
const ColumnWithTypeAndName & column_to_cast = arguments[0];
|
||||||
auto non_const_column_to_cast = column_to_cast.column->convertToFullColumnIfConst();
|
auto non_const_column_to_cast = column_to_cast.column->convertToFullColumnIfConst();
|
||||||
ColumnWithTypeAndName column_to_cast_non_const { std::move(non_const_column_to_cast), column_to_cast.type, column_to_cast.name };
|
ColumnWithTypeAndName column_to_cast_non_const { non_const_column_to_cast, column_to_cast.type, column_to_cast.name };
|
||||||
|
|
||||||
auto cast_result = castColumnAccurateOrNull(column_to_cast_non_const, return_type);
|
auto cast_result = castColumnAccurateOrNull(column_to_cast_non_const, return_type);
|
||||||
|
|
||||||
|
@ -1027,7 +1027,7 @@ public:
|
|||||||
|
|
||||||
ColumnPtr executeImpl(const ColumnsWithTypeAndName & args, const DataTypePtr & result_type, size_t input_rows_count) const override
|
ColumnPtr executeImpl(const ColumnsWithTypeAndName & args, const DataTypePtr & result_type, size_t input_rows_count) const override
|
||||||
{
|
{
|
||||||
ColumnsWithTypeAndName arguments = std::move(args);
|
ColumnsWithTypeAndName arguments = args;
|
||||||
executeShortCircuitArguments(arguments);
|
executeShortCircuitArguments(arguments);
|
||||||
ColumnPtr res;
|
ColumnPtr res;
|
||||||
if ( (res = executeForConstAndNullableCondition(arguments, result_type, input_rows_count))
|
if ( (res = executeForConstAndNullableCondition(arguments, result_type, input_rows_count))
|
||||||
|
@ -117,7 +117,7 @@ public:
|
|||||||
|
|
||||||
ColumnPtr executeImpl(const ColumnsWithTypeAndName & args, const DataTypePtr & result_type, size_t input_rows_count) const override
|
ColumnPtr executeImpl(const ColumnsWithTypeAndName & args, const DataTypePtr & result_type, size_t input_rows_count) const override
|
||||||
{
|
{
|
||||||
ColumnsWithTypeAndName arguments = std::move(args);
|
ColumnsWithTypeAndName arguments = args;
|
||||||
executeShortCircuitArguments(arguments);
|
executeShortCircuitArguments(arguments);
|
||||||
/** We will gather values from columns in branches to result column,
|
/** We will gather values from columns in branches to result column,
|
||||||
* depending on values of conditions.
|
* depending on values of conditions.
|
||||||
|
@ -152,7 +152,7 @@ public:
|
|||||||
for (const auto & arg : arguments)
|
for (const auto & arg : arguments)
|
||||||
arg_types.push_back(arg.type);
|
arg_types.push_back(arg.type);
|
||||||
|
|
||||||
return std::make_unique<FunctionBaseNow64>(nowSubsecond(scale), std::move(arg_types), std::move(result_type));
|
return std::make_unique<FunctionBaseNow64>(nowSubsecond(scale), std::move(arg_types), result_type);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ public:
|
|||||||
auto func_if = FunctionFactory::instance().get("if", context)->build(if_columns);
|
auto func_if = FunctionFactory::instance().get("if", context)->build(if_columns);
|
||||||
auto if_res = func_if->execute(if_columns, result_type, input_rows_count);
|
auto if_res = func_if->execute(if_columns, result_type, input_rows_count);
|
||||||
|
|
||||||
return makeNullable(std::move(if_res));
|
return makeNullable(if_res);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -386,7 +386,7 @@ void WriteBufferFromS3::waitForReadyBackGroundTasks()
|
|||||||
while (!upload_object_tasks.empty() && upload_object_tasks.front().is_finised)
|
while (!upload_object_tasks.empty() && upload_object_tasks.front().is_finised)
|
||||||
{
|
{
|
||||||
auto & task = upload_object_tasks.front();
|
auto & task = upload_object_tasks.front();
|
||||||
auto exception = std::move(task.exception);
|
auto exception = task.exception;
|
||||||
auto tag = std::move(task.tag);
|
auto tag = std::move(task.tag);
|
||||||
upload_object_tasks.pop_front();
|
upload_object_tasks.pop_front();
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@
|
|||||||
#include <Access/AccessControl.h>
|
#include <Access/AccessControl.h>
|
||||||
#include <base/range.h>
|
#include <base/range.h>
|
||||||
#include <base/sort.h>
|
#include <base/sort.h>
|
||||||
#include <boost/range/algorithm_ext/push_back.hpp>
|
#include <base/insertAtEnd.h>
|
||||||
|
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
@ -76,11 +76,11 @@ ASTs InterpreterShowAccessQuery::getCreateAndGrantQueries() const
|
|||||||
{
|
{
|
||||||
create_queries.push_back(InterpreterShowCreateAccessEntityQuery::getCreateQuery(*entity, access_control));
|
create_queries.push_back(InterpreterShowCreateAccessEntityQuery::getCreateQuery(*entity, access_control));
|
||||||
if (entity->isTypeOf(AccessEntityType::USER) || entity->isTypeOf(AccessEntityType::ROLE))
|
if (entity->isTypeOf(AccessEntityType::USER) || entity->isTypeOf(AccessEntityType::ROLE))
|
||||||
boost::range::push_back(grant_queries, InterpreterShowGrantsQuery::getGrantQueries(*entity, access_control));
|
insertAtEnd(grant_queries, InterpreterShowGrantsQuery::getGrantQueries(*entity, access_control));
|
||||||
}
|
}
|
||||||
|
|
||||||
ASTs result = std::move(create_queries);
|
ASTs result = std::move(create_queries);
|
||||||
boost::range::push_back(result, std::move(grant_queries));
|
insertAtEnd(result, std::move(grant_queries));
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -517,7 +517,7 @@ Block ActionsDAG::updateHeader(Block header) const
|
|||||||
{
|
{
|
||||||
auto & list = it->second;
|
auto & list = it->second;
|
||||||
pos_to_remove.insert(pos);
|
pos_to_remove.insert(pos);
|
||||||
node_to_column[inputs[list.front()]] = std::move(col);
|
node_to_column[inputs[list.front()]] = col;
|
||||||
list.pop_front();
|
list.pop_front();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -590,7 +590,7 @@ Block ActionsDAG::updateHeader(Block header) const
|
|||||||
for (auto & col : result_columns)
|
for (auto & col : result_columns)
|
||||||
res.insert(std::move(col));
|
res.insert(std::move(col));
|
||||||
|
|
||||||
for (const auto & item : header)
|
for (auto && item : header)
|
||||||
res.insert(std::move(item));
|
res.insert(std::move(item));
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
@ -651,8 +651,8 @@ NameSet ActionsDAG::foldActionsByProjection(
|
|||||||
{
|
{
|
||||||
/// Projection folding.
|
/// Projection folding.
|
||||||
node->type = ActionsDAG::ActionType::INPUT;
|
node->type = ActionsDAG::ActionType::INPUT;
|
||||||
node->result_type = std::move(column_with_type_name->type);
|
node->result_type = column_with_type_name->type;
|
||||||
node->result_name = std::move(column_with_type_name->name);
|
node->result_name = column_with_type_name->name;
|
||||||
node->children.clear();
|
node->children.clear();
|
||||||
inputs.push_back(node);
|
inputs.push_back(node);
|
||||||
}
|
}
|
||||||
@ -724,7 +724,7 @@ void ActionsDAG::addAliases(const NamesWithAliases & aliases)
|
|||||||
Node node;
|
Node node;
|
||||||
node.type = ActionType::ALIAS;
|
node.type = ActionType::ALIAS;
|
||||||
node.result_type = child->result_type;
|
node.result_type = child->result_type;
|
||||||
node.result_name = std::move(item.second);
|
node.result_name = item.second;
|
||||||
node.column = child->column;
|
node.column = child->column;
|
||||||
node.children.emplace_back(child);
|
node.children.emplace_back(child);
|
||||||
|
|
||||||
@ -771,7 +771,7 @@ void ActionsDAG::project(const NamesWithAliases & projection)
|
|||||||
Node node;
|
Node node;
|
||||||
node.type = ActionType::ALIAS;
|
node.type = ActionType::ALIAS;
|
||||||
node.result_type = child->result_type;
|
node.result_type = child->result_type;
|
||||||
node.result_name = std::move(item.second);
|
node.result_name = item.second;
|
||||||
node.column = child->column;
|
node.column = child->column;
|
||||||
node.children.emplace_back(child);
|
node.children.emplace_back(child);
|
||||||
|
|
||||||
|
@ -120,7 +120,7 @@ static Block createBlockFromCollection(const Collection & collection, const Data
|
|||||||
|
|
||||||
if (i == tuple_size)
|
if (i == tuple_size)
|
||||||
for (i = 0; i < tuple_size; ++i)
|
for (i = 0; i < tuple_size; ++i)
|
||||||
columns[i]->insert(std::move(tuple_values[i]));
|
columns[i]->insert(tuple_values[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,9 +165,9 @@ void AsynchronousInsertQueue::scheduleDataProcessingJob(const InsertQuery & key,
|
|||||||
{
|
{
|
||||||
/// Wrap 'unique_ptr' with 'shared_ptr' to make this
|
/// Wrap 'unique_ptr' with 'shared_ptr' to make this
|
||||||
/// lambda copyable and allow to save it to the thread pool.
|
/// lambda copyable and allow to save it to the thread pool.
|
||||||
pool.scheduleOrThrowOnError([=, data = std::make_shared<InsertDataPtr>(std::move(data))]
|
pool.scheduleOrThrowOnError([key, global_context, data = std::make_shared<InsertDataPtr>(std::move(data))]() mutable
|
||||||
{
|
{
|
||||||
processData(std::move(key), std::move(*data), std::move(global_context));
|
processData(key, std::move(*data), std::move(global_context));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -748,7 +748,7 @@ void ExpressionActions::execute(Block & block, size_t & num_rows, bool dry_run)
|
|||||||
if (execution_context.columns[pos].column)
|
if (execution_context.columns[pos].column)
|
||||||
res.insert(execution_context.columns[pos]);
|
res.insert(execution_context.columns[pos]);
|
||||||
|
|
||||||
for (const auto & item : block)
|
for (auto && item : block)
|
||||||
res.insert(std::move(item));
|
res.insert(std::move(item));
|
||||||
|
|
||||||
block.swap(res);
|
block.swap(res);
|
||||||
|
@ -1367,7 +1367,7 @@ bool SelectQueryExpressionAnalyzer::appendLimitBy(ExpressionActionsChain & chain
|
|||||||
|
|
||||||
auto child_name = child->getColumnName();
|
auto child_name = child->getColumnName();
|
||||||
if (!aggregated_names.count(child_name))
|
if (!aggregated_names.count(child_name))
|
||||||
step.addRequiredOutput(std::move(child_name));
|
step.addRequiredOutput(child_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
@ -56,7 +56,7 @@ namespace
|
|||||||
static_assert(std::is_same_v<ReturnType, ExternalLoader::Loadables>);
|
static_assert(std::is_same_v<ReturnType, ExternalLoader::Loadables>);
|
||||||
ExternalLoader::Loadables objects;
|
ExternalLoader::Loadables objects;
|
||||||
objects.reserve(results.size());
|
objects.reserve(results.size());
|
||||||
for (const auto & result : results)
|
for (auto && result : results)
|
||||||
{
|
{
|
||||||
if (auto object = std::move(result.object))
|
if (auto object = std::move(result.object))
|
||||||
objects.push_back(std::move(object));
|
objects.push_back(std::move(object));
|
||||||
|
@ -139,7 +139,7 @@ ExternalLoader::LoadablePtr ExternalUserDefinedExecutableFunctionsLoader::create
|
|||||||
|
|
||||||
UserDefinedExecutableFunctionConfiguration function_configuration
|
UserDefinedExecutableFunctionConfiguration function_configuration
|
||||||
{
|
{
|
||||||
.name = std::move(name),
|
.name = name,
|
||||||
.command = std::move(command_value),
|
.command = std::move(command_value),
|
||||||
.command_arguments = std::move(command_arguments),
|
.command_arguments = std::move(command_arguments),
|
||||||
.arguments = std::move(arguments),
|
.arguments = std::move(arguments),
|
||||||
|
@ -197,7 +197,7 @@ Chain InterpreterInsertQuery::buildChain(
|
|||||||
std::atomic_uint64_t * elapsed_counter_ms)
|
std::atomic_uint64_t * elapsed_counter_ms)
|
||||||
{
|
{
|
||||||
auto sample = getSampleBlock(columns, table, metadata_snapshot);
|
auto sample = getSampleBlock(columns, table, metadata_snapshot);
|
||||||
return buildChainImpl(table, metadata_snapshot, std::move(sample) , thread_status, elapsed_counter_ms);
|
return buildChainImpl(table, metadata_snapshot, sample, thread_status, elapsed_counter_ms);
|
||||||
}
|
}
|
||||||
|
|
||||||
Chain InterpreterInsertQuery::buildChainImpl(
|
Chain InterpreterInsertQuery::buildChainImpl(
|
||||||
|
@ -133,7 +133,7 @@ public:
|
|||||||
, process_list(process_list_)
|
, process_list(process_list_)
|
||||||
, processes_to_stop(std::move(processes_to_stop_))
|
, processes_to_stop(std::move(processes_to_stop_))
|
||||||
, processes_block(std::move(processes_block_))
|
, processes_block(std::move(processes_block_))
|
||||||
, res_sample_block(std::move(res_sample_block_))
|
, res_sample_block(res_sample_block_)
|
||||||
{
|
{
|
||||||
addTotalRowsApprox(processes_to_stop.size());
|
addTotalRowsApprox(processes_to_stop.size());
|
||||||
}
|
}
|
||||||
|
@ -549,7 +549,7 @@ InterpreterSelectQuery::InterpreterSelectQuery(
|
|||||||
|
|
||||||
/// Reuse already built sets for multiple passes of analysis
|
/// Reuse already built sets for multiple passes of analysis
|
||||||
subquery_for_sets = std::move(query_analyzer->getSubqueriesForSets());
|
subquery_for_sets = std::move(query_analyzer->getSubqueriesForSets());
|
||||||
prepared_sets = query_info.sets.empty() ? std::move(query_analyzer->getPreparedSets()) : std::move(query_info.sets);
|
prepared_sets = query_info.sets.empty() ? query_analyzer->getPreparedSets() : query_info.sets;
|
||||||
|
|
||||||
/// Do not try move conditions to PREWHERE for the second time.
|
/// Do not try move conditions to PREWHERE for the second time.
|
||||||
/// Otherwise, we won't be able to fallback from inefficient PREWHERE to WHERE later.
|
/// Otherwise, we won't be able to fallback from inefficient PREWHERE to WHERE later.
|
||||||
|
@ -576,7 +576,7 @@ std::shared_ptr<ASTExpressionList> subqueryExpressionList(
|
|||||||
needed_columns[table_pos].fillExpressionList(*expression_list);
|
needed_columns[table_pos].fillExpressionList(*expression_list);
|
||||||
|
|
||||||
for (const auto & expr : alias_pushdown[table_pos])
|
for (const auto & expr : alias_pushdown[table_pos])
|
||||||
expression_list->children.emplace_back(std::move(expr));
|
expression_list->children.emplace_back(expr);
|
||||||
|
|
||||||
return expression_list;
|
return expression_list;
|
||||||
}
|
}
|
||||||
|
@ -77,7 +77,7 @@ SessionLogElement::SessionLogElement(const UUID & auth_id_, Type type_)
|
|||||||
|
|
||||||
NamesAndTypesList SessionLogElement::getNamesAndTypes()
|
NamesAndTypesList SessionLogElement::getNamesAndTypes()
|
||||||
{
|
{
|
||||||
const auto event_type = std::make_shared<DataTypeEnum8>(
|
auto event_type = std::make_shared<DataTypeEnum8>(
|
||||||
DataTypeEnum8::Values
|
DataTypeEnum8::Values
|
||||||
{
|
{
|
||||||
{"LoginFailure", static_cast<Int8>(SESSION_LOGIN_FAILURE)},
|
{"LoginFailure", static_cast<Int8>(SESSION_LOGIN_FAILURE)},
|
||||||
@ -86,7 +86,7 @@ NamesAndTypesList SessionLogElement::getNamesAndTypes()
|
|||||||
});
|
});
|
||||||
|
|
||||||
#define AUTH_TYPE_NAME_AND_VALUE(v) std::make_pair(AuthenticationTypeInfo::get(v).raw_name, static_cast<Int8>(v))
|
#define AUTH_TYPE_NAME_AND_VALUE(v) std::make_pair(AuthenticationTypeInfo::get(v).raw_name, static_cast<Int8>(v))
|
||||||
const auto identified_with_column = std::make_shared<DataTypeEnum8>(
|
auto identified_with_column = std::make_shared<DataTypeEnum8>(
|
||||||
DataTypeEnum8::Values
|
DataTypeEnum8::Values
|
||||||
{
|
{
|
||||||
AUTH_TYPE_NAME_AND_VALUE(AuthType::NO_PASSWORD),
|
AUTH_TYPE_NAME_AND_VALUE(AuthType::NO_PASSWORD),
|
||||||
@ -98,7 +98,7 @@ NamesAndTypesList SessionLogElement::getNamesAndTypes()
|
|||||||
});
|
});
|
||||||
#undef AUTH_TYPE_NAME_AND_VALUE
|
#undef AUTH_TYPE_NAME_AND_VALUE
|
||||||
|
|
||||||
const auto interface_type_column = std::make_shared<DataTypeEnum8>(
|
auto interface_type_column = std::make_shared<DataTypeEnum8>(
|
||||||
DataTypeEnum8::Values
|
DataTypeEnum8::Values
|
||||||
{
|
{
|
||||||
{"TCP", static_cast<Int8>(Interface::TCP)},
|
{"TCP", static_cast<Int8>(Interface::TCP)},
|
||||||
@ -108,9 +108,9 @@ NamesAndTypesList SessionLogElement::getNamesAndTypes()
|
|||||||
{"PostgreSQL", static_cast<Int8>(Interface::POSTGRESQL)}
|
{"PostgreSQL", static_cast<Int8>(Interface::POSTGRESQL)}
|
||||||
});
|
});
|
||||||
|
|
||||||
const auto lc_string_datatype = std::make_shared<DataTypeLowCardinality>(std::make_shared<DataTypeString>());
|
auto lc_string_datatype = std::make_shared<DataTypeLowCardinality>(std::make_shared<DataTypeString>());
|
||||||
|
|
||||||
const auto settings_type_column = std::make_shared<DataTypeArray>(
|
auto settings_type_column = std::make_shared<DataTypeArray>(
|
||||||
std::make_shared<DataTypeTuple>(
|
std::make_shared<DataTypeTuple>(
|
||||||
DataTypes({
|
DataTypes({
|
||||||
// setting name
|
// setting name
|
||||||
|
@ -63,7 +63,7 @@ ActionsDAGPtr addMissingDefaults(
|
|||||||
{
|
{
|
||||||
const auto & nested_type = array_type->getNestedType();
|
const auto & nested_type = array_type->getNestedType();
|
||||||
ColumnPtr nested_column = nested_type->createColumnConstWithDefaultValue(0);
|
ColumnPtr nested_column = nested_type->createColumnConstWithDefaultValue(0);
|
||||||
const auto & constant = actions->addColumn({std::move(nested_column), nested_type, column.name});
|
const auto & constant = actions->addColumn({nested_column, nested_type, column.name});
|
||||||
|
|
||||||
auto & group = nested_groups[offsets_name];
|
auto & group = nested_groups[offsets_name];
|
||||||
group[0] = &constant;
|
group[0] = &constant;
|
||||||
@ -76,7 +76,7 @@ ActionsDAGPtr addMissingDefaults(
|
|||||||
* it can be full (or the interpreter may decide that it is constant everywhere).
|
* it can be full (or the interpreter may decide that it is constant everywhere).
|
||||||
*/
|
*/
|
||||||
auto new_column = column.type->createColumnConstWithDefaultValue(0);
|
auto new_column = column.type->createColumnConstWithDefaultValue(0);
|
||||||
const auto * col = &actions->addColumn({std::move(new_column), column.type, column.name});
|
const auto * col = &actions->addColumn({new_column, column.type, column.name});
|
||||||
index.push_back(&actions->materializeNode(*col));
|
index.push_back(&actions->materializeNode(*col));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -781,8 +781,8 @@ static std::tuple<ASTPtr, BlockIO> executeQueryImpl(
|
|||||||
|
|
||||||
element.memory_usage = info.peak_memory_usage > 0 ? info.peak_memory_usage : 0;
|
element.memory_usage = info.peak_memory_usage > 0 ? info.peak_memory_usage : 0;
|
||||||
|
|
||||||
element.thread_ids = std::move(info.thread_ids);
|
element.thread_ids = info.thread_ids;
|
||||||
element.profile_counters = std::move(info.profile_counters);
|
element.profile_counters = info.profile_counters;
|
||||||
|
|
||||||
/// We need to refresh the access info since dependent views might have added extra information, either during
|
/// We need to refresh the access info since dependent views might have added extra information, either during
|
||||||
/// creation of the view (PushingToViewsBlockOutputStream) or while executing its internal SELECT
|
/// creation of the view (PushingToViewsBlockOutputStream) or while executing its internal SELECT
|
||||||
|
@ -57,7 +57,7 @@ bool removeJoin(ASTSelectQuery & select, TreeRewriterResult & rewriter_result, C
|
|||||||
const size_t left_table_pos = 0;
|
const size_t left_table_pos = 0;
|
||||||
/// Test each argument of `and` function and select ones related to only left table
|
/// Test each argument of `and` function and select ones related to only left table
|
||||||
std::shared_ptr<ASTFunction> new_conj = makeASTFunction("and");
|
std::shared_ptr<ASTFunction> new_conj = makeASTFunction("and");
|
||||||
for (const auto & node : collectConjunctions(where))
|
for (auto && node : collectConjunctions(where))
|
||||||
{
|
{
|
||||||
if (membership_collector.getIdentsMembership(node) == left_table_pos)
|
if (membership_collector.getIdentsMembership(node) == left_table_pos)
|
||||||
new_conj->arguments->children.push_back(std::move(node));
|
new_conj->arguments->children.push_back(std::move(node));
|
||||||
|
@ -35,7 +35,7 @@ struct PullingAsyncPipelineExecutor::Data
|
|||||||
if (has_exception)
|
if (has_exception)
|
||||||
{
|
{
|
||||||
has_exception = false;
|
has_exception = false;
|
||||||
std::rethrow_exception(std::move(exception));
|
std::rethrow_exception(exception);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -90,7 +90,7 @@ struct PushingAsyncPipelineExecutor::Data
|
|||||||
if (has_exception)
|
if (has_exception)
|
||||||
{
|
{
|
||||||
has_exception = false;
|
has_exception = false;
|
||||||
std::rethrow_exception(std::move(exception));
|
std::rethrow_exception(exception);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -114,7 +114,7 @@ static std::shared_ptr<arrow::ipc::RecordBatchFileReader> createFileReader(ReadB
|
|||||||
if (is_stopped)
|
if (is_stopped)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
auto file_reader_status = arrow::ipc::RecordBatchFileReader::Open(std::move(arrow_file));
|
auto file_reader_status = arrow::ipc::RecordBatchFileReader::Open(arrow_file);
|
||||||
if (!file_reader_status.ok())
|
if (!file_reader_status.ok())
|
||||||
throw Exception(ErrorCodes::UNKNOWN_EXCEPTION,
|
throw Exception(ErrorCodes::UNKNOWN_EXCEPTION,
|
||||||
"Error while opening a table: {}", file_reader_status.status().ToString());
|
"Error while opening a table: {}", file_reader_status.status().ToString());
|
||||||
|
@ -241,7 +241,7 @@ static ColumnWithTypeAndName readColumnWithDecimalDataImpl(std::shared_ptr<arrow
|
|||||||
column_data.emplace_back(chunk.IsNull(value_i) ? DecimalType(0) : *reinterpret_cast<const DecimalType *>(chunk.Value(value_i))); // TODO: copy column
|
column_data.emplace_back(chunk.IsNull(value_i) ? DecimalType(0) : *reinterpret_cast<const DecimalType *>(chunk.Value(value_i))); // TODO: copy column
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return {std::move(internal_column), std::move(internal_type), column_name};
|
return {std::move(internal_column), internal_type, column_name};
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename DecimalArray>
|
template <typename DecimalArray>
|
||||||
@ -337,7 +337,7 @@ static ColumnWithTypeAndName readColumnFromArrowColumn(
|
|||||||
auto nested_column = readColumnFromArrowColumn(arrow_column, column_name, format_name, true, dictionary_values, read_ints_as_dates);
|
auto nested_column = readColumnFromArrowColumn(arrow_column, column_name, format_name, true, dictionary_values, read_ints_as_dates);
|
||||||
auto nullmap_column = readByteMapFromArrowColumn(arrow_column);
|
auto nullmap_column = readByteMapFromArrowColumn(arrow_column);
|
||||||
auto nullable_type = std::make_shared<DataTypeNullable>(std::move(nested_column.type));
|
auto nullable_type = std::make_shared<DataTypeNullable>(std::move(nested_column.type));
|
||||||
auto nullable_column = ColumnNullable::create(std::move(nested_column.column), std::move(nullmap_column));
|
auto nullable_column = ColumnNullable::create(nested_column.column, nullmap_column);
|
||||||
return {std::move(nullable_column), std::move(nullable_type), column_name};
|
return {std::move(nullable_column), std::move(nullable_type), column_name};
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -384,7 +384,7 @@ static ColumnWithTypeAndName readColumnFromArrowColumn(
|
|||||||
|
|
||||||
const auto * tuple_column = assert_cast<const ColumnTuple *>(nested_column.column.get());
|
const auto * tuple_column = assert_cast<const ColumnTuple *>(nested_column.column.get());
|
||||||
const auto * tuple_type = assert_cast<const DataTypeTuple *>(nested_column.type.get());
|
const auto * tuple_type = assert_cast<const DataTypeTuple *>(nested_column.type.get());
|
||||||
auto map_column = ColumnMap::create(std::move(tuple_column->getColumnPtr(0)), std::move(tuple_column->getColumnPtr(1)), std::move(offsets_column));
|
auto map_column = ColumnMap::create(tuple_column->getColumnPtr(0), tuple_column->getColumnPtr(1), offsets_column);
|
||||||
auto map_type = std::make_shared<DataTypeMap>(tuple_type->getElements()[0], tuple_type->getElements()[1]);
|
auto map_type = std::make_shared<DataTypeMap>(tuple_type->getElements()[0], tuple_type->getElements()[1]);
|
||||||
return {std::move(map_column), std::move(map_type), column_name};
|
return {std::move(map_column), std::move(map_type), column_name};
|
||||||
}
|
}
|
||||||
@ -393,7 +393,7 @@ static ColumnWithTypeAndName readColumnFromArrowColumn(
|
|||||||
auto arrow_nested_column = getNestedArrowColumn(arrow_column);
|
auto arrow_nested_column = getNestedArrowColumn(arrow_column);
|
||||||
auto nested_column = readColumnFromArrowColumn(arrow_nested_column, column_name, format_name, false, dictionary_values, read_ints_as_dates);
|
auto nested_column = readColumnFromArrowColumn(arrow_nested_column, column_name, format_name, false, dictionary_values, read_ints_as_dates);
|
||||||
auto offsets_column = readOffsetsFromArrowListColumn(arrow_column);
|
auto offsets_column = readOffsetsFromArrowListColumn(arrow_column);
|
||||||
auto array_column = ColumnArray::create(std::move(nested_column.column), std::move(offsets_column));
|
auto array_column = ColumnArray::create(nested_column.column, offsets_column);
|
||||||
auto array_type = std::make_shared<DataTypeArray>(nested_column.type);
|
auto array_type = std::make_shared<DataTypeArray>(nested_column.type);
|
||||||
return {std::move(array_column), std::move(array_type), column_name};
|
return {std::move(array_column), std::move(array_type), column_name};
|
||||||
}
|
}
|
||||||
@ -458,7 +458,7 @@ static ColumnWithTypeAndName readColumnFromArrowColumn(
|
|||||||
|
|
||||||
auto arrow_indexes_column = std::make_shared<arrow::ChunkedArray>(indexes_array);
|
auto arrow_indexes_column = std::make_shared<arrow::ChunkedArray>(indexes_array);
|
||||||
auto indexes_column = readColumnWithIndexesData(arrow_indexes_column);
|
auto indexes_column = readColumnWithIndexesData(arrow_indexes_column);
|
||||||
auto lc_column = ColumnLowCardinality::create(dict_values->column, std::move(indexes_column));
|
auto lc_column = ColumnLowCardinality::create(dict_values->column, indexes_column);
|
||||||
auto lc_type = std::make_shared<DataTypeLowCardinality>(dict_values->type);
|
auto lc_type = std::make_shared<DataTypeLowCardinality>(dict_values->type);
|
||||||
return {std::move(lc_column), std::move(lc_type), column_name};
|
return {std::move(lc_column), std::move(lc_type), column_name};
|
||||||
}
|
}
|
||||||
|
@ -15,9 +15,9 @@ namespace ErrorCodes
|
|||||||
|
|
||||||
BinaryRowInputFormat::BinaryRowInputFormat(ReadBuffer & in_, Block header, Params params_, bool with_names_, bool with_types_, const FormatSettings & format_settings_)
|
BinaryRowInputFormat::BinaryRowInputFormat(ReadBuffer & in_, Block header, Params params_, bool with_names_, bool with_types_, const FormatSettings & format_settings_)
|
||||||
: RowInputFormatWithNamesAndTypes(
|
: RowInputFormatWithNamesAndTypes(
|
||||||
std::move(header),
|
header,
|
||||||
in_,
|
in_,
|
||||||
std::move(params_),
|
params_,
|
||||||
with_names_,
|
with_names_,
|
||||||
with_types_,
|
with_types_,
|
||||||
format_settings_,
|
format_settings_,
|
||||||
|
@ -658,7 +658,7 @@ namespace DB
|
|||||||
auto nested_arrow_type = getArrowType(nested_types[i], tuple_column->getColumnPtr(i), name, format_name, out_is_column_nullable);
|
auto nested_arrow_type = getArrowType(nested_types[i], tuple_column->getColumnPtr(i), name, format_name, out_is_column_nullable);
|
||||||
nested_fields.push_back(std::make_shared<arrow::Field>(name, nested_arrow_type, *out_is_column_nullable));
|
nested_fields.push_back(std::make_shared<arrow::Field>(name, nested_arrow_type, *out_is_column_nullable));
|
||||||
}
|
}
|
||||||
return arrow::struct_(std::move(nested_fields));
|
return arrow::struct_(nested_fields);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (column_type->lowCardinality())
|
if (column_type->lowCardinality())
|
||||||
|
@ -169,7 +169,7 @@ static std::optional<capnp::DynamicValue::Reader> convertToDynamicValue(
|
|||||||
auto value_builder = initStructFieldBuilder(nested_column, row_num, struct_builder, value_field);
|
auto value_builder = initStructFieldBuilder(nested_column, row_num, struct_builder, value_field);
|
||||||
auto value = convertToDynamicValue(nested_column, nullable_type->getNestedType(), row_num, value_builder, enum_comparing_mode, temporary_text_data_storage);
|
auto value = convertToDynamicValue(nested_column, nullable_type->getNestedType(), row_num, value_builder, enum_comparing_mode, temporary_text_data_storage);
|
||||||
if (value)
|
if (value)
|
||||||
struct_builder.set(value_field, std::move(*value));
|
struct_builder.set(value_field, *value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -184,7 +184,7 @@ static std::optional<capnp::DynamicValue::Reader> convertToDynamicValue(
|
|||||||
= initStructFieldBuilder(nested_columns[pos], row_num, struct_builder, nested_struct_schema.getFieldByName(name));
|
= initStructFieldBuilder(nested_columns[pos], row_num, struct_builder, nested_struct_schema.getFieldByName(name));
|
||||||
auto value = convertToDynamicValue(nested_columns[pos], nested_types[pos], row_num, field_builder, enum_comparing_mode, temporary_text_data_storage);
|
auto value = convertToDynamicValue(nested_columns[pos], nested_types[pos], row_num, field_builder, enum_comparing_mode, temporary_text_data_storage);
|
||||||
if (value)
|
if (value)
|
||||||
struct_builder.set(name, std::move(*value));
|
struct_builder.set(name, *value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
@ -215,7 +215,7 @@ static std::optional<capnp::DynamicValue::Reader> convertToDynamicValue(
|
|||||||
|
|
||||||
auto value = convertToDynamicValue(nested_column, nested_type, offset + i, value_builder, enum_comparing_mode, temporary_text_data_storage);
|
auto value = convertToDynamicValue(nested_column, nested_type, offset + i, value_builder, enum_comparing_mode, temporary_text_data_storage);
|
||||||
if (value)
|
if (value)
|
||||||
list_builder.set(i, std::move(*value));
|
list_builder.set(i, *value);
|
||||||
}
|
}
|
||||||
return std::nullopt;
|
return std::nullopt;
|
||||||
}
|
}
|
||||||
|
@ -27,7 +27,7 @@ JSONCompactEachRowRowInputFormat::JSONCompactEachRowRowInputFormat(
|
|||||||
: RowInputFormatWithNamesAndTypes(
|
: RowInputFormatWithNamesAndTypes(
|
||||||
header_,
|
header_,
|
||||||
in_,
|
in_,
|
||||||
std::move(params_),
|
params_,
|
||||||
with_names_,
|
with_names_,
|
||||||
with_types_,
|
with_types_,
|
||||||
format_settings_,
|
format_settings_,
|
||||||
|
@ -111,7 +111,7 @@ static void getFileReaderAndSchema(
|
|||||||
if (is_stopped)
|
if (is_stopped)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
auto result = arrow::adapters::orc::ORCFileReader::Open(std::move(arrow_file), arrow::default_memory_pool());
|
auto result = arrow::adapters::orc::ORCFileReader::Open(arrow_file, arrow::default_memory_pool());
|
||||||
if (!result.ok())
|
if (!result.ok())
|
||||||
throw Exception(result.status().ToString(), ErrorCodes::BAD_ARGUMENTS);
|
throw Exception(result.status().ToString(), ErrorCodes::BAD_ARGUMENTS);
|
||||||
file_reader = std::move(result).ValueOrDie();
|
file_reader = std::move(result).ValueOrDie();
|
||||||
|
@ -457,7 +457,7 @@ static void postprocessChunk(
|
|||||||
{
|
{
|
||||||
const auto & from_type = desc.nested_type;
|
const auto & from_type = desc.nested_type;
|
||||||
const auto & to_type = desc.real_type;
|
const auto & to_type = desc.real_type;
|
||||||
res_columns[desc.column_numbers[0]] = recursiveTypeConversion(std::move(column), from_type, to_type);
|
res_columns[desc.column_numbers[0]] = recursiveTypeConversion(column, from_type, to_type);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
res_columns[desc.column_numbers[0]] = std::move(column);
|
res_columns[desc.column_numbers[0]] = std::move(column);
|
||||||
|
@ -138,7 +138,7 @@ void addCreatingSetsStep(
|
|||||||
|
|
||||||
auto creating_set = std::make_unique<CreatingSetStep>(
|
auto creating_set = std::make_unique<CreatingSetStep>(
|
||||||
plan->getCurrentDataStream(),
|
plan->getCurrentDataStream(),
|
||||||
std::move(description),
|
description,
|
||||||
std::move(set),
|
std::move(set),
|
||||||
limits,
|
limits,
|
||||||
context);
|
context);
|
||||||
|
@ -982,7 +982,7 @@ ReadFromMergeTree::AnalysisResult ReadFromMergeTree::getAnalysisResult() const
|
|||||||
{
|
{
|
||||||
auto result_ptr = analyzed_result_ptr ? analyzed_result_ptr : selectRangesToRead(prepared_parts);
|
auto result_ptr = analyzed_result_ptr ? analyzed_result_ptr : selectRangesToRead(prepared_parts);
|
||||||
if (std::holds_alternative<std::exception_ptr>(result_ptr->result))
|
if (std::holds_alternative<std::exception_ptr>(result_ptr->result))
|
||||||
std::rethrow_exception(std::move(std::get<std::exception_ptr>(result_ptr->result)));
|
std::rethrow_exception(std::get<std::exception_ptr>(result_ptr->result));
|
||||||
|
|
||||||
return std::get<ReadFromMergeTree::AnalysisResult>(result_ptr->result);
|
return std::get<ReadFromMergeTree::AnalysisResult>(result_ptr->result);
|
||||||
}
|
}
|
||||||
@ -1326,7 +1326,7 @@ bool MergeTreeDataSelectAnalysisResult::error() const
|
|||||||
size_t MergeTreeDataSelectAnalysisResult::marks() const
|
size_t MergeTreeDataSelectAnalysisResult::marks() const
|
||||||
{
|
{
|
||||||
if (std::holds_alternative<std::exception_ptr>(result))
|
if (std::holds_alternative<std::exception_ptr>(result))
|
||||||
std::rethrow_exception(std::move(std::get<std::exception_ptr>(result)));
|
std::rethrow_exception(std::get<std::exception_ptr>(result));
|
||||||
|
|
||||||
const auto & index_stats = std::get<ReadFromMergeTree::AnalysisResult>(result).index_stats;
|
const auto & index_stats = std::get<ReadFromMergeTree::AnalysisResult>(result).index_stats;
|
||||||
if (index_stats.empty())
|
if (index_stats.empty())
|
||||||
|
@ -24,7 +24,7 @@ void DistinctSortedTransform::transform(Chunk & chunk)
|
|||||||
if (column_ptrs.empty())
|
if (column_ptrs.empty())
|
||||||
return;
|
return;
|
||||||
|
|
||||||
const ColumnRawPtrs clearing_hint_columns(getClearingColumns(chunk, column_ptrs));
|
ColumnRawPtrs clearing_hint_columns(getClearingColumns(chunk, column_ptrs));
|
||||||
|
|
||||||
if (data.type == ClearableSetVariants::Type::EMPTY)
|
if (data.type == ClearableSetVariants::Type::EMPTY)
|
||||||
data.init(ClearableSetVariants::chooseMethod(column_ptrs, key_sizes));
|
data.init(ClearableSetVariants::chooseMethod(column_ptrs, key_sizes));
|
||||||
|
@ -138,7 +138,7 @@ void ExceptionKeepingTransform::work()
|
|||||||
{
|
{
|
||||||
stage = Stage::Exception;
|
stage = Stage::Exception;
|
||||||
ready_output = true;
|
ready_output = true;
|
||||||
data.exception = std::move(exception);
|
data.exception = exception;
|
||||||
onException();
|
onException();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -152,7 +152,7 @@ void ExceptionKeepingTransform::work()
|
|||||||
{
|
{
|
||||||
stage = Stage::Exception;
|
stage = Stage::Exception;
|
||||||
ready_output = true;
|
ready_output = true;
|
||||||
data.exception = std::move(exception);
|
data.exception = exception;
|
||||||
onException();
|
onException();
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -166,7 +166,7 @@ void ExceptionKeepingTransform::work()
|
|||||||
{
|
{
|
||||||
stage = Stage::Exception;
|
stage = Stage::Exception;
|
||||||
ready_output = true;
|
ready_output = true;
|
||||||
data.exception = std::move(exception);
|
data.exception = exception;
|
||||||
onException();
|
onException();
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -188,7 +188,7 @@ void ExceptionKeepingTransform::work()
|
|||||||
{
|
{
|
||||||
stage = Stage::Exception;
|
stage = Stage::Exception;
|
||||||
ready_output = true;
|
ready_output = true;
|
||||||
data.exception = std::move(exception);
|
data.exception = exception;
|
||||||
onException();
|
onException();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -206,7 +206,7 @@ WindowTransform::WindowTransform(const Block & input_header_,
|
|||||||
{
|
{
|
||||||
column = std::move(column)->convertToFullColumnIfConst();
|
column = std::move(column)->convertToFullColumnIfConst();
|
||||||
}
|
}
|
||||||
input_header.setColumns(std::move(input_columns));
|
input_header.setColumns(input_columns);
|
||||||
|
|
||||||
// Initialize window function workspaces.
|
// Initialize window function workspaces.
|
||||||
workspaces.reserve(functions.size());
|
workspaces.reserve(functions.size());
|
||||||
|
@ -695,7 +695,7 @@ IProcessor::Status FinalizingViewsTransform::prepare()
|
|||||||
return Status::Ready;
|
return Status::Ready;
|
||||||
|
|
||||||
if (any_exception)
|
if (any_exception)
|
||||||
output.pushException(std::move(any_exception));
|
output.pushException(any_exception);
|
||||||
|
|
||||||
output.finish();
|
output.finish();
|
||||||
return Status::Finished;
|
return Status::Finished;
|
||||||
@ -708,7 +708,7 @@ static std::exception_ptr addStorageToException(std::exception_ptr ptr, const St
|
|||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
std::rethrow_exception(std::move(ptr));
|
std::rethrow_exception(ptr);
|
||||||
}
|
}
|
||||||
catch (DB::Exception & exception)
|
catch (DB::Exception & exception)
|
||||||
{
|
{
|
||||||
@ -736,7 +736,7 @@ void FinalizingViewsTransform::work()
|
|||||||
if (!any_exception)
|
if (!any_exception)
|
||||||
any_exception = status.exception;
|
any_exception = status.exception;
|
||||||
|
|
||||||
view.setException(addStorageToException(std::move(status.exception), view.table_id));
|
view.setException(addStorageToException(status.exception, view.table_id));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
@ -1122,7 +1122,7 @@ std::string PredefinedQueryHandler::getQuery(HTTPServerRequest & request, HTMLFo
|
|||||||
|
|
||||||
HTTPRequestHandlerFactoryPtr createDynamicHandlerFactory(IServer & server, const std::string & config_prefix)
|
HTTPRequestHandlerFactoryPtr createDynamicHandlerFactory(IServer & server, const std::string & config_prefix)
|
||||||
{
|
{
|
||||||
const auto & query_param_name = server.config().getString(config_prefix + ".handler.query_param_name", "query");
|
auto query_param_name = server.config().getString(config_prefix + ".handler.query_param_name", "query");
|
||||||
auto factory = std::make_shared<HandlingRuleHTTPHandlerFactory<DynamicQueryHandler>>(server, std::move(query_param_name));
|
auto factory = std::make_shared<HandlingRuleHTTPHandlerFactory<DynamicQueryHandler>>(server, std::move(query_param_name));
|
||||||
|
|
||||||
factory->addFiltersFromConfig(server.config(), config_prefix);
|
factory->addFiltersFromConfig(server.config(), config_prefix);
|
||||||
|
@ -117,7 +117,7 @@ void ColumnDescription::readText(ReadBuffer & buf)
|
|||||||
ParserColumnDeclaration column_parser(/* require type */ true);
|
ParserColumnDeclaration column_parser(/* require type */ true);
|
||||||
ASTPtr ast = parseQuery(column_parser, "x T " + modifiers, "column parser", 0, DBMS_DEFAULT_MAX_PARSER_DEPTH);
|
ASTPtr ast = parseQuery(column_parser, "x T " + modifiers, "column parser", 0, DBMS_DEFAULT_MAX_PARSER_DEPTH);
|
||||||
|
|
||||||
if (const auto * col_ast = ast->as<ASTColumnDeclaration>())
|
if (auto * col_ast = ast->as<ASTColumnDeclaration>())
|
||||||
{
|
{
|
||||||
if (col_ast->default_expression)
|
if (col_ast->default_expression)
|
||||||
{
|
{
|
||||||
@ -309,7 +309,7 @@ void ColumnsDescription::flattenNested()
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
ColumnDescription column = std::move(*it);
|
ColumnDescription column = *it;
|
||||||
removeSubcolumns(column.name);
|
removeSubcolumns(column.name);
|
||||||
it = columns.get<0>().erase(it);
|
it = columns.get<0>().erase(it);
|
||||||
|
|
||||||
|
@ -198,6 +198,20 @@ ConstraintsDescription & ConstraintsDescription::operator=(const ConstraintsDesc
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ConstraintsDescription::ConstraintsDescription(ConstraintsDescription && other) noexcept
|
||||||
|
: constraints(std::move(other.constraints))
|
||||||
|
{
|
||||||
|
update();
|
||||||
|
}
|
||||||
|
|
||||||
|
ConstraintsDescription & ConstraintsDescription::operator=(ConstraintsDescription && other) noexcept
|
||||||
|
{
|
||||||
|
constraints = std::move(other.constraints);
|
||||||
|
update();
|
||||||
|
|
||||||
|
return *this;
|
||||||
|
}
|
||||||
|
|
||||||
void ConstraintsDescription::update()
|
void ConstraintsDescription::update()
|
||||||
{
|
{
|
||||||
if (constraints.empty())
|
if (constraints.empty())
|
||||||
|
@ -18,6 +18,9 @@ public:
|
|||||||
ConstraintsDescription(const ConstraintsDescription & other);
|
ConstraintsDescription(const ConstraintsDescription & other);
|
||||||
ConstraintsDescription & operator=(const ConstraintsDescription & other);
|
ConstraintsDescription & operator=(const ConstraintsDescription & other);
|
||||||
|
|
||||||
|
ConstraintsDescription(ConstraintsDescription && other) noexcept;
|
||||||
|
ConstraintsDescription & operator=(ConstraintsDescription && other) noexcept;
|
||||||
|
|
||||||
bool empty() const { return constraints.empty(); }
|
bool empty() const { return constraints.empty(); }
|
||||||
String toString() const;
|
String toString() const;
|
||||||
|
|
||||||
|
@ -3663,7 +3663,7 @@ RestoreDataTasks MergeTreeData::restoreDataPartsFromBackup(const BackupPtr & bac
|
|||||||
Strings part_names = backup->listFiles(data_path_in_backup);
|
Strings part_names = backup->listFiles(data_path_in_backup);
|
||||||
for (const String & part_name : part_names)
|
for (const String & part_name : part_names)
|
||||||
{
|
{
|
||||||
const auto part_info = MergeTreePartInfo::tryParsePartName(part_name, format_version);
|
auto part_info = MergeTreePartInfo::tryParsePartName(part_name, format_version);
|
||||||
|
|
||||||
if (!part_info)
|
if (!part_info)
|
||||||
continue;
|
continue;
|
||||||
@ -4745,7 +4745,7 @@ std::optional<ProjectionCandidate> MergeTreeData::getQueryProcessingStageWithAgg
|
|||||||
query_options,
|
query_options,
|
||||||
/* prepared_sets_= */ query_info.sets);
|
/* prepared_sets_= */ query_info.sets);
|
||||||
const auto & analysis_result = select.getAnalysisResult();
|
const auto & analysis_result = select.getAnalysisResult();
|
||||||
query_info.sets = std::move(select.getQueryAnalyzer()->getPreparedSets());
|
query_info.sets = select.getQueryAnalyzer()->getPreparedSets();
|
||||||
|
|
||||||
bool can_use_aggregate_projection = true;
|
bool can_use_aggregate_projection = true;
|
||||||
/// If the first stage of the query pipeline is more complex than Aggregating - Expression - Filter - ReadFromStorage,
|
/// If the first stage of the query pipeline is more complex than Aggregating - Expression - Filter - ReadFromStorage,
|
||||||
|
@ -231,8 +231,10 @@ void MergeTreeDataPartChecksums::addFile(const String & file_name, UInt64 file_s
|
|||||||
|
|
||||||
void MergeTreeDataPartChecksums::add(MergeTreeDataPartChecksums && rhs_checksums)
|
void MergeTreeDataPartChecksums::add(MergeTreeDataPartChecksums && rhs_checksums)
|
||||||
{
|
{
|
||||||
for (auto & checksum : rhs_checksums.files)
|
for (auto && checksum : rhs_checksums.files)
|
||||||
files[std::move(checksum.first)] = std::move(checksum.second);
|
{
|
||||||
|
files[checksum.first] = std::move(checksum.second);
|
||||||
|
}
|
||||||
|
|
||||||
rhs_checksums.files.clear();
|
rhs_checksums.files.clear();
|
||||||
}
|
}
|
||||||
|
@ -707,7 +707,7 @@ MergeTreeRangeReader::ReadResult MergeTreeRangeReader::read(size_t max_rows, Mar
|
|||||||
{
|
{
|
||||||
auto old_columns = block_before_prewhere.getColumns();
|
auto old_columns = block_before_prewhere.getColumns();
|
||||||
filterColumns(old_columns, read_result.getFilterOriginal()->getData());
|
filterColumns(old_columns, read_result.getFilterOriginal()->getData());
|
||||||
block_before_prewhere.setColumns(std::move(old_columns));
|
block_before_prewhere.setColumns(old_columns);
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto & column : block_before_prewhere)
|
for (auto & column : block_before_prewhere)
|
||||||
|
@ -26,14 +26,14 @@ MergeTreeReaderCompact::MergeTreeReaderCompact(
|
|||||||
const ReadBufferFromFileBase::ProfileCallback & profile_callback_,
|
const ReadBufferFromFileBase::ProfileCallback & profile_callback_,
|
||||||
clockid_t clock_type_)
|
clockid_t clock_type_)
|
||||||
: IMergeTreeReader(
|
: IMergeTreeReader(
|
||||||
std::move(data_part_),
|
data_part_,
|
||||||
std::move(columns_),
|
columns_,
|
||||||
metadata_snapshot_,
|
metadata_snapshot_,
|
||||||
uncompressed_cache_,
|
uncompressed_cache_,
|
||||||
mark_cache_,
|
mark_cache_,
|
||||||
std::move(mark_ranges_),
|
mark_ranges_,
|
||||||
std::move(settings_),
|
settings_,
|
||||||
std::move(avg_value_size_hints_))
|
avg_value_size_hints_)
|
||||||
, marks_loader(
|
, marks_loader(
|
||||||
data_part->volume->getDisk(),
|
data_part->volume->getDisk(),
|
||||||
mark_cache,
|
mark_cache,
|
||||||
|
@ -20,9 +20,15 @@ MergeTreeReaderInMemory::MergeTreeReaderInMemory(
|
|||||||
const StorageMetadataPtr & metadata_snapshot_,
|
const StorageMetadataPtr & metadata_snapshot_,
|
||||||
MarkRanges mark_ranges_,
|
MarkRanges mark_ranges_,
|
||||||
MergeTreeReaderSettings settings_)
|
MergeTreeReaderSettings settings_)
|
||||||
: IMergeTreeReader(data_part_, std::move(columns_), metadata_snapshot_,
|
: IMergeTreeReader(
|
||||||
nullptr, nullptr, std::move(mark_ranges_),
|
data_part_,
|
||||||
std::move(settings_), {})
|
columns_,
|
||||||
|
metadata_snapshot_,
|
||||||
|
nullptr,
|
||||||
|
nullptr,
|
||||||
|
mark_ranges_,
|
||||||
|
settings_,
|
||||||
|
{})
|
||||||
, part_in_memory(std::move(data_part_))
|
, part_in_memory(std::move(data_part_))
|
||||||
{
|
{
|
||||||
for (const auto & name_and_type : columns)
|
for (const auto & name_and_type : columns)
|
||||||
|
@ -36,14 +36,14 @@ MergeTreeReaderWide::MergeTreeReaderWide(
|
|||||||
const ReadBufferFromFileBase::ProfileCallback & profile_callback_,
|
const ReadBufferFromFileBase::ProfileCallback & profile_callback_,
|
||||||
clockid_t clock_type_)
|
clockid_t clock_type_)
|
||||||
: IMergeTreeReader(
|
: IMergeTreeReader(
|
||||||
std::move(data_part_),
|
data_part_,
|
||||||
std::move(columns_),
|
columns_,
|
||||||
metadata_snapshot_,
|
metadata_snapshot_,
|
||||||
uncompressed_cache_,
|
uncompressed_cache_,
|
||||||
std::move(mark_cache_),
|
mark_cache_,
|
||||||
std::move(mark_ranges_),
|
mark_ranges_,
|
||||||
std::move(settings_),
|
settings_,
|
||||||
std::move(avg_value_size_hints_))
|
avg_value_size_hints_)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
|
@ -295,7 +295,7 @@ void StorageDictionary::alter(const AlterCommands & params, ContextPtr alter_con
|
|||||||
}
|
}
|
||||||
|
|
||||||
std::lock_guard<std::mutex> lock(dictionary_config_mutex);
|
std::lock_guard<std::mutex> lock(dictionary_config_mutex);
|
||||||
configuration->setString("dictionary.comment", std::move(new_comment));
|
configuration->setString("dictionary.comment", new_comment);
|
||||||
}
|
}
|
||||||
|
|
||||||
void registerStorageDictionary(StorageFactory & factory)
|
void registerStorageDictionary(StorageFactory & factory)
|
||||||
|
@ -172,7 +172,7 @@ ColumnPtr fillColumnWithRandomData(
|
|||||||
|
|
||||||
auto data_column = fillColumnWithRandomData(nested_type, offset, max_array_length, max_string_length, rng, context);
|
auto data_column = fillColumnWithRandomData(nested_type, offset, max_array_length, max_string_length, rng, context);
|
||||||
|
|
||||||
return ColumnArray::create(std::move(data_column), std::move(offsets_column));
|
return ColumnArray::create(data_column, std::move(offsets_column));
|
||||||
}
|
}
|
||||||
|
|
||||||
case TypeIndex::Tuple:
|
case TypeIndex::Tuple:
|
||||||
@ -198,7 +198,7 @@ ColumnPtr fillColumnWithRandomData(
|
|||||||
for (UInt64 i = 0; i < limit; ++i)
|
for (UInt64 i = 0; i < limit; ++i)
|
||||||
null_map[i] = rng() % 16 == 0; /// No real motivation for this.
|
null_map[i] = rng() % 16 == 0; /// No real motivation for this.
|
||||||
|
|
||||||
return ColumnNullable::create(std::move(nested_column), std::move(null_map_column));
|
return ColumnNullable::create(nested_column, std::move(null_map_column));
|
||||||
}
|
}
|
||||||
|
|
||||||
case TypeIndex::UInt8:
|
case TypeIndex::UInt8:
|
||||||
@ -395,7 +395,7 @@ protected:
|
|||||||
for (const auto & elem : block_to_fill)
|
for (const auto & elem : block_to_fill)
|
||||||
columns.emplace_back(fillColumnWithRandomData(elem.type, block_size, max_array_length, max_string_length, rng, context));
|
columns.emplace_back(fillColumnWithRandomData(elem.type, block_size, max_array_length, max_string_length, rng, context));
|
||||||
|
|
||||||
columns = Nested::flatten(block_to_fill.cloneWithColumns(std::move(columns))).getColumns();
|
columns = Nested::flatten(block_to_fill.cloneWithColumns(columns)).getColumns();
|
||||||
return {std::move(columns), block_size};
|
return {std::move(columns), block_size};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,6 +55,9 @@ struct StorageInMemoryMetadata
|
|||||||
StorageInMemoryMetadata(const StorageInMemoryMetadata & other);
|
StorageInMemoryMetadata(const StorageInMemoryMetadata & other);
|
||||||
StorageInMemoryMetadata & operator=(const StorageInMemoryMetadata & other);
|
StorageInMemoryMetadata & operator=(const StorageInMemoryMetadata & other);
|
||||||
|
|
||||||
|
StorageInMemoryMetadata(StorageInMemoryMetadata && other) = default;
|
||||||
|
StorageInMemoryMetadata & operator=(StorageInMemoryMetadata && other) = default;
|
||||||
|
|
||||||
/// NOTE: Thread unsafe part. You should modify same StorageInMemoryMetadata
|
/// NOTE: Thread unsafe part. You should modify same StorageInMemoryMetadata
|
||||||
/// structure from different threads. It should be used as MultiVersion
|
/// structure from different threads. It should be used as MultiVersion
|
||||||
/// object. See example in IStorage.
|
/// object. See example in IStorage.
|
||||||
|
@ -171,7 +171,7 @@ Chunk LogSource::generate()
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (!column->empty())
|
if (!column->empty())
|
||||||
res.insert(ColumnWithTypeAndName(std::move(column), name_type.type, name_type.name));
|
res.insert(ColumnWithTypeAndName(column, name_type.type, name_type.name));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (res)
|
if (res)
|
||||||
|
@ -730,7 +730,7 @@ void StorageMerge::convertingSourceStream(
|
|||||||
for (const auto & alias : aliases)
|
for (const auto & alias : aliases)
|
||||||
{
|
{
|
||||||
pipe_columns.emplace_back(NameAndTypePair(alias.name, alias.type));
|
pipe_columns.emplace_back(NameAndTypePair(alias.name, alias.type));
|
||||||
ASTPtr expr = std::move(alias.expression);
|
ASTPtr expr = alias.expression;
|
||||||
auto syntax_result = TreeRewriter(local_context).analyze(expr, pipe_columns);
|
auto syntax_result = TreeRewriter(local_context).analyze(expr, pipe_columns);
|
||||||
auto expression_analyzer = ExpressionAnalyzer{alias.expression, syntax_result, local_context};
|
auto expression_analyzer = ExpressionAnalyzer{alias.expression, syntax_result, local_context};
|
||||||
|
|
||||||
|
@ -178,7 +178,7 @@ public:
|
|||||||
{
|
{
|
||||||
/// Avoid Excessive copy when block is small enough
|
/// Avoid Excessive copy when block is small enough
|
||||||
if (block.rows() <= max_rows)
|
if (block.rows() <= max_rows)
|
||||||
return Blocks{std::move(block)};
|
return {block};
|
||||||
|
|
||||||
const size_t split_block_size = ceil(block.rows() * 1.0 / max_rows);
|
const size_t split_block_size = ceil(block.rows() * 1.0 / max_rows);
|
||||||
Blocks split_blocks(split_block_size);
|
Blocks split_blocks(split_block_size);
|
||||||
|
@ -649,7 +649,7 @@ StorageURLWithFailover::StorageURLWithFailover(
|
|||||||
Poco::URI poco_uri(uri_option);
|
Poco::URI poco_uri(uri_option);
|
||||||
context_->getRemoteHostFilter().checkURL(poco_uri);
|
context_->getRemoteHostFilter().checkURL(poco_uri);
|
||||||
LOG_DEBUG(&Poco::Logger::get("StorageURLDistributed"), "Adding URL option: {}", uri_option);
|
LOG_DEBUG(&Poco::Logger::get("StorageURLDistributed"), "Adding URL option: {}", uri_option);
|
||||||
uri_options.emplace_back(std::move(uri_option));
|
uri_options.emplace_back(uri_option);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@
|
|||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
#include <Parsers/Access/ASTRolesOrUsersSet.h>
|
#include <Parsers/Access/ASTRolesOrUsersSet.h>
|
||||||
#include <base/range.h>
|
#include <base/range.h>
|
||||||
#include <boost/range/algorithm_ext/push_back.hpp>
|
#include <base/insertAtEnd.h>
|
||||||
|
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
@ -43,7 +43,8 @@ NamesAndTypesList StorageSystemRowPolicies::getNamesAndTypes()
|
|||||||
{"apply_to_except", std::make_shared<DataTypeArray>(std::make_shared<DataTypeString>())}
|
{"apply_to_except", std::make_shared<DataTypeArray>(std::make_shared<DataTypeString>())}
|
||||||
};
|
};
|
||||||
|
|
||||||
boost::range::push_back(names_and_types, std::move(extra_names_and_types));
|
insertAtEnd(names_and_types, extra_names_and_types);
|
||||||
|
|
||||||
return names_and_types;
|
return names_and_types;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -509,8 +509,8 @@ protected:
|
|||||||
loading_dependencies_tables.reserve(info.dependencies.size());
|
loading_dependencies_tables.reserve(info.dependencies.size());
|
||||||
for (auto && dependency : info.dependencies)
|
for (auto && dependency : info.dependencies)
|
||||||
{
|
{
|
||||||
loading_dependencies_databases.push_back(std::move(dependency.database));
|
loading_dependencies_databases.push_back(dependency.database);
|
||||||
loading_dependencies_tables.push_back(std::move(dependency.table));
|
loading_dependencies_tables.push_back(dependency.table);
|
||||||
}
|
}
|
||||||
|
|
||||||
Array loading_dependent_databases;
|
Array loading_dependent_databases;
|
||||||
@ -519,8 +519,8 @@ protected:
|
|||||||
loading_dependent_tables.reserve(info.dependencies.size());
|
loading_dependent_tables.reserve(info.dependencies.size());
|
||||||
for (auto && dependent : info.dependent_database_objects)
|
for (auto && dependent : info.dependent_database_objects)
|
||||||
{
|
{
|
||||||
loading_dependent_databases.push_back(std::move(dependent.database));
|
loading_dependent_databases.push_back(dependent.database);
|
||||||
loading_dependent_tables.push_back(std::move(dependent.table));
|
loading_dependent_tables.push_back(dependent.table);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (columns_mask[src_index++])
|
if (columns_mask[src_index++])
|
||||||
|
@ -78,7 +78,7 @@ StoragePtr TableFunctionExecutable::executeImpl(const ASTPtr & /*ast_function*/,
|
|||||||
auto global_context = context->getGlobalContext();
|
auto global_context = context->getGlobalContext();
|
||||||
ExecutableSettings settings;
|
ExecutableSettings settings;
|
||||||
settings.script_name = script_name;
|
settings.script_name = script_name;
|
||||||
settings.script_arguments = std::move(arguments);
|
settings.script_arguments = arguments;
|
||||||
|
|
||||||
auto storage = StorageExecutable::create(storage_id, format, settings, input_queries, getActualTableStructure(context), ConstraintsDescription{});
|
auto storage = StorageExecutable::create(storage_id, format, settings, input_queries, getActualTableStructure(context), ConstraintsDescription{});
|
||||||
storage->startup();
|
storage->startup();
|
||||||
|
Loading…
Reference in New Issue
Block a user