mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-27 01:51:59 +00:00
Merge pull request #2029 from yandex/refactor-table-declaration
Refactor columns desription
This commit is contained in:
commit
2a90880602
@ -173,7 +173,7 @@ ASTPtr createASTIdentifierForColumnInTable(const String & column, const CollectT
|
||||
void createASTsForAllColumnsInTable(const CollectTables::TableInfo & table, ASTs & res)
|
||||
{
|
||||
if (table.storage)
|
||||
for (const auto & name : table.storage->getColumnNamesList())
|
||||
for (const auto & name : table.storage->getColumns().getNamesOfPhysical())
|
||||
res.emplace_back(createASTIdentifierForColumnInTable(name, table));
|
||||
else
|
||||
for (size_t i = 0, size = table.structure_of_subquery.columns(); i < size; ++i)
|
||||
@ -315,7 +315,7 @@ void processIdentifier(
|
||||
}
|
||||
else if (table->storage)
|
||||
{
|
||||
info.data_type = table->storage->getDataTypeByName(column_name);
|
||||
info.data_type = table->storage->getColumn(column_name).type;
|
||||
}
|
||||
else
|
||||
throw Exception("Logical error: no storage and no structure of subquery is specified for table", ErrorCodes::LOGICAL_ERROR);
|
||||
|
@ -72,7 +72,7 @@ void ExecuteTableFunctions::dump(WriteBuffer & out) const
|
||||
{
|
||||
writeString(table.second->getName(), out);
|
||||
writeCString("\n\n", out);
|
||||
writeString(table.second->getColumnsList().toString(), out);
|
||||
writeString(table.second->getColumns().getAllPhysical().toString(), out);
|
||||
writeCString("\n", out);
|
||||
}
|
||||
}
|
||||
|
@ -198,7 +198,7 @@ public:
|
||||
|
||||
/// Create table
|
||||
NamesAndTypesList columns = sample_block.getNamesAndTypesList();
|
||||
StoragePtr storage = StorageMemory::create(data.second, columns, NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{});
|
||||
StoragePtr storage = StorageMemory::create(data.second, ColumnsDescription{columns});
|
||||
storage->startup();
|
||||
context.addExternalTable(data.second, storage);
|
||||
BlockOutputStreamPtr output = storage->write(ASTPtr(), context.getSettingsRef());
|
||||
|
@ -138,4 +138,14 @@ NamesAndTypesList NamesAndTypesList::addTypes(const Names & names) const
|
||||
return res;
|
||||
}
|
||||
|
||||
bool NamesAndTypesList::contains(const String & name) const
|
||||
{
|
||||
for (const NameAndTypePair & column : *this)
|
||||
{
|
||||
if (column.name == name)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -69,6 +69,8 @@ public:
|
||||
|
||||
/// Unlike `filter`, returns columns in the order in which they go in `names`.
|
||||
NamesAndTypesList addTypes(const Names & names) const;
|
||||
|
||||
bool contains(const String & name) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ std::ostream & operator<<(std::ostream & stream, const IDataType & what)
|
||||
std::ostream & operator<<(std::ostream & stream, const IStorage & what)
|
||||
{
|
||||
stream << "IStorage(name = " << what.getName() << ", tableName = " << what.getTableName() << ") {"
|
||||
<< what.getColumnsList().toString()
|
||||
<< what.getColumns().getAllPhysical().toString()
|
||||
<< "}";
|
||||
// isRemote supportsSampling supportsFinal supportsPrewhere
|
||||
return stream;
|
||||
|
@ -138,7 +138,7 @@ void RemoteBlockInputStream::sendExternalTables()
|
||||
{
|
||||
StoragePtr cur = table.second;
|
||||
QueryProcessingStage::Enum stage = QueryProcessingStage::Complete;
|
||||
BlockInputStreams input = cur->read(cur->getColumnNamesList(), {}, context,
|
||||
BlockInputStreams input = cur->read(cur->getColumns().getNamesOfPhysical(), {}, context,
|
||||
stage, DEFAULT_BLOCK_SIZE, 1);
|
||||
if (input.size() == 0)
|
||||
res.push_back(std::make_pair(std::make_shared<OneBlockInputStream>(cur->getSampleBlock()), table.first));
|
||||
|
@ -104,8 +104,8 @@ int main(int, char **)
|
||||
|
||||
/// create an object of an existing hit log table
|
||||
|
||||
StoragePtr table = StorageLog::create("./", "HitLog", names_and_types_list,
|
||||
NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{}, DEFAULT_MAX_COMPRESS_BLOCK_SIZE);
|
||||
StoragePtr table = StorageLog::create(
|
||||
"./", "HitLog", ColumnsDescription{names_and_types_list}, DEFAULT_MAX_COMPRESS_BLOCK_SIZE);
|
||||
table->startup();
|
||||
|
||||
/// read from it, apply the expression, filter, and write in tsv form to the console
|
||||
|
@ -95,8 +95,8 @@ try
|
||||
|
||||
/// create an object of an existing hit log table
|
||||
|
||||
StoragePtr table = StorageLog::create("./", "HitLog", names_and_types_list,
|
||||
NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{}, DEFAULT_MAX_COMPRESS_BLOCK_SIZE);
|
||||
StoragePtr table = StorageLog::create(
|
||||
"./", "HitLog", ColumnsDescription{names_and_types_list}, DEFAULT_MAX_COMPRESS_BLOCK_SIZE);
|
||||
table->startup();
|
||||
|
||||
/// read from it
|
||||
|
@ -107,8 +107,8 @@ try
|
||||
|
||||
/// create an object of an existing hit log table
|
||||
|
||||
StoragePtr table = StorageLog::create("./", "HitLog", names_and_types_list,
|
||||
NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{}, DEFAULT_MAX_COMPRESS_BLOCK_SIZE);
|
||||
StoragePtr table = StorageLog::create(
|
||||
"./", "HitLog", ColumnsDescription{names_and_types_list}, DEFAULT_MAX_COMPRESS_BLOCK_SIZE);
|
||||
table->startup();
|
||||
|
||||
/// read from it, sort it, and write it in tsv form to the console
|
||||
|
@ -41,8 +41,7 @@ Tables DatabaseDictionary::loadTables()
|
||||
{
|
||||
const DictionaryStructure & dictionary_structure = dict_ptr->getStructure();
|
||||
auto columns = StorageDictionary::getNamesAndTypes(dictionary_structure);
|
||||
tables[name] = StorageDictionary::create(name,
|
||||
columns, NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{}, dictionary_structure, name);
|
||||
tables[name] = StorageDictionary::create(name, ColumnsDescription{columns}, dictionary_structure, name);
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,8 +75,7 @@ StoragePtr DatabaseDictionary::tryGetTable(
|
||||
{
|
||||
const DictionaryStructure & dictionary_structure = dict_ptr->getStructure();
|
||||
auto columns = StorageDictionary::getNamesAndTypes(dictionary_structure);
|
||||
return StorageDictionary::create(table_name,
|
||||
columns, NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{}, dictionary_structure, table_name);
|
||||
return StorageDictionary::create(table_name, ColumnsDescription{columns}, dictionary_structure, table_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -142,10 +140,7 @@ void DatabaseDictionary::renameTable(
|
||||
void DatabaseDictionary::alterTable(
|
||||
const Context &,
|
||||
const String &,
|
||||
const NamesAndTypesList &,
|
||||
const NamesAndTypesList &,
|
||||
const NamesAndTypesList &,
|
||||
const ColumnDefaults &,
|
||||
const ColumnsDescription &,
|
||||
const ASTModifier &)
|
||||
{
|
||||
throw Exception("DatabaseDictionary: alterTable() is not supported", ErrorCodes::NOT_IMPLEMENTED);
|
||||
|
@ -79,10 +79,7 @@ public:
|
||||
void alterTable(
|
||||
const Context & context,
|
||||
const String & name,
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults,
|
||||
const ColumnsDescription & columns,
|
||||
const ASTModifier & engine_modifier) override;
|
||||
|
||||
time_t getTableMetadataModificationTime(
|
||||
|
@ -105,10 +105,7 @@ void DatabaseMemory::renameTable(
|
||||
void DatabaseMemory::alterTable(
|
||||
const Context &,
|
||||
const String &,
|
||||
const NamesAndTypesList &,
|
||||
const NamesAndTypesList &,
|
||||
const NamesAndTypesList &,
|
||||
const ColumnDefaults &,
|
||||
const ColumnsDescription &,
|
||||
const ASTModifier &)
|
||||
{
|
||||
throw Exception("DatabaseMemory: alterTable() is not supported", ErrorCodes::NOT_IMPLEMENTED);
|
||||
|
@ -70,10 +70,7 @@ public:
|
||||
void alterTable(
|
||||
const Context & context,
|
||||
const String & name,
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults,
|
||||
const ColumnsDescription & columns,
|
||||
const ASTModifier & engine_modifier) override;
|
||||
|
||||
time_t getTableMetadataModificationTime(
|
||||
|
@ -462,10 +462,7 @@ void DatabaseOrdinary::drop()
|
||||
void DatabaseOrdinary::alterTable(
|
||||
const Context & context,
|
||||
const String & name,
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults,
|
||||
const ColumnsDescription & columns,
|
||||
const ASTModifier & storage_modifier)
|
||||
{
|
||||
/// Read the definition of the table and replace the necessary parts with new ones.
|
||||
@ -486,7 +483,7 @@ void DatabaseOrdinary::alterTable(
|
||||
|
||||
ASTCreateQuery & ast_create_query = typeid_cast<ASTCreateQuery &>(*ast);
|
||||
|
||||
ASTPtr new_columns = InterpreterCreateQuery::formatColumns(columns, materialized_columns, alias_columns, column_defaults);
|
||||
ASTPtr new_columns = InterpreterCreateQuery::formatColumns(columns);
|
||||
ast_create_query.replace(ast_create_query.columns, new_columns);
|
||||
|
||||
if (storage_modifier)
|
||||
|
@ -45,10 +45,7 @@ public:
|
||||
void alterTable(
|
||||
const Context & context,
|
||||
const String & name,
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults,
|
||||
const ColumnsDescription & columns,
|
||||
const ASTModifier & engine_modifier) override;
|
||||
|
||||
time_t getTableMetadataModificationTime(
|
||||
|
@ -67,7 +67,7 @@ std::pair<String, StoragePtr> createTableFromDefinition(
|
||||
if (!ast_create_query.columns)
|
||||
throw Exception("Missing definition of columns.", ErrorCodes::EMPTY_LIST_OF_COLUMNS_PASSED);
|
||||
|
||||
InterpreterCreateQuery::ColumnsInfo columns_info = InterpreterCreateQuery::getColumnsInfo(*ast_create_query.columns, context);
|
||||
ColumnsDescription columns = InterpreterCreateQuery::getColumnsDescription(*ast_create_query.columns, context);
|
||||
|
||||
return
|
||||
{
|
||||
@ -75,7 +75,7 @@ std::pair<String, StoragePtr> createTableFromDefinition(
|
||||
StorageFactory::instance().get(
|
||||
ast_create_query,
|
||||
database_data_path, ast_create_query.table, database_name, context, context.getGlobalContext(),
|
||||
columns_info.columns, columns_info.materialized_columns, columns_info.alias_columns, columns_info.column_defaults,
|
||||
columns,
|
||||
true, has_force_restore_data_flag)
|
||||
};
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
#include <Core/Types.h>
|
||||
#include <Core/NamesAndTypes.h>
|
||||
#include <Storages/ColumnDefault.h>
|
||||
#include <Storages/ColumnsDescription.h>
|
||||
#include <ctime>
|
||||
#include <memory>
|
||||
#include <functional>
|
||||
@ -113,10 +113,7 @@ public:
|
||||
virtual void alterTable(
|
||||
const Context & context,
|
||||
const String & name,
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults,
|
||||
const ColumnsDescription & columns,
|
||||
const ASTModifier & engine_modifier) = 0;
|
||||
|
||||
/// Returns time of table's metadata change, 0 if there is no corresponding metadata file.
|
||||
|
@ -1750,9 +1750,8 @@ void FunctionHasColumnInTable::executeImpl(Block & block, const ColumnNumbers &
|
||||
{
|
||||
std::vector<std::vector<String>> host_names = {{ host_name }};
|
||||
auto cluster = std::make_shared<Cluster>(global_context.getSettings(), host_names, !user_name.empty() ? user_name : "default", password, global_context.getTCPPort(), false);
|
||||
auto names_and_types_list = getStructureOfRemoteTable(*cluster, database_name, table_name, global_context);
|
||||
const auto & names = names_and_types_list.getNames();
|
||||
has_column = std::find(names.begin(), names.end(), column_name) != names.end();
|
||||
auto remote_columns = getStructureOfRemoteTable(*cluster, database_name, table_name, global_context);
|
||||
has_column = remote_columns.hasPhysical(column_name);
|
||||
}
|
||||
|
||||
block.getByPosition(result).column = DataTypeUInt8().createColumnConst(block.rows(), UInt64(has_column));
|
||||
|
@ -190,7 +190,7 @@ ExpressionAnalyzer::ExpressionAnalyzer(
|
||||
}
|
||||
|
||||
if (storage && source_columns.empty())
|
||||
source_columns = storage->getSampleBlock().getNamesAndTypesList();
|
||||
source_columns = storage->getColumns().getAllPhysical();
|
||||
else
|
||||
removeDuplicateColumns(source_columns);
|
||||
|
||||
@ -712,7 +712,7 @@ static std::shared_ptr<InterpreterSelectWithUnionQuery> interpretSubquery(
|
||||
/// get columns list for target table
|
||||
auto database_table = getDatabaseAndTableNameFromIdentifier(*table);
|
||||
const auto & storage = context.getTable(database_table.first, database_table.second);
|
||||
const auto & columns = storage->getColumnsListNonMaterialized();
|
||||
const auto & columns = storage->getColumns().ordinary;
|
||||
select_expression_list->children.reserve(columns.size());
|
||||
|
||||
/// manually substitute column names in place of asterisk
|
||||
@ -826,7 +826,7 @@ void ExpressionAnalyzer::addExternalStorage(ASTPtr & subquery_or_table_name_or_t
|
||||
Block sample = interpreter->getSampleBlock();
|
||||
NamesAndTypesList columns = sample.getNamesAndTypesList();
|
||||
|
||||
StoragePtr external_storage = StorageMemory::create(external_table_name, columns, NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{});
|
||||
StoragePtr external_storage = StorageMemory::create(external_table_name, ColumnsDescription{columns});
|
||||
external_storage->startup();
|
||||
|
||||
/** We replace the subquery with the name of the temporary table.
|
||||
@ -1050,7 +1050,7 @@ void ExpressionAnalyzer::normalizeTreeImpl(
|
||||
if (storage)
|
||||
{
|
||||
/// If we select from a table, get only not MATERIALIZED, not ALIAS columns.
|
||||
for (const auto & name_type : storage->getColumnsListNonMaterialized())
|
||||
for (const auto & name_type : storage->getColumns().ordinary)
|
||||
all_columns.emplace_back(std::make_shared<ASTIdentifier>(name_type.name));
|
||||
}
|
||||
else
|
||||
@ -1147,7 +1147,8 @@ void ExpressionAnalyzer::addAliasColumns()
|
||||
if (!storage)
|
||||
return;
|
||||
|
||||
source_columns.insert(std::end(source_columns), std::begin(storage->alias_columns), std::end(storage->alias_columns));
|
||||
const auto & aliases = storage->getColumns().aliases;
|
||||
source_columns.insert(std::end(source_columns), std::begin(aliases), std::end(aliases));
|
||||
}
|
||||
|
||||
|
||||
|
@ -110,7 +110,7 @@ void InterpreterAlterQuery::parseAlter(
|
||||
}
|
||||
if (ast_col_decl.default_expression)
|
||||
{
|
||||
command.default_type = columnDefaultTypeFromString(ast_col_decl.default_specifier);
|
||||
command.default_kind = columnDefaultKindFromString(ast_col_decl.default_specifier);
|
||||
command.default_expression = ast_col_decl.default_expression;
|
||||
}
|
||||
|
||||
@ -157,7 +157,7 @@ void InterpreterAlterQuery::parseAlter(
|
||||
|
||||
if (ast_col_decl.default_expression)
|
||||
{
|
||||
command.default_type = columnDefaultTypeFromString(ast_col_decl.default_specifier);
|
||||
command.default_kind = columnDefaultKindFromString(ast_col_decl.default_specifier);
|
||||
command.default_expression = ast_col_decl.default_expression;
|
||||
}
|
||||
|
||||
@ -200,7 +200,7 @@ void InterpreterAlterQuery::PartitionCommands::validate(const IStorage * table)
|
||||
{
|
||||
String column_name = command.column_name.safeGet<String>();
|
||||
|
||||
if (!table->hasRealColumn(column_name))
|
||||
if (!table->getColumns().hasPhysical(column_name))
|
||||
{
|
||||
throw Exception("Wrong column name. Cannot find column " + column_name + " to clear it from partition",
|
||||
DB::ErrorCodes::ILLEGAL_COLUMN);
|
||||
|
@ -246,7 +246,7 @@ static ColumnsAndDefaults parseColumns(const ASTExpressionList & column_list_ast
|
||||
explicit_type = block.getByName(column_name).type;
|
||||
|
||||
defaults.emplace(column_name, ColumnDefault{
|
||||
columnDefaultTypeFromString(col_decl_ptr->default_specifier),
|
||||
columnDefaultKindFromString(col_decl_ptr->default_specifier),
|
||||
col_decl_ptr->default_expression
|
||||
});
|
||||
}
|
||||
@ -256,7 +256,7 @@ static ColumnsAndDefaults parseColumns(const ASTExpressionList & column_list_ast
|
||||
}
|
||||
|
||||
|
||||
static NamesAndTypesList removeAndReturnColumns(ColumnsAndDefaults & columns_and_defaults, const ColumnDefaultType type)
|
||||
static NamesAndTypesList removeAndReturnColumns(ColumnsAndDefaults & columns_and_defaults, const ColumnDefaultKind kind)
|
||||
{
|
||||
auto & columns = columns_and_defaults.first;
|
||||
auto & defaults = columns_and_defaults.second;
|
||||
@ -266,7 +266,7 @@ static NamesAndTypesList removeAndReturnColumns(ColumnsAndDefaults & columns_and
|
||||
for (auto it = std::begin(columns); it != std::end(columns);)
|
||||
{
|
||||
const auto jt = defaults.find(it->name);
|
||||
if (jt != std::end(defaults) && jt->second.type == type)
|
||||
if (jt != std::end(defaults) && jt->second.kind == kind)
|
||||
{
|
||||
removed.push_back(*it);
|
||||
it = columns.erase(it);
|
||||
@ -301,15 +301,11 @@ ASTPtr InterpreterCreateQuery::formatColumns(const NamesAndTypesList & columns)
|
||||
return columns_list;
|
||||
}
|
||||
|
||||
ASTPtr InterpreterCreateQuery::formatColumns(
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults)
|
||||
ASTPtr InterpreterCreateQuery::formatColumns(const ColumnsDescription & columns)
|
||||
{
|
||||
auto columns_list = std::make_shared<ASTExpressionList>();
|
||||
|
||||
for (const auto & column : boost::join(columns, boost::join(materialized_columns, alias_columns)))
|
||||
for (const auto & column : columns.getAll())
|
||||
{
|
||||
const auto column_declaration = std::make_shared<ASTColumnDeclaration>();
|
||||
ASTPtr column_declaration_ptr{column_declaration};
|
||||
@ -324,10 +320,10 @@ ASTPtr InterpreterCreateQuery::formatColumns(
|
||||
column_declaration->type = parseQuery(storage_p, pos, end, "data type");
|
||||
column_declaration->type->owned_string = type_name;
|
||||
|
||||
const auto it = column_defaults.find(column.name);
|
||||
if (it != std::end(column_defaults))
|
||||
const auto it = columns.defaults.find(column.name);
|
||||
if (it != std::end(columns.defaults))
|
||||
{
|
||||
column_declaration->default_specifier = toString(it->second.type);
|
||||
column_declaration->default_specifier = toString(it->second.kind);
|
||||
column_declaration->default_expression = it->second.expression->clone();
|
||||
}
|
||||
|
||||
@ -338,49 +334,46 @@ ASTPtr InterpreterCreateQuery::formatColumns(
|
||||
}
|
||||
|
||||
|
||||
InterpreterCreateQuery::ColumnsInfo InterpreterCreateQuery::getColumnsInfo(const ASTExpressionList & columns, const Context & context)
|
||||
ColumnsDescription InterpreterCreateQuery::getColumnsDescription(const ASTExpressionList & columns, const Context & context)
|
||||
{
|
||||
ColumnsInfo res;
|
||||
ColumnsDescription res;
|
||||
|
||||
auto && columns_and_defaults = parseColumns(columns, context);
|
||||
res.materialized_columns = removeAndReturnColumns(columns_and_defaults, ColumnDefaultType::Materialized);
|
||||
res.alias_columns = removeAndReturnColumns(columns_and_defaults, ColumnDefaultType::Alias);
|
||||
res.columns = std::move(columns_and_defaults.first);
|
||||
res.column_defaults = std::move(columns_and_defaults.second);
|
||||
res.materialized = removeAndReturnColumns(columns_and_defaults, ColumnDefaultKind::Materialized);
|
||||
res.aliases = removeAndReturnColumns(columns_and_defaults, ColumnDefaultKind::Alias);
|
||||
res.ordinary = std::move(columns_and_defaults.first);
|
||||
res.defaults = std::move(columns_and_defaults.second);
|
||||
|
||||
if (res.columns.size() + res.materialized_columns.size() == 0)
|
||||
if (res.ordinary.size() + res.materialized.size() == 0)
|
||||
throw Exception{"Cannot CREATE table without physical columns", ErrorCodes::EMPTY_LIST_OF_COLUMNS_PASSED};
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
InterpreterCreateQuery::ColumnsInfo InterpreterCreateQuery::setColumns(
|
||||
ColumnsDescription InterpreterCreateQuery::setColumns(
|
||||
ASTCreateQuery & create, const Block & as_select_sample, const StoragePtr & as_storage) const
|
||||
{
|
||||
ColumnsInfo res;
|
||||
ColumnsDescription res;
|
||||
|
||||
if (create.columns)
|
||||
{
|
||||
res = getColumnsInfo(*create.columns, context);
|
||||
res = getColumnsDescription(*create.columns, context);
|
||||
}
|
||||
else if (!create.as_table.empty())
|
||||
{
|
||||
res.columns = as_storage->getColumnsListNonMaterialized();
|
||||
res.materialized_columns = as_storage->materialized_columns;
|
||||
res.alias_columns = as_storage->alias_columns;
|
||||
res.column_defaults = as_storage->column_defaults;
|
||||
res = as_storage->getColumns();
|
||||
}
|
||||
else if (create.select)
|
||||
{
|
||||
for (size_t i = 0; i < as_select_sample.columns(); ++i)
|
||||
res.columns.emplace_back(as_select_sample.safeGetByPosition(i).name, as_select_sample.safeGetByPosition(i).type);
|
||||
res.ordinary.emplace_back(as_select_sample.safeGetByPosition(i).name, as_select_sample.safeGetByPosition(i).type);
|
||||
}
|
||||
else
|
||||
throw Exception("Incorrect CREATE query: required list of column descriptions or AS section or SELECT.", ErrorCodes::INCORRECT_QUERY);
|
||||
|
||||
/// Even if query has list of columns, canonicalize it (unfold Nested columns).
|
||||
ASTPtr new_columns = formatColumns(res.columns, res.materialized_columns, res.alias_columns, res.column_defaults);
|
||||
ASTPtr new_columns = formatColumns(res);
|
||||
if (create.columns)
|
||||
create.replace(create.columns, new_columns);
|
||||
else
|
||||
@ -394,11 +387,11 @@ InterpreterCreateQuery::ColumnsInfo InterpreterCreateQuery::setColumns(
|
||||
throw Exception("Column " + backQuoteIfNeed(column_name_and_type.name) + " already exists", ErrorCodes::DUPLICATE_COLUMN);
|
||||
};
|
||||
|
||||
for (const auto & elem : res.columns)
|
||||
for (const auto & elem : res.ordinary)
|
||||
check_column_already_exists(elem);
|
||||
for (const auto & elem : res.materialized_columns)
|
||||
for (const auto & elem : res.materialized)
|
||||
check_column_already_exists(elem);
|
||||
for (const auto & elem : res.alias_columns)
|
||||
for (const auto & elem : res.aliases)
|
||||
check_column_already_exists(elem);
|
||||
|
||||
return res;
|
||||
@ -489,7 +482,7 @@ BlockIO InterpreterCreateQuery::createTable(ASTCreateQuery & create)
|
||||
}
|
||||
|
||||
/// Set and retrieve list of columns.
|
||||
ColumnsInfo columns = setColumns(create, as_select_sample, as_storage);
|
||||
ColumnsDescription columns = setColumns(create, as_select_sample, as_storage);
|
||||
|
||||
/// Set the table engine if it was not specified explicitly.
|
||||
setEngine(create);
|
||||
@ -530,10 +523,7 @@ BlockIO InterpreterCreateQuery::createTable(ASTCreateQuery & create)
|
||||
database_name,
|
||||
context,
|
||||
context.getGlobalContext(),
|
||||
columns.columns,
|
||||
columns.materialized_columns,
|
||||
columns.alias_columns,
|
||||
columns.column_defaults,
|
||||
columns,
|
||||
create.attach,
|
||||
false);
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <Interpreters/IInterpreter.h>
|
||||
#include <Storages/ColumnDefault.h>
|
||||
#include <Storages/ColumnsDescription.h>
|
||||
|
||||
|
||||
class ThreadPool;
|
||||
@ -27,11 +27,7 @@ public:
|
||||
|
||||
/// List of columns and their types in AST.
|
||||
static ASTPtr formatColumns(const NamesAndTypesList & columns);
|
||||
static ASTPtr formatColumns(
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults);
|
||||
static ASTPtr formatColumns(const ColumnsDescription & columns);
|
||||
|
||||
void setDatabaseLoadingThreadpool(ThreadPool & thread_pool_)
|
||||
{
|
||||
@ -48,23 +44,15 @@ public:
|
||||
internal = internal_;
|
||||
}
|
||||
|
||||
struct ColumnsInfo
|
||||
{
|
||||
NamesAndTypesList columns;
|
||||
NamesAndTypesList materialized_columns;
|
||||
NamesAndTypesList alias_columns;
|
||||
ColumnDefaults column_defaults;
|
||||
};
|
||||
|
||||
/// Obtain information about columns, their types and default values, for case when columns in CREATE query is specified explicitly.
|
||||
static ColumnsInfo getColumnsInfo(const ASTExpressionList & columns, const Context & context);
|
||||
static ColumnsDescription getColumnsDescription(const ASTExpressionList & columns, const Context & context);
|
||||
|
||||
private:
|
||||
BlockIO createDatabase(ASTCreateQuery & create);
|
||||
BlockIO createTable(ASTCreateQuery & create);
|
||||
|
||||
/// Calculate list of columns of table and return it.
|
||||
ColumnsInfo setColumns(ASTCreateQuery & create, const Block & as_select_sample, const StoragePtr & as_storage) const;
|
||||
ColumnsDescription setColumns(ASTCreateQuery & create, const Block & as_select_sample, const StoragePtr & as_storage) const;
|
||||
void setEngine(ASTCreateQuery & create) const;
|
||||
void checkAccess(const ASTCreateQuery & create);
|
||||
|
||||
|
@ -99,9 +99,8 @@ BlockInputStreamPtr InterpreterDescribeQuery::executeImpl()
|
||||
}
|
||||
|
||||
auto table_lock = table->lockStructure(false, __PRETTY_FUNCTION__);
|
||||
columns = table->getColumnsList();
|
||||
columns.insert(std::end(columns), std::begin(table->alias_columns), std::end(table->alias_columns));
|
||||
column_defaults = table->column_defaults;
|
||||
columns = table->getColumns().getAll();
|
||||
column_defaults = table->getColumns().defaults;
|
||||
}
|
||||
|
||||
Block sample_block = getSampleBlock();
|
||||
@ -120,7 +119,7 @@ BlockInputStreamPtr InterpreterDescribeQuery::executeImpl()
|
||||
}
|
||||
else
|
||||
{
|
||||
res_columns[2]->insert(toString(it->second.type));
|
||||
res_columns[2]->insert(toString(it->second.kind));
|
||||
res_columns[3]->insert(queryToString(it->second.expression));
|
||||
}
|
||||
}
|
||||
|
@ -95,7 +95,7 @@ BlockIO InterpreterInsertQuery::execute()
|
||||
|
||||
auto table_lock = table->lockStructure(true, __PRETTY_FUNCTION__);
|
||||
|
||||
NamesAndTypesList required_columns = table->getColumnsList();
|
||||
NamesAndTypesList required_columns = table->getColumns().getAllPhysical();
|
||||
|
||||
/// We create a pipeline of several streams, into which we will write data.
|
||||
BlockOutputStreamPtr out;
|
||||
@ -103,7 +103,7 @@ BlockIO InterpreterInsertQuery::execute()
|
||||
out = std::make_shared<PushingToViewsBlockOutputStream>(query.database, query.table, table, context, query_ptr, query.no_destination);
|
||||
|
||||
out = std::make_shared<AddingDefaultBlockOutputStream>(
|
||||
out, getSampleBlock(query, table), required_columns, table->column_defaults, context);
|
||||
out, getSampleBlock(query, table), required_columns, table->getColumns().defaults, context);
|
||||
|
||||
/// Do not squash blocks if it is a sync INSERT into Distributed, since it lead to double bufferization on client and server side.
|
||||
/// Client-side bufferization might cause excessive timeouts (especially in case of big blocks).
|
||||
@ -136,7 +136,7 @@ BlockIO InterpreterInsertQuery::execute()
|
||||
if (!allow_materialized)
|
||||
{
|
||||
Block in_header = res.in->getHeader();
|
||||
for (const auto & name_type : table->materialized_columns)
|
||||
for (const auto & name_type : table->getColumns().materialized)
|
||||
if (in_header.has(name_type.name))
|
||||
throw Exception("Cannot insert column " + name_type.name + ", because it is MATERIALIZED column.", ErrorCodes::ILLEGAL_COLUMN);
|
||||
}
|
||||
|
@ -516,12 +516,13 @@ QueryProcessingStage::Enum InterpreterSelectQuery::executeFetchColumns(Pipeline
|
||||
/// Are ALIAS columns required for query execution?
|
||||
auto alias_columns_required = false;
|
||||
|
||||
if (storage && !storage->alias_columns.empty())
|
||||
if (storage && !storage->getColumns().aliases.empty())
|
||||
{
|
||||
const auto & column_defaults = storage->getColumns().defaults;
|
||||
for (const auto & column : required_columns)
|
||||
{
|
||||
const auto default_it = storage->column_defaults.find(column);
|
||||
if (default_it != std::end(storage->column_defaults) && default_it->second.type == ColumnDefaultType::Alias)
|
||||
const auto default_it = column_defaults.find(column);
|
||||
if (default_it != std::end(column_defaults) && default_it->second.kind == ColumnDefaultKind::Alias)
|
||||
{
|
||||
alias_columns_required = true;
|
||||
break;
|
||||
@ -535,8 +536,8 @@ QueryProcessingStage::Enum InterpreterSelectQuery::executeFetchColumns(Pipeline
|
||||
|
||||
for (const auto & column : required_columns)
|
||||
{
|
||||
const auto default_it = storage->column_defaults.find(column);
|
||||
if (default_it != std::end(storage->column_defaults) && default_it->second.type == ColumnDefaultType::Alias)
|
||||
const auto default_it = column_defaults.find(column);
|
||||
if (default_it != std::end(column_defaults) && default_it->second.kind == ColumnDefaultKind::Alias)
|
||||
required_columns_expr_list->children.emplace_back(setAlias(default_it->second.expression->clone(), column));
|
||||
else
|
||||
required_columns_expr_list->children.emplace_back(std::make_shared<ASTIdentifier>(column));
|
||||
|
@ -38,7 +38,6 @@ public:
|
||||
std::string getRemoteTableName() const { return remote_table; }
|
||||
|
||||
std::string getTableName() const override { return ""; }
|
||||
const DB::NamesAndTypesList & getColumnsListImpl() const override { return names_and_types; }
|
||||
|
||||
protected:
|
||||
StorageDistributedFake(const std::string & remote_database_, const std::string & remote_table_, size_t shard_count_)
|
||||
@ -50,7 +49,6 @@ private:
|
||||
const std::string remote_database;
|
||||
const std::string remote_table;
|
||||
size_t shard_count;
|
||||
DB::NamesAndTypesList names_and_types;
|
||||
};
|
||||
|
||||
|
||||
|
@ -1120,8 +1120,8 @@ protected:
|
||||
|
||||
if (!column.default_specifier.empty())
|
||||
{
|
||||
ColumnDefaultType type = columnDefaultTypeFromString(column.default_specifier);
|
||||
if (type == ColumnDefaultType::Materialized || type == ColumnDefaultType::Alias)
|
||||
ColumnDefaultKind kind = columnDefaultKindFromString(column.default_specifier);
|
||||
if (kind == ColumnDefaultKind::Materialized || kind == ColumnDefaultKind::Alias)
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -630,7 +630,8 @@ bool TCPHandler::receiveData()
|
||||
if (!(storage = query_context.tryGetExternalTable(external_table_name)))
|
||||
{
|
||||
NamesAndTypesList columns = block.getNamesAndTypesList();
|
||||
storage = StorageMemory::create(external_table_name, columns, NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{});
|
||||
storage = StorageMemory::create(external_table_name,
|
||||
ColumnsDescription{columns, NamesAndTypesList{}, NamesAndTypesList{}, ColumnDefaults{}});
|
||||
storage->startup();
|
||||
query_context.addExternalTable(external_table_name, storage);
|
||||
}
|
||||
|
@ -21,21 +21,11 @@ namespace ErrorCodes
|
||||
}
|
||||
|
||||
|
||||
void AlterCommand::apply(
|
||||
NamesAndTypesList & columns, NamesAndTypesList & materialized_columns, NamesAndTypesList & alias_columns,
|
||||
ColumnDefaults & column_defaults) const
|
||||
void AlterCommand::apply(ColumnsDescription & columns_description) const
|
||||
{
|
||||
if (type == ADD_COLUMN)
|
||||
{
|
||||
const auto exists_in = [this] (const NamesAndTypesList & columns)
|
||||
{
|
||||
return columns.end() != std::find_if(columns.begin(), columns.end(),
|
||||
std::bind(namesEqual, std::cref(column_name), std::placeholders::_1));
|
||||
};
|
||||
|
||||
if (exists_in(columns) ||
|
||||
exists_in(materialized_columns) ||
|
||||
exists_in(alias_columns))
|
||||
if (columns_description.getAll().contains(column_name))
|
||||
{
|
||||
throw Exception{
|
||||
"Cannot add column " + column_name + ": column with this name already exists",
|
||||
@ -67,25 +57,25 @@ void AlterCommand::apply(
|
||||
columns.emplace(insert_it, column_name, data_type);
|
||||
};
|
||||
|
||||
if (default_type == ColumnDefaultType::Default)
|
||||
add_column(columns);
|
||||
else if (default_type == ColumnDefaultType::Materialized)
|
||||
add_column(materialized_columns);
|
||||
else if (default_type == ColumnDefaultType::Alias)
|
||||
add_column(alias_columns);
|
||||
if (default_kind == ColumnDefaultKind::Default)
|
||||
add_column(columns_description.ordinary);
|
||||
else if (default_kind == ColumnDefaultKind::Materialized)
|
||||
add_column(columns_description.materialized);
|
||||
else if (default_kind == ColumnDefaultKind::Alias)
|
||||
add_column(columns_description.aliases);
|
||||
else
|
||||
throw Exception{"Unknown ColumnDefaultType value", ErrorCodes::LOGICAL_ERROR};
|
||||
throw Exception{"Unknown ColumnDefaultKind value", ErrorCodes::LOGICAL_ERROR};
|
||||
|
||||
if (default_expression)
|
||||
column_defaults.emplace(column_name, ColumnDefault{default_type, default_expression});
|
||||
columns_description.defaults.emplace(column_name, ColumnDefault{default_kind, default_expression});
|
||||
|
||||
/// Slow, because each time a list is copied
|
||||
columns = Nested::flatten(columns);
|
||||
columns_description.ordinary = Nested::flatten(columns_description.ordinary);
|
||||
}
|
||||
else if (type == DROP_COLUMN)
|
||||
{
|
||||
/// look for a column in list and remove it if present, also removing corresponding entry from column_defaults
|
||||
const auto remove_column = [&column_defaults, this] (NamesAndTypesList & columns)
|
||||
const auto remove_column = [&columns_description, this] (NamesAndTypesList & columns)
|
||||
{
|
||||
auto removed = false;
|
||||
NamesAndTypesList::iterator column_it;
|
||||
@ -95,15 +85,15 @@ void AlterCommand::apply(
|
||||
{
|
||||
removed = true;
|
||||
column_it = columns.erase(column_it);
|
||||
column_defaults.erase(column_name);
|
||||
columns_description.defaults.erase(column_name);
|
||||
}
|
||||
|
||||
return removed;
|
||||
};
|
||||
|
||||
if (!remove_column(columns) &&
|
||||
!remove_column(materialized_columns) &&
|
||||
!remove_column(alias_columns))
|
||||
if (!remove_column(columns_description.ordinary) &&
|
||||
!remove_column(columns_description.materialized) &&
|
||||
!remove_column(columns_description.aliases))
|
||||
{
|
||||
throw Exception("Wrong column name. Cannot find column " + column_name + " to drop",
|
||||
ErrorCodes::ILLEGAL_COLUMN);
|
||||
@ -111,14 +101,15 @@ void AlterCommand::apply(
|
||||
}
|
||||
else if (type == MODIFY_COLUMN)
|
||||
{
|
||||
const auto default_it = column_defaults.find(column_name);
|
||||
const auto had_default_expr = default_it != std::end(column_defaults);
|
||||
const auto old_default_type = had_default_expr ? default_it->second.type : ColumnDefaultType{};
|
||||
const auto default_it = columns_description.defaults.find(column_name);
|
||||
const auto had_default_expr = default_it != std::end(columns_description.defaults);
|
||||
const auto old_default_kind = had_default_expr ? default_it->second.kind : ColumnDefaultKind{};
|
||||
|
||||
/// target column list
|
||||
auto & new_columns = default_type == ColumnDefaultType::Default ?
|
||||
columns : default_type == ColumnDefaultType::Materialized ?
|
||||
materialized_columns : alias_columns;
|
||||
auto & new_columns =
|
||||
default_kind == ColumnDefaultKind::Default ? columns_description.ordinary
|
||||
: default_kind == ColumnDefaultKind::Materialized ? columns_description.materialized
|
||||
: columns_description.aliases;
|
||||
|
||||
/// find column or throw exception
|
||||
const auto find_column = [this] (NamesAndTypesList & columns)
|
||||
@ -133,12 +124,13 @@ void AlterCommand::apply(
|
||||
};
|
||||
|
||||
/// if default types differ, remove column from the old list, then add to the new list
|
||||
if (default_type != old_default_type)
|
||||
if (default_kind != old_default_kind)
|
||||
{
|
||||
/// source column list
|
||||
auto & old_columns = old_default_type == ColumnDefaultType::Default ?
|
||||
columns : old_default_type == ColumnDefaultType::Materialized ?
|
||||
materialized_columns : alias_columns;
|
||||
auto & old_columns =
|
||||
old_default_kind == ColumnDefaultKind::Default ? columns_description.ordinary
|
||||
: old_default_kind == ColumnDefaultKind::Materialized ? columns_description.materialized
|
||||
: columns_description.aliases;
|
||||
|
||||
const auto old_column_it = find_column(old_columns);
|
||||
new_columns.emplace_back(*old_column_it);
|
||||
@ -146,7 +138,7 @@ void AlterCommand::apply(
|
||||
|
||||
/// do not forget to change the default type of old column
|
||||
if (had_default_expr)
|
||||
column_defaults[column_name].type = default_type;
|
||||
columns_description.defaults[column_name].kind = default_kind;
|
||||
}
|
||||
|
||||
/// find column in one of three column lists
|
||||
@ -155,13 +147,13 @@ void AlterCommand::apply(
|
||||
|
||||
if (!default_expression && had_default_expr)
|
||||
/// new column has no default expression, remove it from column_defaults along with it's type
|
||||
column_defaults.erase(column_name);
|
||||
columns_description.defaults.erase(column_name);
|
||||
else if (default_expression && !had_default_expr)
|
||||
/// new column has a default expression while the old one had not, add it it column_defaults
|
||||
column_defaults.emplace(column_name, ColumnDefault{default_type, default_expression});
|
||||
columns_description.defaults.emplace(column_name, ColumnDefault{default_kind, default_expression});
|
||||
else if (had_default_expr)
|
||||
/// both old and new columns have default expression, update it
|
||||
column_defaults[column_name].expression = default_expression;
|
||||
columns_description.defaults[column_name].expression = default_expression;
|
||||
}
|
||||
else if (type == MODIFY_PRIMARY_KEY)
|
||||
{
|
||||
@ -173,30 +165,20 @@ void AlterCommand::apply(
|
||||
}
|
||||
|
||||
|
||||
void AlterCommands::apply(NamesAndTypesList & columns,
|
||||
NamesAndTypesList & materialized_columns,
|
||||
NamesAndTypesList & alias_columns,
|
||||
ColumnDefaults & column_defaults) const
|
||||
void AlterCommands::apply(ColumnsDescription & columns_description) const
|
||||
{
|
||||
auto new_columns = columns;
|
||||
auto new_materialized_columns = materialized_columns;
|
||||
auto new_alias_columns = alias_columns;
|
||||
auto new_column_defaults = column_defaults;
|
||||
auto new_columns_description = columns_description;
|
||||
|
||||
for (const AlterCommand & command : *this)
|
||||
command.apply(new_columns, new_materialized_columns, new_alias_columns, new_column_defaults);
|
||||
command.apply(new_columns_description);
|
||||
|
||||
columns = std::move(new_columns);
|
||||
materialized_columns = std::move(new_materialized_columns);
|
||||
alias_columns = std::move(new_alias_columns);
|
||||
column_defaults = std::move(new_column_defaults);
|
||||
columns_description = std::move(new_columns_description);
|
||||
}
|
||||
|
||||
void AlterCommands::validate(IStorage * table, const Context & context)
|
||||
{
|
||||
auto columns = table->getColumnsList();
|
||||
columns.insert(std::end(columns), std::begin(table->alias_columns), std::end(table->alias_columns));
|
||||
auto defaults = table->column_defaults;
|
||||
auto all_columns = table->getColumns().getAll();
|
||||
auto defaults = table->getColumns().defaults;
|
||||
|
||||
std::vector<std::pair<NameAndTypePair, AlterCommand *>> defaulted_columns{};
|
||||
|
||||
@ -208,12 +190,12 @@ void AlterCommands::validate(IStorage * table, const Context & context)
|
||||
if (command.type == AlterCommand::ADD_COLUMN || command.type == AlterCommand::MODIFY_COLUMN)
|
||||
{
|
||||
const auto & column_name = command.column_name;
|
||||
const auto column_it = std::find_if(std::begin(columns), std::end(columns),
|
||||
const auto column_it = std::find_if(std::begin(all_columns), std::end(all_columns),
|
||||
std::bind(AlterCommand::namesEqual, std::cref(command.column_name), std::placeholders::_1));
|
||||
|
||||
if (command.type == AlterCommand::ADD_COLUMN)
|
||||
{
|
||||
if (std::end(columns) != column_it)
|
||||
if (std::end(all_columns) != column_it)
|
||||
throw Exception{
|
||||
"Cannot add column " + column_name + ": column with this name already exists",
|
||||
ErrorCodes::ILLEGAL_COLUMN};
|
||||
@ -221,17 +203,17 @@ void AlterCommands::validate(IStorage * table, const Context & context)
|
||||
else if (command.type == AlterCommand::MODIFY_COLUMN)
|
||||
{
|
||||
|
||||
if (std::end(columns) == column_it)
|
||||
if (std::end(all_columns) == column_it)
|
||||
throw Exception{
|
||||
"Wrong column name. Cannot find column " + column_name + " to modify",
|
||||
ErrorCodes::ILLEGAL_COLUMN};
|
||||
|
||||
columns.erase(column_it);
|
||||
all_columns.erase(column_it);
|
||||
defaults.erase(column_name);
|
||||
}
|
||||
|
||||
/// we're creating dummy DataTypeUInt8 in order to prevent the NullPointerException in ExpressionActions
|
||||
columns.emplace_back(column_name, command.data_type ? command.data_type : std::make_shared<DataTypeUInt8>());
|
||||
all_columns.emplace_back(column_name, command.data_type ? command.data_type : std::make_shared<DataTypeUInt8>());
|
||||
|
||||
if (command.default_expression)
|
||||
{
|
||||
@ -265,7 +247,7 @@ void AlterCommands::validate(IStorage * table, const Context & context)
|
||||
for (const auto & default_column : defaults)
|
||||
{
|
||||
const auto & default_expression = default_column.second.expression;
|
||||
const auto actions = ExpressionAnalyzer{default_expression, context, {}, columns}.getActions(true);
|
||||
const auto actions = ExpressionAnalyzer{default_expression, context, {}, all_columns}.getActions(true);
|
||||
const auto required_columns = actions->getRequiredColumns();
|
||||
|
||||
if (required_columns.end() != std::find(required_columns.begin(), required_columns.end(), command.column_name))
|
||||
@ -275,12 +257,12 @@ void AlterCommands::validate(IStorage * table, const Context & context)
|
||||
}
|
||||
|
||||
auto found = false;
|
||||
for (auto it = std::begin(columns); it != std::end(columns);)
|
||||
for (auto it = std::begin(all_columns); it != std::end(all_columns);)
|
||||
{
|
||||
if (AlterCommand::namesEqual(command.column_name, *it))
|
||||
{
|
||||
found = true;
|
||||
it = columns.erase(it);
|
||||
it = all_columns.erase(it);
|
||||
}
|
||||
else
|
||||
++it;
|
||||
@ -305,7 +287,7 @@ void AlterCommands::validate(IStorage * table, const Context & context)
|
||||
for (const auto & col_def : defaults)
|
||||
{
|
||||
const auto & column_name = col_def.first;
|
||||
const auto column_it = std::find_if(columns.begin(), columns.end(), [&] (const NameAndTypePair & name_type)
|
||||
const auto column_it = std::find_if(all_columns.begin(), all_columns.end(), [&] (const NameAndTypePair & name_type)
|
||||
{ return AlterCommand::namesEqual(column_name, name_type); });
|
||||
|
||||
const auto tmp_column_name = column_name + "_tmp";
|
||||
@ -321,7 +303,7 @@ void AlterCommands::validate(IStorage * table, const Context & context)
|
||||
defaulted_columns.emplace_back(NameAndTypePair{column_name, column_type_ptr}, nullptr);
|
||||
}
|
||||
|
||||
const auto actions = ExpressionAnalyzer{default_expr_list, context, {}, columns}.getActions(true);
|
||||
const auto actions = ExpressionAnalyzer{default_expr_list, context, {}, all_columns}.getActions(true);
|
||||
const auto block = actions->getSampleBlock();
|
||||
|
||||
/// set deduced types, modify default expression if necessary
|
||||
@ -351,7 +333,7 @@ void AlterCommands::validate(IStorage * table, const Context & context)
|
||||
/// add a new alter command to modify existing column
|
||||
this->emplace_back(AlterCommand{
|
||||
AlterCommand::MODIFY_COLUMN, column_name, explicit_type,
|
||||
default_it->second.type, default_it->second.expression
|
||||
default_it->second.kind, default_it->second.expression
|
||||
});
|
||||
|
||||
command_ptr = &this->back();
|
||||
|
@ -1,7 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <Core/NamesAndTypes.h>
|
||||
#include <Storages/ColumnDefault.h>
|
||||
#include <Storages/ColumnsDescription.h>
|
||||
|
||||
namespace DB
|
||||
{
|
||||
@ -27,7 +27,7 @@ struct AlterCommand
|
||||
/// For ADD and MODIFY, a new column type.
|
||||
DataTypePtr data_type;
|
||||
|
||||
ColumnDefaultType default_type{};
|
||||
ColumnDefaultKind default_kind{};
|
||||
ASTPtr default_expression{};
|
||||
|
||||
/// For ADD - after which column to add a new one. If an empty string, add to the end. To add to the beginning now it is impossible.
|
||||
@ -43,16 +43,13 @@ struct AlterCommand
|
||||
return (name_with_dot == name_type.name.substr(0, name_without_dot.length() + 1) || name_without_dot == name_type.name);
|
||||
}
|
||||
|
||||
void apply(NamesAndTypesList & columns,
|
||||
NamesAndTypesList & materialized_columns,
|
||||
NamesAndTypesList & alias_columns,
|
||||
ColumnDefaults & column_defaults) const;
|
||||
void apply(ColumnsDescription & columns_description) const;
|
||||
|
||||
AlterCommand() = default;
|
||||
AlterCommand(const Type type, const String & column_name, const DataTypePtr & data_type,
|
||||
const ColumnDefaultType default_type, const ASTPtr & default_expression,
|
||||
const ColumnDefaultKind default_kind, const ASTPtr & default_expression,
|
||||
const String & after_column = String{})
|
||||
: type{type}, column_name{column_name}, data_type{data_type}, default_type{default_type},
|
||||
: type{type}, column_name{column_name}, data_type{data_type}, default_kind{default_kind},
|
||||
default_expression{default_expression}, after_column{after_column}
|
||||
{}
|
||||
};
|
||||
@ -63,10 +60,7 @@ class Context;
|
||||
class AlterCommands : public std::vector<AlterCommand>
|
||||
{
|
||||
public:
|
||||
void apply(NamesAndTypesList & columns,
|
||||
NamesAndTypesList & materialized_columns,
|
||||
NamesAndTypesList & alias_columns,
|
||||
ColumnDefaults & column_defaults) const;
|
||||
void apply(ColumnsDescription & columns_description) const;
|
||||
|
||||
void validate(IStorage * table, const Context & context);
|
||||
};
|
||||
|
@ -6,12 +6,12 @@ namespace DB
|
||||
{
|
||||
|
||||
|
||||
ColumnDefaultType columnDefaultTypeFromString(const std::string & str)
|
||||
ColumnDefaultKind columnDefaultKindFromString(const std::string & str)
|
||||
{
|
||||
static const std::unordered_map<std::string, ColumnDefaultType> map{
|
||||
{ "DEFAULT", ColumnDefaultType::Default },
|
||||
{ "MATERIALIZED", ColumnDefaultType::Materialized },
|
||||
{ "ALIAS", ColumnDefaultType::Alias }
|
||||
static const std::unordered_map<std::string, ColumnDefaultKind> map{
|
||||
{ "DEFAULT", ColumnDefaultKind::Default },
|
||||
{ "MATERIALIZED", ColumnDefaultKind::Materialized },
|
||||
{ "ALIAS", ColumnDefaultKind::Alias }
|
||||
};
|
||||
|
||||
const auto it = map.find(str);
|
||||
@ -19,22 +19,22 @@ ColumnDefaultType columnDefaultTypeFromString(const std::string & str)
|
||||
}
|
||||
|
||||
|
||||
std::string toString(const ColumnDefaultType type)
|
||||
std::string toString(const ColumnDefaultKind kind)
|
||||
{
|
||||
static const std::unordered_map<ColumnDefaultType, std::string> map{
|
||||
{ ColumnDefaultType::Default, "DEFAULT" },
|
||||
{ ColumnDefaultType::Materialized, "MATERIALIZED" },
|
||||
{ ColumnDefaultType::Alias, "ALIAS" }
|
||||
static const std::unordered_map<ColumnDefaultKind, std::string> map{
|
||||
{ ColumnDefaultKind::Default, "DEFAULT" },
|
||||
{ ColumnDefaultKind::Materialized, "MATERIALIZED" },
|
||||
{ ColumnDefaultKind::Alias, "ALIAS" }
|
||||
};
|
||||
|
||||
const auto it = map.find(type);
|
||||
return it != std::end(map) ? it->second : throw Exception{"Invalid ColumnDefaultType"};
|
||||
const auto it = map.find(kind);
|
||||
return it != std::end(map) ? it->second : throw Exception{"Invalid ColumnDefaultKind"};
|
||||
}
|
||||
|
||||
|
||||
bool operator==(const ColumnDefault & lhs, const ColumnDefault & rhs)
|
||||
{
|
||||
return lhs.type == rhs.type && queryToString(lhs.expression) == queryToString(rhs.expression);
|
||||
return lhs.kind == rhs.kind && queryToString(lhs.expression) == queryToString(rhs.expression);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -9,7 +9,7 @@
|
||||
namespace DB
|
||||
{
|
||||
|
||||
enum class ColumnDefaultType
|
||||
enum class ColumnDefaultKind
|
||||
{
|
||||
Default,
|
||||
Materialized,
|
||||
@ -17,13 +17,13 @@ enum class ColumnDefaultType
|
||||
};
|
||||
|
||||
|
||||
ColumnDefaultType columnDefaultTypeFromString(const std::string & str);
|
||||
std::string toString(const ColumnDefaultType type);
|
||||
ColumnDefaultKind columnDefaultKindFromString(const std::string & str);
|
||||
std::string toString(const ColumnDefaultKind type);
|
||||
|
||||
|
||||
struct ColumnDefault
|
||||
{
|
||||
ColumnDefaultType type;
|
||||
ColumnDefaultKind kind;
|
||||
ASTPtr expression;
|
||||
};
|
||||
|
||||
|
@ -1,3 +1,4 @@
|
||||
#include <Storages/ColumnsDescription.h>
|
||||
#include <Parsers/ExpressionListParsers.h>
|
||||
#include <Parsers/parseQuery.h>
|
||||
#include <Parsers/queryToString.h>
|
||||
@ -7,8 +8,13 @@
|
||||
#include <IO/ReadHelpers.h>
|
||||
#include <IO/WriteBufferFromString.h>
|
||||
#include <IO/ReadBufferFromString.h>
|
||||
#include <Storages/ColumnsDescription.h>
|
||||
#include <DataTypes/DataTypeFactory.h>
|
||||
#include <Common/Exception.h>
|
||||
|
||||
#include <ext/collection_cast.h>
|
||||
#include <ext/map.h>
|
||||
|
||||
#include <boost/range/join.hpp>
|
||||
|
||||
|
||||
namespace DB
|
||||
@ -16,17 +22,53 @@ namespace DB
|
||||
|
||||
namespace ErrorCodes
|
||||
{
|
||||
extern const int NO_SUCH_COLUMN_IN_TABLE;
|
||||
extern const int CANNOT_PARSE_TEXT;
|
||||
}
|
||||
|
||||
|
||||
template <bool store>
|
||||
String ColumnsDescription<store>::toString() const
|
||||
NamesAndTypesList ColumnsDescription::getAllPhysical() const
|
||||
{
|
||||
return ext::collection_cast<NamesAndTypesList>(boost::join(ordinary, materialized));
|
||||
}
|
||||
|
||||
|
||||
NamesAndTypesList ColumnsDescription::getAll() const
|
||||
{
|
||||
return ext::collection_cast<NamesAndTypesList>(boost::join(ordinary, boost::join(materialized, aliases)));
|
||||
}
|
||||
|
||||
|
||||
Names ColumnsDescription::getNamesOfPhysical() const
|
||||
{
|
||||
return ext::map<Names>(boost::join(ordinary, materialized), [] (const auto & it) { return it.name; });
|
||||
}
|
||||
|
||||
|
||||
NameAndTypePair ColumnsDescription::getPhysical(const String & column_name) const
|
||||
{
|
||||
for (auto & it : boost::join(ordinary, materialized))
|
||||
if (it.name == column_name)
|
||||
return it;
|
||||
throw Exception("There is no column " + column_name + " in table.", ErrorCodes::NO_SUCH_COLUMN_IN_TABLE);
|
||||
}
|
||||
|
||||
|
||||
bool ColumnsDescription::hasPhysical(const String & column_name) const
|
||||
{
|
||||
for (auto & it : boost::join(ordinary, materialized))
|
||||
if (it.name == column_name)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
String ColumnsDescription::toString() const
|
||||
{
|
||||
WriteBufferFromOwnString buf;
|
||||
|
||||
writeString("columns format version: 1\n", buf);
|
||||
writeText(columns.size() + materialized.size() + alias.size(), buf);
|
||||
writeText(ordinary.size() + materialized.size() + aliases.size(), buf);
|
||||
writeString(" columns:\n", buf);
|
||||
|
||||
const auto write_columns = [this, &buf] (const NamesAndTypesList & columns)
|
||||
@ -46,23 +88,22 @@ String ColumnsDescription<store>::toString() const
|
||||
else
|
||||
writeChar('\t', buf);
|
||||
|
||||
writeString(DB::toString(it->second.type), buf);
|
||||
writeString(DB::toString(it->second.kind), buf);
|
||||
writeChar('\t', buf);
|
||||
writeString(queryToString(it->second.expression), buf);
|
||||
writeChar('\n', buf);
|
||||
}
|
||||
};
|
||||
|
||||
write_columns(columns);
|
||||
write_columns(ordinary);
|
||||
write_columns(materialized);
|
||||
write_columns(alias);
|
||||
write_columns(aliases);
|
||||
|
||||
return buf.str();
|
||||
}
|
||||
|
||||
|
||||
template <>
|
||||
ColumnsDescription<true> ColumnsDescription<true>::parse(const String & str)
|
||||
ColumnsDescription ColumnsDescription::parse(const String & str)
|
||||
{
|
||||
ReadBufferFromString buf{str};
|
||||
|
||||
@ -74,7 +115,7 @@ ColumnsDescription<true> ColumnsDescription<true>::parse(const String & str)
|
||||
ParserExpression expr_parser;
|
||||
const DataTypeFactory & data_type_factory = DataTypeFactory::instance();
|
||||
|
||||
ColumnsDescription<true> result{};
|
||||
ColumnsDescription result;
|
||||
for (size_t i = 0; i < count; ++i)
|
||||
{
|
||||
String column_name;
|
||||
@ -88,14 +129,14 @@ ColumnsDescription<true> ColumnsDescription<true>::parse(const String & str)
|
||||
{
|
||||
assertChar('\n', buf);
|
||||
|
||||
result.columns.emplace_back(column_name, std::move(type));
|
||||
result.ordinary.emplace_back(column_name, std::move(type));
|
||||
continue;
|
||||
}
|
||||
assertChar('\t', buf);
|
||||
|
||||
String default_type_str;
|
||||
readString(default_type_str, buf);
|
||||
const auto default_type = columnDefaultTypeFromString(default_type_str);
|
||||
String default_kind_str;
|
||||
readString(default_kind_str, buf);
|
||||
const auto default_kind = columnDefaultKindFromString(default_kind_str);
|
||||
assertChar('\t', buf);
|
||||
|
||||
String default_expr_str;
|
||||
@ -106,14 +147,14 @@ ColumnsDescription<true> ColumnsDescription<true>::parse(const String & str)
|
||||
const auto end = begin + default_expr_str.size();
|
||||
ASTPtr default_expr = parseQuery(expr_parser, begin, end, "default expression");
|
||||
|
||||
if (ColumnDefaultType::Default == default_type)
|
||||
result.columns.emplace_back(column_name, std::move(type));
|
||||
else if (ColumnDefaultType::Materialized == default_type)
|
||||
if (ColumnDefaultKind::Default == default_kind)
|
||||
result.ordinary.emplace_back(column_name, std::move(type));
|
||||
else if (ColumnDefaultKind::Materialized == default_kind)
|
||||
result.materialized.emplace_back(column_name, std::move(type));
|
||||
else if (ColumnDefaultType::Alias == default_type)
|
||||
result.alias.emplace_back(column_name, std::move(type));
|
||||
else if (ColumnDefaultKind::Alias == default_kind)
|
||||
result.aliases.emplace_back(column_name, std::move(type));
|
||||
|
||||
result.defaults.emplace(column_name, ColumnDefault{default_type, default_expr});
|
||||
result.defaults.emplace(column_name, ColumnDefault{default_kind, default_expr});
|
||||
}
|
||||
|
||||
assertEOF(buf);
|
||||
@ -121,8 +162,4 @@ ColumnsDescription<true> ColumnsDescription<true>::parse(const String & str)
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
template struct ColumnsDescription<false>;
|
||||
template struct ColumnsDescription<true>;
|
||||
|
||||
}
|
||||
|
@ -1,28 +1,62 @@
|
||||
#pragma once
|
||||
|
||||
#include <Storages/ColumnDefault.h>
|
||||
#include <Core/NamesAndTypes.h>
|
||||
#include <Core/Names.h>
|
||||
#include <Storages/ColumnDefault.h>
|
||||
#include <Core/Block.h>
|
||||
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
|
||||
template <bool store>
|
||||
struct ColumnsDescription
|
||||
{
|
||||
template <typename T>
|
||||
using by_value_or_cref = std::conditional_t<store, T, const T &>;
|
||||
NamesAndTypesList ordinary;
|
||||
NamesAndTypesList materialized;
|
||||
NamesAndTypesList aliases;
|
||||
ColumnDefaults defaults;
|
||||
|
||||
ColumnsDescription() = default;
|
||||
|
||||
ColumnsDescription(
|
||||
NamesAndTypesList ordinary_,
|
||||
NamesAndTypesList materialized_,
|
||||
NamesAndTypesList aliases_,
|
||||
ColumnDefaults defaults_)
|
||||
: ordinary(std::move(ordinary_))
|
||||
, materialized(std::move(materialized_))
|
||||
, aliases(std::move(aliases_))
|
||||
, defaults(std::move(defaults_))
|
||||
{}
|
||||
|
||||
explicit ColumnsDescription(NamesAndTypesList ordinary_) : ordinary(std::move(ordinary_)) {}
|
||||
|
||||
bool operator==(const ColumnsDescription & other) const
|
||||
{
|
||||
return ordinary == other.ordinary
|
||||
&& materialized == other.materialized
|
||||
&& aliases == other.aliases
|
||||
&& defaults == other.defaults;
|
||||
}
|
||||
|
||||
bool operator!=(const ColumnsDescription & other) const { return !(*this == other); }
|
||||
|
||||
/// ordinary + materialized.
|
||||
NamesAndTypesList getAllPhysical() const;
|
||||
|
||||
/// ordinary + materialized + aliases.
|
||||
NamesAndTypesList getAll() const;
|
||||
|
||||
Names getNamesOfPhysical() const;
|
||||
|
||||
NameAndTypePair getPhysical(const String & column_name) const;
|
||||
|
||||
bool hasPhysical(const String & column_name) const;
|
||||
|
||||
by_value_or_cref<NamesAndTypesList> columns;
|
||||
by_value_or_cref<NamesAndTypesList> materialized;
|
||||
by_value_or_cref<NamesAndTypesList> alias;
|
||||
by_value_or_cref<ColumnDefaults> defaults;
|
||||
|
||||
String toString() const;
|
||||
|
||||
static ColumnsDescription parse(const String & str);
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
@ -468,7 +468,7 @@ bool StorageDistributedDirectoryMonitor::maybeMarkAsBroken(const std::string & f
|
||||
|
||||
std::string StorageDistributedDirectoryMonitor::getLoggerName() const
|
||||
{
|
||||
return storage.name + '.' + storage.getName() + ".DirectoryMonitor";
|
||||
return storage.table_name + '.' + storage.getName() + ".DirectoryMonitor";
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -83,6 +83,10 @@ public:
|
||||
/// The main name of the table type (for example, StorageMergeTree).
|
||||
virtual std::string getName() const = 0;
|
||||
|
||||
/** The name of the table.
|
||||
*/
|
||||
virtual std::string getTableName() const = 0;
|
||||
|
||||
/** Returns true if the storage receives data from a remote server or servers. */
|
||||
virtual bool isRemote() const { return false; }
|
||||
|
||||
|
@ -1,13 +1,12 @@
|
||||
#include <unordered_set>
|
||||
#include <Storages/ITableDeclaration.h>
|
||||
#include <Common/Exception.h>
|
||||
|
||||
#include <boost/range/join.hpp>
|
||||
#include <sparsehash/dense_hash_map>
|
||||
#include <sparsehash/dense_hash_set>
|
||||
#include <Storages/ITableDeclaration.h>
|
||||
#include <Parsers/ASTIdentifier.h>
|
||||
#include <Parsers/ASTNameTypePair.h>
|
||||
#include <Interpreters/Context.h>
|
||||
#include <ext/map.h>
|
||||
#include <ext/identity.h>
|
||||
#include <ext/collection_cast.h>
|
||||
|
||||
#include <unordered_set>
|
||||
#include <sstream>
|
||||
|
||||
|
||||
namespace DB
|
||||
@ -25,77 +24,22 @@ namespace ErrorCodes
|
||||
}
|
||||
|
||||
|
||||
NamesAndTypesList ITableDeclaration::getColumnsList() const
|
||||
void ITableDeclaration::setColumns(ColumnsDescription columns_)
|
||||
{
|
||||
return ext::collection_cast<NamesAndTypesList>(getColumnsListRange());
|
||||
if (columns_.ordinary.empty())
|
||||
throw Exception("Empty list of columns passed", ErrorCodes::EMPTY_LIST_OF_COLUMNS_PASSED);
|
||||
columns = std::move(columns_);
|
||||
}
|
||||
|
||||
|
||||
ITableDeclaration::ColumnsListRange ITableDeclaration::getColumnsListRange() const
|
||||
{
|
||||
return boost::join(getColumnsListImpl(), materialized_columns);
|
||||
}
|
||||
|
||||
|
||||
bool ITableDeclaration::hasRealColumn(const String & column_name) const
|
||||
{
|
||||
for (auto & it : getColumnsListRange())
|
||||
if (it.name == column_name)
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
Names ITableDeclaration::getColumnNamesList() const
|
||||
{
|
||||
return ext::map<Names>(getColumnsListRange(), [] (const auto & it) { return it.name; });
|
||||
}
|
||||
|
||||
|
||||
NameAndTypePair ITableDeclaration::getRealColumn(const String & column_name) const
|
||||
{
|
||||
for (auto & it : getColumnsListRange())
|
||||
if (it.name == column_name)
|
||||
return it;
|
||||
throw Exception("There is no column " + column_name + " in table.", ErrorCodes::NO_SUCH_COLUMN_IN_TABLE);
|
||||
}
|
||||
|
||||
NameAndTypePair ITableDeclaration::getMaterializedColumn(const String & column_name) const
|
||||
{
|
||||
for (auto & column : materialized_columns)
|
||||
if (column.name == column_name)
|
||||
return column;
|
||||
|
||||
throw Exception("There is no column " + column_name + " in table.", ErrorCodes::NO_SUCH_COLUMN_IN_TABLE);
|
||||
}
|
||||
|
||||
bool ITableDeclaration::hasMaterializedColumn(const String & column_name) const
|
||||
{
|
||||
for (auto & column : materialized_columns)
|
||||
if (column.name == column_name)
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
bool ITableDeclaration::hasColumn(const String & column_name) const
|
||||
{
|
||||
return hasRealColumn(column_name); /// By default, we assume that there are no virtual columns in the storage.
|
||||
return getColumns().hasPhysical(column_name); /// By default, we assume that there are no virtual columns in the storage.
|
||||
}
|
||||
|
||||
NameAndTypePair ITableDeclaration::getColumn(const String & column_name) const
|
||||
{
|
||||
return getRealColumn(column_name); /// By default, we assume that there are no virtual columns in the storage.
|
||||
}
|
||||
|
||||
|
||||
const DataTypePtr ITableDeclaration::getDataTypeByName(const String & column_name) const
|
||||
{
|
||||
for (const auto & column : getColumnsListRange())
|
||||
if (column.name == column_name)
|
||||
return column.type;
|
||||
|
||||
throw Exception("There is no column " + column_name + " in table.", ErrorCodes::NO_SUCH_COLUMN_IN_TABLE);
|
||||
return getColumns().getPhysical(column_name); /// By default, we assume that there are no virtual columns in the storage.
|
||||
}
|
||||
|
||||
|
||||
@ -103,7 +47,7 @@ Block ITableDeclaration::getSampleBlock() const
|
||||
{
|
||||
Block res;
|
||||
|
||||
for (const auto & col : getColumnsListRange())
|
||||
for (const auto & col : boost::join(getColumns().ordinary, getColumns().materialized))
|
||||
res.insert({ col.type->createColumn(), col.type, col.name });
|
||||
|
||||
return res;
|
||||
@ -114,7 +58,7 @@ Block ITableDeclaration::getSampleBlockNonMaterialized() const
|
||||
{
|
||||
Block res;
|
||||
|
||||
for (const auto & col : getColumnsListNonMaterialized())
|
||||
for (const auto & col : getColumns().ordinary)
|
||||
res.insert({ col.type->createColumn(), col.type, col.name });
|
||||
|
||||
return res;
|
||||
@ -138,7 +82,7 @@ Block ITableDeclaration::getSampleBlockForColumns(const Names & column_names) co
|
||||
static std::string listOfColumns(const NamesAndTypesList & available_columns)
|
||||
{
|
||||
std::stringstream s;
|
||||
for (NamesAndTypesList::const_iterator it = available_columns.begin(); it != available_columns.end(); ++it)
|
||||
for (auto it = available_columns.begin(); it != available_columns.end(); ++it)
|
||||
{
|
||||
if (it != available_columns.begin())
|
||||
s << ", ";
|
||||
@ -175,7 +119,7 @@ static NamesAndTypesMap getColumnsMap(const Args &... args)
|
||||
|
||||
void ITableDeclaration::check(const Names & column_names) const
|
||||
{
|
||||
const NamesAndTypesList & available_columns = getColumnsList();
|
||||
const NamesAndTypesList & available_columns = getColumns().getAllPhysical();
|
||||
|
||||
if (column_names.empty())
|
||||
throw Exception("Empty list of columns queried. There are columns: " + listOfColumns(available_columns),
|
||||
@ -201,16 +145,16 @@ void ITableDeclaration::check(const Names & column_names) const
|
||||
}
|
||||
|
||||
|
||||
void ITableDeclaration::check(const NamesAndTypesList & columns) const
|
||||
void ITableDeclaration::check(const NamesAndTypesList & provided_columns) const
|
||||
{
|
||||
const NamesAndTypesList & available_columns = getColumnsList();
|
||||
const NamesAndTypesList & available_columns = getColumns().getAllPhysical();
|
||||
const auto columns_map = getColumnsMap(available_columns);
|
||||
|
||||
using UniqueStrings = google::dense_hash_set<StringRef, StringRefHash>;
|
||||
UniqueStrings unique_names;
|
||||
unique_names.set_empty_key(StringRef());
|
||||
|
||||
for (const NameAndTypePair & column : columns)
|
||||
for (const NameAndTypePair & column : provided_columns)
|
||||
{
|
||||
NamesAndTypesMap::const_iterator it = columns_map.find(column.name);
|
||||
if (columns_map.end() == it)
|
||||
@ -229,11 +173,11 @@ void ITableDeclaration::check(const NamesAndTypesList & columns) const
|
||||
}
|
||||
|
||||
|
||||
void ITableDeclaration::check(const NamesAndTypesList & columns, const Names & column_names) const
|
||||
void ITableDeclaration::check(const NamesAndTypesList & provided_columns, const Names & column_names) const
|
||||
{
|
||||
const NamesAndTypesList & available_columns = getColumnsList();
|
||||
const NamesAndTypesList & available_columns = getColumns().getAllPhysical();
|
||||
const auto available_columns_map = getColumnsMap(available_columns);
|
||||
const NamesAndTypesMap & provided_columns_map = getColumnsMap(columns);
|
||||
const NamesAndTypesMap & provided_columns_map = getColumnsMap(provided_columns);
|
||||
|
||||
if (column_names.empty())
|
||||
throw Exception("Empty list of columns queried. There are columns: " + listOfColumns(available_columns),
|
||||
@ -268,7 +212,7 @@ void ITableDeclaration::check(const NamesAndTypesList & columns, const Names & c
|
||||
|
||||
void ITableDeclaration::check(const Block & block, bool need_all) const
|
||||
{
|
||||
const NamesAndTypesList & available_columns = getColumnsList();
|
||||
const NamesAndTypesList & available_columns = getColumns().getAllPhysical();
|
||||
const auto columns_map = getColumnsMap(available_columns);
|
||||
|
||||
using NameSet = std::unordered_set<String>;
|
||||
@ -304,15 +248,10 @@ void ITableDeclaration::check(const Block & block, bool need_all) const
|
||||
}
|
||||
}
|
||||
|
||||
ITableDeclaration::ITableDeclaration(const NamesAndTypesList & columns, const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns, const ColumnDefaults & column_defaults)
|
||||
: columns{columns},
|
||||
materialized_columns{materialized_columns},
|
||||
alias_columns{alias_columns},
|
||||
column_defaults{column_defaults}
|
||||
|
||||
ITableDeclaration::ITableDeclaration(ColumnsDescription columns_)
|
||||
{
|
||||
if (columns.empty())
|
||||
throw Exception("Empty list of columns passed to storage constructor", ErrorCodes::EMPTY_LIST_OF_COLUMNS_PASSED);
|
||||
setColumns(std::move(columns_));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,62 +1,25 @@
|
||||
#pragma once
|
||||
|
||||
#include <Core/Names.h>
|
||||
#include <Core/NamesAndTypes.h>
|
||||
#include <Common/Exception.h>
|
||||
#include <Core/Block.h>
|
||||
#include <Storages/ColumnDefault.h>
|
||||
|
||||
#include <boost/range/iterator_range.hpp>
|
||||
#include <boost/range/join.hpp>
|
||||
#include <Storages/ColumnsDescription.h>
|
||||
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
class Context;
|
||||
|
||||
/** Description of the table.
|
||||
* Do not thread safe. See IStorage::lockStructure ().
|
||||
* Is not thread safe. See IStorage::lockStructure ().
|
||||
*/
|
||||
class ITableDeclaration
|
||||
{
|
||||
public:
|
||||
/** The name of the table.
|
||||
*/
|
||||
virtual std::string getTableName() const = 0;
|
||||
virtual const ColumnsDescription & getColumns() const { return columns; }
|
||||
virtual void setColumns(ColumnsDescription columns_);
|
||||
|
||||
/** Get a list of names and table column types, only non-virtual.
|
||||
*/
|
||||
NamesAndTypesList getColumnsList() const;
|
||||
const NamesAndTypesList & getColumnsListNonMaterialized() const { return getColumnsListImpl(); }
|
||||
|
||||
/** Get a list of column table names, only non-virtual.
|
||||
*/
|
||||
virtual Names getColumnNamesList() const;
|
||||
|
||||
/** Get a description of the real (non-virtual) column by its name.
|
||||
*/
|
||||
virtual NameAndTypePair getRealColumn(const String & column_name) const;
|
||||
|
||||
/** Is there a real (non-virtual) column with that name.
|
||||
*/
|
||||
virtual bool hasRealColumn(const String & column_name) const;
|
||||
|
||||
NameAndTypePair getMaterializedColumn(const String & column_name) const;
|
||||
bool hasMaterializedColumn(const String & column_name) const;
|
||||
|
||||
/** Get a description of any column by its name.
|
||||
*/
|
||||
/// NOTE: These methods should include virtual columns, but should NOT include ALIAS columns
|
||||
/// (they are treated separately).
|
||||
virtual NameAndTypePair getColumn(const String & column_name) const;
|
||||
|
||||
/** Is there a column with that name.
|
||||
*/
|
||||
virtual bool hasColumn(const String & column_name) const;
|
||||
|
||||
const DataTypePtr getDataTypeByName(const String & column_name) const;
|
||||
|
||||
/** The same, but in the form of a block-sample.
|
||||
*/
|
||||
Block getSampleBlock() const;
|
||||
Block getSampleBlockNonMaterialized() const;
|
||||
Block getSampleBlockForColumns(const Names & column_names) const;
|
||||
@ -81,29 +44,12 @@ public:
|
||||
void check(const Block & block, bool need_all = false) const;
|
||||
|
||||
|
||||
ITableDeclaration() = default;
|
||||
explicit ITableDeclaration(ColumnsDescription columns_);
|
||||
virtual ~ITableDeclaration() = default;
|
||||
|
||||
ITableDeclaration() = default;
|
||||
ITableDeclaration(
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults);
|
||||
|
||||
NamesAndTypesList columns;
|
||||
NamesAndTypesList materialized_columns;
|
||||
NamesAndTypesList alias_columns;
|
||||
ColumnDefaults column_defaults;
|
||||
|
||||
private:
|
||||
virtual const NamesAndTypesList & getColumnsListImpl() const
|
||||
{
|
||||
return columns;
|
||||
}
|
||||
|
||||
using ColumnsListRange = boost::range::joined_range<const NamesAndTypesList, const NamesAndTypesList>;
|
||||
/// Returns a lazily joined range of table's ordinary and materialized columns, without unnecessary copying
|
||||
ColumnsListRange getColumnsListRange() const;
|
||||
ColumnsDescription columns;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
#include <Storages/MergeTree/DataPartsExchange.h>
|
||||
#include <Storages/IStorage.h>
|
||||
#include <Common/CurrentMetrics.h>
|
||||
#include <Common/NetException.h>
|
||||
#include <Common/typeid_cast.h>
|
||||
|
@ -141,8 +141,9 @@ try
|
||||
if (!column_names.empty())
|
||||
storage.check(data_part->columns, column_names);
|
||||
|
||||
pre_columns = storage.getColumnsList().addTypes(pre_column_names);
|
||||
columns = storage.getColumnsList().addTypes(column_names);
|
||||
const NamesAndTypesList & physical_columns = storage.getColumns().getAllPhysical();
|
||||
pre_columns = physical_columns.addTypes(pre_column_names);
|
||||
columns = physical_columns.addTypes(column_names);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -26,9 +26,9 @@ NameSet injectRequiredColumns(const MergeTreeData & storage, const MergeTreeData
|
||||
continue;
|
||||
}
|
||||
|
||||
const auto default_it = storage.column_defaults.find(column_name);
|
||||
const auto default_it = storage.getColumns().defaults.find(column_name);
|
||||
/// columns has no explicit default expression
|
||||
if (default_it == std::end(storage.column_defaults))
|
||||
if (default_it == std::end(storage.getColumns().defaults))
|
||||
continue;
|
||||
|
||||
/// collect identifiers required for evaluation
|
||||
|
@ -80,10 +80,7 @@ namespace ErrorCodes
|
||||
|
||||
MergeTreeData::MergeTreeData(
|
||||
const String & database_, const String & table_,
|
||||
const String & full_path_, const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const String & full_path_, const ColumnsDescription & columns_,
|
||||
Context & context_,
|
||||
const ASTPtr & primary_expr_ast_,
|
||||
const ASTPtr & secondary_sort_expr_ast_,
|
||||
@ -95,7 +92,7 @@ MergeTreeData::MergeTreeData(
|
||||
bool require_part_metadata_,
|
||||
bool attach,
|
||||
BrokenPartCallback broken_part_callback_)
|
||||
: ITableDeclaration{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
: ITableDeclaration{columns_},
|
||||
context(context_),
|
||||
sampling_expression(sampling_expression_),
|
||||
index_granularity(settings_.index_granularity),
|
||||
@ -113,7 +110,7 @@ MergeTreeData::MergeTreeData(
|
||||
data_parts_by_state_and_info(data_parts_indexes.get<TagByStateAndInfo>())
|
||||
{
|
||||
/// NOTE: using the same columns list as is read when performing actual merges.
|
||||
merging_params.check(getColumnsList());
|
||||
merging_params.check(getColumns().getAllPhysical());
|
||||
|
||||
if (!primary_expr_ast)
|
||||
throw Exception("Primary key cannot be empty", ErrorCodes::BAD_ARGUMENTS);
|
||||
@ -225,11 +222,11 @@ void MergeTreeData::initPrimaryKey()
|
||||
primary_sort_descr.clear();
|
||||
addSortDescription(primary_sort_descr, primary_expr_ast);
|
||||
|
||||
primary_expr = ExpressionAnalyzer(primary_expr_ast, context, nullptr, getColumnsList()).getActions(false);
|
||||
primary_expr = ExpressionAnalyzer(primary_expr_ast, context, nullptr, getColumns().getAllPhysical()).getActions(false);
|
||||
|
||||
{
|
||||
ExpressionActionsPtr projected_expr =
|
||||
ExpressionAnalyzer(primary_expr_ast, context, nullptr, getColumnsList()).getActions(true);
|
||||
ExpressionAnalyzer(primary_expr_ast, context, nullptr, getColumns().getAllPhysical()).getActions(true);
|
||||
primary_key_sample = projected_expr->getSampleBlock();
|
||||
}
|
||||
|
||||
@ -244,10 +241,10 @@ void MergeTreeData::initPrimaryKey()
|
||||
if (secondary_sort_expr_ast)
|
||||
{
|
||||
addSortDescription(sort_descr, secondary_sort_expr_ast);
|
||||
secondary_sort_expr = ExpressionAnalyzer(secondary_sort_expr_ast, context, nullptr, getColumnsList()).getActions(false);
|
||||
secondary_sort_expr = ExpressionAnalyzer(secondary_sort_expr_ast, context, nullptr, getColumns().getAllPhysical()).getActions(false);
|
||||
|
||||
ExpressionActionsPtr projected_expr =
|
||||
ExpressionAnalyzer(secondary_sort_expr_ast, context, nullptr, getColumnsList()).getActions(true);
|
||||
ExpressionAnalyzer(secondary_sort_expr_ast, context, nullptr, getColumns().getAllPhysical()).getActions(true);
|
||||
auto secondary_key_sample = projected_expr->getSampleBlock();
|
||||
|
||||
checkKeyExpression(*secondary_sort_expr, secondary_key_sample, "Secondary");
|
||||
@ -260,7 +257,7 @@ void MergeTreeData::initPartitionKey()
|
||||
if (!partition_expr_ast || partition_expr_ast->children.empty())
|
||||
return;
|
||||
|
||||
partition_expr = ExpressionAnalyzer(partition_expr_ast, context, nullptr, getColumnsList()).getActions(false);
|
||||
partition_expr = ExpressionAnalyzer(partition_expr_ast, context, nullptr, getColumns().getAllPhysical()).getActions(false);
|
||||
for (const ASTPtr & ast : partition_expr_ast->children)
|
||||
{
|
||||
String col_name = ast->getColumnName();
|
||||
@ -864,11 +861,8 @@ bool isMetadataOnlyConversion(const IDataType * from, const IDataType * to)
|
||||
void MergeTreeData::checkAlter(const AlterCommands & commands)
|
||||
{
|
||||
/// Check that needed transformations can be applied to the list of columns without considering type conversions.
|
||||
auto new_columns = columns;
|
||||
auto new_materialized_columns = materialized_columns;
|
||||
auto new_alias_columns = alias_columns;
|
||||
auto new_column_defaults = column_defaults;
|
||||
commands.apply(new_columns, new_materialized_columns, new_alias_columns, new_column_defaults);
|
||||
auto new_columns = getColumns();
|
||||
commands.apply(new_columns);
|
||||
|
||||
/// Set of columns that shouldn't be altered.
|
||||
NameSet columns_alter_forbidden;
|
||||
@ -909,7 +903,7 @@ void MergeTreeData::checkAlter(const AlterCommands & commands)
|
||||
columns_alter_forbidden.insert(merging_params.sign_column);
|
||||
|
||||
std::map<String, const IDataType *> old_types;
|
||||
for (const auto & column : columns)
|
||||
for (const auto & column : getColumns().getAllPhysical())
|
||||
old_types.emplace(column.name, column.type.get());
|
||||
|
||||
for (const AlterCommand & command : commands)
|
||||
@ -937,11 +931,7 @@ void MergeTreeData::checkAlter(const AlterCommands & commands)
|
||||
NameToNameMap unused_map;
|
||||
bool unused_bool;
|
||||
|
||||
/// augment plain columns with materialized columns for convert expression creation
|
||||
new_columns.insert(std::end(new_columns),
|
||||
std::begin(new_materialized_columns), std::end(new_materialized_columns));
|
||||
|
||||
createConvertExpression(nullptr, getColumnsList(), new_columns, unused_expression, unused_map, unused_bool);
|
||||
createConvertExpression(nullptr, getColumns().getAllPhysical(), new_columns.getAllPhysical(), unused_expression, unused_map, unused_bool);
|
||||
}
|
||||
|
||||
void MergeTreeData::createConvertExpression(const DataPartPtr & part, const NamesAndTypesList & old_columns, const NamesAndTypesList & new_columns,
|
||||
@ -1845,7 +1835,7 @@ void MergeTreeData::addPartContributionToColumnSizes(const DataPartPtr & part)
|
||||
const auto & files = part->checksums.files;
|
||||
|
||||
/// TODO This method doesn't take into account columns with multiple files.
|
||||
for (const auto & column : getColumnsList())
|
||||
for (const auto & column : getColumns().getAllPhysical())
|
||||
{
|
||||
const auto escaped_name = escapeForFileName(column.name);
|
||||
const auto bin_file_name = escaped_name + ".bin";
|
||||
@ -1878,7 +1868,7 @@ void MergeTreeData::removePartContributionToColumnSizes(const DataPartPtr & part
|
||||
const auto & files = part->checksums.files;
|
||||
|
||||
/// TODO This method doesn't take into account columns with multiple files.
|
||||
for (const auto & column : columns)
|
||||
for (const auto & column : getColumns().getAllPhysical())
|
||||
{
|
||||
const auto escaped_name = escapeForFileName(column.name);
|
||||
const auto bin_file_name = escaped_name + ".bin";
|
||||
|
@ -4,7 +4,8 @@
|
||||
#include <Common/SimpleIncrement.h>
|
||||
#include <Interpreters/Context.h>
|
||||
#include <Interpreters/ExpressionActions.h>
|
||||
#include <Storages/IStorage.h>
|
||||
#include <Storages/ITableDeclaration.h>
|
||||
#include <Storages/AlterCommands.h>
|
||||
#include <Storages/MergeTree/MergeTreePartInfo.h>
|
||||
#include <Storages/MergeTree/MergeTreeSettings.h>
|
||||
#include <IO/ReadBufferFromString.h>
|
||||
@ -270,10 +271,8 @@ public:
|
||||
/// require_part_metadata - should checksums.txt and columns.txt exist in the part directory.
|
||||
/// attach - whether the existing table is attached or the new table is created.
|
||||
MergeTreeData(const String & database_, const String & table_,
|
||||
const String & full_path_, const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const String & full_path_,
|
||||
const ColumnsDescription & columns_,
|
||||
Context & context_,
|
||||
const ASTPtr & primary_expr_ast_,
|
||||
const ASTPtr & secondary_sort_expr_ast_,
|
||||
@ -305,8 +304,6 @@ public:
|
||||
|
||||
Int64 getMaxDataPartIndex();
|
||||
|
||||
const NamesAndTypesList & getColumnsListImpl() const override { return columns; }
|
||||
|
||||
NameAndTypePair getColumn(const String & column_name) const override
|
||||
{
|
||||
if (column_name == "_part")
|
||||
@ -316,12 +313,12 @@ public:
|
||||
if (column_name == "_sample_factor")
|
||||
return NameAndTypePair("_sample_factor", std::make_shared<DataTypeFloat64>());
|
||||
|
||||
return ITableDeclaration::getColumn(column_name);
|
||||
return getColumns().getPhysical(column_name);
|
||||
}
|
||||
|
||||
bool hasColumn(const String & column_name) const override
|
||||
{
|
||||
return ITableDeclaration::hasColumn(column_name)
|
||||
return getColumns().hasPhysical(column_name)
|
||||
|| column_name == "_part"
|
||||
|| column_name == "_part_index"
|
||||
|| column_name == "_sample_factor";
|
||||
@ -329,7 +326,7 @@ public:
|
||||
|
||||
String getDatabaseName() const { return database_name; }
|
||||
|
||||
String getTableName() const override { return table_name; }
|
||||
String getTableName() const { return table_name; }
|
||||
|
||||
String getFullPath() const { return full_path; }
|
||||
|
||||
@ -431,9 +428,6 @@ public:
|
||||
const ASTPtr & new_primary_key,
|
||||
bool skip_sanity_checks);
|
||||
|
||||
/// Must be called with locked lockStructureForAlter().
|
||||
void setColumnsList(const NamesAndTypesList & new_columns) { columns = new_columns; }
|
||||
|
||||
/// Should be called if part data is suspected to be corrupted.
|
||||
void reportBrokenPart(const String & name)
|
||||
{
|
||||
@ -507,6 +501,7 @@ public:
|
||||
/// For ATTACH/DETACH/DROP PARTITION.
|
||||
String getPartitionIDFromQuery(const ASTPtr & partition, const Context & context);
|
||||
|
||||
|
||||
MergeTreeDataFormatVersion format_version;
|
||||
|
||||
Context & context;
|
||||
|
@ -536,8 +536,8 @@ MergeTreeData::MutableDataPartPtr MergeTreeDataMerger::mergePartsToTemporaryPart
|
||||
for (const MergeTreeData::DataPartPtr & part : parts)
|
||||
part->accumulateColumnSizes(merged_column_to_size);
|
||||
|
||||
Names all_column_names = data.getColumnNamesList();
|
||||
NamesAndTypesList all_columns = data.getColumnsList();
|
||||
Names all_column_names = data.getColumns().getNamesOfPhysical();
|
||||
NamesAndTypesList all_columns = data.getColumns().getAllPhysical();
|
||||
const SortDescription sort_desc = data.getSortDescription();
|
||||
|
||||
NamesAndTypesList gathering_columns, merging_columns;
|
||||
|
@ -418,7 +418,7 @@ UInt64 MergeTreeDataPart::getColumnMrkSize(const String & name) const
|
||||
*/
|
||||
String MergeTreeDataPart::getColumnNameWithMinumumCompressedSize() const
|
||||
{
|
||||
const auto & columns = storage.getColumnsList();
|
||||
const auto & columns = storage.getColumns().getAllPhysical();
|
||||
const std::string * minimum_size_column = nullptr;
|
||||
UInt64 minimum_size = std::numeric_limits<UInt64>::max();
|
||||
|
||||
@ -774,7 +774,7 @@ void MergeTreeDataPart::accumulateColumnSizes(ColumnToSize & column_to_size) con
|
||||
{
|
||||
std::shared_lock<std::shared_mutex> part_lock(columns_lock);
|
||||
|
||||
for (const NameAndTypePair & name_type : storage.columns)
|
||||
for (const NameAndTypePair & name_type : storage.getColumns().getAllPhysical())
|
||||
{
|
||||
name_type.type->enumerateStreams([&](const IDataType::SubstreamPath & substream_path)
|
||||
{
|
||||
@ -794,7 +794,7 @@ void MergeTreeDataPart::loadColumns(bool require)
|
||||
throw Exception("No columns.txt in part " + name, ErrorCodes::NO_FILE_IN_DATA_PART);
|
||||
|
||||
/// If there is no file with a list of columns, write it down.
|
||||
for (const NameAndTypePair & column : storage.getColumnsList())
|
||||
for (const NameAndTypePair & column : storage.getColumns().getAllPhysical())
|
||||
if (Poco::File(getFullPath() + escapeForFileName(column.name) + ".bin").exists())
|
||||
columns.push_back(column);
|
||||
|
||||
|
@ -175,7 +175,7 @@ BlockInputStreams MergeTreeDataSelectExecutor::read(
|
||||
}
|
||||
}
|
||||
|
||||
NamesAndTypesList available_real_columns = data.getColumnsList();
|
||||
NamesAndTypesList available_real_columns = data.getColumns().getAllPhysical();
|
||||
|
||||
NamesAndTypesList available_real_and_virtual_columns = available_real_columns;
|
||||
for (const auto & name : virt_column_names)
|
||||
@ -857,7 +857,7 @@ void MergeTreeDataSelectExecutor::createPositiveSignCondition(
|
||||
arguments->children.push_back(sign);
|
||||
arguments->children.push_back(one);
|
||||
|
||||
out_expression = ExpressionAnalyzer(function, context, {}, data.getColumnsList()).getActions(false);
|
||||
out_expression = ExpressionAnalyzer(function, context, {}, data.getColumns().getAllPhysical()).getActions(false);
|
||||
out_column = function->getColumnName();
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <Core/QueryProcessingStage.h>
|
||||
#include <Storages/SelectQueryInfo.h>
|
||||
#include <Storages/MergeTree/MergeTreeData.h>
|
||||
#include <Storages/MergeTree/RangesInDataPart.h>
|
||||
|
||||
|
@ -205,7 +205,7 @@ MergeTreeData::MutableDataPartPtr MergeTreeDataWriter::writeTempPart(BlockWithPa
|
||||
/// either default lz4 or compression method with zero thresholds on absolute and relative part size.
|
||||
auto compression_settings = data.context.chooseCompressionSettings(0, 0);
|
||||
|
||||
NamesAndTypesList columns = data.getColumnsList().filter(block.getNames());
|
||||
NamesAndTypesList columns = data.getColumns().getAllPhysical().filter(block.getNames());
|
||||
MergedBlockOutputStream out(data, new_data_part->getFullPath(), columns, compression_settings);
|
||||
|
||||
out.writePrefix();
|
||||
|
@ -238,8 +238,9 @@ std::vector<size_t> MergeTreeReadPool::fillPerPartInfo(
|
||||
if (!required_column_names.empty())
|
||||
data.check(part.data_part->columns, required_column_names);
|
||||
|
||||
per_part_pre_columns.push_back(data.getColumnsList().addTypes(required_pre_column_names));
|
||||
per_part_columns.push_back(data.getColumnsList().addTypes(required_column_names));
|
||||
const NamesAndTypesList & physical_columns = data.getColumns().getAllPhysical();
|
||||
per_part_pre_columns.push_back(physical_columns.addTypes(required_pre_column_names));
|
||||
per_part_columns.push_back(physical_columns.addTypes(required_column_names));
|
||||
}
|
||||
else
|
||||
{
|
||||
|
@ -463,7 +463,7 @@ void MergeTreeReader::fillMissingColumns(Block & res, const Names & ordered_name
|
||||
if (!has_column)
|
||||
{
|
||||
should_sort = true;
|
||||
if (storage.column_defaults.count(requested_column.name) != 0)
|
||||
if (storage.getColumns().defaults.count(requested_column.name) != 0)
|
||||
{
|
||||
should_evaluate_defaults = true;
|
||||
continue;
|
||||
@ -498,7 +498,7 @@ void MergeTreeReader::fillMissingColumns(Block & res, const Names & ordered_name
|
||||
|
||||
/// evaluate defaulted columns if necessary
|
||||
if (should_evaluate_defaults)
|
||||
evaluateMissingDefaults(res, columns, storage.column_defaults, storage.context);
|
||||
evaluateMissingDefaults(res, columns, storage.getColumns().defaults, storage.context);
|
||||
|
||||
/// sort columns to ensure consistent order among all blocks
|
||||
if (should_sort)
|
||||
|
@ -41,9 +41,9 @@ MergeTreeWhereOptimizer::MergeTreeWhereOptimizer(
|
||||
Logger * log)
|
||||
: primary_key_columns{ext::map<std::unordered_set>(data.getPrimarySortDescription(),
|
||||
[] (const SortColumnDescription & col) { return col.column_name; })},
|
||||
table_columns{ext::map<std::unordered_set>(data.getColumnsList(),
|
||||
table_columns{ext::map<std::unordered_set>(data.getColumns().getAllPhysical(),
|
||||
[] (const NameAndTypePair & col) { return col.name; })},
|
||||
block_with_constants{PKCondition::getBlockWithConstants(query_info.query, context, data.getColumnsList())},
|
||||
block_with_constants{PKCondition::getBlockWithConstants(query_info.query, context, data.getColumns().getAllPhysical())},
|
||||
prepared_sets(query_info.sets),
|
||||
log{log}
|
||||
{
|
||||
|
@ -56,12 +56,7 @@ void ReplicatedMergeTreeAlterThread::run()
|
||||
|
||||
zkutil::Stat stat;
|
||||
const String columns_str = zookeeper->get(storage.zookeeper_path + "/columns", &stat, wakeup_event);
|
||||
auto columns_desc = ColumnsDescription<true>::parse(columns_str);
|
||||
|
||||
auto & columns = columns_desc.columns;
|
||||
auto & materialized_columns = columns_desc.materialized;
|
||||
auto & alias_columns = columns_desc.alias;
|
||||
auto & column_defaults = columns_desc.defaults;
|
||||
auto columns_in_zk = ColumnsDescription::parse(columns_str);
|
||||
|
||||
bool changed_version = (stat.version != storage.columns_version);
|
||||
|
||||
@ -92,42 +87,13 @@ void ReplicatedMergeTreeAlterThread::run()
|
||||
|
||||
auto table_lock = storage.lockStructureForAlter(__PRETTY_FUNCTION__);
|
||||
|
||||
const auto columns_changed = columns != storage.data.getColumnsListNonMaterialized();
|
||||
const auto materialized_columns_changed = materialized_columns != storage.data.materialized_columns;
|
||||
const auto alias_columns_changed = alias_columns != storage.data.alias_columns;
|
||||
const auto column_defaults_changed = column_defaults != storage.data.column_defaults;
|
||||
|
||||
if (columns_changed || materialized_columns_changed || alias_columns_changed ||
|
||||
column_defaults_changed)
|
||||
if (columns_in_zk != storage.getColumns())
|
||||
{
|
||||
LOG_INFO(log, "Columns list changed in ZooKeeper. Applying changes locally.");
|
||||
|
||||
storage.context.getDatabase(storage.database_name)->alterTable(
|
||||
storage.context, storage.table_name,
|
||||
columns, materialized_columns, alias_columns, column_defaults, {});
|
||||
|
||||
if (columns_changed)
|
||||
{
|
||||
storage.data.setColumnsList(columns);
|
||||
}
|
||||
|
||||
if (materialized_columns_changed)
|
||||
{
|
||||
storage.materialized_columns = materialized_columns;
|
||||
storage.data.materialized_columns = std::move(materialized_columns);
|
||||
}
|
||||
|
||||
if (alias_columns_changed)
|
||||
{
|
||||
storage.alias_columns = alias_columns;
|
||||
storage.data.alias_columns = std::move(alias_columns);
|
||||
}
|
||||
|
||||
if (column_defaults_changed)
|
||||
{
|
||||
storage.column_defaults = column_defaults;
|
||||
storage.data.column_defaults = std::move(column_defaults);
|
||||
}
|
||||
storage.context, storage.table_name, columns_in_zk, {});
|
||||
storage.setColumns(std::move(columns_in_zk));
|
||||
|
||||
/// Reinitialize primary key because primary key column types might have changed.
|
||||
storage.data.initPrimaryKey();
|
||||
@ -158,7 +124,7 @@ void ReplicatedMergeTreeAlterThread::run()
|
||||
if (!changed_version)
|
||||
parts = storage.data.getDataParts();
|
||||
|
||||
const auto columns_plus_materialized = storage.data.getColumnsList();
|
||||
const auto columns_for_parts = storage.getColumns().getAllPhysical();
|
||||
|
||||
for (const MergeTreeData::DataPartPtr & part : parts)
|
||||
{
|
||||
@ -166,7 +132,7 @@ void ReplicatedMergeTreeAlterThread::run()
|
||||
/// TODO: You can skip checking for too large changes if ZooKeeper has, for example,
|
||||
/// node /flags/force_alter.
|
||||
auto transaction = storage.data.alterDataPart(
|
||||
part, columns_plus_materialized, storage.data.primary_expr_ast, false);
|
||||
part, columns_for_parts, storage.data.primary_expr_ast, false);
|
||||
|
||||
if (!transaction)
|
||||
continue;
|
||||
|
@ -628,14 +628,13 @@ static StoragePtr create(const StorageFactory::Arguments & args)
|
||||
if (replicated)
|
||||
return StorageReplicatedMergeTree::create(
|
||||
zookeeper_path, replica_name, args.attach, args.data_path, args.database_name, args.table_name,
|
||||
args.columns, args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
args.columns,
|
||||
args.context, primary_expr_list, secondary_sorting_expr_list, date_column_name, partition_expr_list,
|
||||
sampling_expression, merging_params, storage_settings,
|
||||
args.has_force_restore_data_flag);
|
||||
else
|
||||
return StorageMergeTree::create(
|
||||
args.data_path, args.database_name, args.table_name,
|
||||
args.columns, args.materialized_columns, args.alias_columns, args.column_defaults, args.attach,
|
||||
args.data_path, args.database_name, args.table_name, args.columns, args.attach,
|
||||
args.context, primary_expr_list, secondary_sorting_expr_list, date_column_name, partition_expr_list,
|
||||
sampling_expression, merging_params, storage_settings,
|
||||
args.has_force_restore_data_flag);
|
||||
|
@ -48,14 +48,11 @@ namespace ErrorCodes
|
||||
}
|
||||
|
||||
|
||||
StorageBuffer::StorageBuffer(const std::string & name_, const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
StorageBuffer::StorageBuffer(const std::string & name_, const ColumnsDescription & columns_,
|
||||
Context & context_,
|
||||
size_t num_shards_, const Thresholds & min_thresholds_, const Thresholds & max_thresholds_,
|
||||
const String & destination_database_, const String & destination_table_, bool allow_materialized_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
: IStorage{columns_},
|
||||
name(name_), context(context_),
|
||||
num_shards(num_shards_), buffers(num_shards_),
|
||||
min_thresholds(min_thresholds_), max_thresholds(max_thresholds_),
|
||||
@ -598,11 +595,10 @@ void StorageBuffer::alter(const AlterCommands & params, const String & database_
|
||||
/// So that no blocks of the old structure remain.
|
||||
optimize({} /*query*/, {} /*partition_id*/, false /*final*/, false /*deduplicate*/, context);
|
||||
|
||||
params.apply(columns, materialized_columns, alias_columns, column_defaults);
|
||||
|
||||
context.getDatabase(database_name)->alterTable(
|
||||
context, table_name,
|
||||
columns, materialized_columns, alias_columns, column_defaults, {});
|
||||
ColumnsDescription new_columns = getColumns();
|
||||
params.apply(new_columns);
|
||||
context.getDatabase(database_name)->alterTable(context, table_name, new_columns, {});
|
||||
setColumns(std::move(new_columns));
|
||||
}
|
||||
|
||||
|
||||
@ -641,7 +637,6 @@ void registerStorageBuffer(StorageFactory & factory)
|
||||
|
||||
return StorageBuffer::create(
|
||||
args.table_name, args.columns,
|
||||
args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
args.context,
|
||||
num_buckets,
|
||||
StorageBuffer::Thresholds{min_time, min_rows, min_bytes},
|
||||
|
@ -123,10 +123,7 @@ protected:
|
||||
/** num_shards - the level of internal parallelism (the number of independent buffers)
|
||||
* The buffer is flushed if all minimum thresholds or at least one of the maximum thresholds are exceeded.
|
||||
*/
|
||||
StorageBuffer(const std::string & name_, const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
StorageBuffer(const std::string & name_, const ColumnsDescription & columns_,
|
||||
Context & context_,
|
||||
size_t num_shards_, const Thresholds & min_thresholds_, const Thresholds & max_thresholds_,
|
||||
const String & destination_database_, const String & destination_table_, bool allow_materialized_);
|
||||
|
@ -220,7 +220,7 @@ void StorageCatBoostPool::parseColumnDescription()
|
||||
|
||||
void StorageCatBoostPool::createSampleBlockAndColumns()
|
||||
{
|
||||
columns.clear();
|
||||
ColumnsDescription columns;
|
||||
NamesAndTypesList cat_columns;
|
||||
NamesAndTypesList num_columns;
|
||||
sample_block.clear();
|
||||
@ -239,19 +239,21 @@ void StorageCatBoostPool::createSampleBlockAndColumns()
|
||||
else if (desc.column_type == DatasetColumnType::Num)
|
||||
num_columns.emplace_back(desc.column_name, type);
|
||||
else
|
||||
materialized_columns.emplace_back(desc.column_name, type);
|
||||
columns.materialized.emplace_back(desc.column_name, type);
|
||||
|
||||
if (!desc.alias.empty())
|
||||
{
|
||||
auto alias = std::make_shared<ASTIdentifier>(desc.column_name);
|
||||
column_defaults[desc.alias] = {ColumnDefaultType::Alias, alias};
|
||||
alias_columns.emplace_back(desc.alias, type);
|
||||
columns.defaults[desc.alias] = {ColumnDefaultKind::Alias, alias};
|
||||
columns.aliases.emplace_back(desc.alias, type);
|
||||
}
|
||||
|
||||
sample_block.insert(ColumnWithTypeAndName(type, desc.column_name));
|
||||
}
|
||||
columns.insert(columns.end(), num_columns.begin(), num_columns.end());
|
||||
columns.insert(columns.end(), cat_columns.begin(), cat_columns.end());
|
||||
columns.ordinary.insert(columns.ordinary.end(), num_columns.begin(), num_columns.end());
|
||||
columns.ordinary.insert(columns.ordinary.end(), cat_columns.begin(), cat_columns.end());
|
||||
|
||||
setColumns(columns);
|
||||
}
|
||||
|
||||
BlockInputStreams StorageCatBoostPool::read(const Names & column_names,
|
||||
|
@ -24,13 +24,10 @@ namespace ErrorCodes
|
||||
|
||||
StorageDictionary::StorageDictionary(
|
||||
const String & table_name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
const DictionaryStructure & dictionary_structure_,
|
||||
const String & dictionary_name_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_}, table_name(table_name_),
|
||||
: IStorage{columns_}, table_name(table_name_),
|
||||
dictionary_name(dictionary_name_),
|
||||
logger(&Poco::Logger::get("StorageDictionary"))
|
||||
{
|
||||
@ -75,7 +72,7 @@ void StorageDictionary::checkNamesAndTypesCompatibleWithDictionary(const Diction
|
||||
auto dictionary_names_and_types = getNamesAndTypes(dictionary_structure);
|
||||
std::set<NameAndTypePair> namesAndTypesSet(dictionary_names_and_types.begin(), dictionary_names_and_types.end());
|
||||
|
||||
for (auto & column : columns)
|
||||
for (auto & column : getColumns().ordinary)
|
||||
{
|
||||
if (namesAndTypesSet.find(column) == namesAndTypesSet.end())
|
||||
{
|
||||
@ -105,8 +102,7 @@ void registerStorageDictionary(StorageFactory & factory)
|
||||
const DictionaryStructure & dictionary_structure = dictionary->getStructure();
|
||||
|
||||
return StorageDictionary::create(
|
||||
args.table_name, args.columns, args.materialized_columns, args.alias_columns,
|
||||
args.column_defaults, dictionary_structure, dictionary_name);
|
||||
args.table_name, args.columns, dictionary_structure, dictionary_name);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -22,7 +22,6 @@ class StorageDictionary : public ext::shared_ptr_helper<StorageDictionary>, publ
|
||||
public:
|
||||
std::string getName() const override { return "Dictionary"; }
|
||||
std::string getTableName() const override { return table_name; }
|
||||
const NamesAndTypesList & getColumnsListImpl() const override { return columns; }
|
||||
BlockInputStreams read(const Names & column_names,
|
||||
const SelectQueryInfo & query_info,
|
||||
const Context & context,
|
||||
@ -62,10 +61,7 @@ private:
|
||||
|
||||
protected:
|
||||
StorageDictionary(const String & table_name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
const DictionaryStructure & dictionary_structure_,
|
||||
const String & dictionary_name_);
|
||||
};
|
||||
|
@ -130,39 +130,35 @@ StorageDistributed::~StorageDistributed() = default;
|
||||
|
||||
|
||||
StorageDistributed::StorageDistributed(
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const std::string & table_name_,
|
||||
const ColumnsDescription & columns_,
|
||||
const String & remote_database_,
|
||||
const String & remote_table_,
|
||||
const String & cluster_name_,
|
||||
const Context & context_,
|
||||
const ASTPtr & sharding_key_,
|
||||
const String & data_path_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
name(name_),
|
||||
: IStorage{columns_},
|
||||
table_name(table_name_),
|
||||
remote_database(remote_database_), remote_table(remote_table_),
|
||||
context(context_), cluster_name(context.getMacros()->expand(cluster_name_)), has_sharding_key(sharding_key_),
|
||||
sharding_key_expr(sharding_key_ ? ExpressionAnalyzer(sharding_key_, context, nullptr, columns).getActions(false) : nullptr),
|
||||
sharding_key_expr(sharding_key_ ? ExpressionAnalyzer(sharding_key_, context, nullptr, getColumns().getAllPhysical()).getActions(false) : nullptr),
|
||||
sharding_key_column_name(sharding_key_ ? sharding_key_->getColumnName() : String{}),
|
||||
path(data_path_.empty() ? "" : (data_path_ + escapeForFileName(name) + '/'))
|
||||
path(data_path_.empty() ? "" : (data_path_ + escapeForFileName(table_name) + '/'))
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
StoragePtr StorageDistributed::createWithOwnCluster(
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const ColumnsDescription & columns_,
|
||||
const String & remote_database_,
|
||||
const String & remote_table_,
|
||||
ClusterPtr & owned_cluster_,
|
||||
const Context & context_)
|
||||
{
|
||||
auto res = ext::shared_ptr_helper<StorageDistributed>::create(
|
||||
name_, columns_, NamesAndTypesList(), NamesAndTypesList(), ColumnDefaults(),
|
||||
remote_database_, remote_table_, String{}, context_, ASTPtr(), String());
|
||||
name_, columns_, remote_database_, remote_table_, String{}, context_, ASTPtr(), String());
|
||||
|
||||
res->owned_cluster = owned_cluster_;
|
||||
|
||||
@ -236,11 +232,11 @@ void StorageDistributed::alter(const AlterCommands & params, const String & data
|
||||
throw Exception("Storage engine " + getName() + " doesn't support primary key.", ErrorCodes::NOT_IMPLEMENTED);
|
||||
|
||||
auto lock = lockStructureForAlter(__PRETTY_FUNCTION__);
|
||||
params.apply(columns, materialized_columns, alias_columns, column_defaults);
|
||||
|
||||
context.getDatabase(database_name)->alterTable(
|
||||
context, table_name,
|
||||
columns, materialized_columns, alias_columns, column_defaults, {});
|
||||
ColumnsDescription new_columns = getColumns();
|
||||
params.apply(new_columns);
|
||||
context.getDatabase(database_name)->alterTable(context, table_name, new_columns, {});
|
||||
setColumns(std::move(new_columns));
|
||||
}
|
||||
|
||||
|
||||
@ -291,13 +287,13 @@ NameAndTypePair StorageDistributed::getColumn(const String & column_name) const
|
||||
if (const auto & type = VirtualColumnFactory::tryGetType(column_name))
|
||||
return { column_name, type };
|
||||
|
||||
return getRealColumn(column_name);
|
||||
return getColumns().getPhysical(column_name);
|
||||
}
|
||||
|
||||
|
||||
bool StorageDistributed::hasColumn(const String & column_name) const
|
||||
{
|
||||
return VirtualColumnFactory::hasColumn(column_name) || IStorage::hasColumn(column_name);
|
||||
return VirtualColumnFactory::hasColumn(column_name) || getColumns().hasPhysical(column_name);
|
||||
}
|
||||
|
||||
void StorageDistributed::createDirectoryMonitors()
|
||||
@ -390,7 +386,7 @@ void registerStorageDistributed(StorageFactory & factory)
|
||||
/// Check that sharding_key exists in the table and has numeric type.
|
||||
if (sharding_key)
|
||||
{
|
||||
auto sharding_expr = ExpressionAnalyzer(sharding_key, args.context, nullptr, args.columns).getActions(true);
|
||||
auto sharding_expr = ExpressionAnalyzer(sharding_key, args.context, nullptr, args.columns.getAllPhysical()).getActions(true);
|
||||
const Block & block = sharding_expr->getSampleBlock();
|
||||
|
||||
if (block.columns() != 1)
|
||||
@ -405,7 +401,6 @@ void registerStorageDistributed(StorageFactory & factory)
|
||||
|
||||
return StorageDistributed::create(
|
||||
args.table_name, args.columns,
|
||||
args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
remote_database, remote_table, cluster_name,
|
||||
args.context, sharding_key, args.data_path);
|
||||
});
|
||||
|
@ -34,20 +34,19 @@ public:
|
||||
~StorageDistributed() override;
|
||||
|
||||
static StoragePtr createWithOwnCluster(
|
||||
const std::string & name_, /// The name of the table.
|
||||
const NamesAndTypesList & columns_, /// List of columns.
|
||||
const std::string & table_name_,
|
||||
const ColumnsDescription & columns_,
|
||||
const String & remote_database_, /// database on remote servers.
|
||||
const String & remote_table_, /// The name of the table on the remote servers.
|
||||
ClusterPtr & owned_cluster_,
|
||||
const Context & context_);
|
||||
|
||||
std::string getName() const override { return "Distributed"; }
|
||||
std::string getTableName() const override { return name; }
|
||||
std::string getTableName() const override { return table_name; }
|
||||
bool supportsSampling() const override { return true; }
|
||||
bool supportsFinal() const override { return true; }
|
||||
bool supportsPrewhere() const override { return true; }
|
||||
|
||||
const NamesAndTypesList & getColumnsListImpl() const override { return columns; }
|
||||
NameAndTypePair getColumn(const String & column_name) const override;
|
||||
bool hasColumn(const String & column_name) const override;
|
||||
|
||||
@ -64,7 +63,7 @@ public:
|
||||
BlockOutputStreamPtr write(const ASTPtr & query, const Settings & settings) override;
|
||||
|
||||
void drop() override {}
|
||||
void rename(const String & /*new_path_to_db*/, const String & /*new_database_name*/, const String & new_table_name) override { name = new_table_name; }
|
||||
void rename(const String & /*new_path_to_db*/, const String & /*new_database_name*/, const String & new_table_name) override { table_name = new_table_name; }
|
||||
/// in the sub-tables, you need to manually add and delete columns
|
||||
/// the structure of the sub-table is not checked
|
||||
void alter(const AlterCommands & params, const String & database_name, const String & table_name, const Context & context) override;
|
||||
@ -95,7 +94,7 @@ public:
|
||||
ClusterPtr getCluster() const;
|
||||
|
||||
|
||||
String name;
|
||||
String table_name;
|
||||
String remote_database;
|
||||
String remote_table;
|
||||
|
||||
@ -132,10 +131,7 @@ public:
|
||||
protected:
|
||||
StorageDistributed(
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
const String & remote_database_,
|
||||
const String & remote_table_,
|
||||
const String & cluster_name_,
|
||||
|
@ -45,10 +45,7 @@ StoragePtr StorageFactory::get(
|
||||
const String & database_name,
|
||||
Context & local_context,
|
||||
Context & context,
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults,
|
||||
const ColumnsDescription & columns,
|
||||
bool attach,
|
||||
bool has_force_restore_data_flag) const
|
||||
{
|
||||
@ -67,9 +64,7 @@ StoragePtr StorageFactory::get(
|
||||
{
|
||||
/// Check for some special types, that are not allowed to be stored in tables. Example: NULL data type.
|
||||
/// Exception: any type is allowed in View, because plain (non-materialized) View does not store anything itself.
|
||||
checkAllTypesAreAllowedInTable(columns);
|
||||
checkAllTypesAreAllowedInTable(materialized_columns);
|
||||
checkAllTypesAreAllowedInTable(alias_columns);
|
||||
checkAllTypesAreAllowedInTable(columns.getAll());
|
||||
|
||||
if (query.is_materialized_view)
|
||||
{
|
||||
@ -130,9 +125,6 @@ StoragePtr StorageFactory::get(
|
||||
.local_context = local_context,
|
||||
.context = context,
|
||||
.columns = columns,
|
||||
.materialized_columns = materialized_columns,
|
||||
.alias_columns = alias_columns,
|
||||
.column_defaults = column_defaults,
|
||||
.attach = attach,
|
||||
.has_force_restore_data_flag = has_force_restore_data_flag
|
||||
};
|
||||
|
@ -14,7 +14,7 @@ class ASTStorage;
|
||||
|
||||
|
||||
/** Allows to create a table by the name and parameters of the engine.
|
||||
* In 'columns', 'materialized_columns', etc., Nested data structures must be flattened.
|
||||
* In 'columns' Nested data structures must be flattened.
|
||||
* You should subsequently call IStorage::startup method to work with table.
|
||||
*/
|
||||
class StorageFactory : public ext::singleton<StorageFactory>
|
||||
@ -31,10 +31,7 @@ public:
|
||||
const String & database_name;
|
||||
Context & local_context;
|
||||
Context & context;
|
||||
const NamesAndTypesList & columns;
|
||||
const NamesAndTypesList & materialized_columns;
|
||||
const NamesAndTypesList & alias_columns;
|
||||
const ColumnDefaults & column_defaults;
|
||||
const ColumnsDescription & columns;
|
||||
bool attach;
|
||||
bool has_force_restore_data_flag;
|
||||
};
|
||||
@ -48,10 +45,7 @@ public:
|
||||
const String & database_name,
|
||||
Context & local_context,
|
||||
Context & context,
|
||||
const NamesAndTypesList & columns,
|
||||
const NamesAndTypesList & materialized_columns,
|
||||
const NamesAndTypesList & alias_columns,
|
||||
const ColumnDefaults & column_defaults,
|
||||
const ColumnsDescription & columns,
|
||||
bool attach,
|
||||
bool has_force_restore_data_flag) const;
|
||||
|
||||
|
@ -54,12 +54,9 @@ StorageFile::StorageFile(
|
||||
const std::string & db_dir_path,
|
||||
const std::string & table_name_,
|
||||
const std::string & format_name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
Context & context_)
|
||||
: IStorage(columns_, materialized_columns_, alias_columns_, column_defaults_),
|
||||
: IStorage(columns_),
|
||||
table_name(table_name_), format_name(format_name_), context_global(context_), table_fd(table_fd_)
|
||||
{
|
||||
if (table_fd < 0) /// Will use file
|
||||
@ -311,7 +308,6 @@ void registerStorageFile(StorageFactory & factory)
|
||||
return StorageFile::create(
|
||||
source_path, source_fd,
|
||||
args.data_path, args.table_name, format_name, args.columns,
|
||||
args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
args.context);
|
||||
});
|
||||
}
|
||||
|
@ -31,11 +31,6 @@ public:
|
||||
return table_name;
|
||||
}
|
||||
|
||||
const NamesAndTypesList & getColumnsListImpl() const override
|
||||
{
|
||||
return columns;
|
||||
}
|
||||
|
||||
BlockInputStreams read(
|
||||
const Names & column_names,
|
||||
const SelectQueryInfo & query_info,
|
||||
@ -61,7 +56,7 @@ protected:
|
||||
/** there are three options (ordered by priority):
|
||||
- use specified file descriptor if (fd >= 0)
|
||||
- use specified table_path if it isn't empty
|
||||
- create own tabale inside data/db/table/
|
||||
- create own table inside data/db/table/
|
||||
*/
|
||||
StorageFile(
|
||||
const std::string & table_path_,
|
||||
@ -69,10 +64,7 @@ protected:
|
||||
const std::string & db_dir_path,
|
||||
const std::string & table_name_,
|
||||
const std::string & format_name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
Context & context_);
|
||||
|
||||
private:
|
||||
|
@ -24,24 +24,12 @@ StorageJoin::StorageJoin(
|
||||
const String & name_,
|
||||
const Names & key_names_,
|
||||
ASTTableJoin::Kind kind_, ASTTableJoin::Strictness strictness_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_)
|
||||
: StorageSetOrJoinBase{path_, name_, columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
const ColumnsDescription & columns_)
|
||||
: StorageSetOrJoinBase{path_, name_, columns_},
|
||||
key_names(key_names_), kind(kind_), strictness(strictness_)
|
||||
{
|
||||
/// Check that key exists in table definition.
|
||||
const auto check_key_exists = [] (const NamesAndTypesList & columns, const String & key)
|
||||
{
|
||||
for (const auto & column : columns)
|
||||
if (column.name == key)
|
||||
return true;
|
||||
return false;
|
||||
};
|
||||
|
||||
for (const auto & key : key_names)
|
||||
if (!check_key_exists(columns, key) && !check_key_exists(materialized_columns, key))
|
||||
if (!getColumns().hasPhysical(key))
|
||||
throw Exception{
|
||||
"Key column (" + key + ") does not exist in table declaration.",
|
||||
ErrorCodes::NO_SUCH_COLUMN_IN_TABLE};
|
||||
@ -58,7 +46,7 @@ void StorageJoin::assertCompatible(ASTTableJoin::Kind kind_, ASTTableJoin::Stric
|
||||
{
|
||||
/// NOTE Could be more loose.
|
||||
if (!(kind == kind_ && strictness == strictness_))
|
||||
throw Exception("Table " + name + " has incompatible type of JOIN.", ErrorCodes::INCOMPATIBLE_TYPE_OF_JOIN);
|
||||
throw Exception("Table " + table_name + " has incompatible type of JOIN.", ErrorCodes::INCOMPATIBLE_TYPE_OF_JOIN);
|
||||
}
|
||||
|
||||
|
||||
@ -123,7 +111,7 @@ void registerStorageJoin(StorageFactory & factory)
|
||||
return StorageJoin::create(
|
||||
args.data_path, args.table_name,
|
||||
key_names, kind, strictness,
|
||||
args.columns, args.materialized_columns, args.alias_columns, args.column_defaults);
|
||||
args.columns);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -47,10 +47,7 @@ protected:
|
||||
const String & name_,
|
||||
const Names & key_names_,
|
||||
ASTTableJoin::Kind kind_, ASTTableJoin::Strictness strictness_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_);
|
||||
const ColumnsDescription & columns_);
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -218,13 +218,10 @@ StorageKafka::StorageKafka(
|
||||
const std::string & table_name_,
|
||||
const std::string & database_name_,
|
||||
Context & context_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
const String & brokers_, const String & group_, const Names & topics_,
|
||||
const String & format_name_, const String & schema_name_, size_t num_consumers_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
: IStorage{columns_},
|
||||
table_name(table_name_), database_name(database_name_), context(context_),
|
||||
topics(topics_), brokers(brokers_), group(group_), format_name(format_name_), schema_name(schema_name_),
|
||||
num_consumers(num_consumers_), log(&Logger::get("StorageKafka (" + table_name_ + ")")),
|
||||
@ -591,7 +588,6 @@ void registerStorageKafka(StorageFactory & factory)
|
||||
|
||||
return StorageKafka::create(
|
||||
args.table_name, args.database_name, args.context, args.columns,
|
||||
args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
brokers, group, topics, format, schema, num_consumers);
|
||||
});
|
||||
}
|
||||
|
@ -102,10 +102,7 @@ protected:
|
||||
const std::string & table_name_,
|
||||
const std::string & database_name_,
|
||||
Context & context_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
const String & brokers_, const String & group_, const Names & topics_,
|
||||
const String & format_name_, const String & schema_name_, size_t num_consumers_);
|
||||
};
|
||||
|
@ -358,12 +358,9 @@ void LogBlockOutputStream::writeMarks(MarksForColumns && marks)
|
||||
StorageLog::StorageLog(
|
||||
const std::string & path_,
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
size_t max_compress_block_size_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
: IStorage{columns_},
|
||||
path(path_), name(name_),
|
||||
max_compress_block_size(max_compress_block_size_),
|
||||
file_checker(path + escapeForFileName(name) + '/' + "sizes.json")
|
||||
@ -374,7 +371,7 @@ StorageLog::StorageLog(
|
||||
/// create files if they do not exist
|
||||
Poco::File(path + escapeForFileName(name) + '/').createDirectories();
|
||||
|
||||
for (const auto & column : getColumnsList())
|
||||
for (const auto & column : getColumns().getAllPhysical())
|
||||
addFiles(column.name, *column.type);
|
||||
|
||||
marks_file = Poco::File(path + escapeForFileName(name) + '/' + DBMS_STORAGE_LOG_MARKS_FILE_NAME);
|
||||
@ -468,8 +465,8 @@ void StorageLog::rename(const String & new_path_to_db, const String & /*new_data
|
||||
|
||||
const StorageLog::Marks & StorageLog::getMarksWithRealRowCount() const
|
||||
{
|
||||
const String & column_name = columns.front().name;
|
||||
const IDataType & column_type = *columns.front().type;
|
||||
const String & column_name = getColumns().ordinary.front().name;
|
||||
const IDataType & column_type = *getColumns().ordinary.front().type;
|
||||
String filename;
|
||||
|
||||
/** We take marks from first column.
|
||||
@ -502,7 +499,7 @@ BlockInputStreams StorageLog::read(
|
||||
processed_stage = QueryProcessingStage::FetchColumns;
|
||||
loadMarks();
|
||||
|
||||
NamesAndTypesList columns = Nested::collect(getColumnsList().addTypes(column_names));
|
||||
NamesAndTypesList all_columns = Nested::collect(getColumns().getAllPhysical().addTypes(column_names));
|
||||
|
||||
std::shared_lock<std::shared_mutex> lock(rwlock);
|
||||
|
||||
@ -526,7 +523,7 @@ BlockInputStreams StorageLog::read(
|
||||
|
||||
res.emplace_back(std::make_shared<LogBlockInputStream>(
|
||||
max_block_size,
|
||||
columns,
|
||||
all_columns,
|
||||
*this,
|
||||
mark_begin,
|
||||
rows_end - rows_begin,
|
||||
@ -562,7 +559,6 @@ void registerStorageLog(StorageFactory & factory)
|
||||
|
||||
return StorageLog::create(
|
||||
args.data_path, args.table_name, args.columns,
|
||||
args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
args.context.getSettings().max_compress_block_size);
|
||||
});
|
||||
}
|
||||
|
@ -52,10 +52,7 @@ protected:
|
||||
StorageLog(
|
||||
const std::string & path_,
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
size_t max_compress_block_size_);
|
||||
|
||||
private:
|
||||
|
@ -64,12 +64,9 @@ StorageMaterializedView::StorageMaterializedView(
|
||||
const String & database_name_,
|
||||
Context & local_context,
|
||||
const ASTCreateQuery & query,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
bool attach_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_}, table_name(table_name_),
|
||||
: IStorage{columns_}, table_name(table_name_),
|
||||
database_name(database_name_), global_context(local_context.getGlobalContext())
|
||||
{
|
||||
if (!query.select)
|
||||
@ -213,8 +210,7 @@ void registerStorageMaterializedView(StorageFactory & factory)
|
||||
/// Pass local_context here to convey setting for inner table
|
||||
return StorageMaterializedView::create(
|
||||
args.table_name, args.database_name, args.local_context, args.query,
|
||||
args.columns, args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
args.attach);
|
||||
args.columns, args.attach);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,6 @@ class StorageMaterializedView : public ext::shared_ptr_helper<StorageMaterialize
|
||||
public:
|
||||
std::string getName() const override { return "MaterializedView"; }
|
||||
std::string getTableName() const override { return table_name; }
|
||||
const NamesAndTypesList & getColumnsListImpl() const override { return columns; }
|
||||
ASTPtr getInnerQuery() const { return inner_query->clone(); };
|
||||
StoragePtr getTargetTable() const;
|
||||
|
||||
@ -62,10 +61,7 @@ protected:
|
||||
const String & database_name_,
|
||||
Context & local_context,
|
||||
const ASTCreateQuery & query,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
bool attach_);
|
||||
};
|
||||
|
||||
|
@ -74,14 +74,8 @@ private:
|
||||
};
|
||||
|
||||
|
||||
StorageMemory::StorageMemory(
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
name(name_)
|
||||
StorageMemory::StorageMemory(String table_name_, ColumnsDescription columns_description_)
|
||||
: IStorage{std::move(columns_description_)}, table_name(std::move(table_name_))
|
||||
{
|
||||
}
|
||||
|
||||
@ -144,7 +138,7 @@ void registerStorageMemory(StorageFactory & factory)
|
||||
"Engine " + args.engine_name + " doesn't support any arguments (" + toString(args.engine_args.size()) + " given)",
|
||||
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||
|
||||
return StorageMemory::create(args.table_name, args.columns, args.materialized_columns, args.alias_columns, args.column_defaults);
|
||||
return StorageMemory::create(args.table_name, args.columns);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -24,7 +24,7 @@ friend class MemoryBlockOutputStream;
|
||||
|
||||
public:
|
||||
std::string getName() const override { return "Memory"; }
|
||||
std::string getTableName() const override { return name; }
|
||||
std::string getTableName() const override { return table_name; }
|
||||
|
||||
size_t getSize() const { return data.size(); }
|
||||
|
||||
@ -39,10 +39,10 @@ public:
|
||||
BlockOutputStreamPtr write(const ASTPtr & query, const Settings & settings) override;
|
||||
|
||||
void drop() override;
|
||||
void rename(const String & /*new_path_to_db*/, const String & /*new_database_name*/, const String & new_table_name) override { name = new_table_name; }
|
||||
void rename(const String & /*new_path_to_db*/, const String & /*new_database_name*/, const String & new_table_name) override { table_name = new_table_name; }
|
||||
|
||||
private:
|
||||
String name;
|
||||
String table_name;
|
||||
|
||||
/// The data itself. `list` - so that when inserted to the end, the existing iterators are not invalidated.
|
||||
BlocksList data;
|
||||
@ -50,12 +50,7 @@ private:
|
||||
std::mutex mutex;
|
||||
|
||||
protected:
|
||||
StorageMemory(
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_);
|
||||
StorageMemory(String table_name_, ColumnsDescription columns_description_);
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -37,14 +37,11 @@ namespace ErrorCodes
|
||||
|
||||
StorageMerge::StorageMerge(
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
const String & source_database_,
|
||||
const String & table_name_regexp_,
|
||||
const Context & context_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
: IStorage{columns_},
|
||||
name(name_), source_database(source_database_),
|
||||
table_name_regexp(table_name_regexp_), context(context_)
|
||||
{
|
||||
@ -184,7 +181,7 @@ BlockInputStreams StorageMerge::read(
|
||||
|
||||
/// If there are only virtual columns in query, you must request at least one other column.
|
||||
if (real_column_names.size() == 0)
|
||||
real_column_names.push_back(ExpressionActions::getSmallestColumn(table->getColumnsList()));
|
||||
real_column_names.push_back(ExpressionActions::getSmallestColumn(table->getColumns().getAllPhysical()));
|
||||
|
||||
/// Substitute virtual column for its value when querying tables.
|
||||
ASTPtr modified_query_ast = query->clone();
|
||||
@ -336,11 +333,11 @@ void StorageMerge::alter(const AlterCommands & params, const String & database_n
|
||||
throw Exception("Storage engine " + getName() + " doesn't support primary key.", ErrorCodes::NOT_IMPLEMENTED);
|
||||
|
||||
auto lock = lockStructureForAlter(__PRETTY_FUNCTION__);
|
||||
params.apply(columns, materialized_columns, alias_columns, column_defaults);
|
||||
|
||||
context.getDatabase(database_name)->alterTable(
|
||||
context, table_name,
|
||||
columns, materialized_columns, alias_columns, column_defaults, {});
|
||||
ColumnsDescription new_columns = getColumns();
|
||||
params.apply(new_columns);
|
||||
context.getDatabase(database_name)->alterTable(context, table_name, new_columns, {});
|
||||
setColumns(new_columns);
|
||||
}
|
||||
|
||||
|
||||
@ -367,7 +364,6 @@ void registerStorageMerge(StorageFactory & factory)
|
||||
|
||||
return StorageMerge::create(
|
||||
args.table_name, args.columns,
|
||||
args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
source_database, table_name_regexp, args.context);
|
||||
});
|
||||
}
|
||||
|
@ -26,7 +26,6 @@ public:
|
||||
bool supportsFinal() const override { return true; }
|
||||
bool supportsIndexForIn() const override { return true; }
|
||||
|
||||
const NamesAndTypesList & getColumnsListImpl() const override { return columns; }
|
||||
NameAndTypePair getColumn(const String & column_name) const override;
|
||||
bool hasColumn(const String & column_name) const override;
|
||||
|
||||
@ -60,10 +59,7 @@ private:
|
||||
protected:
|
||||
StorageMerge(
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
const String & source_database_,
|
||||
const String & table_name_regexp_,
|
||||
const Context & context_);
|
||||
|
@ -35,10 +35,7 @@ StorageMergeTree::StorageMergeTree(
|
||||
const String & path_,
|
||||
const String & database_name_,
|
||||
const String & table_name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
bool attach,
|
||||
Context & context_,
|
||||
const ASTPtr & primary_expr_ast_,
|
||||
@ -49,12 +46,10 @@ StorageMergeTree::StorageMergeTree(
|
||||
const MergeTreeData::MergingParams & merging_params_,
|
||||
const MergeTreeSettings & settings_,
|
||||
bool has_force_restore_data_flag)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
path(path_), database_name(database_name_), table_name(table_name_), full_path(path + escapeForFileName(table_name) + '/'),
|
||||
: path(path_), database_name(database_name_), table_name(table_name_), full_path(path + escapeForFileName(table_name) + '/'),
|
||||
context(context_), background_pool(context_.getBackgroundPool()),
|
||||
data(database_name, table_name,
|
||||
full_path, columns_,
|
||||
materialized_columns_, alias_columns_, column_defaults_,
|
||||
context_, primary_expr_ast_, secondary_sorting_expr_list_, date_column_name, partition_expr_ast_,
|
||||
sampling_expression_, merging_params_,
|
||||
settings_, false, attach),
|
||||
@ -157,16 +152,8 @@ void StorageMergeTree::alter(
|
||||
|
||||
data.checkAlter(params);
|
||||
|
||||
auto new_columns = data.getColumnsListNonMaterialized();
|
||||
auto new_materialized_columns = data.materialized_columns;
|
||||
auto new_alias_columns = data.alias_columns;
|
||||
auto new_column_defaults = data.column_defaults;
|
||||
|
||||
params.apply(new_columns, new_materialized_columns, new_alias_columns, new_column_defaults);
|
||||
|
||||
auto columns_for_parts = new_columns;
|
||||
columns_for_parts.insert(std::end(columns_for_parts),
|
||||
std::begin(new_materialized_columns), std::end(new_materialized_columns));
|
||||
auto new_columns = data.getColumns();
|
||||
params.apply(new_columns);
|
||||
|
||||
std::vector<MergeTreeData::AlterDataPartTransactionPtr> transactions;
|
||||
|
||||
@ -187,6 +174,7 @@ void StorageMergeTree::alter(
|
||||
throw Exception("MODIFY PRIMARY KEY only supported for tables without sampling key", ErrorCodes::BAD_ARGUMENTS);
|
||||
|
||||
auto parts = data.getDataParts({MergeTreeDataPartState::PreCommitted, MergeTreeDataPartState::Committed, MergeTreeDataPartState::Outdated});
|
||||
auto columns_for_parts = new_columns.getAllPhysical();
|
||||
for (const MergeTreeData::DataPartPtr & part : parts)
|
||||
{
|
||||
if (auto transaction = data.alterDataPart(part, columns_for_parts, new_primary_key_ast, false))
|
||||
@ -212,19 +200,8 @@ void StorageMergeTree::alter(
|
||||
};
|
||||
}
|
||||
|
||||
context.getDatabase(database_name)->alterTable(
|
||||
context, table_name,
|
||||
new_columns, new_materialized_columns, new_alias_columns, new_column_defaults,
|
||||
storage_modifier);
|
||||
|
||||
materialized_columns = new_materialized_columns;
|
||||
alias_columns = new_alias_columns;
|
||||
column_defaults = new_column_defaults;
|
||||
|
||||
data.setColumnsList(new_columns);
|
||||
data.materialized_columns = std::move(new_materialized_columns);
|
||||
data.alias_columns = std::move(new_alias_columns);
|
||||
data.column_defaults = std::move(new_column_defaults);
|
||||
context.getDatabase(database_name)->alterTable(context, table_name, new_columns, storage_modifier);
|
||||
setColumns(std::move(new_columns));
|
||||
|
||||
if (primary_key_is_modified)
|
||||
{
|
||||
@ -438,17 +415,10 @@ void StorageMergeTree::clearColumnInPartition(const ASTPtr & partition, const Fi
|
||||
alter_command.type = AlterCommand::DROP_COLUMN;
|
||||
alter_command.column_name = get<String>(column_name);
|
||||
|
||||
auto new_columns = data.getColumnsListNonMaterialized();
|
||||
auto new_materialized_columns = data.materialized_columns;
|
||||
auto new_alias_columns = data.alias_columns;
|
||||
auto new_column_defaults = data.column_defaults;
|
||||
|
||||
alter_command.apply(new_columns, new_materialized_columns, new_alias_columns, new_column_defaults);
|
||||
|
||||
auto columns_for_parts = new_columns;
|
||||
columns_for_parts.insert(std::end(columns_for_parts),
|
||||
std::begin(new_materialized_columns), std::end(new_materialized_columns));
|
||||
auto new_columns = getColumns();
|
||||
alter_command.apply(new_columns);
|
||||
|
||||
auto columns_for_parts = new_columns.getAllPhysical();
|
||||
for (const auto & part : parts)
|
||||
{
|
||||
if (part->info.partition_id != partition_id)
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
#include <ext/shared_ptr_helper.h>
|
||||
|
||||
#include <Storages/IStorage.h>
|
||||
#include <Storages/MergeTree/MergeTreeData.h>
|
||||
#include <Storages/MergeTree/MergeTreeDataSelectExecutor.h>
|
||||
#include <Storages/MergeTree/MergeTreeDataWriter.h>
|
||||
@ -35,7 +36,8 @@ public:
|
||||
bool supportsFinal() const override { return data.supportsFinal(); }
|
||||
bool supportsPrewhere() const override { return data.supportsPrewhere(); }
|
||||
|
||||
const NamesAndTypesList & getColumnsListImpl() const override { return data.getColumnsListNonMaterialized(); }
|
||||
const ColumnsDescription & getColumns() const override { return data.getColumns(); }
|
||||
void setColumns(ColumnsDescription columns_) override { return data.setColumns(std::move(columns_)); }
|
||||
|
||||
NameAndTypePair getColumn(const String & column_name) const override
|
||||
{
|
||||
@ -135,10 +137,7 @@ protected:
|
||||
const String & path_,
|
||||
const String & database_name_,
|
||||
const String & table_name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
bool attach,
|
||||
Context & context_,
|
||||
const ASTPtr & primary_expr_ast_,
|
||||
|
@ -23,8 +23,8 @@ StorageMySQL::StorageMySQL(
|
||||
mysqlxx::Pool && pool,
|
||||
const std::string & remote_database_name,
|
||||
const std::string & remote_table_name,
|
||||
const NamesAndTypesList & columns_)
|
||||
: IStorage{columns_, {}, {}, {}}
|
||||
const ColumnsDescription & columns_)
|
||||
: IStorage{columns_}
|
||||
, name(name)
|
||||
, remote_database_name(remote_database_name)
|
||||
, remote_table_name(remote_table_name)
|
||||
@ -43,7 +43,7 @@ BlockInputStreams StorageMySQL::read(
|
||||
{
|
||||
check(column_names);
|
||||
processed_stage = QueryProcessingStage::FetchColumns;
|
||||
String query = transformQueryForExternalDatabase(*query_info.query, columns, remote_database_name, remote_table_name, context);
|
||||
String query = transformQueryForExternalDatabase(*query_info.query, getColumns().ordinary, remote_database_name, remote_table_name, context);
|
||||
|
||||
Block sample_block;
|
||||
for (const String & name : column_names)
|
||||
|
@ -24,7 +24,7 @@ public:
|
||||
mysqlxx::Pool && pool,
|
||||
const std::string & remote_database_name,
|
||||
const std::string & remote_table_name,
|
||||
const NamesAndTypesList & columns_);
|
||||
const ColumnsDescription & columns);
|
||||
|
||||
std::string getName() const override { return "MySQL"; }
|
||||
std::string getTableName() const override { return name; }
|
||||
|
@ -25,8 +25,7 @@ void registerStorageNull(StorageFactory & factory)
|
||||
"Engine " + args.engine_name + " doesn't support any arguments (" + toString(args.engine_args.size()) + " given)",
|
||||
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||
|
||||
return StorageNull::create(args.table_name,
|
||||
args.columns, args.materialized_columns, args.alias_columns, args.column_defaults);
|
||||
return StorageNull::create(args.table_name, args.columns);
|
||||
});
|
||||
}
|
||||
|
||||
@ -34,11 +33,10 @@ void StorageNull::alter(const AlterCommands & params, const String & database_na
|
||||
{
|
||||
auto lock = lockStructureForAlter(__PRETTY_FUNCTION__);
|
||||
|
||||
params.apply(columns, materialized_columns, alias_columns, column_defaults);
|
||||
|
||||
context.getDatabase(database_name)->alterTable(
|
||||
context, table_name,
|
||||
columns, materialized_columns, alias_columns, column_defaults, {});
|
||||
ColumnsDescription new_columns = getColumns();
|
||||
params.apply(new_columns);
|
||||
context.getDatabase(database_name)->alterTable(context, table_name, new_columns, {});
|
||||
setColumns(std::move(new_columns));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ class StorageNull : public ext::shared_ptr_helper<StorageNull>, public IStorage
|
||||
{
|
||||
public:
|
||||
std::string getName() const override { return "Null"; }
|
||||
std::string getTableName() const override { return name; }
|
||||
std::string getTableName() const override { return table_name; }
|
||||
|
||||
BlockInputStreams read(
|
||||
const Names & column_names,
|
||||
@ -38,22 +38,19 @@ public:
|
||||
|
||||
void rename(const String & /*new_path_to_db*/, const String & /*new_database_name*/, const String & new_table_name) override
|
||||
{
|
||||
name = new_table_name;
|
||||
table_name = new_table_name;
|
||||
}
|
||||
|
||||
void alter(const AlterCommands & params, const String & database_name, const String & table_name, const Context & context) override;
|
||||
|
||||
private:
|
||||
String name;
|
||||
String table_name;
|
||||
|
||||
protected:
|
||||
StorageNull(
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_}, name(name_) {}
|
||||
StorageNull(String table_name_, ColumnsDescription columns_description_)
|
||||
: IStorage{std::move(columns_description_)}, table_name(std::move(table_name_))
|
||||
{
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -21,8 +21,8 @@ StorageODBC::StorageODBC(
|
||||
const std::string & connection_string,
|
||||
const std::string & remote_database_name,
|
||||
const std::string & remote_table_name,
|
||||
const NamesAndTypesList & columns_)
|
||||
: IStorage{columns_, {}, {}, {}}
|
||||
const ColumnsDescription & columns_)
|
||||
: IStorage{columns_}
|
||||
, name(name)
|
||||
, remote_database_name(remote_database_name)
|
||||
, remote_table_name(remote_table_name)
|
||||
@ -43,7 +43,8 @@ BlockInputStreams StorageODBC::read(
|
||||
{
|
||||
check(column_names);
|
||||
processed_stage = QueryProcessingStage::FetchColumns;
|
||||
String query = transformQueryForExternalDatabase(*query_info.query, columns, remote_database_name, remote_table_name, context);
|
||||
String query = transformQueryForExternalDatabase(
|
||||
*query_info.query, getColumns().ordinary, remote_database_name, remote_table_name, context);
|
||||
|
||||
Block sample_block;
|
||||
for (const String & name : column_names)
|
||||
|
@ -30,7 +30,7 @@ public:
|
||||
const std::string & connection_string,
|
||||
const std::string & remote_database_name,
|
||||
const std::string & remote_table_name,
|
||||
const NamesAndTypesList & columns_);
|
||||
const ColumnsDescription & columns_);
|
||||
|
||||
std::string getName() const override { return "ODBC"; }
|
||||
std::string getTableName() const override { return name; }
|
||||
|
@ -174,10 +174,7 @@ StorageReplicatedMergeTree::StorageReplicatedMergeTree(
|
||||
const String & replica_name_,
|
||||
bool attach,
|
||||
const String & path_, const String & database_name_, const String & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
Context & context_,
|
||||
const ASTPtr & primary_expr_ast_,
|
||||
const ASTPtr & secondary_sorting_expr_list_,
|
||||
@ -187,14 +184,13 @@ StorageReplicatedMergeTree::StorageReplicatedMergeTree(
|
||||
const MergeTreeData::MergingParams & merging_params_,
|
||||
const MergeTreeSettings & settings_,
|
||||
bool has_force_restore_data_flag)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_}, context(context_),
|
||||
: context(context_),
|
||||
current_zookeeper(context.getZooKeeper()), database_name(database_name_),
|
||||
table_name(name_), full_path(path_ + escapeForFileName(table_name) + '/'),
|
||||
zookeeper_path(context.getMacros()->expand(zookeeper_path_)),
|
||||
replica_name(context.getMacros()->expand(replica_name_)),
|
||||
data(database_name, table_name,
|
||||
full_path, columns_,
|
||||
materialized_columns_, alias_columns_, column_defaults_,
|
||||
context_, primary_expr_ast_, secondary_sorting_expr_list_, date_column_name, partition_expr_ast_,
|
||||
sampling_expression_, merging_params_,
|
||||
settings_, true, attach,
|
||||
@ -488,9 +484,7 @@ void StorageReplicatedMergeTree::createTableIfNotExists()
|
||||
acl, zkutil::CreateMode::Persistent));
|
||||
ops.emplace_back(std::make_unique<zkutil::Op::Create>(zookeeper_path + "/metadata", metadata,
|
||||
acl, zkutil::CreateMode::Persistent));
|
||||
ops.emplace_back(std::make_unique<zkutil::Op::Create>(zookeeper_path + "/columns", ColumnsDescription<false>{
|
||||
data.getColumnsListNonMaterialized(), data.materialized_columns,
|
||||
data.alias_columns, data.column_defaults}.toString(),
|
||||
ops.emplace_back(std::make_unique<zkutil::Op::Create>(zookeeper_path + "/columns", getColumns().toString(),
|
||||
acl, zkutil::CreateMode::Persistent));
|
||||
ops.emplace_back(std::make_unique<zkutil::Op::Create>(zookeeper_path + "/log", "",
|
||||
acl, zkutil::CreateMode::Persistent));
|
||||
@ -524,39 +518,27 @@ void StorageReplicatedMergeTree::checkTableStructure(bool skip_sanity_checks, bo
|
||||
TableMetadata(data).check(metadata_str);
|
||||
|
||||
zkutil::Stat stat;
|
||||
auto columns_desc = ColumnsDescription<true>::parse(zookeeper->get(zookeeper_path + "/columns", &stat));
|
||||
|
||||
auto & columns = columns_desc.columns;
|
||||
auto & materialized_columns = columns_desc.materialized;
|
||||
auto & alias_columns = columns_desc.alias;
|
||||
auto & column_defaults = columns_desc.defaults;
|
||||
auto columns_from_zk = ColumnsDescription::parse(zookeeper->get(zookeeper_path + "/columns", &stat));
|
||||
columns_version = stat.version;
|
||||
|
||||
if (columns != data.getColumnsListNonMaterialized() ||
|
||||
materialized_columns != data.materialized_columns ||
|
||||
alias_columns != data.alias_columns ||
|
||||
column_defaults != data.column_defaults)
|
||||
const ColumnsDescription & old_columns = getColumns();
|
||||
if (columns_from_zk != old_columns)
|
||||
{
|
||||
if (allow_alter &&
|
||||
(skip_sanity_checks ||
|
||||
data.getColumnsListNonMaterialized().sizeOfDifference(columns) +
|
||||
data.materialized_columns.sizeOfDifference(materialized_columns) <= 2))
|
||||
old_columns.ordinary.sizeOfDifference(columns_from_zk.ordinary) +
|
||||
old_columns.materialized.sizeOfDifference(columns_from_zk.materialized) <= 2))
|
||||
{
|
||||
LOG_WARNING(log, "Table structure in ZooKeeper is a little different from local table structure. Assuming ALTER.");
|
||||
|
||||
/// Without any locks, because table has not been created yet.
|
||||
context.getDatabase(database_name)->alterTable(
|
||||
context, table_name,
|
||||
columns, materialized_columns, alias_columns, column_defaults, {});
|
||||
context.getDatabase(database_name)->alterTable(context, table_name, columns_from_zk, {});
|
||||
|
||||
data.setColumnsList(columns);
|
||||
data.materialized_columns = std::move(materialized_columns);
|
||||
data.alias_columns = std::move(alias_columns);
|
||||
data.column_defaults = std::move(column_defaults);
|
||||
setColumns(std::move(columns_from_zk));
|
||||
}
|
||||
else
|
||||
{
|
||||
throw Exception("Table structure in ZooKeeper is too much different from local table structure.",
|
||||
throw Exception("Table structure in ZooKeeper is too different from local table structure",
|
||||
ErrorCodes::INCOMPATIBLE_COLUMNS);
|
||||
}
|
||||
}
|
||||
@ -714,12 +696,7 @@ void StorageReplicatedMergeTree::createReplica()
|
||||
LOG_DEBUG(log, "Copied " << source_queue.size() << " queue entries");
|
||||
}
|
||||
|
||||
zookeeper->create(replica_path + "/columns", ColumnsDescription<false>{
|
||||
data.getColumnsListNonMaterialized(),
|
||||
data.materialized_columns,
|
||||
data.alias_columns,
|
||||
data.column_defaults
|
||||
}.toString(), zkutil::CreateMode::Persistent);
|
||||
zookeeper->create(replica_path + "/columns", getColumns().toString(), zkutil::CreateMode::Persistent);
|
||||
}
|
||||
|
||||
|
||||
@ -1529,19 +1506,12 @@ void StorageReplicatedMergeTree::executeClearColumnInPartition(const LogEntry &
|
||||
alter_command.type = AlterCommand::DROP_COLUMN;
|
||||
alter_command.column_name = entry.column_name;
|
||||
|
||||
auto new_columns = data.getColumnsListNonMaterialized();
|
||||
auto new_materialized_columns = data.materialized_columns;
|
||||
auto new_alias_columns = data.alias_columns;
|
||||
auto new_column_defaults = data.column_defaults;
|
||||
|
||||
alter_command.apply(new_columns, new_materialized_columns, new_alias_columns, new_column_defaults);
|
||||
|
||||
auto columns_for_parts = new_columns;
|
||||
columns_for_parts.insert(std::end(columns_for_parts),
|
||||
std::begin(new_materialized_columns), std::end(new_materialized_columns));
|
||||
auto new_columns = getColumns();
|
||||
alter_command.apply(new_columns);
|
||||
|
||||
size_t modified_parts = 0;
|
||||
auto parts = data.getDataParts();
|
||||
auto columns_for_parts = new_columns.getAllPhysical();
|
||||
for (const auto & part : parts)
|
||||
{
|
||||
if (!entry_part_info.contains(part->info))
|
||||
@ -2573,15 +2543,10 @@ void StorageReplicatedMergeTree::alter(const AlterCommands & params,
|
||||
if (param.type == AlterCommand::MODIFY_PRIMARY_KEY)
|
||||
throw Exception("Modification of primary key is not supported for replicated tables", ErrorCodes::NOT_IMPLEMENTED);
|
||||
|
||||
NamesAndTypesList new_columns = data.getColumnsListNonMaterialized();
|
||||
NamesAndTypesList new_materialized_columns = data.materialized_columns;
|
||||
NamesAndTypesList new_alias_columns = data.alias_columns;
|
||||
ColumnDefaults new_column_defaults = data.column_defaults;
|
||||
params.apply(new_columns, new_materialized_columns, new_alias_columns, new_column_defaults);
|
||||
ColumnsDescription new_columns = data.getColumns();
|
||||
params.apply(new_columns);
|
||||
|
||||
new_columns_str = ColumnsDescription<false>{
|
||||
new_columns, new_materialized_columns,
|
||||
new_alias_columns, new_column_defaults}.toString();
|
||||
new_columns_str = new_columns.toString();
|
||||
|
||||
/// Do ALTER.
|
||||
getZooKeeper()->set(zookeeper_path + "/columns", new_columns_str, -1, &stat);
|
||||
|
@ -84,7 +84,8 @@ public:
|
||||
bool supportsPrewhere() const override { return data.supportsPrewhere(); }
|
||||
bool supportsReplication() const override { return true; }
|
||||
|
||||
const NamesAndTypesList & getColumnsListImpl() const override { return data.getColumnsListNonMaterialized(); }
|
||||
const ColumnsDescription & getColumns() const override { return data.getColumns(); }
|
||||
void setColumns(ColumnsDescription columns_) override { return data.setColumns(std::move(columns_)); }
|
||||
|
||||
NameAndTypePair getColumn(const String & column_name) const override
|
||||
{
|
||||
@ -443,10 +444,7 @@ protected:
|
||||
const String & replica_name_,
|
||||
bool attach,
|
||||
const String & path_, const String & database_name_, const String & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
Context & context_,
|
||||
const ASTPtr & primary_expr_ast_,
|
||||
const ASTPtr & secondary_sorting_expr_list_,
|
||||
|
@ -88,18 +88,14 @@ BlockOutputStreamPtr StorageSetOrJoinBase::write(const ASTPtr & /*query*/, const
|
||||
|
||||
StorageSetOrJoinBase::StorageSetOrJoinBase(
|
||||
const String & path_,
|
||||
const String & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
name(name_)
|
||||
const String & table_name_,
|
||||
const ColumnsDescription & columns_)
|
||||
: IStorage{columns_}, table_name(table_name_)
|
||||
{
|
||||
if (path_.empty())
|
||||
throw Exception("Join and Set storages require data path", ErrorCodes::INCORRECT_FILE_NAME);
|
||||
|
||||
path = path_ + escapeForFileName(name_) + '/';
|
||||
path = path_ + escapeForFileName(table_name_) + '/';
|
||||
}
|
||||
|
||||
|
||||
@ -107,11 +103,8 @@ StorageSetOrJoinBase::StorageSetOrJoinBase(
|
||||
StorageSet::StorageSet(
|
||||
const String & path_,
|
||||
const String & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_)
|
||||
: StorageSetOrJoinBase{path_, name_, columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
const ColumnsDescription & columns_)
|
||||
: StorageSetOrJoinBase{path_, name_, columns_},
|
||||
set(std::make_shared<Set>(SizeLimits()))
|
||||
{
|
||||
restore();
|
||||
@ -181,7 +174,7 @@ void StorageSetOrJoinBase::rename(const String & new_path_to_db, const String &
|
||||
Poco::File(path).renameTo(new_path);
|
||||
|
||||
path = new_path + "/";
|
||||
name = new_table_name;
|
||||
table_name = new_table_name;
|
||||
}
|
||||
|
||||
|
||||
@ -194,9 +187,7 @@ void registerStorageSet(StorageFactory & factory)
|
||||
"Engine " + args.engine_name + " doesn't support any arguments (" + toString(args.engine_args.size()) + " given)",
|
||||
ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH);
|
||||
|
||||
return StorageSet::create(
|
||||
args.data_path, args.table_name, args.columns,
|
||||
args.materialized_columns, args.alias_columns, args.column_defaults);
|
||||
return StorageSet::create(args.data_path, args.table_name, args.columns);
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@ class StorageSetOrJoinBase : public IStorage
|
||||
friend class SetOrJoinBlockOutputStream;
|
||||
|
||||
public:
|
||||
String getTableName() const override { return name; }
|
||||
String getTableName() const override { return table_name; }
|
||||
|
||||
void rename(const String & new_path_to_db, const String & new_database_name, const String & new_table_name) override;
|
||||
|
||||
@ -30,14 +30,11 @@ public:
|
||||
protected:
|
||||
StorageSetOrJoinBase(
|
||||
const String & path_,
|
||||
const String & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_);
|
||||
const String & table_name_,
|
||||
const ColumnsDescription & columns_);
|
||||
|
||||
String path;
|
||||
String name;
|
||||
String table_name;
|
||||
|
||||
UInt64 increment = 0; /// For the backup file names.
|
||||
|
||||
@ -78,10 +75,7 @@ protected:
|
||||
StorageSet(
|
||||
const String & path_,
|
||||
const String & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_);
|
||||
const ColumnsDescription & columns_);
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -193,13 +193,10 @@ private:
|
||||
StorageStripeLog::StorageStripeLog(
|
||||
const std::string & path_,
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
bool attach,
|
||||
size_t max_compress_block_size_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
: IStorage{columns_},
|
||||
path(path_), name(name_),
|
||||
max_compress_block_size(max_compress_block_size_),
|
||||
file_checker(path + escapeForFileName(name) + '/' + "sizes.json"),
|
||||
@ -301,7 +298,6 @@ void registerStorageStripeLog(StorageFactory & factory)
|
||||
|
||||
return StorageStripeLog::create(
|
||||
args.data_path, args.table_name, args.columns,
|
||||
args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
args.attach, args.context.getSettings().max_compress_block_size);
|
||||
});
|
||||
}
|
||||
|
@ -68,10 +68,7 @@ protected:
|
||||
StorageStripeLog(
|
||||
const std::string & path_,
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
bool attach,
|
||||
size_t max_compress_block_size_);
|
||||
};
|
||||
|
@ -276,13 +276,10 @@ void TinyLogBlockOutputStream::write(const Block & block)
|
||||
StorageTinyLog::StorageTinyLog(
|
||||
const std::string & path_,
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
bool attach,
|
||||
size_t max_compress_block_size_)
|
||||
: IStorage{columns_, materialized_columns_, alias_columns_, column_defaults_},
|
||||
: IStorage{columns_},
|
||||
path(path_), name(name_),
|
||||
max_compress_block_size(max_compress_block_size_),
|
||||
file_checker(path + escapeForFileName(name) + '/' + "sizes.json"),
|
||||
@ -299,7 +296,7 @@ StorageTinyLog::StorageTinyLog(
|
||||
throwFromErrno("Cannot create directory " + full_path, ErrorCodes::CANNOT_CREATE_DIRECTORY);
|
||||
}
|
||||
|
||||
for (const auto & col : getColumnsList())
|
||||
for (const auto & col : getColumns().getAllPhysical())
|
||||
addFiles(col.name, *col.type);
|
||||
}
|
||||
|
||||
@ -351,7 +348,7 @@ BlockInputStreams StorageTinyLog::read(
|
||||
check(column_names);
|
||||
processed_stage = QueryProcessingStage::FetchColumns;
|
||||
return BlockInputStreams(1, std::make_shared<TinyLogBlockInputStream>(
|
||||
max_block_size, Nested::collect(getColumnsList().addTypes(column_names)), *this, context.getSettingsRef().max_read_buffer_size));
|
||||
max_block_size, Nested::collect(getColumns().getAllPhysical().addTypes(column_names)), *this, context.getSettingsRef().max_read_buffer_size));
|
||||
}
|
||||
|
||||
|
||||
@ -379,7 +376,6 @@ void registerStorageTinyLog(StorageFactory & factory)
|
||||
|
||||
return StorageTinyLog::create(
|
||||
args.data_path, args.table_name, args.columns,
|
||||
args.materialized_columns, args.alias_columns, args.column_defaults,
|
||||
args.attach, args.context.getSettings().max_compress_block_size);
|
||||
});
|
||||
}
|
||||
|
@ -71,10 +71,7 @@ protected:
|
||||
StorageTinyLog(
|
||||
const std::string & path_,
|
||||
const std::string & name_,
|
||||
const NamesAndTypesList & columns_,
|
||||
const NamesAndTypesList & materialized_columns_,
|
||||
const NamesAndTypesList & alias_columns_,
|
||||
const ColumnDefaults & column_defaults_,
|
||||
const ColumnsDescription & columns_,
|
||||
bool attach,
|
||||
size_t max_compress_block_size_);
|
||||
};
|
||||
|
@ -19,8 +19,8 @@ namespace ErrorCodes
|
||||
StorageView::StorageView(
|
||||
const String & table_name_,
|
||||
const ASTCreateQuery & query,
|
||||
const NamesAndTypesList & columns_)
|
||||
: IStorage{columns_, {}, {}, {}}, table_name(table_name_)
|
||||
const ColumnsDescription & columns_)
|
||||
: IStorage{columns_}, table_name(table_name_)
|
||||
{
|
||||
if (!query.select)
|
||||
throw Exception("SELECT query is not specified for " + getName(), ErrorCodes::INCORRECT_QUERY);
|
||||
|
@ -42,7 +42,7 @@ protected:
|
||||
StorageView(
|
||||
const String & table_name_,
|
||||
const ASTCreateQuery & query,
|
||||
const NamesAndTypesList & columns_);
|
||||
const ColumnsDescription & columns_);
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -16,10 +16,10 @@ StorageSystemAsynchronousMetrics::StorageSystemAsynchronousMetrics(const std::st
|
||||
: name(name_),
|
||||
async_metrics(async_metrics_)
|
||||
{
|
||||
columns = NamesAndTypesList{
|
||||
setColumns(ColumnsDescription({
|
||||
{"metric", std::make_shared<DataTypeString>()},
|
||||
{"value", std::make_shared<DataTypeFloat64>()},
|
||||
};
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
|
@ -13,10 +13,10 @@ namespace DB
|
||||
StorageSystemBuildOptions::StorageSystemBuildOptions(const std::string & name_)
|
||||
: name(name_)
|
||||
{
|
||||
columns = NamesAndTypesList{
|
||||
setColumns(ColumnsDescription({
|
||||
{ "name", std::make_shared<DataTypeString>() },
|
||||
{ "value", std::make_shared<DataTypeString>() },
|
||||
};
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
|
@ -14,7 +14,7 @@ namespace DB
|
||||
StorageSystemClusters::StorageSystemClusters(const std::string & name_)
|
||||
: name(name_)
|
||||
{
|
||||
columns = NamesAndTypesList{
|
||||
setColumns(ColumnsDescription({
|
||||
{ "cluster", std::make_shared<DataTypeString>() },
|
||||
{ "shard_num", std::make_shared<DataTypeUInt32>() },
|
||||
{ "shard_weight", std::make_shared<DataTypeUInt32>() },
|
||||
@ -24,8 +24,8 @@ StorageSystemClusters::StorageSystemClusters(const std::string & name_)
|
||||
{ "port", std::make_shared<DataTypeUInt16>() },
|
||||
{ "is_local", std::make_shared<DataTypeUInt8>() },
|
||||
{ "user", std::make_shared<DataTypeString>() },
|
||||
{ "default_database", std::make_shared<DataTypeString>() }
|
||||
};
|
||||
{ "default_database", std::make_shared<DataTypeString>() },
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
|
@ -18,7 +18,7 @@ namespace DB
|
||||
StorageSystemColumns::StorageSystemColumns(const std::string & name_)
|
||||
: name(name_)
|
||||
{
|
||||
columns = NamesAndTypesList{
|
||||
setColumns(ColumnsDescription({
|
||||
{ "database", std::make_shared<DataTypeString>() },
|
||||
{ "table", std::make_shared<DataTypeString>() },
|
||||
{ "name", std::make_shared<DataTypeString>() },
|
||||
@ -28,7 +28,7 @@ StorageSystemColumns::StorageSystemColumns(const std::string & name_)
|
||||
{ "data_compressed_bytes", std::make_shared<DataTypeUInt64>() },
|
||||
{ "data_uncompressed_bytes", std::make_shared<DataTypeUInt64>() },
|
||||
{ "marks_bytes", std::make_shared<DataTypeUInt64>() },
|
||||
};
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
@ -137,9 +137,8 @@ BlockInputStreams StorageSystemColumns::read(
|
||||
throw;
|
||||
}
|
||||
|
||||
columns = storage->getColumnsList();
|
||||
columns.insert(std::end(columns), std::begin(storage->alias_columns), std::end(storage->alias_columns));
|
||||
column_defaults = storage->column_defaults;
|
||||
columns = storage->getColumns().getAll();
|
||||
column_defaults = storage->getColumns().defaults;
|
||||
|
||||
/** Info about sizes of columns for tables of MergeTree family.
|
||||
* NOTE: It is possible to add getter for this info to IStorage interface.
|
||||
@ -172,7 +171,7 @@ BlockInputStreams StorageSystemColumns::read(
|
||||
}
|
||||
else
|
||||
{
|
||||
res_columns[i++]->insert(toString(it->second.type));
|
||||
res_columns[i++]->insert(toString(it->second.kind));
|
||||
res_columns[i++]->insert(queryToString(it->second.expression));
|
||||
}
|
||||
}
|
||||
|
@ -13,12 +13,12 @@ namespace DB
|
||||
StorageSystemDatabases::StorageSystemDatabases(const std::string & name_)
|
||||
: name(name_)
|
||||
{
|
||||
columns = NamesAndTypesList{
|
||||
setColumns(ColumnsDescription({
|
||||
{"name", std::make_shared<DataTypeString>()},
|
||||
{"engine", std::make_shared<DataTypeString>()},
|
||||
{"data_path", std::make_shared<DataTypeString>()},
|
||||
{"metadata_path", std::make_shared<DataTypeString>()},
|
||||
};
|
||||
}));
|
||||
}
|
||||
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user