mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-10 09:32:06 +00:00
Merge remote-tracking branch 'origin/master' into pr-better-replicas-failover-2
This commit is contained in:
commit
84d64a6ed8
@ -108,6 +108,11 @@ public:
|
||||
*/
|
||||
QueryTreeNodePtr getColumnSourceOrNull() const;
|
||||
|
||||
void setColumnSource(const QueryTreeNodePtr & source)
|
||||
{
|
||||
getSourceWeakPointer() = source;
|
||||
}
|
||||
|
||||
QueryTreeNodeType getNodeType() const override
|
||||
{
|
||||
return QueryTreeNodeType::COLUMN;
|
||||
|
@ -31,7 +31,7 @@ public:
|
||||
virtual String getDescription() = 0;
|
||||
|
||||
/// Run pass over query tree
|
||||
virtual void run(QueryTreeNodePtr query_tree_node, ContextPtr context) = 0;
|
||||
virtual void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) = 0;
|
||||
|
||||
};
|
||||
|
||||
|
@ -194,7 +194,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void AggregateFunctionsArithmericOperationsPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void AggregateFunctionsArithmericOperationsPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
AggregateFunctionsArithmericOperationsVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -17,7 +17,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Extract arithmeric operations from aggregate functions."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -92,7 +92,7 @@ public:
|
||||
|
||||
}
|
||||
|
||||
void RewriteArrayExistsToHasPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void RewriteArrayExistsToHasPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
RewriteArrayExistsToHasVisitor visitor(context);
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -20,7 +20,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Rewrite arrayExists(func, arr) functions to has(arr, elem) when logically equivalent"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void AutoFinalOnQueryPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void AutoFinalOnQueryPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
auto visitor = AutoFinalOnQueryPassVisitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -25,7 +25,7 @@ public:
|
||||
return "Automatically applies final modifier to table expressions in queries if it is supported and if user level final setting is set";
|
||||
}
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void ComparisonTupleEliminationPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void ComparisonTupleEliminationPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
ComparisonTupleEliminationPassVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -17,7 +17,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Rewrite tuples comparison into equivalent comparison of tuples arguments"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -132,7 +132,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void ConvertOrLikeChainPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void ConvertOrLikeChainPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
auto or_function_resolver = FunctionFactory::instance().get("or", context);
|
||||
auto match_function_resolver = FunctionFactory::instance().get("multiMatchAny", context);
|
||||
|
@ -14,7 +14,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Replaces all the 'or's with {i}like to multiMatchAny"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -718,7 +718,7 @@ public:
|
||||
|
||||
}
|
||||
|
||||
void ConvertLogicalExpressionToCNFPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void ConvertLogicalExpressionToCNFPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
const auto & settings = context->getSettingsRef();
|
||||
if (!settings.convert_query_to_cnf)
|
||||
|
@ -12,7 +12,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Convert logical expression to CNF and apply optimizations using constraints"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ public:
|
||||
|
||||
}
|
||||
|
||||
void CountDistinctPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void CountDistinctPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
CountDistinctVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -20,7 +20,7 @@ public:
|
||||
return "Optimize single countDistinct into count over subquery";
|
||||
}
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -264,7 +264,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void CrossToInnerJoinPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void CrossToInnerJoinPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
CrossToInnerJoinVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -22,7 +22,7 @@ public:
|
||||
return "Replace CROSS JOIN with INNER JOIN";
|
||||
}
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -224,7 +224,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void FunctionToSubcolumnsPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void FunctionToSubcolumnsPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
FunctionToSubcolumnsVisitor visitor(context);
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -24,7 +24,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Rewrite function to subcolumns, for example tupleElement(column, subcolumn) into column.subcolumn"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -256,7 +256,7 @@ void tryFuseQuantiles(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
|
||||
}
|
||||
|
||||
void FuseFunctionsPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void FuseFunctionsPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
tryFuseSumCountAvg(query_tree_node, context);
|
||||
tryFuseQuantiles(query_tree_node, context);
|
||||
|
@ -20,7 +20,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Replaces several calls of aggregate functions of the same family into one call"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void GroupingFunctionsResolvePass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void GroupingFunctionsResolvePass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
GroupingFunctionsResolveVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -24,7 +24,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Resolve GROUPING functions based on GROUP BY modifiers"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -73,7 +73,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void IfChainToMultiIfPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void IfChainToMultiIfPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
auto multi_if_function_ptr = FunctionFactory::instance().get("multiIf", context);
|
||||
IfChainToMultiIfPassVisitor visitor(std::move(multi_if_function_ptr), std::move(context));
|
||||
|
@ -18,7 +18,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Optimize if chain to multiIf"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -57,7 +57,7 @@ public:
|
||||
|
||||
}
|
||||
|
||||
void IfConstantConditionPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void IfConstantConditionPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
IfConstantConditionVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -21,7 +21,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Optimize if, multiIf for constant condition."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -190,7 +190,7 @@ public:
|
||||
|
||||
}
|
||||
|
||||
void IfTransformStringsToEnumPass::run(QueryTreeNodePtr query, ContextPtr context)
|
||||
void IfTransformStringsToEnumPass::run(QueryTreeNodePtr & query, ContextPtr context)
|
||||
{
|
||||
ConvertStringsToEnumVisitor visitor(std::move(context));
|
||||
visitor.visit(query);
|
||||
|
@ -33,7 +33,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Replaces string-type arguments in If and Transform to enum"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -554,7 +554,7 @@ private:
|
||||
}
|
||||
};
|
||||
|
||||
void LogicalExpressionOptimizerPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void LogicalExpressionOptimizerPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
LogicalExpressionOptimizerVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -109,7 +109,7 @@ public:
|
||||
"replace chains of equality functions inside an OR with a single IN operator";
|
||||
}
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void MultiIfToIfPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void MultiIfToIfPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
auto if_function_ptr = FunctionFactory::instance().get("if", context);
|
||||
MultiIfToIfVisitor visitor(std::move(if_function_ptr), std::move(context));
|
||||
|
@ -17,7 +17,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Optimize multiIf with single condition to if."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -64,7 +64,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void NormalizeCountVariantsPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void NormalizeCountVariantsPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
NormalizeCountVariantsVisitor visitor(context);
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -20,7 +20,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Optimize count(literal), sum(1) into count()."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -215,7 +215,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void OptimizeDateOrDateTimeConverterWithPreimagePass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void OptimizeDateOrDateTimeConverterWithPreimagePass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
OptimizeDateOrDateTimeConverterWithPreimageVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -17,7 +17,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Replace predicate having Date/DateTime converters with their preimages"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -130,7 +130,7 @@ private:
|
||||
}
|
||||
};
|
||||
|
||||
void OptimizeGroupByFunctionKeysPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void OptimizeGroupByFunctionKeysPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
OptimizeGroupByFunctionKeysVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -16,7 +16,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Eliminates functions of other keys in GROUP BY section."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void OptimizeGroupByInjectiveFunctionsPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void OptimizeGroupByInjectiveFunctionsPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
OptimizeGroupByInjectiveFunctionsVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -14,7 +14,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Replaces injective functions by it's arguments in GROUP BY section."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void OptimizeRedundantFunctionsInOrderByPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void OptimizeRedundantFunctionsInOrderByPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
OptimizeRedundantFunctionsInOrderByVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -17,7 +17,7 @@ public:
|
||||
|
||||
String getDescription() override { return "If ORDER BY has argument x followed by f(x) transforms it to ORDER BY x."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void OrderByLimitByDuplicateEliminationPass::run(QueryTreeNodePtr query_tree_node, ContextPtr)
|
||||
void OrderByLimitByDuplicateEliminationPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr)
|
||||
{
|
||||
OrderByLimitByDuplicateEliminationVisitor visitor;
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -20,7 +20,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Remove duplicate columns from ORDER BY, LIMIT BY."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -50,7 +50,7 @@ public:
|
||||
|
||||
}
|
||||
|
||||
void OrderByTupleEliminationPass::run(QueryTreeNodePtr query_tree_node, ContextPtr)
|
||||
void OrderByTupleEliminationPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr)
|
||||
{
|
||||
OrderByTupleEliminationVisitor visitor;
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -17,7 +17,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Remove tuple from ORDER BY."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -79,6 +79,8 @@
|
||||
#include <Analyzer/QueryTreeBuilder.h>
|
||||
#include <Analyzer/IQueryTreeNode.h>
|
||||
#include <Analyzer/Identifier.h>
|
||||
#include <Poco/Logger.h>
|
||||
#include <Common/logger_useful.h>
|
||||
|
||||
namespace ProfileEvents
|
||||
{
|
||||
@ -1066,7 +1068,7 @@ private:
|
||||
class QueryAnalyzer
|
||||
{
|
||||
public:
|
||||
void resolve(QueryTreeNodePtr node, const QueryTreeNodePtr & table_expression, ContextPtr context)
|
||||
void resolve(QueryTreeNodePtr & node, const QueryTreeNodePtr & table_expression, ContextPtr context)
|
||||
{
|
||||
IdentifierResolveScope scope(node, nullptr /*parent_scope*/);
|
||||
|
||||
@ -7649,7 +7651,7 @@ QueryAnalysisPass::QueryAnalysisPass(QueryTreeNodePtr table_expression_)
|
||||
: table_expression(std::move(table_expression_))
|
||||
{}
|
||||
|
||||
void QueryAnalysisPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void QueryAnalysisPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
QueryAnalyzer analyzer;
|
||||
analyzer.resolve(query_tree_node, table_expression, context);
|
||||
|
@ -89,7 +89,7 @@ public:
|
||||
return "Resolve type for each query expression. Replace identifiers, matchers with query expressions. Perform constant folding. Evaluate scalar subqueries.";
|
||||
}
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
private:
|
||||
QueryTreeNodePtr table_expression;
|
||||
|
@ -132,7 +132,7 @@ void updateUsedProjectionIndexes(const QueryTreeNodePtr & query_or_union_node, s
|
||||
|
||||
}
|
||||
|
||||
void RemoveUnusedProjectionColumnsPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void RemoveUnusedProjectionColumnsPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
std::vector<QueryTreeNodePtr> nodes_to_visit;
|
||||
nodes_to_visit.push_back(query_tree_node);
|
||||
|
@ -17,7 +17,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Remove unused projection columns in subqueries."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -109,7 +109,7 @@ private:
|
||||
}
|
||||
|
||||
|
||||
void RewriteAggregateFunctionWithIfPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void RewriteAggregateFunctionWithIfPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
RewriteAggregateFunctionWithIfVisitor visitor(context);
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -20,7 +20,7 @@ public:
|
||||
return "Rewrite aggregate functions with if expression as argument when logically equivalent";
|
||||
}
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -120,7 +120,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void RewriteSumFunctionWithSumAndCountPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void RewriteSumFunctionWithSumAndCountPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
RewriteSumFunctionWithSumAndCountVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -20,7 +20,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Rewrite sum(column +/- literal) into sum(column) and literal * count(column)"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -58,7 +58,7 @@ public:
|
||||
|
||||
}
|
||||
|
||||
void ShardNumColumnToFunctionPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void ShardNumColumnToFunctionPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
ShardNumColumnToFunctionVisitor visitor(context);
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -17,7 +17,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Rewrite _shard_num column into shardNum() function"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -178,7 +178,7 @@ private:
|
||||
|
||||
}
|
||||
|
||||
void SumIfToCountIfPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void SumIfToCountIfPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
SumIfToCountIfVisitor visitor(context);
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -23,7 +23,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Rewrite sum(if) and sumIf into countIf"; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -89,7 +89,7 @@ public:
|
||||
|
||||
}
|
||||
|
||||
void UniqInjectiveFunctionsEliminationPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void UniqInjectiveFunctionsEliminationPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
UniqInjectiveFunctionsEliminationVisitor visitor(std::move(context));
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -17,7 +17,7 @@ public:
|
||||
|
||||
String getDescription() override { return "Remove injective functions from uniq functions arguments."; }
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
|
||||
};
|
||||
|
||||
|
@ -185,7 +185,7 @@ public:
|
||||
};
|
||||
|
||||
|
||||
void UniqToCountPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||
void UniqToCountPass::run(QueryTreeNodePtr & query_tree_node, ContextPtr context)
|
||||
{
|
||||
UniqToCountVisitor visitor(context);
|
||||
visitor.visit(query_tree_node);
|
||||
|
@ -24,7 +24,7 @@ public:
|
||||
return "Rewrite uniq and its variants(except uniqUpTo) to count if subquery has distinct or group by clause.";
|
||||
}
|
||||
|
||||
void run(QueryTreeNodePtr query_tree_node, ContextPtr context) override;
|
||||
void run(QueryTreeNodePtr & query_tree_node, ContextPtr context) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -215,6 +215,8 @@ class IColumn;
|
||||
M(UInt64, merge_tree_max_rows_to_use_cache, (128 * 8192), "The maximum number of rows per request, to use the cache of uncompressed data. If the request is large, the cache is not used. (For large queries not to flush out the cache.)", 0) \
|
||||
M(UInt64, merge_tree_max_bytes_to_use_cache, (192 * 10 * 1024 * 1024), "The maximum number of bytes per request, to use the cache of uncompressed data. If the request is large, the cache is not used. (For large queries not to flush out the cache.)", 0) \
|
||||
M(Bool, do_not_merge_across_partitions_select_final, false, "Merge parts only in one partition in select final", 0) \
|
||||
M(Bool, split_parts_ranges_into_intersecting_and_non_intersecting_final, true, "Split parts ranges into intersecting and non intersecting during FINAL optimization", 0) \
|
||||
M(Bool, split_intersecting_parts_ranges_into_layers_final, true, "Split intersecting parts ranges into layers during FINAL optimization", 0) \
|
||||
M(Bool, allow_experimental_inverted_index, false, "If it is set to true, allow to use experimental inverted index.", 0) \
|
||||
\
|
||||
M(UInt64, mysql_max_rows_to_insert, 65536, "The maximum number of rows in MySQL batch insertion of the MySQL storage engine", 0) \
|
||||
@ -855,7 +857,7 @@ class IColumn;
|
||||
M(UInt64, grace_hash_join_max_buckets, 1024, "Limit on the number of grace hash join buckets", 0) \
|
||||
M(Bool, optimize_distinct_in_order, true, "Enable DISTINCT optimization if some columns in DISTINCT form a prefix of sorting. For example, prefix of sorting key in merge tree or ORDER BY statement", 0) \
|
||||
M(Bool, keeper_map_strict_mode, false, "Enforce additional checks during operations on KeeperMap. E.g. throw an exception on an insert for already existing key", 0) \
|
||||
M(UInt64, extract_key_value_pairs_max_pairs_per_row, 1000, "Max number of pairs that can be produced by the `extractKeyValuePairs` function. Used as a safeguard against consuming too much memory.", 0) ALIAS(extract_kvp_max_pairs_per_row) \
|
||||
M(UInt64, extract_kvp_max_pairs_per_row, 1000, "Max number pairs that can be produced by extractKeyValuePairs function. Used to safeguard against consuming too much memory.", 0) \
|
||||
M(Timezone, session_timezone, "", "This setting can be removed in the future due to potential caveats. It is experimental and is not suitable for production usage. The default timezone for current session or query. The server default timezone if empty.", 0) \
|
||||
M(Bool, allow_create_index_without_type, false, "Allow CREATE INDEX query without TYPE. Query will be ignored. Made for SQL compatibility tests.", 0) \
|
||||
M(Bool, create_index_ignore_unique, false, "Ignore UNIQUE keyword in CREATE UNIQUE INDEX. Made for SQL compatibility tests.", 0) \
|
||||
|
@ -90,7 +90,9 @@ static std::map<ClickHouseVersion, SettingsChangesHistory::SettingsChanges> sett
|
||||
{"async_insert_busy_timeout_min_ms", 50, 50, "The minimum value of the asynchronous insert timeout in milliseconds; it also serves as the initial value, which may be increased later by the adaptive algorithm"},
|
||||
{"async_insert_busy_timeout_max_ms", 200, 200, "The minimum value of the asynchronous insert timeout in milliseconds; async_insert_busy_timeout_ms is aliased to async_insert_busy_timeout_max_ms"},
|
||||
{"async_insert_busy_timeout_increase_rate", 0.2, 0.2, "The exponential growth rate at which the adaptive asynchronous insert timeout increases"},
|
||||
{"async_insert_busy_timeout_decrease_rate", 0.2, 0.2, "The exponential growth rate at which the adaptive asynchronous insert timeout decreases"}}},
|
||||
{"async_insert_busy_timeout_decrease_rate", 0.2, 0.2, "The exponential growth rate at which the adaptive asynchronous insert timeout decreases"},
|
||||
{"split_parts_ranges_into_intersecting_and_non_intersecting_final", true, true, "Allow to split parts ranges into intersecting and non intersecting during FINAL optimization"},
|
||||
{"split_intersecting_parts_ranges_into_layers_final", true, true, "Allow to split intersecting parts ranges into layers during FINAL optimization"}}},
|
||||
{"24.1", {{"print_pretty_type_names", false, true, "Better user experience."},
|
||||
{"input_format_json_read_bools_as_strings", false, true, "Allow to read bools as strings in JSON formats by default"},
|
||||
{"output_format_arrow_use_signed_indexes_for_dictionary", false, true, "Use signed indexes type for Arrow dictionaries by default as it's recommended"},
|
||||
@ -111,7 +113,9 @@ static std::map<ClickHouseVersion, SettingsChangesHistory::SettingsChanges> sett
|
||||
{"iceberg_engine_ignore_schema_evolution", false, false, "Allow to ignore schema evolution in Iceberg table engine"},
|
||||
{"optimize_injective_functions_in_group_by", false, true, "Replace injective functions by it's arguments in GROUP BY section in analyzer"},
|
||||
{"update_insert_deduplication_token_in_dependent_materialized_views", false, false, "Allow to update insert deduplication token with table identifier during insert in dependent materialized views"},
|
||||
{"azure_max_unexpected_write_error_retries", 4, 4, "The maximum number of retries in case of unexpected errors during Azure blob storage write"}}},
|
||||
{"azure_max_unexpected_write_error_retries", 4, 4, "The maximum number of retries in case of unexpected errors during Azure blob storage write"},
|
||||
{"split_parts_ranges_into_intersecting_and_non_intersecting_final", false, true, "Allow to split parts ranges into intersecting and non intersecting during FINAL optimization"},
|
||||
{"split_intersecting_parts_ranges_into_layers_final", true, true, "Allow to split intersecting parts ranges into layers during FINAL optimization"}}},
|
||||
{"23.12", {{"allow_suspicious_ttl_expressions", true, false, "It is a new setting, and in previous versions the behavior was equivalent to allowing."},
|
||||
{"input_format_parquet_allow_missing_columns", false, true, "Allow missing columns in Parquet files by default"},
|
||||
{"input_format_orc_allow_missing_columns", false, true, "Allow missing columns in ORC files by default"},
|
||||
|
@ -43,11 +43,11 @@ class ExtractKeyValuePairs : public IFunction
|
||||
builder.withQuotingCharacter(parsed_arguments.quoting_character.value());
|
||||
}
|
||||
|
||||
bool is_number_of_pairs_unlimited = context->getSettingsRef().extract_key_value_pairs_max_pairs_per_row == 0;
|
||||
bool is_number_of_pairs_unlimited = context->getSettingsRef().extract_kvp_max_pairs_per_row == 0;
|
||||
|
||||
if (!is_number_of_pairs_unlimited)
|
||||
{
|
||||
builder.withMaxNumberOfPairs(context->getSettingsRef().extract_key_value_pairs_max_pairs_per_row);
|
||||
builder.withMaxNumberOfPairs(context->getSettingsRef().extract_kvp_max_pairs_per_row);
|
||||
}
|
||||
|
||||
return builder.build();
|
||||
|
@ -1,5 +1,6 @@
|
||||
#include <Columns/ColumnFixedString.h>
|
||||
#include <Columns/ColumnString.h>
|
||||
#include <DataTypes/DataTypeString.h>
|
||||
#include <Functions/FunctionFactory.h>
|
||||
#include <Functions/FunctionHelpers.h>
|
||||
#include <Functions/GatherUtils/Algorithms.h>
|
||||
@ -188,7 +189,7 @@ namespace
|
||||
arguments[2]->getName(),
|
||||
getName());
|
||||
|
||||
return arguments[0];
|
||||
return std::make_shared<DataTypeString>();
|
||||
}
|
||||
|
||||
ColumnPtr executeImpl(const ColumnsWithTypeAndName & arguments, const DataTypePtr &, size_t input_rows_count) const override
|
||||
|
@ -495,8 +495,8 @@ PlannerActionsVisitorImpl::NodeNameAndNodeMinLevel PlannerActionsVisitorImpl::vi
|
||||
return visitFunction(node);
|
||||
|
||||
throw Exception(ErrorCodes::UNSUPPORTED_METHOD,
|
||||
"Expected column, constant, function. Actual {}",
|
||||
node->formatASTForErrorMessage());
|
||||
"Expected column, constant, function. Actual {} with type: {}",
|
||||
node->formatASTForErrorMessage(), node_type);
|
||||
}
|
||||
|
||||
PlannerActionsVisitorImpl::NodeNameAndNodeMinLevel PlannerActionsVisitorImpl::visitColumn(const QueryTreeNodePtr & node)
|
||||
|
@ -228,7 +228,7 @@ struct SplitPartsRangesResult
|
||||
RangesInDataParts intersecting_parts_ranges;
|
||||
};
|
||||
|
||||
SplitPartsRangesResult splitPartsRanges(RangesInDataParts ranges_in_data_parts)
|
||||
SplitPartsRangesResult splitPartsRanges(RangesInDataParts ranges_in_data_parts, const LoggerPtr & logger)
|
||||
{
|
||||
/** Split ranges in data parts into intersecting ranges in data parts and non intersecting ranges in data parts.
|
||||
*
|
||||
@ -483,10 +483,15 @@ SplitPartsRangesResult splitPartsRanges(RangesInDataParts ranges_in_data_parts)
|
||||
intersecting_ranges_in_data_parts.end(),
|
||||
[](const auto & lhs, const auto & rhs) { return lhs.part_index_in_query < rhs.part_index_in_query; });
|
||||
|
||||
LOG_TEST(logger, "Non intersecting ranges in data parts {}", non_intersecting_ranges_in_data_parts.getDescriptions().describe());
|
||||
LOG_TEST(logger, "Intersecting ranges in data parts {}", intersecting_ranges_in_data_parts.getDescriptions().describe());
|
||||
|
||||
return {std::move(non_intersecting_ranges_in_data_parts), std::move(intersecting_ranges_in_data_parts)};
|
||||
}
|
||||
|
||||
std::pair<std::vector<RangesInDataParts>, std::vector<Values>> splitIntersectingPartsRangesIntoLayers(RangesInDataParts intersecting_ranges_in_data_parts, size_t max_layers)
|
||||
std::pair<std::vector<RangesInDataParts>, std::vector<Values>> splitIntersectingPartsRangesIntoLayers(RangesInDataParts intersecting_ranges_in_data_parts,
|
||||
size_t max_layers,
|
||||
const LoggerPtr & logger)
|
||||
{
|
||||
// We will advance the iterator pointing to the mark with the smallest PK value until
|
||||
// there will be not less than rows_per_layer rows in the current layer (roughly speaking).
|
||||
@ -591,8 +596,18 @@ std::pair<std::vector<RangesInDataParts>, std::vector<Values>> splitIntersecting
|
||||
result_layers.back() = std::move(current_layer_builder.getCurrentRangesInDataParts());
|
||||
}
|
||||
|
||||
for (auto & layer : result_layers)
|
||||
size_t result_layers_size = result_layers.size();
|
||||
LOG_TEST(logger, "Split intersecting ranges into {} layers", result_layers_size);
|
||||
|
||||
for (size_t i = 0; i < result_layers_size; ++i)
|
||||
{
|
||||
auto & layer = result_layers[i];
|
||||
|
||||
LOG_TEST(logger, "Layer {} {} filter values in ({}, {}])",
|
||||
i,
|
||||
layer.getDescriptions().describe(),
|
||||
i ? ::toString(borders[i - 1]) : "-inf", i < borders.size() ? ::toString(borders[i]) : "+inf");
|
||||
|
||||
std::stable_sort(
|
||||
layer.begin(),
|
||||
layer.end(),
|
||||
@ -712,23 +727,32 @@ SplitPartsWithRangesByPrimaryKeyResult splitPartsWithRangesByPrimaryKey(
|
||||
size_t max_layers,
|
||||
ContextPtr context,
|
||||
ReadingInOrderStepGetter && in_order_reading_step_getter,
|
||||
bool force_process_all_ranges)
|
||||
bool split_parts_ranges_into_intersecting_and_non_intersecting_final,
|
||||
bool split_intersecting_parts_ranges_into_layers)
|
||||
{
|
||||
if (max_layers <= 1)
|
||||
throw Exception(ErrorCodes::LOGICAL_ERROR, "max_layer should be greater than 1");
|
||||
|
||||
auto logger = getLogger("PartsSplitter");
|
||||
|
||||
SplitPartsWithRangesByPrimaryKeyResult result;
|
||||
|
||||
RangesInDataParts intersecting_parts_ranges = std::move(parts);
|
||||
|
||||
if (!force_process_all_ranges)
|
||||
if (split_parts_ranges_into_intersecting_and_non_intersecting_final)
|
||||
{
|
||||
SplitPartsRangesResult split_result = splitPartsRanges(intersecting_parts_ranges);
|
||||
SplitPartsRangesResult split_result = splitPartsRanges(intersecting_parts_ranges, logger);
|
||||
result.non_intersecting_parts_ranges = std::move(split_result.non_intersecting_parts_ranges);
|
||||
intersecting_parts_ranges = std::move(split_result.intersecting_parts_ranges);
|
||||
}
|
||||
|
||||
auto && [layers, borders] = splitIntersectingPartsRangesIntoLayers(intersecting_parts_ranges, max_layers);
|
||||
if (!split_intersecting_parts_ranges_into_layers)
|
||||
{
|
||||
result.merging_pipes.emplace_back(in_order_reading_step_getter(intersecting_parts_ranges));
|
||||
return result;
|
||||
}
|
||||
|
||||
auto && [layers, borders] = splitIntersectingPartsRangesIntoLayers(intersecting_parts_ranges, max_layers, logger);
|
||||
auto filters = buildFilters(primary_key, borders);
|
||||
result.merging_pipes.resize(layers.size());
|
||||
|
||||
|
@ -34,5 +34,6 @@ SplitPartsWithRangesByPrimaryKeyResult splitPartsWithRangesByPrimaryKey(
|
||||
size_t max_layers,
|
||||
ContextPtr context,
|
||||
ReadingInOrderStepGetter && in_order_reading_step_getter,
|
||||
bool force_process_all_ranges);
|
||||
bool split_parts_ranges_into_intersecting_and_non_intersecting,
|
||||
bool split_intersecting_parts_ranges_into_layers);
|
||||
}
|
||||
|
@ -1175,7 +1175,8 @@ Pipe ReadFromMergeTree::spreadMarkRangesAmongStreamsFinal(
|
||||
|
||||
/// Parts of non-zero level still may contain duplicate PK values to merge on FINAL if there's is_deleted column,
|
||||
/// so we have to process all ranges. It would be more optimal to remove this flag and add an extra filtering step.
|
||||
bool force_process_all_ranges = !data.merging_params.is_deleted_column.empty();
|
||||
bool split_parts_ranges_into_intersecting_and_non_intersecting_final = settings.split_parts_ranges_into_intersecting_and_non_intersecting_final &&
|
||||
data.merging_params.is_deleted_column.empty();
|
||||
|
||||
SplitPartsWithRangesByPrimaryKeyResult split_ranges_result = splitPartsWithRangesByPrimaryKey(
|
||||
metadata_for_reading->getPrimaryKey(),
|
||||
@ -1184,7 +1185,8 @@ Pipe ReadFromMergeTree::spreadMarkRangesAmongStreamsFinal(
|
||||
num_streams,
|
||||
context,
|
||||
std::move(in_order_reading_step_getter),
|
||||
force_process_all_ranges);
|
||||
split_parts_ranges_into_intersecting_and_non_intersecting_final,
|
||||
settings.split_intersecting_parts_ranges_into_layers_final);
|
||||
|
||||
for (auto && non_intersecting_parts_range : split_ranges_result.non_intersecting_parts_ranges)
|
||||
non_intersecting_parts_by_primary_key.push_back(std::move(non_intersecting_parts_range));
|
||||
|
@ -7,7 +7,7 @@ namespace DB
|
||||
{
|
||||
|
||||
ReadFromPreparedSource::ReadFromPreparedSource(Pipe pipe_)
|
||||
: SourceStepWithFilter(DataStream{.header = pipe_.getHeader()})
|
||||
: ISourceStep(DataStream{.header = pipe_.getHeader()})
|
||||
, pipe(std::move(pipe_))
|
||||
{
|
||||
}
|
||||
@ -35,11 +35,4 @@ ReadFromStorageStep::ReadFromStorageStep(
|
||||
processor->setStorageLimits(query_info.storage_limits);
|
||||
}
|
||||
|
||||
void ReadFromStorageStep::applyFilters()
|
||||
{
|
||||
for (const auto & processor : pipe.getProcessors())
|
||||
if (auto * source = dynamic_cast<SourceWithKeyCondition *>(processor.get()))
|
||||
source->setKeyCondition(filter_nodes.nodes, context);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
#include <Interpreters/Context.h>
|
||||
#include <Processors/QueryPlan/ISourceStep.h>
|
||||
#include <Processors/QueryPlan/SourceStepWithFilter.h>
|
||||
#include <QueryPipeline/Pipe.h>
|
||||
#include <Storages/SelectQueryInfo.h>
|
||||
|
||||
@ -10,7 +9,7 @@ namespace DB
|
||||
{
|
||||
|
||||
/// Create source from prepared pipe.
|
||||
class ReadFromPreparedSource : public SourceStepWithFilter
|
||||
class ReadFromPreparedSource : public ISourceStep
|
||||
{
|
||||
public:
|
||||
explicit ReadFromPreparedSource(Pipe pipe_);
|
||||
@ -28,7 +27,6 @@ public:
|
||||
ReadFromStorageStep(Pipe pipe_, String storage_name, ContextPtr context_, const SelectQueryInfo & query_info_);
|
||||
|
||||
String getName() const override { return "ReadFromStorage"; }
|
||||
void applyFilters() override;
|
||||
|
||||
private:
|
||||
ContextPtr context;
|
||||
|
@ -773,7 +773,8 @@ QueryTreeNodePtr buildQueryTreeDistributed(SelectQueryInfo & query_info,
|
||||
table_function_node->setTableExpressionModifiers(*table_expression_modifiers);
|
||||
|
||||
QueryAnalysisPass query_analysis_pass;
|
||||
query_analysis_pass.run(table_function_node, query_context);
|
||||
QueryTreeNodePtr node = table_function_node;
|
||||
query_analysis_pass.run(node, query_context);
|
||||
|
||||
replacement_table_expression = std::move(table_function_node);
|
||||
}
|
||||
|
@ -1,6 +1,13 @@
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
#include <iterator>
|
||||
#include <Analyzer/ConstantNode.h>
|
||||
#include <Analyzer/ColumnNode.h>
|
||||
#include <Analyzer/FunctionNode.h>
|
||||
#include <Analyzer/IdentifierNode.h>
|
||||
#include <Analyzer/InDepthQueryTreeVisitor.h>
|
||||
#include <Analyzer/Passes/QueryAnalysisPass.h>
|
||||
#include <Analyzer/QueryTreeBuilder.h>
|
||||
#include <Analyzer/TableNode.h>
|
||||
#include <Analyzer/Utils.h>
|
||||
#include <Columns/ColumnSet.h>
|
||||
@ -25,6 +32,7 @@
|
||||
#include <Parsers/ASTIdentifier.h>
|
||||
#include <Parsers/ASTLiteral.h>
|
||||
#include <Parsers/ASTSelectQuery.h>
|
||||
#include <Planner/PlannerActionsVisitor.h>
|
||||
#include <Planner/Utils.h>
|
||||
#include <Processors/ConcatProcessor.h>
|
||||
#include <Processors/QueryPlan/BuildQueryPipelineSettings.h>
|
||||
@ -40,6 +48,7 @@
|
||||
#include <QueryPipeline/narrowPipe.h>
|
||||
#include <Storages/AlterCommands.h>
|
||||
#include <Storages/SelectQueryInfo.h>
|
||||
#include <Storages/StorageDistributed.h>
|
||||
#include <Storages/StorageFactory.h>
|
||||
#include <Storages/StorageMerge.h>
|
||||
#include <Storages/StorageView.h>
|
||||
@ -51,6 +60,8 @@
|
||||
#include <Common/assert_cast.h>
|
||||
#include <Common/checkStackSize.h>
|
||||
#include <Common/typeid_cast.h>
|
||||
#include <Core/NamesAndTypes.h>
|
||||
#include <Functions/FunctionFactory.h>
|
||||
|
||||
namespace
|
||||
{
|
||||
@ -78,13 +89,13 @@ namespace DB
|
||||
|
||||
namespace ErrorCodes
|
||||
{
|
||||
extern const int LOGICAL_ERROR;
|
||||
extern const int BAD_ARGUMENTS;
|
||||
extern const int NOT_IMPLEMENTED;
|
||||
extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH;
|
||||
extern const int SAMPLING_NOT_SUPPORTED;
|
||||
extern const int ALTER_OF_COLUMN_IS_FORBIDDEN;
|
||||
extern const int CANNOT_EXTRACT_TABLE_STRUCTURE;
|
||||
extern const int LOGICAL_ERROR;
|
||||
}
|
||||
|
||||
StorageMerge::DatabaseNameOrRegexp::DatabaseNameOrRegexp(
|
||||
@ -379,7 +390,14 @@ void ReadFromMerge::initializePipeline(QueryPipelineBuilder & pipeline, const Bu
|
||||
const auto storage_metadata_snapshot = storage->getInMemoryMetadataPtr();
|
||||
const auto nested_storage_snaphsot = storage->getStorageSnapshot(storage_metadata_snapshot, context);
|
||||
|
||||
auto modified_query_info = getModifiedQueryInfo(query_info, context, table, nested_storage_snaphsot);
|
||||
Names column_names_as_aliases;
|
||||
Aliases aliases;
|
||||
|
||||
Names real_column_names = column_names;
|
||||
if (child_plan.row_policy_data_opt)
|
||||
child_plan.row_policy_data_opt->extendNames(real_column_names);
|
||||
|
||||
auto modified_query_info = getModifiedQueryInfo(context, table, nested_storage_snaphsot, real_column_names, column_names_as_aliases, aliases);
|
||||
|
||||
auto source_pipeline = createSources(
|
||||
child_plan.plan,
|
||||
@ -512,7 +530,6 @@ std::vector<ReadFromMerge::ChildPlan> ReadFromMerge::createChildrenPlans(SelectQ
|
||||
auto storage_metadata_snapshot = storage->getInMemoryMetadataPtr();
|
||||
auto nested_storage_snaphsot = storage->getStorageSnapshot(storage_metadata_snapshot, context);
|
||||
|
||||
auto modified_query_info = getModifiedQueryInfo(query_info, context, table, nested_storage_snaphsot);
|
||||
Names column_names_as_aliases;
|
||||
Names real_column_names = column_names;
|
||||
|
||||
@ -528,6 +545,8 @@ std::vector<ReadFromMerge::ChildPlan> ReadFromMerge::createChildrenPlans(SelectQ
|
||||
row_policy_data_opt->extendNames(real_column_names);
|
||||
}
|
||||
|
||||
auto modified_query_info = getModifiedQueryInfo(context, table, nested_storage_snaphsot, real_column_names, column_names_as_aliases, aliases);
|
||||
|
||||
if (!context->getSettingsRef().allow_experimental_analyzer)
|
||||
{
|
||||
auto storage_columns = storage_metadata_snapshot->getColumns();
|
||||
@ -580,6 +599,10 @@ std::vector<ReadFromMerge::ChildPlan> ReadFromMerge::createChildrenPlans(SelectQ
|
||||
column_names_as_aliases.push_back(ExpressionActions::getSmallestColumn(storage_metadata_snapshot->getColumns().getAllPhysical()).name);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
res.back().plan = createPlanForTable(
|
||||
nested_storage_snaphsot,
|
||||
@ -596,10 +619,198 @@ std::vector<ReadFromMerge::ChildPlan> ReadFromMerge::createChildrenPlans(SelectQ
|
||||
return res;
|
||||
}
|
||||
|
||||
SelectQueryInfo ReadFromMerge::getModifiedQueryInfo(const SelectQueryInfo & query_info,
|
||||
const ContextPtr & modified_context,
|
||||
namespace
|
||||
{
|
||||
|
||||
class ApplyAliasColumnExpressionsVisitor : public InDepthQueryTreeVisitor<ApplyAliasColumnExpressionsVisitor>
|
||||
{
|
||||
public:
|
||||
explicit ApplyAliasColumnExpressionsVisitor(QueryTreeNodePtr replacement_table_expression_)
|
||||
: replacement_table_expression(replacement_table_expression_)
|
||||
{}
|
||||
|
||||
void visitImpl(QueryTreeNodePtr & node)
|
||||
{
|
||||
if (auto * column = node->as<ColumnNode>(); column != nullptr)
|
||||
{
|
||||
if (column->hasExpression())
|
||||
{
|
||||
node = column->getExpressionOrThrow();
|
||||
node->setAlias(column->getColumnName());
|
||||
}
|
||||
else
|
||||
column->setColumnSource(replacement_table_expression);
|
||||
}
|
||||
}
|
||||
private:
|
||||
QueryTreeNodePtr replacement_table_expression;
|
||||
};
|
||||
|
||||
bool hasUnknownColumn(const QueryTreeNodePtr & node, QueryTreeNodePtr replacement_table_expression)
|
||||
{
|
||||
QueryTreeNodes stack = { node };
|
||||
while (!stack.empty())
|
||||
{
|
||||
auto current = stack.back();
|
||||
stack.pop_back();
|
||||
|
||||
switch (current->getNodeType())
|
||||
{
|
||||
case QueryTreeNodeType::CONSTANT:
|
||||
break;
|
||||
case QueryTreeNodeType::COLUMN:
|
||||
{
|
||||
auto * column_node = current->as<ColumnNode>();
|
||||
auto source = column_node->getColumnSourceOrNull();
|
||||
if (source != replacement_table_expression)
|
||||
return true;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
{
|
||||
for (const auto & child : current->getChildren())
|
||||
{
|
||||
if (child)
|
||||
stack.push_back(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void replaceFilterExpression(
|
||||
QueryTreeNodePtr & expression,
|
||||
const QueryTreeNodePtr & replacement_table_expression,
|
||||
const ContextPtr & context)
|
||||
{
|
||||
auto * function = expression->as<FunctionNode>();
|
||||
if (!function)
|
||||
return;
|
||||
|
||||
if (function->getFunctionName() != "and")
|
||||
{
|
||||
if (hasUnknownColumn(expression, replacement_table_expression))
|
||||
expression = nullptr;
|
||||
return;
|
||||
}
|
||||
|
||||
QueryTreeNodes conjunctions;
|
||||
QueryTreeNodes processing{ expression };
|
||||
|
||||
while (!processing.empty())
|
||||
{
|
||||
auto node = std::move(processing.back());
|
||||
processing.pop_back();
|
||||
|
||||
if (auto * function_node = node->as<FunctionNode>())
|
||||
{
|
||||
if (function_node->getFunctionName() == "and")
|
||||
std::copy(
|
||||
function_node->getArguments().begin(),
|
||||
function_node->getArguments().end(),
|
||||
std::back_inserter(processing)
|
||||
);
|
||||
else
|
||||
conjunctions.push_back(node);
|
||||
}
|
||||
else
|
||||
{
|
||||
conjunctions.push_back(node);
|
||||
}
|
||||
}
|
||||
|
||||
std::swap(processing, conjunctions);
|
||||
|
||||
for (const auto & node : processing)
|
||||
{
|
||||
if (!hasUnknownColumn(node, replacement_table_expression))
|
||||
conjunctions.push_back(node);
|
||||
}
|
||||
|
||||
if (conjunctions.empty())
|
||||
{
|
||||
expression = {};
|
||||
return;
|
||||
}
|
||||
if (conjunctions.size() == 1)
|
||||
{
|
||||
expression = conjunctions[0];
|
||||
return;
|
||||
}
|
||||
|
||||
function->getArguments().getNodes() = std::move(conjunctions);
|
||||
|
||||
const auto function_impl = FunctionFactory::instance().get("and", context);
|
||||
function->resolveAsFunction(function_impl->build(function->getArgumentColumns()));
|
||||
}
|
||||
|
||||
QueryTreeNodePtr replaceTableExpressionAndRemoveJoin(
|
||||
QueryTreeNodePtr query,
|
||||
QueryTreeNodePtr original_table_expression,
|
||||
QueryTreeNodePtr replacement_table_expression,
|
||||
const ContextPtr & context,
|
||||
const Names & required_column_names)
|
||||
{
|
||||
auto * query_node = query->as<QueryNode>();
|
||||
auto join_tree_type = query_node->getJoinTree()->getNodeType();
|
||||
auto modified_query = query_node->cloneAndReplace(original_table_expression, replacement_table_expression);
|
||||
|
||||
if (join_tree_type == QueryTreeNodeType::TABLE || join_tree_type == QueryTreeNodeType::TABLE_FUNCTION)
|
||||
return modified_query;
|
||||
|
||||
auto * modified_query_node = modified_query->as<QueryNode>();
|
||||
|
||||
modified_query = modified_query->cloneAndReplace(modified_query_node->getJoinTree(), replacement_table_expression);
|
||||
modified_query_node = modified_query->as<QueryNode>();
|
||||
|
||||
query_node = modified_query->as<QueryNode>();
|
||||
|
||||
if (query_node->hasPrewhere())
|
||||
replaceFilterExpression(query_node->getPrewhere(), replacement_table_expression, context);
|
||||
if (query_node->hasWhere())
|
||||
replaceFilterExpression(query_node->getWhere(), replacement_table_expression, context);
|
||||
|
||||
query_node->getGroupBy().getNodes().clear();
|
||||
query_node->getHaving() = {};
|
||||
query_node->getOrderBy().getNodes().clear();
|
||||
|
||||
auto & projection = modified_query_node->getProjection().getNodes();
|
||||
projection.clear();
|
||||
NamesAndTypes projection_columns;
|
||||
|
||||
for (auto const & column_name : required_column_names)
|
||||
{
|
||||
QueryTreeNodePtr fake_node = std::make_shared<IdentifierNode>(Identifier{column_name});
|
||||
|
||||
QueryAnalysisPass query_analysis_pass(original_table_expression);
|
||||
query_analysis_pass.run(fake_node, context);
|
||||
|
||||
auto * resolved_column = fake_node->as<ColumnNode>();
|
||||
if (!resolved_column)
|
||||
throw Exception(ErrorCodes::LOGICAL_ERROR, "Required column '{}' is not resolved", column_name);
|
||||
auto fake_column = resolved_column->getColumn();
|
||||
|
||||
ApplyAliasColumnExpressionsVisitor visitor(replacement_table_expression);
|
||||
visitor.visit(fake_node);
|
||||
|
||||
projection.push_back(fake_node);
|
||||
projection_columns.push_back(fake_column);
|
||||
}
|
||||
|
||||
query_node->resolveProjectionColumns(std::move(projection_columns));
|
||||
|
||||
return modified_query;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
SelectQueryInfo ReadFromMerge::getModifiedQueryInfo(const ContextPtr & modified_context,
|
||||
const StorageWithLockAndName & storage_with_lock_and_name,
|
||||
const StorageSnapshotPtr & storage_snapshot)
|
||||
const StorageSnapshotPtr & storage_snapshot,
|
||||
Names required_column_names,
|
||||
Names & column_names_as_aliases,
|
||||
Aliases & aliases) const
|
||||
{
|
||||
const auto & [database_name, storage, storage_lock, table_name] = storage_with_lock_and_name;
|
||||
const StorageID current_storage_id = storage->getStorageID();
|
||||
@ -612,8 +823,7 @@ SelectQueryInfo ReadFromMerge::getModifiedQueryInfo(const SelectQueryInfo & quer
|
||||
if (query_info.table_expression_modifiers)
|
||||
replacement_table_expression->setTableExpressionModifiers(*query_info.table_expression_modifiers);
|
||||
|
||||
modified_query_info.query_tree = modified_query_info.query_tree->cloneAndReplace(modified_query_info.table_expression,
|
||||
replacement_table_expression);
|
||||
modified_query_info.query_tree = replaceTableExpressionAndRemoveJoin(modified_query_info.query_tree, modified_query_info.table_expression, replacement_table_expression, modified_context, required_column_names);
|
||||
modified_query_info.table_expression = replacement_table_expression;
|
||||
modified_query_info.planner_context->getOrCreateTableExpressionData(replacement_table_expression);
|
||||
|
||||
@ -624,10 +834,65 @@ SelectQueryInfo ReadFromMerge::getModifiedQueryInfo(const SelectQueryInfo & quer
|
||||
std::unordered_map<std::string, QueryTreeNodePtr> column_name_to_node;
|
||||
|
||||
if (!storage_snapshot->tryGetColumn(get_column_options, "_table"))
|
||||
column_name_to_node.emplace("_table", std::make_shared<ConstantNode>(current_storage_id.table_name));
|
||||
{
|
||||
auto table_name_node = std::make_shared<ConstantNode>(current_storage_id.table_name);
|
||||
table_name_node->setAlias("_table");
|
||||
column_name_to_node.emplace("_table", table_name_node);
|
||||
}
|
||||
|
||||
if (!storage_snapshot->tryGetColumn(get_column_options, "_database"))
|
||||
column_name_to_node.emplace("_database", std::make_shared<ConstantNode>(current_storage_id.database_name));
|
||||
{
|
||||
auto database_name_node = std::make_shared<ConstantNode>(current_storage_id.database_name);
|
||||
database_name_node->setAlias("_database");
|
||||
column_name_to_node.emplace("_database", database_name_node);
|
||||
}
|
||||
|
||||
auto storage_columns = storage_snapshot->metadata->getColumns();
|
||||
|
||||
bool with_aliases = /* common_processed_stage == QueryProcessingStage::FetchColumns && */ !storage_columns.getAliases().empty();
|
||||
if (with_aliases)
|
||||
{
|
||||
auto filter_actions_dag = std::make_shared<ActionsDAG>();
|
||||
for (const auto & column : required_column_names)
|
||||
{
|
||||
const auto column_default = storage_columns.getDefault(column);
|
||||
bool is_alias = column_default && column_default->kind == ColumnDefaultKind::Alias;
|
||||
|
||||
QueryTreeNodePtr column_node;
|
||||
|
||||
|
||||
if (is_alias)
|
||||
{
|
||||
QueryTreeNodePtr fake_node = std::make_shared<IdentifierNode>(Identifier{column});
|
||||
|
||||
QueryAnalysisPass query_analysis_pass(modified_query_info.table_expression);
|
||||
query_analysis_pass.run(fake_node, modified_context);
|
||||
|
||||
auto * resolved_column = fake_node->as<ColumnNode>();
|
||||
|
||||
column_node = fake_node;
|
||||
ApplyAliasColumnExpressionsVisitor visitor(replacement_table_expression);
|
||||
visitor.visit(column_node);
|
||||
|
||||
if (!resolved_column || !resolved_column->getExpression())
|
||||
throw Exception(ErrorCodes::LOGICAL_ERROR, "Alias column is not resolved");
|
||||
|
||||
column_name_to_node.emplace(column, column_node);
|
||||
aliases.push_back({ .name = column, .type = resolved_column->getResultType(), .expression = column_node->toAST() });
|
||||
}
|
||||
else
|
||||
{
|
||||
column_node = std::make_shared<ColumnNode>(NameAndTypePair{column, storage_columns.getColumn(get_column_options, column).type }, modified_query_info.table_expression);
|
||||
}
|
||||
|
||||
|
||||
PlannerActionsVisitor actions_visitor(modified_query_info.planner_context, false /*use_column_identifier_as_action_node_name*/);
|
||||
actions_visitor.visit(filter_actions_dag, column_node);
|
||||
}
|
||||
column_names_as_aliases = filter_actions_dag->getRequiredColumnsNames();
|
||||
if (column_names_as_aliases.empty())
|
||||
column_names_as_aliases.push_back(ExpressionActions::getSmallestColumn(storage_snapshot->metadata->getColumns().getAllPhysical()).name);
|
||||
}
|
||||
|
||||
if (!column_name_to_node.empty())
|
||||
{
|
||||
@ -756,7 +1021,7 @@ QueryPipelineBuilderPtr ReadFromMerge::createSources(
|
||||
|
||||
/// Subordinary tables could have different but convertible types, like numeric types of different width.
|
||||
/// We must return streams with structure equals to structure of Merge table.
|
||||
convertAndFilterSourceStream(header, storage_snapshot->metadata, aliases, row_policy_data_opt, modified_context, *builder, processed_stage);
|
||||
convertAndFilterSourceStream(header, modified_query_info, storage_snapshot, aliases, row_policy_data_opt, modified_context, *builder, processed_stage);
|
||||
}
|
||||
|
||||
return builder;
|
||||
@ -1107,38 +1372,73 @@ void StorageMerge::alter(
|
||||
|
||||
void ReadFromMerge::convertAndFilterSourceStream(
|
||||
const Block & header,
|
||||
const StorageMetadataPtr & metadata_snapshot,
|
||||
SelectQueryInfo & modified_query_info,
|
||||
const StorageSnapshotPtr & snapshot,
|
||||
const Aliases & aliases,
|
||||
const RowPolicyDataOpt & row_policy_data_opt,
|
||||
ContextPtr local_context,
|
||||
ContextMutablePtr local_context,
|
||||
QueryPipelineBuilder & builder,
|
||||
QueryProcessingStage::Enum processed_stage)
|
||||
{
|
||||
Block before_block_header = builder.getHeader();
|
||||
|
||||
auto storage_sample_block = metadata_snapshot->getSampleBlock();
|
||||
auto storage_sample_block = snapshot->metadata->getSampleBlock();
|
||||
auto pipe_columns = builder.getHeader().getNamesAndTypesList();
|
||||
|
||||
for (const auto & alias : aliases)
|
||||
if (local_context->getSettingsRef().allow_experimental_analyzer)
|
||||
{
|
||||
pipe_columns.emplace_back(NameAndTypePair(alias.name, alias.type));
|
||||
ASTPtr expr = alias.expression;
|
||||
auto syntax_result = TreeRewriter(local_context).analyze(expr, pipe_columns);
|
||||
auto expression_analyzer = ExpressionAnalyzer{alias.expression, syntax_result, local_context};
|
||||
|
||||
auto dag = std::make_shared<ActionsDAG>(pipe_columns);
|
||||
auto actions_dag = expression_analyzer.getActionsDAG(true, false);
|
||||
auto actions = std::make_shared<ExpressionActions>(actions_dag, ExpressionActionsSettings::fromContext(local_context, CompileExpressions::yes));
|
||||
|
||||
builder.addSimpleTransform([&](const Block & stream_header)
|
||||
for (const auto & alias : aliases)
|
||||
{
|
||||
return std::make_shared<ExpressionTransform>(stream_header, actions);
|
||||
});
|
||||
pipe_columns.emplace_back(NameAndTypePair(alias.name, alias.type));
|
||||
|
||||
auto actions_dag = std::make_shared<ActionsDAG>(pipe_columns);
|
||||
|
||||
QueryTreeNodePtr query_tree = buildQueryTree(alias.expression, local_context);
|
||||
query_tree->setAlias(alias.name);
|
||||
|
||||
QueryAnalysisPass query_analysis_pass(modified_query_info.table_expression);
|
||||
query_analysis_pass.run(query_tree, local_context);
|
||||
|
||||
PlannerActionsVisitor actions_visitor(modified_query_info.planner_context, false /*use_column_identifier_as_action_node_name*/);
|
||||
const auto & nodes = actions_visitor.visit(actions_dag, query_tree);
|
||||
|
||||
if (nodes.size() != 1)
|
||||
throw Exception(ErrorCodes::LOGICAL_ERROR, "Expected to have 1 output but got {}", nodes.size());
|
||||
|
||||
actions_dag->addOrReplaceInOutputs(actions_dag->addAlias(*nodes.front(), alias.name));
|
||||
|
||||
auto actions = std::make_shared<ExpressionActions>(actions_dag, ExpressionActionsSettings::fromContext(local_context, CompileExpressions::yes));
|
||||
|
||||
builder.addSimpleTransform([&](const Block & stream_header)
|
||||
{
|
||||
return std::make_shared<ExpressionTransform>(stream_header, actions);
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (const auto & alias : aliases)
|
||||
{
|
||||
pipe_columns.emplace_back(NameAndTypePair(alias.name, alias.type));
|
||||
ASTPtr expr = alias.expression;
|
||||
auto syntax_result = TreeRewriter(local_context).analyze(expr, pipe_columns);
|
||||
auto expression_analyzer = ExpressionAnalyzer{alias.expression, syntax_result, local_context};
|
||||
|
||||
auto dag = std::make_shared<ActionsDAG>(pipe_columns);
|
||||
auto actions_dag = expression_analyzer.getActionsDAG(true, false);
|
||||
auto actions = std::make_shared<ExpressionActions>(actions_dag, ExpressionActionsSettings::fromContext(local_context, CompileExpressions::yes));
|
||||
|
||||
builder.addSimpleTransform([&](const Block & stream_header)
|
||||
{
|
||||
return std::make_shared<ExpressionTransform>(stream_header, actions);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
ActionsDAG::MatchColumnsMode convert_actions_match_columns_mode = ActionsDAG::MatchColumnsMode::Name;
|
||||
|
||||
if (local_context->getSettingsRef().allow_experimental_analyzer && processed_stage != QueryProcessingStage::FetchColumns)
|
||||
if (local_context->getSettingsRef().allow_experimental_analyzer
|
||||
&& (processed_stage != QueryProcessingStage::FetchColumns || dynamic_cast<const StorageDistributed *>(&snapshot->storage) != nullptr))
|
||||
convert_actions_match_columns_mode = ActionsDAG::MatchColumnsMode::Position;
|
||||
|
||||
if (row_policy_data_opt)
|
||||
|
@ -189,6 +189,13 @@ private:
|
||||
|
||||
using Aliases = std::vector<AliasData>;
|
||||
|
||||
SelectQueryInfo getModifiedQueryInfo(const ContextPtr & modified_context,
|
||||
const StorageWithLockAndName & storage_with_lock_and_name,
|
||||
const StorageSnapshotPtr & storage_snapshot,
|
||||
Names required_column_names,
|
||||
Names & column_names_as_aliases,
|
||||
Aliases & aliases) const;
|
||||
|
||||
/// An object of this helper class is created
|
||||
/// when processing a Merge table data source (subordinary table)
|
||||
/// that has row policies
|
||||
@ -261,17 +268,13 @@ private:
|
||||
ContextMutablePtr modified_context,
|
||||
bool concat_streams = false) const;
|
||||
|
||||
static SelectQueryInfo getModifiedQueryInfo(const SelectQueryInfo & query_info,
|
||||
const ContextPtr & modified_context,
|
||||
const StorageWithLockAndName & storage_with_lock_and_name,
|
||||
const StorageSnapshotPtr & storage_snapshot);
|
||||
|
||||
static void convertAndFilterSourceStream(
|
||||
const Block & header,
|
||||
const StorageMetadataPtr & metadata_snapshot,
|
||||
SelectQueryInfo & modified_query_info,
|
||||
const StorageSnapshotPtr & snapshot,
|
||||
const Aliases & aliases,
|
||||
const RowPolicyDataOpt & row_policy_data_opt,
|
||||
ContextPtr context,
|
||||
ContextMutablePtr context,
|
||||
QueryPipelineBuilder & builder,
|
||||
QueryProcessingStage::Enum processed_stage);
|
||||
|
||||
|
@ -4,10 +4,8 @@
|
||||
01062_pm_all_join_with_block_continuation
|
||||
01083_expressions_in_engine_arguments
|
||||
01155_rename_move_materialized_view
|
||||
01214_test_storage_merge_aliases_with_where
|
||||
01244_optimize_distributed_group_by_sharding_key
|
||||
01268_shard_avgweighted
|
||||
01560_merge_distributed_join
|
||||
01584_distributed_buffer_cannot_find_column
|
||||
01624_soft_constraints
|
||||
01656_test_query_log_factories_info
|
||||
@ -16,13 +14,11 @@
|
||||
01747_join_view_filter_dictionary
|
||||
01761_cast_to_enum_nullable
|
||||
01925_join_materialized_columns
|
||||
01925_test_storage_merge_aliases
|
||||
01952_optimize_distributed_group_by_sharding_key
|
||||
02174_cte_scalar_cache_mv
|
||||
02354_annoy
|
||||
02428_parameterized_view
|
||||
02493_inconsistent_hex_and_binary_number
|
||||
02575_merge_prewhere_different_default_kind
|
||||
02725_agg_projection_resprect_PK
|
||||
02763_row_policy_storage_merge_alias
|
||||
02818_parameterized_view_with_cte_multiple_usage
|
||||
|
@ -293,7 +293,7 @@ SELECT
|
||||
{'age':'31','last_key':'last_value','name':'neymar','nationality':'brazil','team':'psg'}
|
||||
-- { echoOn }
|
||||
|
||||
SET extract_key_value_pairs_max_pairs_per_row = 2;
|
||||
SET extract_kvp_max_pairs_per_row = 2;
|
||||
-- Should be allowed because it no longer exceeds the max number of pairs
|
||||
-- expected output: {'key1':'value1','key2':'value2'}
|
||||
WITH
|
||||
@ -307,7 +307,7 @@ WITH
|
||||
SELECT
|
||||
x;
|
||||
{'key1':'value1','key2':'value2'}
|
||||
SET extract_key_value_pairs_max_pairs_per_row = 0;
|
||||
SET extract_kvp_max_pairs_per_row = 0;
|
||||
-- Should be allowed because max pairs per row is set to 0 (unlimited)
|
||||
-- expected output: {'key1':'value1','key2':'value2'}
|
||||
WITH
|
||||
|
@ -415,7 +415,7 @@ SELECT
|
||||
x; -- {serverError NUMBER_OF_ARGUMENTS_DOESNT_MATCH}
|
||||
|
||||
-- Should fail allowed because it exceeds the max number of pairs
|
||||
SET extract_key_value_pairs_max_pairs_per_row = 1;
|
||||
SET extract_kvp_max_pairs_per_row = 1;
|
||||
WITH
|
||||
extractKeyValuePairs('key1:value1,key2:value2') AS s_map,
|
||||
CAST(
|
||||
@ -429,7 +429,7 @@ SELECT
|
||||
|
||||
-- { echoOn }
|
||||
|
||||
SET extract_key_value_pairs_max_pairs_per_row = 2;
|
||||
SET extract_kvp_max_pairs_per_row = 2;
|
||||
-- Should be allowed because it no longer exceeds the max number of pairs
|
||||
-- expected output: {'key1':'value1','key2':'value2'}
|
||||
WITH
|
||||
@ -443,7 +443,7 @@ WITH
|
||||
SELECT
|
||||
x;
|
||||
|
||||
SET extract_key_value_pairs_max_pairs_per_row = 0;
|
||||
SET extract_kvp_max_pairs_per_row = 0;
|
||||
-- Should be allowed because max pairs per row is set to 0 (unlimited)
|
||||
-- expected output: {'key1':'value1','key2':'value2'}
|
||||
WITH
|
||||
|
124
tests/queries/0_stateless/02986_leftpad_fixedstring.reference
Normal file
124
tests/queries/0_stateless/02986_leftpad_fixedstring.reference
Normal file
@ -0,0 +1,124 @@
|
||||
|
||||
a String
|
||||
|
||||
a String
|
||||
1 1 1
|
||||
61 1 1 1
|
||||
6162 1 1 1
|
||||
616263 1 1 1
|
||||
61626333 1 1 1
|
||||
6162633334 1 1 1
|
||||
616263333433 1 1 1
|
||||
61626333343332 1 1 1
|
||||
6162633334333234 1 1 1
|
||||
206162633334333234 1 1 1
|
||||
20206162633334333234 1 1 1
|
||||
2020206162633334333234 1 1 1
|
||||
202020206162633334333234 1 1 1
|
||||
20202020206162633334333234 1 1 1
|
||||
2020202020206162633334333234 1 1 1
|
||||
202020202020206162633334333234 1 1 1
|
||||
20202020202020206162633334333234 1 1 1
|
||||
2020202020202020206162633334333234 1 1 1
|
||||
202020202020202020206162633334333234 1 1 1
|
||||
20202020202020202020206162633334333234 1 1 1
|
||||
1 1 1
|
||||
61 1 1 1
|
||||
6162 1 1 1
|
||||
616263 1 1 1
|
||||
61626333 1 1 1
|
||||
6162633334 1 1 1
|
||||
616263333433 1 1 1
|
||||
61626333343332 1 1 1
|
||||
6162633334333234 1 1 1
|
||||
616263333433323420 1 1 1
|
||||
61626333343332342020 1 1 1
|
||||
6162633334333234202020 1 1 1
|
||||
616263333433323420202020 1 1 1
|
||||
61626333343332342020202020 1 1 1
|
||||
6162633334333234202020202020 1 1 1
|
||||
616263333433323420202020202020 1 1 1
|
||||
61626333343332342020202020202020 1 1 1
|
||||
6162633334333234202020202020202020 1 1 1
|
||||
616263333433323420202020202020202020 1 1 1
|
||||
61626333343332342020202020202020202020 1 1 1
|
||||
1
|
||||
61 1
|
||||
6162 1
|
||||
616263 1
|
||||
61626333 1
|
||||
6162633334 1
|
||||
616263333433 1
|
||||
61626333343332 1
|
||||
6162633334333234 1
|
||||
F09F87AA6162633334333234 1
|
||||
F09F87AAF09F87B86162633334333234 1
|
||||
F09F87AAF09F87B8F09F87AA6162633334333234 1
|
||||
F09F87AAF09F87B8F09F87AAF09F87B86162633334333234 1
|
||||
F09F87AAF09F87B8F09F87AAF09F87B8F09F87AA6162633334333234 1
|
||||
F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B86162633334333234 1
|
||||
F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AA6162633334333234 1
|
||||
F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B86162633334333234 1
|
||||
F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AA6162633334333234 1
|
||||
F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B86162633334333234 1
|
||||
F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AA6162633334333234 1
|
||||
1
|
||||
61 1
|
||||
6162 1
|
||||
616263 1
|
||||
61626333 1
|
||||
6162633334 1
|
||||
616263333433 1
|
||||
61626333343332 1
|
||||
6162633334333234 1
|
||||
6162633334333234F09F87AA 1
|
||||
6162633334333234F09F87AAF09F87B8 1
|
||||
6162633334333234F09F87AAF09F87B8F09F87AA 1
|
||||
6162633334333234F09F87AAF09F87B8F09F87AAF09F87B8 1
|
||||
6162633334333234F09F87AAF09F87B8F09F87AAF09F87B8F09F87AA 1
|
||||
6162633334333234F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8 1
|
||||
6162633334333234F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AA 1
|
||||
6162633334333234F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8 1
|
||||
6162633334333234F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AA 1
|
||||
6162633334333234F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8 1
|
||||
6162633334333234F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AAF09F87B8F09F87AA 1
|
||||
1
|
||||
F09F87AA 1
|
||||
F09F87AAF09F87B8 1
|
||||
C391F09F87AAF09F87B8 1
|
||||
C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391F09F87AAF09F87B8 1
|
||||
1
|
||||
F09F87AA 1
|
||||
F09F87AAF09F87B8 1
|
||||
F09F87AAF09F87B8C391 1
|
||||
F09F87AAF09F87B8C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391 1
|
||||
F09F87AAF09F87B8C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391C391 1
|
41
tests/queries/0_stateless/02986_leftpad_fixedstring.sql
Normal file
41
tests/queries/0_stateless/02986_leftpad_fixedstring.sql
Normal file
@ -0,0 +1,41 @@
|
||||
-- https://github.com/ClickHouse/ClickHouse/issues/59604
|
||||
SELECT leftPad(toFixedString('abc', 3), 0), leftPad('abc', CAST('0', 'Int32'));
|
||||
SELECT leftPad(toFixedString('abc343243424324', 15), 1) as a, toTypeName(a);
|
||||
|
||||
SELECT rightPad(toFixedString('abc', 3), 0), rightPad('abc', CAST('0', 'Int32'));
|
||||
SELECT rightPad(toFixedString('abc343243424324', 15), 1) as a, toTypeName(a);
|
||||
|
||||
SELECT
|
||||
hex(leftPad(toFixedString('abc34324' as s, 8), number)) as result,
|
||||
hex(leftPad(s, number)) = result,
|
||||
hex(leftPadUTF8(toFixedString(s, 8), number)) = result,
|
||||
hex(leftPadUTF8(s, number)) = result
|
||||
FROM numbers(20);
|
||||
|
||||
SELECT
|
||||
hex(rightPad(toFixedString('abc34324' as s, 8), number)) as result,
|
||||
hex(rightPad(s, number)) = result,
|
||||
hex(rightPadUTF8(toFixedString(s, 8), number)) = result,
|
||||
hex(rightPadUTF8(s, number)) = result
|
||||
FROM numbers(20);
|
||||
|
||||
-- I'm not confident the behaviour should be like this. I'm only testing memory problems
|
||||
SELECT
|
||||
hex(leftPadUTF8(toFixedString('abc34324' as s, 8), number, '🇪🇸')) as result,
|
||||
hex(leftPadUTF8(s, number, '🇪🇸')) = result
|
||||
FROM numbers(20);
|
||||
|
||||
SELECT
|
||||
hex(rightPadUTF8(toFixedString('abc34324' as s, 8), number, '🇪🇸')) as result,
|
||||
hex(rightPadUTF8(s, number, '🇪🇸')) = result
|
||||
FROM numbers(20);
|
||||
|
||||
SELECT
|
||||
hex(leftPadUTF8(toFixedString('🇪🇸' as s, 8), number, 'Ñ')) as result,
|
||||
hex(leftPadUTF8(s, number, 'Ñ')) = result
|
||||
FROM numbers(20);
|
||||
|
||||
SELECT
|
||||
hex(rightPadUTF8(toFixedString('🇪🇸' as s, 8), number, 'Ñ')) as result,
|
||||
hex(rightPadUTF8(s, number, 'Ñ')) = result
|
||||
FROM numbers(20);
|
Loading…
Reference in New Issue
Block a user