mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-28 10:31:57 +00:00
Merge remote-tracking branch 'upstream/master' into parallelize-file-cache-metadata-download
This commit is contained in:
commit
170adaafca
@ -1918,6 +1918,9 @@ void QueryAnalyzer::evaluateScalarSubqueryIfNeeded(QueryTreeNodePtr & node, Iden
|
||||
subquery_settings.max_result_rows = 1;
|
||||
subquery_settings.extremes = false;
|
||||
subquery_context->setSettings(subquery_settings);
|
||||
/// When execute `INSERT INTO t WITH ... SELECT ...`, it may lead to `Unknown columns`
|
||||
/// exception with this settings enabled(https://github.com/ClickHouse/ClickHouse/issues/52494).
|
||||
subquery_context->setSetting("use_structure_from_insertion_table_in_table_functions", false);
|
||||
|
||||
auto options = SelectQueryOptions(QueryProcessingStage::Complete, scope.subquery_depth, true /*is_subquery*/);
|
||||
auto interpreter = std::make_unique<InterpreterSelectQueryAnalyzer>(node->toAST(), subquery_context, options);
|
||||
|
@ -58,6 +58,10 @@ bool ParserJSONPathMemberAccess::parseImpl(Pos & pos, ASTPtr & node, Expected &
|
||||
member_name = std::make_shared<ASTIdentifier>(String(last_begin, pos->end));
|
||||
++pos;
|
||||
}
|
||||
else if (!pos.isValid() && pos->type == TokenType::EndOfStream)
|
||||
{
|
||||
member_name = std::make_shared<ASTIdentifier>(String(last_begin, last_end));
|
||||
}
|
||||
else
|
||||
{
|
||||
return false;
|
||||
|
@ -77,6 +77,10 @@ static auto getQueryInterpreter(const ASTSubquery & subquery, ExecuteScalarSubqu
|
||||
subquery_settings.max_result_rows = 1;
|
||||
subquery_settings.extremes = false;
|
||||
subquery_context->setSettings(subquery_settings);
|
||||
|
||||
/// When execute `INSERT INTO t WITH ... SELECT ...`, it may lead to `Unknown columns`
|
||||
/// exception with this settings enabled(https://github.com/ClickHouse/ClickHouse/issues/52494).
|
||||
subquery_context->getQueryContext()->setSetting("use_structure_from_insertion_table_in_table_functions", false);
|
||||
if (!data.only_analyze && subquery_context->hasQueryContext())
|
||||
{
|
||||
/// Save current cached scalars in the context before analyzing the query
|
||||
|
@ -77,6 +77,10 @@ SELECT JSON_QUERY('{"array":[[0, 1, 2, 3, 4, 5], [0, -1, -2, -3, -4, -5]]}', '$.
|
||||
[0, 1, 4, 0, -1, -4]
|
||||
SELECT JSON_QUERY('{"1key":1}', '$.1key');
|
||||
[1]
|
||||
SELECT JSON_QUERY('{"123":1}', '$.123');
|
||||
[1]
|
||||
SELECT JSON_QUERY('{"123":1}', '$[123]');
|
||||
|
||||
SELECT JSON_QUERY('{"hello":1}', '$[hello]');
|
||||
[1]
|
||||
SELECT JSON_QUERY('{"hello":1}', '$["hello"]');
|
||||
|
@ -43,6 +43,8 @@ SELECT JSON_QUERY( '{hello:{"world":"!"}}}', '$.hello'); -- invalid json => defa
|
||||
SELECT JSON_QUERY('', '$.hello');
|
||||
SELECT JSON_QUERY('{"array":[[0, 1, 2, 3, 4, 5], [0, -1, -2, -3, -4, -5]]}', '$.array[*][0 to 2, 4]');
|
||||
SELECT JSON_QUERY('{"1key":1}', '$.1key');
|
||||
SELECT JSON_QUERY('{"123":1}', '$.123');
|
||||
SELECT JSON_QUERY('{"123":1}', '$[123]');
|
||||
SELECT JSON_QUERY('{"hello":1}', '$[hello]');
|
||||
SELECT JSON_QUERY('{"hello":1}', '$["hello"]');
|
||||
SELECT JSON_QUERY('{"hello":1}', '$[\'hello\']');
|
||||
|
@ -0,0 +1,3 @@
|
||||
user127 1
|
||||
user405 1
|
||||
user902 1
|
16
tests/queries/0_stateless/02843_insertion_table_schema_infer.sh
Executable file
16
tests/queries/0_stateless/02843_insertion_table_schema_infer.sh
Executable file
@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env bash
|
||||
# Tags: no-parallel, no-fasttest
|
||||
|
||||
set -e
|
||||
|
||||
CUR_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
|
||||
# shellcheck source=../shell_config.sh
|
||||
. "$CUR_DIR"/../shell_config.sh
|
||||
|
||||
DATA_DIR=$CUR_DIR/data_tsv
|
||||
|
||||
$CLICKHOUSE_LOCAL --multiquery \
|
||||
"CREATE VIEW users AS SELECT * FROM file('$DATA_DIR/mock_data.tsv', TSVWithNamesAndTypes);
|
||||
CREATE TABLE users_output (name String, tag UInt64)ENGINE = Memory;
|
||||
INSERT INTO users_output WITH (SELECT groupUniqArrayArray(mapKeys(Tags)) FROM users) AS unique_tags SELECT UserName AS name, length(unique_tags) AS tag FROM users;
|
||||
SELECT * FROM users_output;"
|
@ -10,3 +10,4 @@ set max_threads=1;
|
||||
select trimLeft(explain) from (explain pipeline SELECT DISTINCT id, v FROM t_sparse_distinct) where explain ilike '%DistinctSortedChunkTransform%';
|
||||
DistinctSortedChunkTransform
|
||||
SELECT DISTINCT id, v FROM t_sparse_distinct format Null;
|
||||
DROP TABLE t_sparse_distinct;
|
||||
|
5
tests/queries/0_stateless/data_tsv/mock_data.tsv
Normal file
5
tests/queries/0_stateless/data_tsv/mock_data.tsv
Normal file
@ -0,0 +1,5 @@
|
||||
UserName Age Tags
|
||||
String Int8 Map(String, UInt64)
|
||||
user127 20 {'test': 123}
|
||||
user405 43 {'test': 123}
|
||||
user902 43 {'test': 123}
|
|
Loading…
Reference in New Issue
Block a user