mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 15:12:02 +00:00
Fixed error with StorageMerge [#CLICKHOUSE-2].
This commit is contained in:
parent
222fb2c945
commit
02f7cc2d04
@ -192,7 +192,7 @@ BlockInputStreams StorageMerge::read(
|
||||
if (real_column_names.size() == 0)
|
||||
real_column_names.push_back(ExpressionActions::getSmallestColumn(table->getColumnsList()));
|
||||
|
||||
/// Substitute virtual column for its value. NOTE This looks terribly wrong.
|
||||
/// Substitute virtual column for its value when querying tables.
|
||||
ASTPtr modified_query_ast = query->clone();
|
||||
VirtualColumnUtils::rewriteEntityInAst(modified_query_ast, "_table", table->getTableName());
|
||||
|
||||
@ -220,6 +220,12 @@ BlockInputStreams StorageMerge::read(
|
||||
throw Exception("Source tables for Merge table are processing data up to different stages",
|
||||
ErrorCodes::INCOMPATIBLE_SOURCE_TABLES);
|
||||
|
||||
/// The table may return excessive columns if we query only its virtual column.
|
||||
/// We filter excessive columns. This is done only if query was not processed more than FetchColumns.
|
||||
if (processed_stage_in_source_table == QueryProcessingStage::FetchColumns)
|
||||
for (auto & stream : source_streams)
|
||||
stream = std::make_shared<FilterColumnsBlockInputStream>(stream, real_column_names, true);
|
||||
|
||||
/// Subordinary tables could have different but convertible types, like numeric types of different width.
|
||||
/// We must return streams with structure equals to structure of Merge table.
|
||||
for (auto & stream : source_streams)
|
||||
@ -249,6 +255,10 @@ BlockInputStreams StorageMerge::read(
|
||||
throw Exception("Source tables for Merge table are processing data up to different stages",
|
||||
ErrorCodes::INCOMPATIBLE_SOURCE_TABLES);
|
||||
|
||||
if (processed_stage_in_source_table == QueryProcessingStage::FetchColumns)
|
||||
for (auto & stream : streams)
|
||||
stream = std::make_shared<FilterColumnsBlockInputStream>(stream, real_column_names, true);
|
||||
|
||||
auto stream = streams.empty() ? std::make_shared<NullBlockInputStream>(header) : streams.front();
|
||||
if (!streams.empty())
|
||||
{
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,18 @@
|
||||
DROP TABLE IF EXISTS test.sample1;
|
||||
DROP TABLE IF EXISTS test.sample2;
|
||||
DROP TABLE IF EXISTS test.sample_merge;
|
||||
|
||||
CREATE TABLE test.sample1 (x UInt64, d Date DEFAULT today()) ENGINE = MergeTree(d, intHash64(x), intHash64(x), 10);
|
||||
CREATE TABLE test.sample2 (x UInt64, d Date DEFAULT today()) ENGINE = MergeTree(d, intHash64(x), intHash64(x), 10);
|
||||
|
||||
INSERT INTO test.sample1 (x) SELECT number AS x FROM system.numbers LIMIT 1000;
|
||||
INSERT INTO test.sample2 (x) SELECT number AS x FROM system.numbers LIMIT 2000;
|
||||
|
||||
CREATE TABLE test.sample_merge AS test.sample1 ENGINE = Merge(test, '^sample\\d$');
|
||||
|
||||
SET max_threads = 1;
|
||||
SELECT _sample_factor FROM merge(test, '^sample\\d$');
|
||||
|
||||
DROP TABLE test.sample1;
|
||||
DROP TABLE test.sample2;
|
||||
DROP TABLE test.sample_merge;
|
Loading…
Reference in New Issue
Block a user