This commit is contained in:
kssenii 2022-03-24 17:10:04 +01:00
parent 9282f3ff5e
commit e91e30aaa4
3 changed files with 26 additions and 1 deletions

View File

@ -660,8 +660,14 @@ Pipe StorageS3::read(
Block block_for_format;
if (isColumnOriented())
{
auto fetch_columns = column_names;
fetch_columns.erase(std::remove_if(fetch_columns.begin(), fetch_columns.end(),
[](const String & col){return col == "_path" || col == "_file"; }));
if (fetch_columns.empty())
fetch_columns.push_back(ExpressionActions::getSmallestColumn(storage_snapshot->metadata->getColumns().getAllPhysical()));
columns_description = ColumnsDescription{
storage_snapshot->getSampleBlockForColumns(column_names).getNamesAndTypesList()};
storage_snapshot->getSampleBlockForColumns(fetch_columns).getNamesAndTypesList()};
block_for_format = storage_snapshot->getSampleBlockForColumns(columns_description.getNamesOfPhysical());
}
else

View File

@ -554,6 +554,16 @@ def test_insert_select_schema_inference(started_cluster):
assert int(result) == 1
def test_virtual_column(started_cluster):
hdfs_api = started_cluster.hdfs_api
table_function = (f"hdfs('hdfs://hdfs1:9000/parquet', 'Parquet', 'a Int32, b String')")
node1.query(f"insert into table function {table_function} SELECT 1, 'kek'")
result = node1.query(f"SELECT _path FROM {table_function}")
assert result.strip() == "parquet"
if __name__ == "__main__":
cluster.start()
input("Cluster created, press any key to destroy...")

View File

@ -1375,3 +1375,12 @@ def test_insert_select_schema_inference(started_cluster):
f"select * from s3('http://{started_cluster.minio_host}:{started_cluster.minio_port}/{bucket}/test_insert_select.native')"
)
assert int(result) == 1
def test_virtual_columns(started_cluster):
bucket = started_cluster.minio_bucket
instance = started_cluster.instances["dummy"] # type: ClickHouseInstance
name = "test_table"
result = instance.query("SELECT _path FROM s3(s3_parquet, format='Parquet')")
assert result.strip() == "root/test_parquet"