From 328213f5d9d64e1afa1d792cb2d7b4184fa8edbb Mon Sep 17 00:00:00 2001 From: Vladimir Chebotarev Date: Mon, 31 May 2021 11:46:28 +0300 Subject: [PATCH] Fixes. --- src/Storages/StorageS3.cpp | 6 ++++-- tests/integration/test_storage_s3/test.py | 19 +++++++------------ 2 files changed, 11 insertions(+), 14 deletions(-) diff --git a/src/Storages/StorageS3.cpp b/src/Storages/StorageS3.cpp index 3e6876fad77..67849c11e28 100644 --- a/src/Storages/StorageS3.cpp +++ b/src/Storages/StorageS3.cpp @@ -2,6 +2,8 @@ #if USE_AWS_S3 +#include + #include #include #include @@ -379,13 +381,13 @@ public: current_block_with_partition_by_expr.setColumns(columns); partition_by_expr->execute(current_block_with_partition_by_expr); - const auto & key_column = current_block_with_partition_by_expr.getByName(partition_by_column_name); + const auto * key_column = checkAndGetColumn(current_block_with_partition_by_expr.getByName(partition_by_column_name).column.get()); std::unordered_map sub_chunks_indices; IColumn::Selector selector; for (size_t row = 0; row < chunk.getNumRows(); ++row) { - auto & value = (*key_column.column)[row].get(); + auto value = key_column->getDataAt(row); auto [it, inserted] = sub_chunks_indices.emplace(value, sub_chunks_indices.size()); selector.push_back(it->second); } diff --git a/tests/integration/test_storage_s3/test.py b/tests/integration/test_storage_s3/test.py index d80e92d7882..85873c746fa 100644 --- a/tests/integration/test_storage_s3/test.py +++ b/tests/integration/test_storage_s3/test.py @@ -147,25 +147,20 @@ def test_put(started_cluster, maybe_auth, positive, compression): def test_distributed_put(cluster): - bucket = cluster.minio_bucket if not maybe_auth else cluster.minio_restricted_bucket + bucket = cluster.minio_bucket instance = cluster.instances["dummy"] # type: ClickHouseInstance table_format = "column1 UInt32, column2 UInt32, column3 UInt32" values = "(1, 2, 3), (3, 2, 1), (78, 43, 45)" values_csv = "1,2,3\n3,2,1\n78,43,45\n" filename = "test_{_partition_id}.csv" put_query = f"""insert into table function s3('http://{cluster.minio_host}:{cluster.minio_port}/{bucket}/{filename}', - 'CSV', '{table_format}') PARTITION BY column3 values {values}""" + 'CSV', '{table_format}') PARTITION BY toString(column3) values {values}""" - try: - run_query(instance, put_query) - except helpers.client.QueryRuntimeException: - if positive: - raise - else: - assert positive - assert "1,2,3\n" == get_s3_file_content(cluster, bucket, "test_3.csv") - assert "3,2,1\n" == get_s3_file_content(cluster, bucket, "test_1.csv") - assert "78,43,45\n" == get_s3_file_content(cluster, bucket, "test_45.csv") + run_query(instance, put_query) + + assert "1,2,3\n" == get_s3_file_content(cluster, bucket, "test_3.csv") + assert "3,2,1\n" == get_s3_file_content(cluster, bucket, "test_1.csv") + assert "78,43,45\n" == get_s3_file_content(cluster, bucket, "test_45.csv") @pytest.mark.parametrize("special", [