diff --git a/src/Storages/ObjectStorage/DataLakes/DataLakeConfiguration.h b/src/Storages/ObjectStorage/DataLakes/DataLakeConfiguration.h index 69968dff942..866ef24aa91 100644 --- a/src/Storages/ObjectStorage/DataLakes/DataLakeConfiguration.h +++ b/src/Storages/ObjectStorage/DataLakes/DataLakeConfiguration.h @@ -84,15 +84,15 @@ private: }; #if USE_AVRO -# if USE_AWS_S3 +#if USE_AWS_S3 using StorageS3IcebergConfiguration = DataLakeConfiguration; # endif -# if USE_AZURE_BLOB_STORAGE +#if USE_AZURE_BLOB_STORAGE using StorageAzureIcebergConfiguration = DataLakeConfiguration; # endif -# if USE_HDFS +#if USE_HDFS using StorageHDFSIcebergConfiguration = DataLakeConfiguration; # endif @@ -100,7 +100,7 @@ using StorageLocalIcebergConfiguration = DataLakeConfiguration; # endif #endif diff --git a/src/Storages/ObjectStorage/registerStorageObjectStorage.cpp b/src/Storages/ObjectStorage/registerStorageObjectStorage.cpp index b0122de3bf7..cb1826b2976 100644 --- a/src/Storages/ObjectStorage/registerStorageObjectStorage.cpp +++ b/src/Storages/ObjectStorage/registerStorageObjectStorage.cpp @@ -11,8 +11,6 @@ namespace DB { -#if USE_AWS_S3 || USE_AZURE_BLOB_STORAGE || USE_HDFS - namespace ErrorCodes { extern const int BAD_ARGUMENTS; @@ -65,8 +63,6 @@ static std::shared_ptr createStorageObjectStorage( partition_by); } -#endif - #if USE_AZURE_BLOB_STORAGE void registerStorageAzure(StorageFactory & factory) { @@ -236,10 +232,10 @@ void registerStorageIceberg(StorageFactory & factory) #endif -#if USE_AWS_S3 #if USE_PARQUET void registerStorageDeltaLake(StorageFactory & factory) { +#if USE_AWS_S3 factory.registerStorage( "DeltaLake", [&](const StorageFactory::Arguments & args) @@ -254,11 +250,13 @@ void registerStorageDeltaLake(StorageFactory & factory) .supports_schema_inference = true, .source_access_type = AccessType::S3, }); +#endif } #endif void registerStorageHudi(StorageFactory & factory) { +#if USE_AWS_S3 factory.registerStorage( "Hudi", [&](const StorageFactory::Arguments & args) @@ -273,6 +271,6 @@ void registerStorageHudi(StorageFactory & factory) .supports_schema_inference = true, .source_access_type = AccessType::S3, }); -} #endif } +} diff --git a/src/Storages/registerStorages.cpp b/src/Storages/registerStorages.cpp index cfd406ccbe2..4eb90955a6c 100644 --- a/src/Storages/registerStorages.cpp +++ b/src/Storages/registerStorages.cpp @@ -41,10 +41,11 @@ void registerStorageS3Queue(StorageFactory & factory); #if USE_PARQUET void registerStorageDeltaLake(StorageFactory & factory); #endif +#endif + #if USE_AVRO void registerStorageIceberg(StorageFactory & factory); #endif -#endif #if USE_AZURE_BLOB_STORAGE void registerStorageAzureQueue(StorageFactory & factory); diff --git a/src/TableFunctions/TableFunctionObjectStorage.cpp b/src/TableFunctions/TableFunctionObjectStorage.cpp index 509ef92e8b2..66c90b15c0b 100644 --- a/src/TableFunctions/TableFunctionObjectStorage.cpp +++ b/src/TableFunctions/TableFunctionObjectStorage.cpp @@ -228,7 +228,7 @@ template class TableFunctionObjectStorage( {.documentation = {.description = R"(The table function can be used to read the Iceberg table stored on S3 object store. Alias to icebergS3)", @@ -242,23 +242,23 @@ void registerTableFunctionIceberg(TableFunctionFactory & factory) .categories{"DataLake"}}, .allow_readonly = false}); -# endif -# if USE_AZURE_BLOB_STORAGE +#endif +#if USE_AZURE_BLOB_STORAGE factory.registerFunction( {.documentation = {.description = R"(The table function can be used to read the Iceberg table stored on Azure object store.)", .examples{{"icebergAzure", "SELECT * FROM icebergAzure(url, access_key_id, secret_access_key)", ""}}, .categories{"DataLake"}}, .allow_readonly = false}); -# endif -# if USE_HDFS +#endif +#if USE_HDFS factory.registerFunction( {.documentation = {.description = R"(The table function can be used to read the Iceberg table stored on HDFS virtual filesystem.)", .examples{{"icebergHDFS", "SELECT * FROM icebergHDFS(url)", ""}}, .categories{"DataLake"}}, .allow_readonly = false}); -# endif +#endif factory.registerFunction( {.documentation = {.description = R"(The table function can be used to read the Iceberg table stored locally.)", @@ -268,29 +268,31 @@ void registerTableFunctionIceberg(TableFunctionFactory & factory) } #endif -#if USE_AWS_S3 -# if USE_PARQUET +#if USE_PARQUET void registerTableFunctionDeltaLake(TableFunctionFactory & factory) { +#if USE_AWS_S3 factory.registerFunction( {.documentation = {.description = R"(The table function can be used to read the DeltaLake table stored on object store.)", .examples{{"deltaLake", "SELECT * FROM deltaLake(url, access_key_id, secret_access_key)", ""}}, .categories{"DataLake"}}, .allow_readonly = false}); +#endif } -# endif +#endif void registerTableFunctionHudi(TableFunctionFactory & factory) { +#if USE_AWS_S3 factory.registerFunction( {.documentation = {.description = R"(The table function can be used to read the Hudi table stored on object store.)", .examples{{"hudi", "SELECT * FROM hudi(url, access_key_id, secret_access_key)", ""}}, .categories{"DataLake"}}, .allow_readonly = false}); -} #endif +} void registerDataLakeTableFunctions(TableFunctionFactory & factory) { @@ -298,11 +300,9 @@ void registerDataLakeTableFunctions(TableFunctionFactory & factory) #if USE_AVRO registerTableFunctionIceberg(factory); #endif -#if USE_AWS_S3 -# if USE_PARQUET +#if USE_PARQUET registerTableFunctionDeltaLake(factory); -# endif - registerTableFunctionHudi(factory); #endif + registerTableFunctionHudi(factory); } }