AWS SDK integration rework.

This commit is contained in:
Pavel Kovalenko 2019-12-06 17:37:21 +03:00 committed by Pavel Kovalenko
parent a9e2327ec0
commit a9dfefd37f
18 changed files with 271 additions and 120 deletions

3
.gitmodules vendored
View File

@ -119,3 +119,6 @@
[submodule "aws-checksums"]
path = contrib/aws-checksums
url = https://github.com/awslabs/aws-checksums.git
[submodule "contrib/curl"]
path = contrib/curl
url = https://github.com/curl/curl.git

View File

@ -1,19 +1,26 @@
option (USE_AWS_S3 "Set to FALSE to use system libbrotli library instead of bundled" ${NOT_UNBUNDLED})
if(NOT OS_FREEBSD AND NOT APPLE)
option(ENABLE_S3 "Enable S3" ${ENABLE_LIBRARIES})
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
if (USE_AWS_S3)
if(ENABLE_S3)
option(USE_INTERNAL_AWS_S3_LIBRARY "Set to FALSE to use system S3 instead of bundled" ${NOT_UNBUNDLED})
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
message (WARNING "submodule contrib/aws is missing. to fix try run: \n git submodule update --init --recursive")
set (USE_AWS_S3 0)
set (MISSING_AWS_S3 1)
endif ()
set (MISSING_AWS_S3 1)
endif ()
if (USE_AWS_S3 AND NOT MISSING_AWS_S3)
set(AWS_S3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3/include")
set(AWS_S3_CORE_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core/include")
set(AWS_S3_LIBRARY aws_s3)
set(USE_INTERNAL_AWS_S3_LIBRARY 1)
set(USE_AWS_S3 1)
endif ()
if (USE_INTERNAL_AWS_S3_LIBRARY AND NOT MISSING_AWS_S3)
set(AWS_S3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3/include")
set(AWS_S3_CORE_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core/include")
set(AWS_S3_LIBRARY aws_s3)
set(USE_INTERNAL_AWS_S3_LIBRARY 1)
set(USE_AWS_S3 1)
else()
set(USE_INTERNAL_AWS_S3_LIBRARY 0)
set(USE_AWS_S3 0)
endif ()
endif()
message (STATUS "Using aws_s3=${USE_AWS_S3}: ${AWS_S3_INCLUDE_DIR} : ${AWS_S3_LIBRARY}")

View File

@ -3,6 +3,11 @@ SET(AWS_CORE_LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core)
SET(AWS_CHECKSUMS_LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/aws-checksums)
SET(AWS_COMMON_LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/aws-c-common)
SET(AWS_EVENT_STREAM_LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/aws-c-event-stream)
SET(CURL_LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/curl)
OPTION(USE_AWS_MEMORY_MANAGEMENT "Aws memory management" OFF)
configure_file("${AWS_CORE_LIBRARY_DIR}/include/aws/core/SDKConfig.h.in"
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
file(GLOB AWS_CORE_SOURCES
"${AWS_CORE_LIBRARY_DIR}/source/*.cpp"
@ -53,15 +58,13 @@ file(GLOB AWS_COMMON_SOURCES
file(GLOB AWS_CHECKSUMS_SOURCES
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/*.c"
# "${AWS_CHECKSUMS_LIBRARY_DIR}/source/intel/*.c"
# "${AWS_CHECKSUMS_LIBRARY_DIR}/source/arm/*.c"
# "${AWS_CHECKSUMS_LIBRARY_DIR}/source/visualc/*.c"
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/intel/*.c"
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/arm/*.c"
)
file(GLOB S3_UNIFIED_SRC
${AWS_EVENT_STREAM_SOURCES}
${AWS_COMMON_SOURCES}
${AWS_CHECKSUMS_SOURCES}
${AWS_S3_SOURCES}
${AWS_S3_MODEL_SOURCES}
${AWS_CORE_SOURCES}
@ -70,31 +73,65 @@ file(GLOB S3_UNIFIED_SRC
set(S3_INCLUDES
"${CMAKE_CURRENT_SOURCE_DIR}/include/"
"${AWS_COMMON_LIBRARY_DIR}/include/"
"${AWS_CHECKSUMS_LIBRARY_DIR}/include/"
"${AWS_EVENT_STREAM_LIBRARY_DIR}/include/"
"${AWS_S3_LIBRARY_DIR}/include/"
"${AWS_CORE_LIBRARY_DIR}/include/"
"${CMAKE_CURRENT_BINARY_DIR}/include/"
)
include(ExternalProject)
ExternalProject_Add(
libcurl_ex
SOURCE_DIR "${CURL_LIBRARY_DIR}"
INSTALL_DIR ${CMAKE_BINARY_DIR}/install
CMAKE_ARGS
-DCMAKE_USE_OPENSSL=ON
-DBUILD_SHARED_LIBS=${BUILD_SHARED_LIBS}
-DHTTP_ONLY=ON
-DCMAKE_INSTALL_PREFIX=<INSTALL_DIR>
-DBUILD_CURL_TESTS=OFF
-DBUILD_TESTING=OFF
-DBUILD_CURL_EXE=OFF
-DCURL_ZLIB=OFF
-DOPENSSL_ROOT_DIR=${ClickHouse_SOURCE_DIR}/contrib/ssl
-DOPENSSL_INCLUDE_DIR=${OPENSSL_INCLUDE_DIR}
)
add_library(libcurl UNKNOWN IMPORTED)
if (BUILD_SHARED_LIBS)
set(CURL_LIB_SUFFIX so)
else()
set(CURL_LIB_SUFFIX a)
endif()
set_target_properties(
libcurl
PROPERTIES IMPORTED_LOCATION
${CMAKE_BINARY_DIR}/install/lib/libcurl.${CURL_LIB_SUFFIX}
)
target_link_libraries(libcurl INTERFACE OpenSSL::SSL OpenSSL::Crypto)
add_library(aws_s3_checksums ${AWS_CHECKSUMS_SOURCES})
target_include_directories(aws_s3_checksums PUBLIC "${AWS_CHECKSUMS_LIBRARY_DIR}/include/")
if(CMAKE_BUILD_TYPE STREQUAL "" OR CMAKE_BUILD_TYPE STREQUAL "Debug")
target_compile_definitions(aws_s3_checksums PRIVATE "-DDEBUG_BUILD")
endif()
set_target_properties(aws_s3_checksums PROPERTIES COMPILE_OPTIONS -fPIC)
set_target_properties(aws_s3_checksums PROPERTIES LINKER_LANGUAGE C)
set_property(TARGET aws_s3_checksums PROPERTY C_STANDARD 99)
add_library(aws_s3 ${S3_UNIFIED_SRC})
OPTION(USE_AWS_MEMORY_MANAGEMENT "Aws memory management" OFF)
configure_file("${AWS_CORE_LIBRARY_DIR}/include/aws/core/SDKConfig.h.in" "${AWS_CORE_LIBRARY_DIR}/include/aws/core/SDKConfig.h")
add_dependencies(aws_s3 libcurl_ex)
target_compile_definitions(aws_s3 PUBLIC -DENABLE_OPENSSL_ENCRYPTION)
target_compile_definitions(aws_s3 PUBLIC -DENABLE_CURL_CLIENT)
target_compile_definitions(aws_s3 PUBLIC "AWS_SDK_VERSION_MAJOR=1")
target_compile_definitions(aws_s3 PUBLIC "AWS_SDK_VERSION_MINOR=7")
target_compile_definitions(aws_s3 PUBLIC "AWS_SDK_VERSION_PATCH=231")
target_include_directories(aws_s3 PUBLIC ${S3_INCLUDES} "${CMAKE_BINARY_DIR}/install")
set(OPENSSL_USE_STATIC_LIBS TRUE)
find_package(OpenSSL REQUIRED)
set(CURL_LIBRARY "-lcurl")
add_definitions(-DCURL_STATICLIB)
find_package(CURL REQUIRED)
add_definitions(-DENABLE_OPENSSL_ENCRYPTION)
add_definitions(-DENABLE_CURL_CLIENT)
target_include_directories(aws_s3 PRIVATE ${S3_INCLUDES})
target_include_directories(aws_s3 PRIVATE ${CURL_INCLUDE_DIR})
target_link_libraries(aws_s3 OpenSSL::Crypto)
target_link_libraries(aws_s3 ${CURL_LIBRARIES})
target_link_libraries(aws_s3 PRIVATE aws_s3_checksums OpenSSL::SSL OpenSSL::Crypto libcurl)

1
contrib/curl vendored Submodule

@ -0,0 +1 @@
Subproject commit 3b8bbbbd1609c638a3d3d0acb148a33dedb67be3

View File

@ -6,6 +6,7 @@
#cmakedefine01 USE_SSL
#cmakedefine01 USE_POCO_NETSSL
#cmakedefine01 USE_HDFS
#cmakedefine01 USE_AWS_S3
#cmakedefine01 USE_CPUID
#cmakedefine01 USE_CPUINFO
#cmakedefine01 USE_BROTLI

View File

@ -1,5 +1,8 @@
#include <IO/ReadBufferFromS3.h>
#include <Common/config.h>
#if USE_AWS_S3
#include <IO/ReadBufferFromS3.h>
#include <IO/ReadBufferFromIStream.h>
#include <common/logger_useful.h>
@ -43,5 +46,6 @@ bool ReadBufferFromS3::nextImpl()
working_buffer = internal_buffer;
return true;
}
}
#endif

View File

@ -1,5 +1,9 @@
#pragma once
#include <Common/config.h>
#if USE_AWS_S3
#include <memory>
#include <IO/ConnectionTimeouts.h>
@ -32,3 +36,5 @@ public:
};
}
#endif

View File

@ -1,3 +1,7 @@
#include <Common/config.h>
#if USE_AWS_S3
#include <IO/S3Common.h>
#include <IO/WriteHelpers.h>
#include <IO/WriteBufferFromString.h>
@ -7,64 +11,87 @@
#include <aws/core/auth/AWSCredentialsProvider.h>
namespace DB
{
namespace DB {
namespace ErrorCodes
{
namespace ErrorCodes {
extern const int BAD_ARGUMENTS;
}
namespace S3 {
Aws::SDKOptions ClientFactory::aws_options;
static std::mutex aws_init_lock;
static Aws::SDKOptions aws_options;
static std::atomic<bool> aws_initialized(false);
static const std::regex S3_URL_REGEX(R"((https?://.*)/(.*)/(.*))");
static void initializeAwsAPI() {
std::lock_guard<std::mutex> lock(aws_init_lock);
if (!aws_initialized.load()) {
ClientFactory::ClientFactory() {
Aws::InitAPI(aws_options);
aws_initialized.store(true);
}
ClientFactory::~ClientFactory() {
Aws::ShutdownAPI(aws_options);
}
ClientFactory &ClientFactory::instance() {
static ClientFactory ret;
return ret;
}
std::shared_ptr<Aws::S3::S3Client>
ClientFactory::create(
const String & endpoint,
const String & access_key_id,
const String & secret_access_key
) {
Aws::Client::ClientConfiguration cfg;
if (!endpoint.empty())
cfg.endpointOverride = endpoint;
auto cred_provider = std::make_shared<Aws::Auth::SimpleAWSCredentialsProvider>(access_key_id,
secret_access_key);
return std::make_shared<Aws::S3::S3Client>(
std::move(cred_provider), // Credentials provider.
std::move(cfg), // Client configuration.
Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never, // Sign policy.
endpoint.empty() // Use virtual addressing only if endpoint is not specified.
);
}
URI::URI(Poco::URI & uri) {
static const std::regex BUCKET_KEY_PATTERN("([^/]+)/(.*)");
// s3://*
if (uri.getScheme() == "s3" || uri.getScheme() == "S3") {
bucket = uri.getAuthority();
if (bucket.empty())
throw Exception ("Invalid S3 URI: no bucket: " + uri.toString(), ErrorCodes::BAD_ARGUMENTS);
const auto & path = uri.getPath();
// s3://bucket or s3://bucket/
if (path.length() <= 1)
throw Exception ("Invalid S3 URI: no key: " + uri.toString(), ErrorCodes::BAD_ARGUMENTS);
key = path.substr(1);
return;
}
if (uri.getHost().empty())
throw Exception("Invalid S3 URI: no host: " + uri.toString(), ErrorCodes::BAD_ARGUMENTS);
endpoint = uri.getScheme() + "://" + uri.getAuthority();
// Parse bucket and key from path.
std::smatch match;
std::regex_search(uri.getPath(), match, BUCKET_KEY_PATTERN);
if (!match.empty()) {
bucket = match.str(1);
if (bucket.empty())
throw Exception ("Invalid S3 URI: no bucket: " + uri.toString(), ErrorCodes::BAD_ARGUMENTS);
key = match.str(2);
if (key.empty())
throw Exception ("Invalid S3 URI: no key: " + uri.toString(), ErrorCodes::BAD_ARGUMENTS);
}
else
throw Exception("Invalid S3 URI: no bucket or key: " + uri.toString(), ErrorCodes::BAD_ARGUMENTS);
}
}
std::shared_ptr<Aws::S3::S3Client> S3Helper::createS3Client(const String & endpoint_url,
const String & access_key_id,
const String & secret_access_key)
{
initializeAwsAPI();
Aws::Client::ClientConfiguration cfg;
cfg.endpointOverride = endpoint_url;
cfg.scheme = Aws::Http::Scheme::HTTP;
auto cred_provider = std::make_shared<Aws::Auth::SimpleAWSCredentialsProvider>(access_key_id, secret_access_key);
return std::make_shared<Aws::S3::S3Client>(
std::move(cred_provider),
std::move(cfg),
Aws::Client::AWSAuthV4Signer::PayloadSigningPolicy::Never,
false
);
}
S3Endpoint S3Helper::parseS3EndpointFromUrl(const String & url) {
std::smatch match;
if (std::regex_search(url, match, S3_URL_REGEX) && match.size() > 1) {
S3Endpoint endpoint;
endpoint.endpoint_url = match.str(1);
endpoint.bucket = match.str(2);
endpoint.key = match.str(3);
return endpoint;
}
else
throw Exception("Failed to parse S3 Storage URL. It should contain endpoint url, bucket and file. "
"Regex is (https?://.*)/(.*)/(.*)", ErrorCodes::BAD_ARGUMENTS);
}
}
#endif

View File

@ -1,29 +1,48 @@
#pragma once
#include <Common/config.h>
#if USE_AWS_S3
#include <regex>
#include <Core/Types.h>
#include <Poco/Net/HTTPRequest.h>
#include <aws/s3/S3Client.h>
#include <boost/noncopyable.hpp>
#include <Poco/URI.h>
#include <aws/core/Aws.h>
namespace DB
namespace DB::S3
{
struct S3Endpoint {
String endpoint_url;
String bucket;
String key;
class ClientFactory {
public:
ClientFactory();
~ClientFactory();
static ClientFactory & instance();
std::shared_ptr<Aws::S3::S3Client> create(const String & endpoint,
const String & access_key_id,
const String & secret_access_key);
private:
static Aws::SDKOptions aws_options;
};
/**
* The following patterns are allowed:
* s3://bucket/key
* http(s)://endpoint/bucket/key
*/
struct URI {
// Custom endpoint if URI scheme is not S3.
String endpoint;
String bucket;
String key;
namespace S3Helper
{
S3Endpoint parseS3EndpointFromUrl(const String & url);
std::shared_ptr<Aws::S3::S3Client> createS3Client(const String & endpoint_url,
const String & access_key_id,
const String & secret_access_key);
}
explicit URI (Poco::URI & uri);
};
}
#endif

View File

@ -1,5 +1,8 @@
#include <IO/WriteBufferFromS3.h>
#include <Common/config.h>
#if USE_AWS_S3
#include <IO/WriteBufferFromS3.h>
#include <IO/WriteHelpers.h>
#include <common/logger_useful.h>
@ -159,3 +162,5 @@ void WriteBufferFromS3::complete()
}
}
#endif

View File

@ -1,5 +1,9 @@
#pragma once
#include <Common/config.h>
#if USE_AWS_S3
#include <memory>
#include <vector>
#include <Core/Types.h>
@ -54,3 +58,5 @@ private:
};
}
#endif

View File

@ -1,3 +1,7 @@
#include <Common/config.h>
#if USE_AWS_S3
#include <IO/S3Common.h>
#include <Storages/StorageFactory.h>
#include <Storages/StorageS3.h>
@ -129,7 +133,7 @@ namespace
}
StorageS3::StorageS3(const S3Endpoint & endpoint_,
StorageS3::StorageS3(const S3::URI & uri_,
const String & access_key_id_,
const String & secret_access_key_,
const std::string & database_name_,
@ -141,14 +145,14 @@ StorageS3::StorageS3(const S3Endpoint & endpoint_,
Context & context_,
const String & compression_method_ = "")
: IStorage(columns_)
, endpoint(endpoint_)
, uri(uri_)
, context_global(context_)
, format_name(format_name_)
, database_name(database_name_)
, table_name(table_name_)
, min_upload_part_size(min_upload_part_size_)
, compression_method(compression_method_)
, client(S3Helper::createS3Client(endpoint_.endpoint_url, access_key_id_, secret_access_key_))
, client(S3::ClientFactory::instance().create(uri_.endpoint, access_key_id_, secret_access_key_))
{
setColumns(columns_);
setConstraints(constraints_);
@ -169,10 +173,10 @@ BlockInputStreams StorageS3::read(
getHeaderBlock(column_names),
context,
max_block_size,
IStorage::chooseCompressionMethod(endpoint.endpoint_url, compression_method),
IStorage::chooseCompressionMethod(uri.endpoint, compression_method),
client,
endpoint.bucket,
endpoint.key);
uri.bucket,
uri.key);
auto column_defaults = getColumns().getDefaults();
if (column_defaults.empty())
@ -190,8 +194,8 @@ BlockOutputStreamPtr StorageS3::write(const ASTPtr & /*query*/, const Context &
{
return std::make_shared<StorageS3BlockOutputStream>(
format_name, min_upload_part_size, getSampleBlock(), context_global,
IStorage::chooseCompressionMethod(endpoint.endpoint_url, compression_method),
client, endpoint.bucket, endpoint.key);
IStorage::chooseCompressionMethod(uri.endpoint, compression_method),
client, uri.bucket, uri.key);
}
void registerStorageS3(StorageFactory & factory)
@ -208,7 +212,8 @@ void registerStorageS3(StorageFactory & factory)
engine_args[i] = evaluateConstantExpressionOrIdentifierAsLiteral(engine_args[i], args.local_context);
String url = engine_args[0]->as<ASTLiteral &>().value.safeGet<String>();
S3Endpoint endpoint = S3Helper::parseS3EndpointFromUrl(url);
Poco::URI uri (url);
S3::URI s3_uri (uri);
String format_name = engine_args[engine_args.size() - 1]->as<ASTLiteral &>().value.safeGet<String>();
@ -228,8 +233,9 @@ void registerStorageS3(StorageFactory & factory)
else
compression_method = "auto";
return StorageS3::create(endpoint, access_key_id, secret_access_key, args.database_name, args.table_name, format_name, min_upload_part_size, args.columns, args.constraints, args.context);
return StorageS3::create(s3_uri, access_key_id, secret_access_key, args.database_name, args.table_name, format_name, min_upload_part_size, args.columns, args.constraints, args.context);
});
}
}
#endif

View File

@ -1,11 +1,14 @@
#pragma once
#include <Common/config.h>
#if USE_AWS_S3
#include <Storages/IStorage.h>
#include <Poco/URI.h>
#include <common/logger_useful.h>
#include <ext/shared_ptr_helper.h>
#include <aws/s3/S3Client.h>
#include <aws/core/auth/AWSCredentialsProvider.h>
namespace DB
{
@ -18,7 +21,7 @@ namespace DB
class StorageS3 : public ext::shared_ptr_helper<StorageS3>, public IStorage
{
public:
StorageS3(const S3Endpoint & endpoint,
StorageS3(const S3::URI & uri,
const String & access_key_id,
const String & secret_access_key,
const String & database_name_,
@ -58,7 +61,7 @@ public:
void rename(const String & new_path_to_db, const String & new_database_name, const String & new_table_name, TableStructureWriteLockHolder &) override;
private:
S3Endpoint endpoint;
S3::URI uri;
const Context & context_global;
String format_name;
@ -68,5 +71,5 @@ private:
String compression_method;
std::shared_ptr<Aws::S3::S3Client> client;
};
}
#endif

View File

@ -19,7 +19,6 @@ void registerStorageDistributed(StorageFactory & factory);
void registerStorageMemory(StorageFactory & factory);
void registerStorageFile(StorageFactory & factory);
void registerStorageURL(StorageFactory & factory);
void registerStorageS3(StorageFactory & factory);
void registerStorageDictionary(StorageFactory & factory);
void registerStorageSet(StorageFactory & factory);
void registerStorageJoin(StorageFactory & factory);
@ -27,6 +26,10 @@ void registerStorageView(StorageFactory & factory);
void registerStorageMaterializedView(StorageFactory & factory);
void registerStorageLiveView(StorageFactory & factory);
#if USE_AWS_S3
void registerStorageS3(StorageFactory & factory);
#endif
#if USE_HDFS
void registerStorageHDFS(StorageFactory & factory);
#endif
@ -61,7 +64,6 @@ void registerStorages()
registerStorageMemory(factory);
registerStorageFile(factory);
registerStorageURL(factory);
registerStorageS3(factory);
registerStorageDictionary(factory);
registerStorageSet(factory);
registerStorageJoin(factory);
@ -69,6 +71,10 @@ void registerStorages()
registerStorageMaterializedView(factory);
registerStorageLiveView(factory);
#if USE_AWS_S3
registerStorageS3(factory);
#endif
#if USE_HDFS
registerStorageHDFS(factory);
#endif

View File

@ -1,3 +1,7 @@
#include <Common/config.h>
#if USE_AWS_S3
#include <IO/S3Common.h>
#include <Storages/StorageS3.h>
#include <Interpreters/evaluateConstantExpression.h>
@ -76,9 +80,11 @@ StoragePtr TableFunctionS3::getStorage(
const std::string & table_name,
const String & compression_method) const
{
S3Endpoint endpoint = S3Helper::parseS3EndpointFromUrl(source);
Poco::URI uri (source);
S3::URI s3_uri (uri);
UInt64 min_upload_part_size = global_context.getSettingsRef().s3_min_upload_part_size;
return StorageS3::create(endpoint, access_key_id, secret_access_key, getDatabaseName(), table_name, format, min_upload_part_size, columns, ConstraintsDescription{}, global_context, compression_method);
return StorageS3::create(s3_uri, access_key_id, secret_access_key, getDatabaseName(), table_name, format, min_upload_part_size, columns, ConstraintsDescription{}, global_context, compression_method);
}
void registerTableFunctionS3(TableFunctionFactory & factory)
@ -87,3 +93,5 @@ void registerTableFunctionS3(TableFunctionFactory & factory)
}
}
#endif

View File

@ -1,5 +1,9 @@
#pragma once
#include <Common/config.h>
#if USE_AWS_S3
#include <TableFunctions/ITableFunction.h>
@ -37,3 +41,5 @@ private:
};
}
#endif

View File

@ -11,11 +11,14 @@ void registerTableFunctionMerge(TableFunctionFactory & factory);
void registerTableFunctionRemote(TableFunctionFactory & factory);
void registerTableFunctionNumbers(TableFunctionFactory & factory);
void registerTableFunctionFile(TableFunctionFactory & factory);
void registerTableFunctionS3(TableFunctionFactory & factory);
void registerTableFunctionURL(TableFunctionFactory & factory);
void registerTableFunctionValues(TableFunctionFactory & factory);
void registerTableFunctionInput(TableFunctionFactory & factory);
#if USE_AWS_S3
void registerTableFunctionS3(TableFunctionFactory & factory);
#endif
#if USE_HDFS
void registerTableFunctionHDFS(TableFunctionFactory & factory);
#endif
@ -39,11 +42,14 @@ void registerTableFunctions()
registerTableFunctionRemote(factory);
registerTableFunctionNumbers(factory);
registerTableFunctionFile(factory);
registerTableFunctionS3(factory);
registerTableFunctionURL(factory);
registerTableFunctionValues(factory);
registerTableFunctionInput(factory);
#if USE_AWS_S3
registerTableFunctionS3(factory);
#endif
#if USE_HDFS
registerTableFunctionHDFS(factory);
#endif

View File

@ -4,7 +4,7 @@ FROM ubuntu:18.04
RUN echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main" >> /etc/apt/sources.list
RUN apt-get update \
&& env DEBIAN_FRONTEND=noninteractive apt-get -y install tzdata python llvm-6.0 llvm-6.0-dev libreadline-dev libicu-dev bsdutils llvm-8 curl libcurl4 libcurl4-openssl-dev \
&& env DEBIAN_FRONTEND=noninteractive apt-get -y install tzdata python llvm-6.0 llvm-6.0-dev libreadline-dev libicu-dev bsdutils llvm-8 \
&& rm -rf \
/var/lib/apt/lists/* \
/var/cache/debconf \