mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 08:32:02 +00:00
Merge branch 'master' into better_hashmap
This commit is contained in:
commit
e0a7f5939f
@ -547,6 +547,4 @@ add_subdirectory (programs)
|
||||
add_subdirectory (tests)
|
||||
add_subdirectory (utils)
|
||||
|
||||
include (cmake/print_include_directories.cmake)
|
||||
|
||||
include (cmake/sanitize_target_link_libraries.cmake)
|
||||
|
@ -134,7 +134,7 @@ else ()
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||
endif ()
|
||||
|
||||
set (TEST_FLAG "-mavx512f -mavx512bw")
|
||||
set (TEST_FLAG "-mavx512f -mavx512bw -mavx512vl")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
check_cxx_source_compiles("
|
||||
#include <immintrin.h>
|
||||
@ -143,6 +143,8 @@ else ()
|
||||
(void)a;
|
||||
auto b = _mm512_add_epi16(__m512i(), __m512i());
|
||||
(void)b;
|
||||
auto c = _mm_cmp_epi8_mask(__m128i(), __m128i(), 0);
|
||||
(void)c;
|
||||
return 0;
|
||||
}
|
||||
" HAVE_AVX512)
|
||||
@ -181,7 +183,7 @@ else ()
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi")
|
||||
endif ()
|
||||
if (HAVE_AVX512)
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw -mprefer-vector-width=256")
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw -mavx512vl -mprefer-vector-width=256")
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
@ -1,31 +1,8 @@
|
||||
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ON)
|
||||
|
||||
if (NOT USE_LIBCXX)
|
||||
target_link_libraries(global-libs INTERFACE -l:libstdc++.a -l:libstdc++fs.a) # Always link these libraries as static
|
||||
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
|
||||
return()
|
||||
endif()
|
||||
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_LIBCPP_DEBUG=0") # More checks in debug build.
|
||||
|
||||
if (NOT HAVE_LIBCXX AND NOT MISSING_INTERNAL_LIBCXX_LIBRARY)
|
||||
set (LIBCXX_LIBRARY cxx)
|
||||
set (LIBCXXABI_LIBRARY cxxabi)
|
||||
add_subdirectory(contrib/libcxxabi-cmake)
|
||||
add_subdirectory(contrib/libcxx-cmake)
|
||||
add_subdirectory(contrib/libcxxabi-cmake)
|
||||
add_subdirectory(contrib/libcxx-cmake)
|
||||
|
||||
# Exception handling library is embedded into libcxxabi.
|
||||
# Exception handling library is embedded into libcxxabi.
|
||||
|
||||
set (HAVE_LIBCXX 1)
|
||||
endif ()
|
||||
|
||||
if (HAVE_LIBCXX)
|
||||
target_link_libraries(global-libs INTERFACE ${LIBCXX_LIBRARY} ${LIBCXXABI_LIBRARY} ${LIBCXXFS_LIBRARY})
|
||||
|
||||
message (STATUS "Using libcxx: ${LIBCXX_LIBRARY}")
|
||||
message (STATUS "Using libcxxfs: ${LIBCXXFS_LIBRARY}")
|
||||
message (STATUS "Using libcxxabi: ${LIBCXXABI_LIBRARY}")
|
||||
else()
|
||||
target_link_libraries(global-libs INTERFACE -l:libstdc++.a -l:libstdc++fs.a) # Always link these libraries as static
|
||||
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
|
||||
endif()
|
||||
target_link_libraries(global-libs INTERFACE cxx cxxabi)
|
||||
|
@ -1,29 +0,0 @@
|
||||
|
||||
# TODO? Maybe recursive collect on all deps
|
||||
|
||||
get_property (dirs1 TARGET dbms PROPERTY INCLUDE_DIRECTORIES)
|
||||
list(APPEND dirs ${dirs1})
|
||||
|
||||
get_property (dirs1 TARGET clickhouse_common_io PROPERTY INCLUDE_DIRECTORIES)
|
||||
list(APPEND dirs ${dirs1})
|
||||
|
||||
get_property (dirs1 TARGET common PROPERTY INCLUDE_DIRECTORIES)
|
||||
list(APPEND dirs ${dirs1})
|
||||
|
||||
get_property (dirs1 TARGET ch_contrib::cityhash PROPERTY INCLUDE_DIRECTORIES)
|
||||
list(APPEND dirs ${dirs1})
|
||||
|
||||
get_property (dirs1 TARGET roaring PROPERTY INCLUDE_DIRECTORIES)
|
||||
list(APPEND dirs ${dirs1})
|
||||
|
||||
if (TARGET ch_contrib::double_conversion)
|
||||
get_property (dirs1 TARGET ch_contrib::double_conversion PROPERTY INCLUDE_DIRECTORIES)
|
||||
list(APPEND dirs ${dirs1})
|
||||
endif ()
|
||||
|
||||
list(REMOVE_DUPLICATES dirs)
|
||||
file (WRITE ${CMAKE_CURRENT_BINARY_DIR}/include_directories.txt "")
|
||||
foreach (dir ${dirs})
|
||||
string (REPLACE "${ClickHouse_SOURCE_DIR}" "." dir "${dir}")
|
||||
file (APPEND ${CMAKE_CURRENT_BINARY_DIR}/include_directories.txt "-I ${dir} ")
|
||||
endforeach ()
|
@ -55,11 +55,6 @@ if (COMPILER_CLANG)
|
||||
no_warning(weak-template-vtables)
|
||||
no_warning(weak-vtables)
|
||||
|
||||
# XXX: libstdc++ has some of these for 3way compare
|
||||
if (NOT USE_LIBCXX)
|
||||
no_warning(zero-as-null-pointer-constant)
|
||||
endif()
|
||||
|
||||
# TODO Enable conversion, sign-conversion, double-promotion warnings.
|
||||
else ()
|
||||
add_warning(comma)
|
||||
@ -98,10 +93,7 @@ if (COMPILER_CLANG)
|
||||
add_warning(tautological-bitwise-compare)
|
||||
|
||||
# XXX: libstdc++ has some of these for 3way compare
|
||||
if (USE_LIBCXX)
|
||||
add_warning(zero-as-null-pointer-constant)
|
||||
endif()
|
||||
|
||||
endif ()
|
||||
elseif (COMPILER_GCC)
|
||||
# Add compiler options only to c++ compiler
|
||||
@ -183,11 +175,8 @@ elseif (COMPILER_GCC)
|
||||
add_cxx_compile_options(-Wundef)
|
||||
# Warn if vector operation is not implemented via SIMD capabilities of the architecture
|
||||
add_cxx_compile_options(-Wvector-operation-performance)
|
||||
# XXX: libstdc++ has some of these for 3way compare
|
||||
if (USE_LIBCXX)
|
||||
# Warn when a literal 0 is used as null pointer constant.
|
||||
add_cxx_compile_options(-Wzero-as-null-pointer-constant)
|
||||
endif()
|
||||
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 10)
|
||||
# XXX: gcc10 stuck with this option while compiling GatherUtils code
|
||||
|
@ -6,15 +6,17 @@ set(BUILD_TESTING OFF)
|
||||
set(ABSL_PROPAGATE_CXX_STD ON)
|
||||
add_subdirectory("${ABSL_ROOT_DIR}" "${ClickHouse_BINARY_DIR}/contrib/abseil-cpp")
|
||||
|
||||
add_library(abseil_swiss_tables INTERFACE)
|
||||
add_library(_abseil_swiss_tables INTERFACE)
|
||||
|
||||
target_link_libraries(abseil_swiss_tables INTERFACE
|
||||
target_link_libraries(_abseil_swiss_tables INTERFACE
|
||||
absl::flat_hash_map
|
||||
absl::flat_hash_set
|
||||
)
|
||||
|
||||
get_target_property(FLAT_HASH_MAP_INCLUDE_DIR absl::flat_hash_map INTERFACE_INCLUDE_DIRECTORIES)
|
||||
target_include_directories (abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_MAP_INCLUDE_DIR})
|
||||
target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_MAP_INCLUDE_DIR})
|
||||
|
||||
get_target_property(FLAT_HASH_SET_INCLUDE_DIR absl::flat_hash_set INTERFACE_INCLUDE_DIRECTORIES)
|
||||
target_include_directories (abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_SET_INCLUDE_DIR})
|
||||
target_include_directories (_abseil_swiss_tables SYSTEM BEFORE INTERFACE ${FLAT_HASH_SET_INCLUDE_DIR})
|
||||
|
||||
add_library(ch_contrib::abseil_swiss_tables ALIAS _abseil_swiss_tables)
|
||||
|
@ -91,30 +91,30 @@ set(S3_INCLUDES
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/"
|
||||
)
|
||||
|
||||
add_library(aws_s3_checksums ${AWS_CHECKSUMS_SOURCES})
|
||||
target_include_directories(aws_s3_checksums SYSTEM PUBLIC "${AWS_CHECKSUMS_LIBRARY_DIR}/include/")
|
||||
add_library(_aws_s3_checksums ${AWS_CHECKSUMS_SOURCES})
|
||||
target_include_directories(_aws_s3_checksums SYSTEM PUBLIC "${AWS_CHECKSUMS_LIBRARY_DIR}/include/")
|
||||
if(CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
target_compile_definitions(aws_s3_checksums PRIVATE "-DDEBUG_BUILD")
|
||||
target_compile_definitions(_aws_s3_checksums PRIVATE "-DDEBUG_BUILD")
|
||||
endif()
|
||||
set_target_properties(aws_s3_checksums PROPERTIES LINKER_LANGUAGE C)
|
||||
set_property(TARGET aws_s3_checksums PROPERTY C_STANDARD 99)
|
||||
set_target_properties(_aws_s3_checksums PROPERTIES LINKER_LANGUAGE C)
|
||||
set_property(TARGET _aws_s3_checksums PROPERTY C_STANDARD 99)
|
||||
|
||||
add_library(aws_s3 ${S3_UNIFIED_SRC})
|
||||
add_library(_aws_s3 ${S3_UNIFIED_SRC})
|
||||
|
||||
target_compile_definitions(aws_s3 PUBLIC "AWS_SDK_VERSION_MAJOR=1")
|
||||
target_compile_definitions(aws_s3 PUBLIC "AWS_SDK_VERSION_MINOR=7")
|
||||
target_compile_definitions(aws_s3 PUBLIC "AWS_SDK_VERSION_PATCH=231")
|
||||
target_include_directories(aws_s3 SYSTEM BEFORE PUBLIC ${S3_INCLUDES})
|
||||
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MAJOR=1")
|
||||
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MINOR=7")
|
||||
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_PATCH=231")
|
||||
target_include_directories(_aws_s3 SYSTEM BEFORE PUBLIC ${S3_INCLUDES})
|
||||
|
||||
if (TARGET OpenSSL::SSL)
|
||||
target_compile_definitions(aws_s3 PUBLIC -DENABLE_OPENSSL_ENCRYPTION)
|
||||
target_link_libraries(aws_s3 PRIVATE OpenSSL::Crypto OpenSSL::SSL)
|
||||
target_compile_definitions(_aws_s3 PUBLIC -DENABLE_OPENSSL_ENCRYPTION)
|
||||
target_link_libraries(_aws_s3 PRIVATE OpenSSL::Crypto OpenSSL::SSL)
|
||||
endif()
|
||||
|
||||
target_link_libraries(aws_s3 PRIVATE aws_s3_checksums)
|
||||
target_link_libraries(_aws_s3 PRIVATE _aws_s3_checksums)
|
||||
|
||||
# The library is large - avoid bloat.
|
||||
target_compile_options (aws_s3 PRIVATE -g0)
|
||||
target_compile_options (aws_s3_checksums PRIVATE -g0)
|
||||
target_compile_options (_aws_s3 PRIVATE -g0)
|
||||
target_compile_options (_aws_s3_checksums PRIVATE -g0)
|
||||
|
||||
add_library(ch_contrib::aws_s3 ALIAS aws_s3)
|
||||
add_library(ch_contrib::aws_s3 ALIAS _aws_s3)
|
||||
|
@ -11,37 +11,37 @@ endif()
|
||||
|
||||
SET(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/base64")
|
||||
|
||||
add_library(base64_scalar OBJECT "${LIBRARY_DIR}/turbob64c.c" "${LIBRARY_DIR}/turbob64d.c")
|
||||
add_library(base64_ssse3 OBJECT "${LIBRARY_DIR}/turbob64sse.c") # This file also contains code for ARM NEON
|
||||
add_library(_base64_scalar OBJECT "${LIBRARY_DIR}/turbob64c.c" "${LIBRARY_DIR}/turbob64d.c")
|
||||
add_library(_base64_ssse3 OBJECT "${LIBRARY_DIR}/turbob64sse.c") # This file also contains code for ARM NEON
|
||||
|
||||
if (ARCH_AMD64)
|
||||
add_library(base64_avx OBJECT "${LIBRARY_DIR}/turbob64sse.c") # This is not a mistake. One file is compiled twice.
|
||||
add_library(base64_avx2 OBJECT "${LIBRARY_DIR}/turbob64avx2.c")
|
||||
add_library(_base64_avx OBJECT "${LIBRARY_DIR}/turbob64sse.c") # This is not a mistake. One file is compiled twice.
|
||||
add_library(_base64_avx2 OBJECT "${LIBRARY_DIR}/turbob64avx2.c")
|
||||
endif ()
|
||||
|
||||
target_compile_options(base64_scalar PRIVATE -falign-loops)
|
||||
target_compile_options(_base64_scalar PRIVATE -falign-loops)
|
||||
|
||||
if (ARCH_AMD64)
|
||||
target_compile_options(base64_ssse3 PRIVATE -mno-avx -mno-avx2 -mssse3 -falign-loops)
|
||||
target_compile_options(base64_avx PRIVATE -falign-loops -mavx)
|
||||
target_compile_options(base64_avx2 PRIVATE -falign-loops -mavx2)
|
||||
target_compile_options(_base64_ssse3 PRIVATE -mno-avx -mno-avx2 -mssse3 -falign-loops)
|
||||
target_compile_options(_base64_avx PRIVATE -falign-loops -mavx)
|
||||
target_compile_options(_base64_avx2 PRIVATE -falign-loops -mavx2)
|
||||
else ()
|
||||
target_compile_options(base64_ssse3 PRIVATE -falign-loops)
|
||||
target_compile_options(_base64_ssse3 PRIVATE -falign-loops)
|
||||
endif ()
|
||||
|
||||
if (ARCH_AMD64)
|
||||
add_library(base64
|
||||
$<TARGET_OBJECTS:base64_scalar>
|
||||
$<TARGET_OBJECTS:base64_ssse3>
|
||||
$<TARGET_OBJECTS:base64_avx>
|
||||
$<TARGET_OBJECTS:base64_avx2>)
|
||||
add_library(_base64
|
||||
$<TARGET_OBJECTS:_base64_scalar>
|
||||
$<TARGET_OBJECTS:_base64_ssse3>
|
||||
$<TARGET_OBJECTS:_base64_avx>
|
||||
$<TARGET_OBJECTS:_base64_avx2>)
|
||||
else ()
|
||||
add_library(base64
|
||||
$<TARGET_OBJECTS:base64_scalar>
|
||||
$<TARGET_OBJECTS:base64_ssse3>)
|
||||
add_library(_base64
|
||||
$<TARGET_OBJECTS:_base64_scalar>
|
||||
$<TARGET_OBJECTS:_base64_ssse3>)
|
||||
endif ()
|
||||
|
||||
target_include_directories(base64 SYSTEM PUBLIC ${LIBRARY_DIR})
|
||||
target_include_directories(_base64 SYSTEM PUBLIC ${LIBRARY_DIR})
|
||||
|
||||
if (XCODE OR XCODE_VERSION)
|
||||
# https://gitlab.kitware.com/cmake/cmake/issues/17457
|
||||
@ -50,7 +50,7 @@ if (XCODE OR XCODE_VERSION)
|
||||
if (NOT EXISTS "${CMAKE_CURRENT_BINARY_DIR}/dummy.c")
|
||||
file(WRITE "${CMAKE_CURRENT_BINARY_DIR}/dummy.c" "")
|
||||
endif ()
|
||||
target_sources(base64 PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/dummy.c")
|
||||
target_sources(_base64 PRIVATE "${CMAKE_CURRENT_BINARY_DIR}/dummy.c")
|
||||
endif ()
|
||||
|
||||
add_library(ch_contrib::base64 ALIAS base64)
|
||||
add_library(ch_contrib::base64 ALIAS _base64)
|
||||
|
@ -368,7 +368,7 @@ elseif(WIN32)
|
||||
endif()
|
||||
|
||||
add_library(
|
||||
crypto
|
||||
_crypto
|
||||
|
||||
${CRYPTO_ARCH_SOURCES}
|
||||
err_data.c
|
||||
@ -605,7 +605,7 @@ add_library(
|
||||
)
|
||||
|
||||
add_library(
|
||||
ssl
|
||||
_ssl
|
||||
|
||||
"${BORINGSSL_SOURCE_DIR}/ssl/bio_ssl.cc"
|
||||
"${BORINGSSL_SOURCE_DIR}/ssl/d1_both.cc"
|
||||
@ -672,21 +672,22 @@ add_executable(
|
||||
"${BORINGSSL_SOURCE_DIR}/tool/transport_common.cc"
|
||||
)
|
||||
|
||||
target_link_libraries(ssl crypto)
|
||||
target_link_libraries(bssl ssl)
|
||||
target_link_libraries(_ssl _crypto)
|
||||
target_link_libraries(bssl _ssl)
|
||||
|
||||
if(NOT WIN32 AND NOT ANDROID)
|
||||
target_link_libraries(crypto pthread)
|
||||
target_link_libraries(_crypto pthread)
|
||||
endif()
|
||||
|
||||
# NOTE: that ClickHouse does not support WIN32 anyway.
|
||||
if(WIN32)
|
||||
target_link_libraries(bssl ws2_32)
|
||||
endif()
|
||||
|
||||
target_include_directories(crypto SYSTEM PUBLIC "${BORINGSSL_SOURCE_DIR}/include")
|
||||
target_include_directories(ssl SYSTEM PUBLIC "${BORINGSSL_SOURCE_DIR}/include")
|
||||
target_include_directories(_crypto SYSTEM PUBLIC "${BORINGSSL_SOURCE_DIR}/include")
|
||||
target_include_directories(_ssl SYSTEM PUBLIC "${BORINGSSL_SOURCE_DIR}/include")
|
||||
|
||||
target_compile_options(crypto PRIVATE -Wno-gnu-anonymous-struct)
|
||||
target_compile_options(_crypto PRIVATE -Wno-gnu-anonymous-struct)
|
||||
|
||||
add_library(OpenSSL::Crypto ALIAS crypto)
|
||||
add_library(OpenSSL::SSL ALIAS ssl)
|
||||
add_library(OpenSSL::Crypto ALIAS _crypto)
|
||||
add_library(OpenSSL::SSL ALIAS _ssl)
|
||||
|
@ -36,8 +36,8 @@ set (KJ_SRCS
|
||||
"${CAPNPROTO_SOURCE_DIR}/kj/parse/char.c++"
|
||||
)
|
||||
|
||||
add_library(kj ${KJ_SRCS})
|
||||
target_include_directories(kj SYSTEM PUBLIC ${CAPNPROTO_SOURCE_DIR})
|
||||
add_library(_kj ${KJ_SRCS})
|
||||
target_include_directories(_kj SYSTEM PUBLIC ${CAPNPROTO_SOURCE_DIR})
|
||||
|
||||
set (CAPNP_SRCS
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/c++.capnp.c++"
|
||||
@ -58,11 +58,11 @@ set (CAPNP_SRCS
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/stringify.c++"
|
||||
)
|
||||
|
||||
add_library(capnp ${CAPNP_SRCS})
|
||||
set_target_properties(capnp
|
||||
add_library(_capnp ${CAPNP_SRCS})
|
||||
set_target_properties(_capnp
|
||||
PROPERTIES LINKER_LANGUAGE CXX
|
||||
)
|
||||
target_link_libraries(capnp PUBLIC kj)
|
||||
target_link_libraries(_capnp PUBLIC _kj)
|
||||
|
||||
set (CAPNPC_SRCS
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/type-id.c++"
|
||||
@ -78,8 +78,8 @@ set (CAPNPC_SRCS
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/serialize-text.c++"
|
||||
)
|
||||
|
||||
add_library(capnpc ${CAPNPC_SRCS})
|
||||
target_link_libraries(capnpc PUBLIC capnp)
|
||||
add_library(_capnpc ${CAPNPC_SRCS})
|
||||
target_link_libraries(_capnpc PUBLIC _capnp)
|
||||
|
||||
# The library has substandard code
|
||||
if (COMPILER_GCC)
|
||||
@ -89,8 +89,8 @@ elseif (COMPILER_CLANG)
|
||||
set (CAPNP_PRIVATE_CXX_FLAGS -fno-char8_t)
|
||||
endif ()
|
||||
|
||||
target_compile_options(kj PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(capnp PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(capnpc PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(_kj PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(_capnp PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
target_compile_options(_capnpc PRIVATE ${SUPPRESS_WARNINGS} ${CAPNP_PRIVATE_CXX_FLAGS})
|
||||
|
||||
add_library(ch_contrib::capnp ALIAS capnpc)
|
||||
add_library(ch_contrib::capnp ALIAS _capnpc)
|
||||
|
@ -53,16 +53,16 @@ endif()
|
||||
list(APPEND SOURCES ${CASS_SRC_DIR}/atomic/atomic_std.hpp)
|
||||
|
||||
|
||||
add_library(curl_hostcheck OBJECT ${CASS_SRC_DIR}/third_party/curl/hostcheck.cpp)
|
||||
add_library(hdr_histogram OBJECT ${CASS_SRC_DIR}/third_party/hdr_histogram/hdr_histogram.cpp)
|
||||
add_library(http-parser OBJECT ${CASS_SRC_DIR}/third_party/http-parser/http_parser.c)
|
||||
add_library(minizip OBJECT
|
||||
add_library(_curl_hostcheck OBJECT ${CASS_SRC_DIR}/third_party/curl/hostcheck.cpp)
|
||||
add_library(_hdr_histogram OBJECT ${CASS_SRC_DIR}/third_party/hdr_histogram/hdr_histogram.cpp)
|
||||
add_library(_http-parser OBJECT ${CASS_SRC_DIR}/third_party/http-parser/http_parser.c)
|
||||
add_library(_minizip OBJECT
|
||||
${CASS_SRC_DIR}/third_party/minizip/ioapi.c
|
||||
${CASS_SRC_DIR}/third_party/minizip/zip.c
|
||||
${CASS_SRC_DIR}/third_party/minizip/unzip.c)
|
||||
|
||||
target_link_libraries(minizip ch_contrib::zlib)
|
||||
target_compile_definitions(minizip PRIVATE "-Dz_crc_t=unsigned long")
|
||||
target_link_libraries(_minizip ch_contrib::zlib)
|
||||
target_compile_definitions(_minizip PRIVATE "-Dz_crc_t=unsigned long")
|
||||
|
||||
list(APPEND INCLUDE_DIRS
|
||||
${CASS_SRC_DIR}/third_party/curl
|
||||
@ -121,10 +121,10 @@ configure_file(
|
||||
|
||||
add_library(_cassandra
|
||||
${SOURCES}
|
||||
$<TARGET_OBJECTS:curl_hostcheck>
|
||||
$<TARGET_OBJECTS:hdr_histogram>
|
||||
$<TARGET_OBJECTS:http-parser>
|
||||
$<TARGET_OBJECTS:minizip>)
|
||||
$<TARGET_OBJECTS:_curl_hostcheck>
|
||||
$<TARGET_OBJECTS:_hdr_histogram>
|
||||
$<TARGET_OBJECTS:_http-parser>
|
||||
$<TARGET_OBJECTS:_minizip>)
|
||||
|
||||
target_link_libraries(_cassandra ch_contrib::zlib)
|
||||
target_include_directories(_cassandra PRIVATE ${CMAKE_CURRENT_BINARY_DIR} ${INCLUDE_DIRS})
|
||||
|
@ -14,12 +14,12 @@ set (SRCS
|
||||
"${LIBRARY_DIR}/src/zone_info_source.cc"
|
||||
)
|
||||
|
||||
add_library (cctz ${SRCS})
|
||||
target_include_directories (cctz PUBLIC "${LIBRARY_DIR}/include")
|
||||
add_library (_cctz ${SRCS})
|
||||
target_include_directories (_cctz PUBLIC "${LIBRARY_DIR}/include")
|
||||
|
||||
if (OS_FREEBSD)
|
||||
# yes, need linux, because bsd check inside linux in time_zone_libc.cc:24
|
||||
target_compile_definitions (cctz PRIVATE __USE_BSD linux _XOPEN_SOURCE=600)
|
||||
target_compile_definitions (_cctz PRIVATE __USE_BSD linux _XOPEN_SOURCE=600)
|
||||
endif ()
|
||||
|
||||
# Related to time_zones table:
|
||||
@ -57,7 +57,7 @@ clickhouse_embed_binaries(
|
||||
RESOURCE_DIR "${TZDIR}"
|
||||
RESOURCES ${TIMEZONE_RESOURCE_FILES}
|
||||
)
|
||||
add_dependencies(cctz tzdata)
|
||||
target_link_libraries(cctz INTERFACE "-Wl,${WHOLE_ARCHIVE} $<TARGET_FILE:tzdata> -Wl,${NO_WHOLE_ARCHIVE}")
|
||||
add_dependencies(_cctz tzdata)
|
||||
target_link_libraries(_cctz INTERFACE "-Wl,${WHOLE_ARCHIVE} $<TARGET_FILE:tzdata> -Wl,${NO_WHOLE_ARCHIVE}")
|
||||
|
||||
add_library(ch_contrib::cctz ALIAS cctz)
|
||||
add_library(ch_contrib::cctz ALIAS _cctz)
|
||||
|
@ -1,2 +1,3 @@
|
||||
add_library(consistent-hashing consistent_hashing.cpp popcount.cpp)
|
||||
target_include_directories(consistent-hashing SYSTEM PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
add_library(_consistent_hashing consistent_hashing.cpp popcount.cpp)
|
||||
target_include_directories(_consistent_hashing SYSTEM PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
add_library(ch_contrib::consistent_hashing ALIAS _consistent_hashing)
|
||||
|
@ -19,15 +19,15 @@ set(SRCS
|
||||
"${LIBRARY_DIR}/src/roaring_priority_queue.c"
|
||||
"${LIBRARY_DIR}/src/roaring_array.c")
|
||||
|
||||
add_library(roaring ${SRCS})
|
||||
add_library(_roaring ${SRCS})
|
||||
|
||||
target_include_directories(roaring PRIVATE "${LIBRARY_DIR}/include/roaring")
|
||||
target_include_directories(roaring SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include")
|
||||
target_include_directories(roaring SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/cpp")
|
||||
target_include_directories(_roaring PRIVATE "${LIBRARY_DIR}/include/roaring")
|
||||
target_include_directories(_roaring SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include")
|
||||
target_include_directories(_roaring SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/cpp")
|
||||
|
||||
# We redirect malloc/free family of functions to different functions that will track memory in ClickHouse.
|
||||
# Also note that we exploit implicit function declarations.
|
||||
target_compile_definitions(roaring PRIVATE
|
||||
target_compile_definitions(_roaring PRIVATE
|
||||
-Dmalloc=clickhouse_malloc
|
||||
-Dcalloc=clickhouse_calloc
|
||||
-Drealloc=clickhouse_realloc
|
||||
@ -35,4 +35,6 @@ target_compile_definitions(roaring PRIVATE
|
||||
-Dfree=clickhouse_free
|
||||
-Dposix_memalign=clickhouse_posix_memalign)
|
||||
|
||||
target_link_libraries(roaring PUBLIC clickhouse_common_io)
|
||||
target_link_libraries(_roaring PUBLIC clickhouse_common_io)
|
||||
|
||||
add_library(ch_contrib::roaring ALIAS _roaring)
|
||||
|
@ -147,35 +147,24 @@ set (SRCS
|
||||
"${LIBRARY_DIR}/lib/vssh/libssh.c"
|
||||
)
|
||||
|
||||
add_library (curl ${SRCS})
|
||||
add_library (_curl ${SRCS})
|
||||
|
||||
target_compile_definitions (curl PRIVATE
|
||||
target_compile_definitions (_curl PRIVATE
|
||||
HAVE_CONFIG_H
|
||||
BUILDING_LIBCURL
|
||||
CURL_HIDDEN_SYMBOLS
|
||||
libcurl_EXPORTS
|
||||
OS="${CMAKE_SYSTEM_NAME}"
|
||||
)
|
||||
target_include_directories (curl SYSTEM PUBLIC
|
||||
target_include_directories (_curl SYSTEM PUBLIC
|
||||
"${LIBRARY_DIR}/include"
|
||||
"${LIBRARY_DIR}/lib"
|
||||
. # curl_config.h
|
||||
)
|
||||
|
||||
target_link_libraries (curl PRIVATE ssl)
|
||||
target_link_libraries (_curl PRIVATE OpenSSL::SSL)
|
||||
|
||||
# The library is large - avoid bloat (XXX: is it?)
|
||||
target_compile_options (curl PRIVATE -g0)
|
||||
target_compile_options (_curl PRIVATE -g0)
|
||||
|
||||
# find_package(CURL) compatibility for the following packages that uses
|
||||
# find_package(CURL)/include(FindCURL):
|
||||
# - sentry-native
|
||||
set (CURL_FOUND ON CACHE BOOL "")
|
||||
set (CURL_ROOT_DIR ${LIBRARY_DIR} CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIR "${LIBRARY_DIR}/include" CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIRS "${LIBRARY_DIR}/include" CACHE PATH "")
|
||||
set (CURL_LIBRARY curl CACHE STRING "")
|
||||
set (CURL_LIBRARIES ${CURL_LIBRARY} CACHE STRING "")
|
||||
set (CURL_VERSION_STRING 7.67.0 CACHE STRING "")
|
||||
# add_library (CURL::libcurl ALIAS ${CURL_LIBRARY})
|
||||
add_library (ch_contrib::curl ALIAS ${CURL_LIBRARY})
|
||||
add_library (ch_contrib::curl ALIAS _curl)
|
||||
|
@ -12,9 +12,9 @@ endif()
|
||||
|
||||
set(CYRUS_SASL_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl")
|
||||
|
||||
add_library(sasl2)
|
||||
add_library(_sasl2)
|
||||
|
||||
target_sources(sasl2 PRIVATE
|
||||
target_sources(_sasl2 PRIVATE
|
||||
"${CYRUS_SASL_SOURCE_DIR}/plugins/gssapi.c"
|
||||
# "${CYRUS_SASL_SOURCE_DIR}/plugins/gssapiv2_init.c"
|
||||
"${CYRUS_SASL_SOURCE_DIR}/common/plugin_common.c"
|
||||
@ -32,11 +32,11 @@ target_sources(sasl2 PRIVATE
|
||||
"${CYRUS_SASL_SOURCE_DIR}/lib/checkpw.c"
|
||||
)
|
||||
|
||||
target_include_directories(sasl2 PUBLIC
|
||||
target_include_directories(_sasl2 PUBLIC
|
||||
${CMAKE_CURRENT_BINARY_DIR}
|
||||
)
|
||||
|
||||
target_include_directories(sasl2 PRIVATE
|
||||
target_include_directories(_sasl2 PRIVATE
|
||||
${CMAKE_CURRENT_SOURCE_DIR} # for config.h
|
||||
"${CYRUS_SASL_SOURCE_DIR}/plugins"
|
||||
${CYRUS_SASL_SOURCE_DIR}
|
||||
@ -50,7 +50,7 @@ target_include_directories(sasl2 PRIVATE
|
||||
"${CYRUS_SASL_SOURCE_DIR}/tests"
|
||||
)
|
||||
|
||||
target_compile_definitions(sasl2 PUBLIC
|
||||
target_compile_definitions(_sasl2 PUBLIC
|
||||
HAVE_CONFIG_H
|
||||
# PLUGINDIR="/usr/local/lib/sasl2"
|
||||
PLUGINDIR=""
|
||||
@ -76,6 +76,6 @@ file(COPY
|
||||
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}
|
||||
)
|
||||
|
||||
target_link_libraries(sasl2 PUBLIC ch_contrib::krb5)
|
||||
target_link_libraries(_sasl2 PUBLIC ch_contrib::krb5)
|
||||
|
||||
add_library(ch_contrib::sasl2 ALIAS sasl2)
|
||||
add_library(ch_contrib::sasl2 ALIAS _sasl2)
|
||||
|
@ -1,5 +1,5 @@
|
||||
set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/dragonbox")
|
||||
|
||||
add_library(dragonbox_to_chars "${LIBRARY_DIR}/source/dragonbox_to_chars.cpp")
|
||||
|
||||
target_include_directories(dragonbox_to_chars SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include/")
|
||||
add_library(_dragonbox_to_chars "${LIBRARY_DIR}/source/dragonbox_to_chars.cpp")
|
||||
target_include_directories(_dragonbox_to_chars SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include/")
|
||||
add_library(ch_contrib::dragonbox_to_chars ALIAS _dragonbox_to_chars)
|
||||
|
@ -1,3 +1,3 @@
|
||||
add_library(fast_float INTERFACE)
|
||||
target_include_directories(fast_float SYSTEM BEFORE INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/")
|
||||
add_library(ch_contrib::fast_float ALIAS fast_float)
|
||||
add_library(_fast_float INTERFACE)
|
||||
target_include_directories(_fast_float SYSTEM BEFORE INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/")
|
||||
add_library(ch_contrib::fast_float ALIAS _fast_float)
|
||||
|
@ -25,8 +25,8 @@ endif()
|
||||
|
||||
set (SRCS ${SRCS} "${LIBRARY_DIR}/fastops/plain/ops_plain.cpp" "${LIBRARY_DIR}/fastops/core/avx_id.cpp" "${LIBRARY_DIR}/fastops/fastops.cpp")
|
||||
|
||||
add_library(fastops ${SRCS})
|
||||
add_library(_fastops ${SRCS})
|
||||
|
||||
target_include_directories(fastops SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
target_include_directories(_fastops SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
|
||||
add_library(ch_contrib::fastops ALIAS fastops)
|
||||
add_library(ch_contrib::fastops ALIAS _fastops)
|
||||
|
@ -16,6 +16,6 @@ set (SRCS
|
||||
../fmtlib/include/fmt/ranges.h
|
||||
)
|
||||
|
||||
add_library(fmt ${SRCS})
|
||||
target_include_directories(fmt SYSTEM PUBLIC ../fmtlib/include)
|
||||
add_library(ch_contrib::fmt ALIAS fmt)
|
||||
add_library(_fmt ${SRCS})
|
||||
target_include_directories(_fmt SYSTEM PUBLIC ../fmtlib/include)
|
||||
add_library(ch_contrib::fmt ALIAS _fmt)
|
||||
|
@ -30,12 +30,12 @@ set(SRCS
|
||||
|
||||
configure_file("${H3_SOURCE_DIR}/include/h3api.h.in" "${H3_BINARY_DIR}/include/h3api.h")
|
||||
|
||||
add_library(h3 ${SRCS})
|
||||
target_include_directories(h3 SYSTEM PUBLIC "${H3_SOURCE_DIR}/include")
|
||||
target_include_directories(h3 SYSTEM PUBLIC "${H3_BINARY_DIR}/include")
|
||||
target_compile_definitions(h3 PRIVATE H3_HAVE_VLA)
|
||||
add_library(_h3 ${SRCS})
|
||||
target_include_directories(_h3 SYSTEM PUBLIC "${H3_SOURCE_DIR}/include")
|
||||
target_include_directories(_h3 SYSTEM PUBLIC "${H3_BINARY_DIR}/include")
|
||||
target_compile_definitions(_h3 PRIVATE H3_HAVE_VLA)
|
||||
if(M_LIBRARY)
|
||||
target_link_libraries(h3 PRIVATE ${M_LIBRARY})
|
||||
target_link_libraries(_h3 PRIVATE ${M_LIBRARY})
|
||||
endif()
|
||||
|
||||
add_library(ch_contrib::h3 ALIAS h3)
|
||||
add_library(ch_contrib::h3 ALIAS _h3)
|
||||
|
@ -217,23 +217,23 @@ set (SRCS
|
||||
"${LIBRARY_DIR}/src/util/ue2string.cpp"
|
||||
)
|
||||
|
||||
add_library (hyperscan ${SRCS})
|
||||
add_library (_hyperscan ${SRCS})
|
||||
|
||||
target_compile_options (hyperscan
|
||||
target_compile_options (_hyperscan
|
||||
PRIVATE -g0 # Library has too much debug information
|
||||
-mno-avx -mno-avx2 # The library is using dynamic dispatch and is confused if AVX is enabled globally
|
||||
-march=corei7 -O2 -fno-strict-aliasing -fno-omit-frame-pointer -fvisibility=hidden # The options from original build system
|
||||
-fno-sanitize=undefined # Assume the library takes care of itself
|
||||
)
|
||||
target_include_directories (hyperscan
|
||||
target_include_directories (_hyperscan
|
||||
PRIVATE
|
||||
common
|
||||
"${LIBRARY_DIR}/include"
|
||||
)
|
||||
target_include_directories (hyperscan SYSTEM PUBLIC "${LIBRARY_DIR}/src")
|
||||
target_include_directories (_hyperscan SYSTEM PUBLIC "${LIBRARY_DIR}/src")
|
||||
if (ARCH_AMD64)
|
||||
target_include_directories (hyperscan PRIVATE x86_64)
|
||||
target_include_directories (_hyperscan PRIVATE x86_64)
|
||||
endif ()
|
||||
target_link_libraries (hyperscan PRIVATE boost::headers_only)
|
||||
target_link_libraries (_hyperscan PRIVATE boost::headers_only)
|
||||
|
||||
add_library (ch_contrib::hyperscan ALIAS hyperscan)
|
||||
add_library (ch_contrib::hyperscan ALIAS _hyperscan)
|
||||
|
@ -87,9 +87,9 @@ if (OS_DARWIN)
|
||||
list(APPEND SRCS "${LIBRARY_DIR}/src/zone.c")
|
||||
endif ()
|
||||
|
||||
add_library(jemalloc ${SRCS})
|
||||
target_include_directories(jemalloc PRIVATE "${LIBRARY_DIR}/include")
|
||||
target_include_directories(jemalloc SYSTEM PUBLIC include)
|
||||
add_library(_jemalloc ${SRCS})
|
||||
target_include_directories(_jemalloc PRIVATE "${LIBRARY_DIR}/include")
|
||||
target_include_directories(_jemalloc SYSTEM PUBLIC include)
|
||||
|
||||
set (JEMALLOC_INCLUDE_PREFIX)
|
||||
# OS_
|
||||
@ -117,24 +117,24 @@ endif ()
|
||||
|
||||
configure_file(${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h.in
|
||||
${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h)
|
||||
target_include_directories(jemalloc SYSTEM PRIVATE
|
||||
target_include_directories(_jemalloc SYSTEM PRIVATE
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal")
|
||||
|
||||
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_NO_PRIVATE_NAMESPACE)
|
||||
target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_NO_PRIVATE_NAMESPACE)
|
||||
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_DEBUG=1)
|
||||
target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_DEBUG=1)
|
||||
endif ()
|
||||
|
||||
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_PROF=1)
|
||||
target_compile_definitions(_jemalloc PRIVATE -DJEMALLOC_PROF=1)
|
||||
|
||||
if (USE_UNWIND)
|
||||
target_compile_definitions (jemalloc PRIVATE -DJEMALLOC_PROF_LIBUNWIND=1)
|
||||
target_link_libraries (jemalloc PRIVATE unwind)
|
||||
target_compile_definitions (_jemalloc PRIVATE -DJEMALLOC_PROF_LIBUNWIND=1)
|
||||
target_link_libraries (_jemalloc PRIVATE unwind)
|
||||
endif ()
|
||||
|
||||
target_compile_options(jemalloc PRIVATE -Wno-redundant-decls)
|
||||
target_compile_options(_jemalloc PRIVATE -Wno-redundant-decls)
|
||||
# for RTLD_NEXT
|
||||
target_compile_options(jemalloc PRIVATE -D_GNU_SOURCE)
|
||||
target_compile_options(_jemalloc PRIVATE -D_GNU_SOURCE)
|
||||
|
||||
add_library(ch_contrib::jemalloc ALIAS jemalloc)
|
||||
add_library(ch_contrib::jemalloc ALIAS _jemalloc)
|
||||
|
@ -558,10 +558,10 @@ add_custom_target(
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
add_library(krb5)
|
||||
add_library(_krb5)
|
||||
|
||||
add_dependencies(
|
||||
krb5
|
||||
_krb5
|
||||
ERRMAP_H
|
||||
ERROR_MAP_H
|
||||
KRB_5_H
|
||||
@ -579,7 +579,7 @@ if(CMAKE_SYSTEM_NAME MATCHES "Darwin")
|
||||
list(APPEND ALL_SRCS "${CMAKE_CURRENT_BINARY_DIR}/include_private/kcmrpc.c")
|
||||
endif()
|
||||
|
||||
target_sources(krb5 PRIVATE
|
||||
target_sources(_krb5 PRIVATE
|
||||
${ALL_SRCS}
|
||||
)
|
||||
|
||||
@ -651,12 +651,12 @@ add_custom_command(
|
||||
|
||||
|
||||
|
||||
target_include_directories(krb5 SYSTEM BEFORE PUBLIC
|
||||
target_include_directories(_krb5 SYSTEM BEFORE PUBLIC
|
||||
"${KRB5_SOURCE_DIR}/include"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include"
|
||||
)
|
||||
|
||||
target_include_directories(krb5 PRIVATE
|
||||
target_include_directories(_krb5 PRIVATE
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include_private" # For autoconf.h and other generated headers.
|
||||
${KRB5_SOURCE_DIR}
|
||||
"${KRB5_SOURCE_DIR}/include"
|
||||
@ -678,7 +678,7 @@ target_include_directories(krb5 PRIVATE
|
||||
"${KRB5_SOURCE_DIR}/lib/krb5/os"
|
||||
)
|
||||
|
||||
target_compile_definitions(krb5 PRIVATE
|
||||
target_compile_definitions(_krb5 PRIVATE
|
||||
KRB5_PRIVATE
|
||||
_GSS_STATIC_LINK=1
|
||||
KRB5_DEPRECATED=1
|
||||
@ -688,6 +688,6 @@ target_compile_definitions(krb5 PRIVATE
|
||||
LIBDIR="/usr/local/lib"
|
||||
)
|
||||
|
||||
target_link_libraries(krb5 PRIVATE OpenSSL::Crypto OpenSSL::SSL)
|
||||
target_link_libraries(_krb5 PRIVATE OpenSSL::Crypto OpenSSL::SSL)
|
||||
|
||||
add_library(ch_contrib::krb5 ALIAS krb5)
|
||||
add_library(ch_contrib::krb5 ALIAS _krb5)
|
||||
|
@ -5,6 +5,6 @@ set(SRCS
|
||||
"${LIBRARY_DIR}/src/RdrLemmatizer.cpp"
|
||||
)
|
||||
|
||||
add_library(lemmagen STATIC ${SRCS})
|
||||
target_include_directories(lemmagen SYSTEM PUBLIC "${LEMMAGEN_INCLUDE_DIR}")
|
||||
add_library(ch_contrib::lemmagen ALIAS lemmagen)
|
||||
add_library(_lemmagen STATIC ${SRCS})
|
||||
target_include_directories(_lemmagen SYSTEM PUBLIC "${LEMMAGEN_INCLUDE_DIR}")
|
||||
add_library(ch_contrib::lemmagen ALIAS _lemmagen)
|
||||
|
@ -23,12 +23,12 @@ set (SRCS
|
||||
"${LIBRARY_DIR}/libcpuid/recog_intel.c"
|
||||
)
|
||||
|
||||
add_library (cpuid ${SRCS})
|
||||
add_library (_cpuid ${SRCS})
|
||||
|
||||
target_include_directories (cpuid SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
target_compile_definitions (cpuid PRIVATE VERSION="v0.4.1")
|
||||
target_include_directories (_cpuid SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
target_compile_definitions (_cpuid PRIVATE VERSION="v0.4.1")
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options (cpuid PRIVATE -Wno-reserved-id-macro)
|
||||
target_compile_options (_cpuid PRIVATE -Wno-reserved-id-macro)
|
||||
endif ()
|
||||
|
||||
add_library(ch_contrib::cpuid ALIAS cpuid)
|
||||
add_library(ch_contrib::cpuid ALIAS _cpuid)
|
||||
|
@ -1,2 +1,3 @@
|
||||
add_library (libdivide INTERFACE)
|
||||
target_include_directories (libdivide SYSTEM BEFORE INTERFACE .)
|
||||
add_library (_libdivide INTERFACE)
|
||||
target_include_directories (_libdivide SYSTEM BEFORE INTERFACE .)
|
||||
add_library (ch_contrib::libdivide ALIAS _libdivide)
|
||||
|
@ -98,19 +98,19 @@ if (TARGET ch_contrib::krb5)
|
||||
${SRC_DIR}/gssapi/server.c)
|
||||
endif()
|
||||
|
||||
add_library(gsasl ${SRCS})
|
||||
add_library(_gsasl ${SRCS})
|
||||
|
||||
target_include_directories(gsasl PUBLIC ${SRC_DIR})
|
||||
target_include_directories(gsasl PUBLIC ${SRC_DIR}/gl)
|
||||
target_include_directories(gsasl PUBLIC ${SRC_DIR}/src)
|
||||
target_include_directories(gsasl PUBLIC ${SRC_DIR}/digest-md5)
|
||||
target_include_directories(gsasl PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/libgsasl-cmake/linux_x86_64/include")
|
||||
target_include_directories(_gsasl PUBLIC ${SRC_DIR})
|
||||
target_include_directories(_gsasl PUBLIC ${SRC_DIR}/gl)
|
||||
target_include_directories(_gsasl PUBLIC ${SRC_DIR}/src)
|
||||
target_include_directories(_gsasl PUBLIC ${SRC_DIR}/digest-md5)
|
||||
target_include_directories(_gsasl PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/libgsasl-cmake/linux_x86_64/include")
|
||||
|
||||
target_compile_definitions (gsasl PRIVATE HAVE_CONFIG_H=1)
|
||||
target_compile_definitions(_gsasl PRIVATE HAVE_CONFIG_H=1)
|
||||
|
||||
if (TARGET ch_contrib::krb5)
|
||||
target_link_libraries(gsasl PUBLIC ch_contrib::krb5)
|
||||
target_compile_definitions (gsasl PRIVATE HAVE_GSSAPI_H=1 USE_GSSAPI=1)
|
||||
target_link_libraries(_gsasl PUBLIC ch_contrib::krb5)
|
||||
target_compile_definitions(_gsasl PRIVATE HAVE_GSSAPI_H=1 USE_GSSAPI=1)
|
||||
endif()
|
||||
|
||||
add_library(ch_contrib::gsasl ALIAS gsasl)
|
||||
add_library(ch_contrib::gsasl ALIAS _gsasl)
|
||||
|
@ -2,5 +2,6 @@ set (SRCS
|
||||
src/metrohash64.cpp
|
||||
src/metrohash128.cpp
|
||||
)
|
||||
add_library(metrohash ${SRCS})
|
||||
target_include_directories(metrohash PUBLIC src)
|
||||
add_library(_metrohash ${SRCS})
|
||||
target_include_directories(_metrohash PUBLIC src)
|
||||
add_library(ch_contrib::metrohash ALIAS _metrohash)
|
||||
|
@ -57,12 +57,12 @@ set(SRCS
|
||||
"${LIBPQ_SOURCE_DIR}/port/explicit_bzero.c"
|
||||
)
|
||||
|
||||
add_library(libpq ${SRCS})
|
||||
add_library(_libpq ${SRCS})
|
||||
|
||||
target_include_directories (libpq SYSTEM PUBLIC ${LIBPQ_SOURCE_DIR})
|
||||
target_include_directories (libpq SYSTEM PUBLIC "${LIBPQ_SOURCE_DIR}/include")
|
||||
target_include_directories (libpq SYSTEM PRIVATE "${LIBPQ_SOURCE_DIR}/configs")
|
||||
target_include_directories (_libpq SYSTEM PUBLIC ${LIBPQ_SOURCE_DIR})
|
||||
target_include_directories (_libpq SYSTEM PUBLIC "${LIBPQ_SOURCE_DIR}/include")
|
||||
target_include_directories (_libpq SYSTEM PRIVATE "${LIBPQ_SOURCE_DIR}/configs")
|
||||
|
||||
target_link_libraries (libpq PRIVATE ssl)
|
||||
target_link_libraries (_libpq PRIVATE OpenSSL::SSL)
|
||||
|
||||
add_library(ch_contrib::libpq ALIAS libpq)
|
||||
add_library(ch_contrib::libpq ALIAS _libpq)
|
||||
|
@ -70,9 +70,9 @@ set (HDRS
|
||||
"${LIBRARY_DIR}/include/pqxx/zview.hxx"
|
||||
)
|
||||
|
||||
add_library(libpqxx ${SRCS} ${HDRS})
|
||||
add_library(_libpqxx ${SRCS} ${HDRS})
|
||||
|
||||
target_link_libraries(libpqxx PUBLIC ch_contrib::libpq)
|
||||
target_include_directories (libpqxx SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include")
|
||||
target_link_libraries(_libpqxx PUBLIC ch_contrib::libpq)
|
||||
target_include_directories (_libpqxx SYSTEM BEFORE PUBLIC "${LIBRARY_DIR}/include")
|
||||
|
||||
add_library(ch_contrib::libpqxx ALIAS libpqxx)
|
||||
add_library(ch_contrib::libpqxx ALIAS _libpqxx)
|
||||
|
@ -27,6 +27,6 @@ FOREACH ( LINE ${_CONTENT} )
|
||||
endforeach ()
|
||||
|
||||
# all the sources parsed. Now just add the lib
|
||||
add_library ( stemmer STATIC ${_SOURCES} ${_HEADERS} )
|
||||
target_include_directories (stemmer SYSTEM PUBLIC "${STEMMER_INCLUDE_DIR}")
|
||||
add_library(ch_contrib::stemmer ALIAS stemmer)
|
||||
add_library(_stemmer STATIC ${_SOURCES} ${_HEADERS} )
|
||||
target_include_directories(_stemmer SYSTEM PUBLIC "${STEMMER_INCLUDE_DIR}")
|
||||
add_library(ch_contrib::stemmer ALIAS _stemmer)
|
||||
|
@ -50,14 +50,14 @@ set(SRCS
|
||||
"${LIBXML2_SOURCE_DIR}/schematron.c"
|
||||
"${LIBXML2_SOURCE_DIR}/xzlib.c"
|
||||
)
|
||||
add_library(libxml2 ${SRCS})
|
||||
add_library(_libxml2 ${SRCS})
|
||||
|
||||
target_link_libraries(libxml2 PRIVATE ch_contrib::zlib)
|
||||
target_link_libraries(_libxml2 PRIVATE ch_contrib::zlib)
|
||||
if(M_LIBRARY)
|
||||
target_link_libraries(libxml2 PRIVATE ${M_LIBRARY})
|
||||
target_link_libraries(_libxml2 PRIVATE ${M_LIBRARY})
|
||||
endif()
|
||||
|
||||
target_include_directories(libxml2 BEFORE PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/linux_x86_64/include")
|
||||
target_include_directories(libxml2 BEFORE PUBLIC "${LIBXML2_SOURCE_DIR}/include")
|
||||
target_include_directories(_libxml2 BEFORE PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}/linux_x86_64/include")
|
||||
target_include_directories(_libxml2 BEFORE PUBLIC "${LIBXML2_SOURCE_DIR}/include")
|
||||
|
||||
add_library(ch_contrib::libxml2 ALIAS libxml2)
|
||||
add_library(ch_contrib::libxml2 ALIAS _libxml2)
|
||||
|
@ -1,4 +1,4 @@
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/magic_enum")
|
||||
add_library (magic_enum INTERFACE)
|
||||
target_include_directories(magic_enum SYSTEM INTERFACE ${LIBRARY_DIR}/include)
|
||||
add_library(ch_contrib::magic_enum ALIAS magic_enum)
|
||||
set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/magic_enum")
|
||||
add_library(_magic_enum INTERFACE)
|
||||
target_include_directories(_magic_enum SYSTEM INTERFACE ${LIBRARY_DIR}/include)
|
||||
add_library(ch_contrib::magic_enum ALIAS _magic_enum)
|
||||
|
@ -239,12 +239,12 @@ endif()
|
||||
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_async.c ${CC_SOURCE_DIR}/libmariadb/ma_context.c)
|
||||
|
||||
|
||||
add_library(mariadbclient STATIC ${LIBMARIADB_SOURCES})
|
||||
target_link_libraries(mariadbclient ${SYSTEM_LIBS})
|
||||
add_library(_mariadbclient STATIC ${LIBMARIADB_SOURCES})
|
||||
target_link_libraries(_mariadbclient ${SYSTEM_LIBS})
|
||||
|
||||
target_include_directories(mariadbclient PRIVATE ${CC_BINARY_DIR}/include-private)
|
||||
target_include_directories(mariadbclient SYSTEM PUBLIC ${CC_BINARY_DIR}/include-public ${CC_SOURCE_DIR}/include ${CC_SOURCE_DIR}/libmariadb)
|
||||
target_include_directories(_mariadbclient PRIVATE ${CC_BINARY_DIR}/include-private)
|
||||
target_include_directories(_mariadbclient SYSTEM PUBLIC ${CC_BINARY_DIR}/include-public ${CC_SOURCE_DIR}/include ${CC_SOURCE_DIR}/libmariadb)
|
||||
|
||||
set_target_properties(mariadbclient PROPERTIES IMPORTED_INTERFACE_LINK_LIBRARIES "${SYSTEM_LIBS}")
|
||||
set_target_properties(_mariadbclient PROPERTIES IMPORTED_INTERFACE_LINK_LIBRARIES "${SYSTEM_LIBS}")
|
||||
|
||||
add_library(ch_contrib::mariadbclient ALIAS mariadbclient)
|
||||
add_library(ch_contrib::mariadbclient ALIAS _mariadbclient)
|
||||
|
@ -1,7 +1,8 @@
|
||||
add_library(murmurhash
|
||||
add_library(_murmurhash
|
||||
src/MurmurHash2.cpp
|
||||
src/MurmurHash3.cpp
|
||||
include/MurmurHash2.h
|
||||
include/MurmurHash3.h)
|
||||
|
||||
target_include_directories (murmurhash PUBLIC include)
|
||||
target_include_directories(_murmurhash PUBLIC include)
|
||||
add_library(ch_contrib::murmurhash ALIAS _murmurhash)
|
||||
|
@ -12,8 +12,7 @@ set (SRCS
|
||||
"${LIBRARY_DIR}/nanodbc/nanodbc.cpp"
|
||||
)
|
||||
|
||||
add_library(nanodbc ${SRCS})
|
||||
|
||||
target_link_libraries (nanodbc PUBLIC ch_contrib::unixodbc)
|
||||
target_include_directories (nanodbc SYSTEM PUBLIC "${LIBRARY_DIR}/")
|
||||
add_library(ch_contrib::nanodbc ALIAS nanodbc)
|
||||
add_library(_nanodbc ${SRCS})
|
||||
target_link_libraries(_nanodbc PUBLIC ch_contrib::unixodbc)
|
||||
target_include_directories(_nanodbc SYSTEM PUBLIC "${LIBRARY_DIR}/")
|
||||
add_library(ch_contrib::nanodbc ALIAS _nanodbc)
|
||||
|
@ -29,7 +29,7 @@ if (ENABLE_SSL)
|
||||
|
||||
target_compile_options (_poco_crypto PRIVATE -Wno-newline-eof)
|
||||
target_include_directories (_poco_crypto SYSTEM PUBLIC "${LIBRARY_DIR}/Crypto/include")
|
||||
target_link_libraries (_poco_crypto PUBLIC Poco::Foundation ssl crypto)
|
||||
target_link_libraries (_poco_crypto PUBLIC Poco::Foundation OpenSSL::SSL OpenSSL::Crypto)
|
||||
|
||||
message (STATUS "Using Poco::Crypto")
|
||||
else ()
|
||||
|
@ -63,13 +63,13 @@ set(libprotobuf_lite_files
|
||||
${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc
|
||||
)
|
||||
|
||||
add_library(libprotobuf-lite ${libprotobuf_lite_files})
|
||||
target_link_libraries(libprotobuf-lite pthread)
|
||||
add_library(_libprotobuf-lite ${libprotobuf_lite_files})
|
||||
target_link_libraries(_libprotobuf-lite pthread)
|
||||
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
|
||||
target_link_libraries(libprotobuf-lite log)
|
||||
target_link_libraries(_libprotobuf-lite log)
|
||||
endif()
|
||||
target_include_directories(libprotobuf-lite SYSTEM PUBLIC ${protobuf_source_dir}/src)
|
||||
add_library(protobuf::libprotobuf-lite ALIAS libprotobuf-lite)
|
||||
target_include_directories(_libprotobuf-lite SYSTEM PUBLIC ${protobuf_source_dir}/src)
|
||||
add_library(protobuf::libprotobuf-lite ALIAS _libprotobuf-lite)
|
||||
|
||||
|
||||
set(libprotobuf_files
|
||||
@ -127,17 +127,17 @@ set(libprotobuf_files
|
||||
${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc
|
||||
)
|
||||
|
||||
add_library(libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files})
|
||||
add_library(_libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files})
|
||||
if (ENABLE_FUZZING)
|
||||
target_compile_options(libprotobuf PRIVATE "-fsanitize-recover=all")
|
||||
target_compile_options(_libprotobuf PRIVATE "-fsanitize-recover=all")
|
||||
endif()
|
||||
target_link_libraries(libprotobuf pthread)
|
||||
target_link_libraries(libprotobuf ch_contrib::zlib)
|
||||
target_link_libraries(_libprotobuf pthread)
|
||||
target_link_libraries(_libprotobuf ch_contrib::zlib)
|
||||
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
|
||||
target_link_libraries(libprotobuf log)
|
||||
target_link_libraries(_libprotobuf log)
|
||||
endif()
|
||||
target_include_directories(libprotobuf SYSTEM PUBLIC ${protobuf_source_dir}/src)
|
||||
add_library(protobuf::libprotobuf ALIAS libprotobuf)
|
||||
target_include_directories(_libprotobuf SYSTEM PUBLIC ${protobuf_source_dir}/src)
|
||||
add_library(protobuf::libprotobuf ALIAS _libprotobuf)
|
||||
|
||||
|
||||
set(libprotoc_files
|
||||
@ -226,9 +226,9 @@ set(libprotoc_files
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc
|
||||
)
|
||||
|
||||
add_library(libprotoc ${libprotoc_files})
|
||||
target_link_libraries(libprotoc libprotobuf)
|
||||
add_library(protobuf::libprotoc ALIAS libprotoc)
|
||||
add_library(_libprotoc ${libprotoc_files})
|
||||
target_link_libraries(_libprotoc _libprotobuf)
|
||||
add_library(protobuf::libprotoc ALIAS _libprotoc)
|
||||
|
||||
set(protoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/main.cc)
|
||||
|
||||
@ -236,7 +236,7 @@ if (CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
|
||||
AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR)
|
||||
|
||||
add_executable(protoc ${protoc_files})
|
||||
target_link_libraries(protoc libprotoc libprotobuf pthread)
|
||||
target_link_libraries(protoc _libprotoc _libprotobuf pthread)
|
||||
add_executable(protobuf::protoc ALIAS protoc)
|
||||
|
||||
if (ENABLE_FUZZING)
|
||||
@ -319,11 +319,11 @@ endif ()
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/protobuf-cmake/protobuf_generate.cmake")
|
||||
|
||||
add_library(_protobuf INTERFACE)
|
||||
target_link_libraries(_protobuf INTERFACE libprotobuf)
|
||||
target_link_libraries(_protobuf INTERFACE _libprotobuf)
|
||||
target_include_directories(_protobuf INTERFACE "${Protobuf_INCLUDE_DIR}")
|
||||
add_library(ch_contrib::protobuf ALIAS _protobuf)
|
||||
|
||||
add_library(_protoc INTERFACE)
|
||||
target_link_libraries(_protoc INTERFACE libprotoc libprotobuf)
|
||||
target_link_libraries(_protoc INTERFACE _libprotoc _libprotobuf)
|
||||
target_include_directories(_protoc INTERFACE "${Protobuf_INCLUDE_DIR}")
|
||||
add_library(ch_contrib::protoc ALIAS _protoc)
|
||||
|
@ -71,5 +71,7 @@ foreach (FILENAME mutex.h)
|
||||
add_dependencies (re2_st transform_${FILENAME})
|
||||
endforeach ()
|
||||
|
||||
# NOTE: you should not change name of library here, since it is used for PVS
|
||||
# (see docker/test/pvs/Dockerfile), to generate required header (see above)
|
||||
add_library(ch_contrib::re2 ALIAS re2)
|
||||
add_library(ch_contrib::re2_st ALIAS re2_st)
|
||||
|
@ -20,11 +20,11 @@ set(SRCS
|
||||
"${LIBRARY_DIR}/src/wcwidth.cpp"
|
||||
)
|
||||
|
||||
add_library (replxx ${SRCS})
|
||||
target_include_directories(replxx SYSTEM PUBLIC "${LIBRARY_DIR}/include")
|
||||
add_library (_replxx ${SRCS})
|
||||
target_include_directories(_replxx SYSTEM PUBLIC "${LIBRARY_DIR}/include")
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options(replxx PRIVATE -Wno-documentation)
|
||||
target_compile_options(_replxx PRIVATE -Wno-documentation)
|
||||
endif ()
|
||||
|
||||
add_library(ch_contrib::replxx ALIAS replxx)
|
||||
add_library(ch_contrib::replxx ALIAS _replxx)
|
||||
|
@ -49,17 +49,16 @@ else()
|
||||
list(APPEND SRCS ${SRC_DIR}/src/modulefinder/sentry_modulefinder_linux.c)
|
||||
endif()
|
||||
|
||||
add_library(sentry ${SRCS})
|
||||
add_library(sentry::sentry ALIAS sentry)
|
||||
add_library(_sentry ${SRCS})
|
||||
|
||||
if(BUILD_SHARED_LIBS)
|
||||
target_compile_definitions(sentry PRIVATE SENTRY_BUILD_SHARED)
|
||||
target_compile_definitions(_sentry PRIVATE SENTRY_BUILD_SHARED)
|
||||
else()
|
||||
target_compile_definitions(sentry PUBLIC SENTRY_BUILD_STATIC)
|
||||
target_compile_definitions(_sentry PUBLIC SENTRY_BUILD_STATIC)
|
||||
endif()
|
||||
|
||||
target_link_libraries(sentry PRIVATE ch_contrib::curl pthread)
|
||||
target_include_directories(sentry PUBLIC "${SRC_DIR}/include" PRIVATE "${SRC_DIR}/src")
|
||||
target_compile_definitions(sentry PRIVATE SENTRY_WITH_INPROC_BACKEND SIZEOF_LONG=8)
|
||||
target_link_libraries(_sentry PRIVATE ch_contrib::curl pthread)
|
||||
target_include_directories(_sentry PUBLIC "${SRC_DIR}/include" PRIVATE "${SRC_DIR}/src")
|
||||
target_compile_definitions(_sentry PRIVATE SENTRY_WITH_INPROC_BACKEND SIZEOF_LONG=8)
|
||||
|
||||
add_library(ch_contrib::sentry ALIAS sentry)
|
||||
add_library(ch_contrib::sentry ALIAS _sentry)
|
||||
|
@ -9,6 +9,6 @@ set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/sqlite-amalgamation")
|
||||
|
||||
set(SRCS ${LIBRARY_DIR}/sqlite3.c)
|
||||
|
||||
add_library(sqlite ${SRCS})
|
||||
target_include_directories(sqlite SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
add_library(ch_contrib::sqlite ALIAS sqlite)
|
||||
add_library(_sqlite ${SRCS})
|
||||
target_include_directories(_sqlite SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
add_library(ch_contrib::sqlite ALIAS _sqlite)
|
||||
|
@ -1,9 +0,0 @@
|
||||
# The stats is a header-only library of probability density functions,
|
||||
# cumulative distribution functions, quantile functions, and random sampling methods.
|
||||
set(STATS_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/stats/include")
|
||||
set(GCEM_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/gcem/include")
|
||||
|
||||
add_library(stats INTERFACE)
|
||||
|
||||
target_include_directories(stats SYSTEM INTERFACE ${STATS_INCLUDE_DIR})
|
||||
target_include_directories(stats SYSTEM INTERFACE ${GCEM_INCLUDE_DIR})
|
@ -29,9 +29,9 @@ set (SRCS_LTDL
|
||||
"${LIBRARY_DIR}/libltdl/loaders/preopen.c"
|
||||
)
|
||||
|
||||
add_library (ltdl ${SRCS_LTDL})
|
||||
add_library (_ltdl ${SRCS_LTDL})
|
||||
|
||||
target_include_directories(ltdl
|
||||
target_include_directories(_ltdl
|
||||
SYSTEM
|
||||
PRIVATE
|
||||
linux_x86_64/libltdl
|
||||
@ -39,8 +39,8 @@ target_include_directories(ltdl
|
||||
"${LIBRARY_DIR}/libltdl"
|
||||
"${LIBRARY_DIR}/libltdl/libltdl"
|
||||
)
|
||||
target_compile_definitions(ltdl PRIVATE -DHAVE_CONFIG_H -DLTDL -DLTDLOPEN=libltdlc)
|
||||
target_compile_options(ltdl PRIVATE -Wno-constant-logical-operand -Wno-unknown-warning-option -O2)
|
||||
target_compile_definitions(_ltdl PRIVATE -DHAVE_CONFIG_H -DLTDL -DLTDLOPEN=libltdlc)
|
||||
target_compile_options(_ltdl PRIVATE -Wno-constant-logical-operand -Wno-unknown-warning-option -O2)
|
||||
|
||||
# odbc
|
||||
|
||||
@ -279,13 +279,13 @@ set (SRCS
|
||||
"${LIBRARY_DIR}/odbcinst/SQLWritePrivateProfileString.c"
|
||||
)
|
||||
|
||||
add_library (unixodbc ${SRCS})
|
||||
add_library (_unixodbc ${SRCS})
|
||||
|
||||
target_link_libraries (unixodbc PRIVATE ltdl)
|
||||
target_link_libraries (_unixodbc PRIVATE _ltdl)
|
||||
|
||||
# SYSTEM_FILE_PATH was changed to /etc
|
||||
|
||||
target_include_directories (unixodbc
|
||||
target_include_directories (_unixodbc
|
||||
SYSTEM
|
||||
PRIVATE
|
||||
linux_x86_64/private
|
||||
@ -293,8 +293,8 @@ target_include_directories (unixodbc
|
||||
linux_x86_64
|
||||
"${LIBRARY_DIR}/include"
|
||||
)
|
||||
target_compile_definitions (unixodbc PRIVATE -DHAVE_CONFIG_H)
|
||||
target_compile_options (unixodbc
|
||||
target_compile_definitions (_unixodbc PRIVATE -DHAVE_CONFIG_H)
|
||||
target_compile_options (_unixodbc
|
||||
PRIVATE
|
||||
-Wno-dangling-else
|
||||
-Wno-parentheses
|
||||
@ -304,4 +304,4 @@ target_compile_options (unixodbc
|
||||
-O2
|
||||
)
|
||||
|
||||
add_library (ch_contrib::unixodbc ALIAS unixodbc)
|
||||
add_library (ch_contrib::unixodbc ALIAS _unixodbc)
|
||||
|
@ -6,7 +6,7 @@ set(SRCS
|
||||
"${LIBRARY_DIR}/wnb/core/wordnet.cc"
|
||||
)
|
||||
|
||||
add_library(wnb ${SRCS})
|
||||
target_link_libraries(wnb PRIVATE boost::headers_only boost::graph)
|
||||
target_include_directories(wnb SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
add_library(ch_contrib::wnb ALIAS wnb)
|
||||
add_library(_wnb ${SRCS})
|
||||
target_link_libraries(_wnb PRIVATE boost::headers_only boost::graph)
|
||||
target_include_directories(_wnb SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
add_library(ch_contrib::wnb ALIAS _wnb)
|
||||
|
@ -39,9 +39,9 @@ set (SRCS
|
||||
${LIBRARY_DIR}/src/scantag.cpp
|
||||
)
|
||||
|
||||
add_library (yaml-cpp ${SRCS})
|
||||
add_library (_yaml_cpp ${SRCS})
|
||||
|
||||
target_include_directories(yaml-cpp PRIVATE ${LIBRARY_DIR}/include/yaml-cpp)
|
||||
target_include_directories(yaml-cpp SYSTEM BEFORE PUBLIC ${LIBRARY_DIR}/include)
|
||||
target_include_directories(_yaml_cpp PRIVATE ${LIBRARY_DIR}/include/yaml-cpp)
|
||||
target_include_directories(_yaml_cpp SYSTEM BEFORE PUBLIC ${LIBRARY_DIR}/include)
|
||||
|
||||
add_library (ch_contrib::yaml_cpp ALIAS yaml-cpp)
|
||||
add_library (ch_contrib::yaml_cpp ALIAS _yaml_cpp)
|
||||
|
@ -131,7 +131,6 @@ set(ZLIB_SRCS
|
||||
set(ZLIB_ALL_SRCS ${ZLIB_SRCS} ${ZLIB_ARCH_SRCS})
|
||||
|
||||
add_library(_zlib ${ZLIB_ALL_SRCS})
|
||||
add_library(zlibstatic ALIAS _zlib)
|
||||
add_library(ch_contrib::zlib ALIAS _zlib)
|
||||
|
||||
# https://github.com/zlib-ng/zlib-ng/pull/733
|
||||
|
@ -105,10 +105,13 @@ toc_title: Adopters
|
||||
| <a href="https://www.mindsdb.com/" class="favicon">MindsDB</a> | Machine Learning | Main Product | — | — | [Official Website](https://www.mindsdb.com/blog/machine-learning-models-as-tables-in-ch) |
|
||||
| <a href="https://mux.com/" class="favicon">MUX</a> | Online Video | Video Analytics | — | — | [Talk in English, August 2019](https://altinity.com/presentations/2019/8/13/how-clickhouse-became-the-default-analytics-database-for-mux/) |
|
||||
| <a href="https://www.mgid.com/" class="favicon">MGID</a> | Ad network | Web-analytics | — | — | [Blog post in Russian, April 2020](http://gs-studio.com/news-about-it/32777----clickhouse---c) |
|
||||
| <a href="https://mu.se/" class="favicon">Muse Group</a> | Music Software | Performance Monitoring | — | — | [Blog post in Russian, January 2021](https://habr.com/en/post/647079/) |
|
||||
| <a href="https://www.netskope.com/" class="favicon">Netskope</a> | Network Security | — | — | — | [Job advertisement, March 2021](https://www.mendeley.com/careers/job/senior-software-developer-backend-developer-1346348) |
|
||||
| <a href="https://niclabs.cl/" class="favicon">NIC Labs</a> | Network Monitoring | RaTA-DNS | — | — | [Blog post, March 2021](https://niclabs.cl/ratadns/2021/03/Clickhouse) |
|
||||
| <a href="https://nlmk.com/en/" class="favicon">NLMK</a> | Steel | Monitoring | — | — | [Article in Russian, Jan 2022](https://habr.com/en/company/nlmk/blog/645943/) |
|
||||
| <a href="https://getnoc.com/" class="favicon">NOC Project</a> | Network Monitoring | Analytics | Main Product | — | [Official Website](https://getnoc.com/features/big-data/) |
|
||||
| <a href="https://www.noction.com" class="favicon">Noction</a> | Network Technology | Main Product | — | — | [Official Website](https://www.noction.com/news/irp-3-11-remote-triggered-blackholing-capability)
|
||||
| <a href="https://www.ntop.org/" class="favicon">ntop</a> | Network Monitoning | Monitoring | — | — | [Official website, Jan 2022](https://www.ntop.org/ntop/historical-traffic-analysis-at-scale-using-clickhouse-with-ntopng/) |
|
||||
| <a href="https://www.nuna.com/" class="favicon">Nuna Inc.</a> | Health Data Analytics | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=170) |
|
||||
| <a href="https://ok.ru" class="favicon">Ok.ru</a> | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, October 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) |
|
||||
| <a href="https://omnicomm.ru/" class="favicon">Omnicomm</a> | Transportation Monitoring | — | — | — | [Facebook post, October 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) |
|
||||
@ -190,5 +193,6 @@ toc_title: Adopters
|
||||
| <a href="https://promo.croc.ru/digitalworker" class="favicon">Цифровой Рабочий</a> | Industrial IoT, Analytics | — | — | — | [Blog post in Russian, March 2021](https://habr.com/en/company/croc/blog/548018/) |
|
||||
| <a href="https://shop.okraina.ru/" class="favicon">ООО «МПЗ Богородский»</a> | Agriculture | — | — | — | [Article in Russian, November 2020](https://cloud.yandex.ru/cases/okraina) |
|
||||
| <a href="https://domclick.ru/" class="favicon">ДомКлик</a> | Real Estate | — | — | — | [Article in Russian, October 2021](https://habr.com/ru/company/domclick/blog/585936/) |
|
||||
| <a href="https://magenta-technology.ru/sistema-upravleniya-marshrutami-inkassacii-as-strela/" class="favicon">АС "Стрела"</a> | Transportation | — | — | — | [Job posting, Jan 2022](https://vk.com/topic-111905078_35689124?post=3553) |
|
||||
|
||||
[Original article](https://clickhouse.com/docs/en/introduction/adopters/) <!--hide-->
|
||||
|
@ -93,6 +93,8 @@ For [String](../../sql-reference/data-types/string.md) and [FixedString](../../s
|
||||
|
||||
Values of [Float](../../sql-reference/data-types/float.md) and [Decimal](../../sql-reference/data-types/decimal.md) types are encoded as their representation in memory. As we support little-endian architecture, they are encoded in little-endian. Zero leading/trailing bytes are not omitted.
|
||||
|
||||
Values of [UUID](../data-types/uuid.md) type are encoded as big-endian order string.
|
||||
|
||||
**Arguments**
|
||||
|
||||
- `arg` — A value to convert to hexadecimal. Types: [String](../../sql-reference/data-types/string.md), [UInt](../../sql-reference/data-types/int-uint.md), [Float](../../sql-reference/data-types/float.md), [Decimal](../../sql-reference/data-types/decimal.md), [Date](../../sql-reference/data-types/date.md) or [DateTime](../../sql-reference/data-types/datetime.md).
|
||||
@ -147,6 +149,21 @@ Result:
|
||||
└──────────────────┘
|
||||
```
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT lower(hex(toUUID('61f0c404-5cb3-11e7-907b-a6006ad3dba0'))) as uuid_hex
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌─uuid_hex─────────────────────────┐
|
||||
│ 61f0c4045cb311e7907ba6006ad3dba0 │
|
||||
└──────────────────────────────────┘
|
||||
```
|
||||
|
||||
|
||||
## unhex {#unhexstr}
|
||||
|
||||
Performs the opposite operation of [hex](#hex). It interprets each pair of hexadecimal digits (in the argument) as a number and converts it to the byte represented by the number. The return value is a binary string (BLOB).
|
||||
@ -224,6 +241,8 @@ For [String](../../sql-reference/data-types/string.md) and [FixedString](../../s
|
||||
|
||||
Values of [Float](../../sql-reference/data-types/float.md) and [Decimal](../../sql-reference/data-types/decimal.md) types are encoded as their representation in memory. As we support little-endian architecture, they are encoded in little-endian. Zero leading/trailing bytes are not omitted.
|
||||
|
||||
Values of [UUID](../data-types/uuid.md) type are encoded as big-endian order string.
|
||||
|
||||
**Arguments**
|
||||
|
||||
- `arg` — A value to convert to binary. [String](../../sql-reference/data-types/string.md), [FixedString](../../sql-reference/data-types/fixedstring.md), [UInt](../../sql-reference/data-types/int-uint.md), [Float](../../sql-reference/data-types/float.md), [Decimal](../../sql-reference/data-types/decimal.md), [Date](../../sql-reference/data-types/date.md), or [DateTime](../../sql-reference/data-types/datetime.md).
|
||||
@ -280,6 +299,21 @@ Result:
|
||||
└──────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT bin(toUUID('61f0c404-5cb3-11e7-907b-a6006ad3dba0')) as bin_uuid
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌─bin_uuid─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┐
|
||||
│ 01100001111100001100010000000100010111001011001100010001111001111001000001111011101001100000000001101010110100111101101110100000 │
|
||||
└──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
|
||||
## unbin {#unbinstr}
|
||||
|
||||
Interprets each pair of binary digits (in the argument) as a number and converts it to the byte represented by the number. The functions performs the opposite operation to [bin](#bin).
|
||||
|
@ -156,6 +156,40 @@ Result:
|
||||
└─────────────┘
|
||||
```
|
||||
|
||||
## h3EdgeLengthKm {#h3edgelengthkm}
|
||||
|
||||
Calculates the average length of the [H3](#h3index) hexagon edge in kilometers.
|
||||
|
||||
**Syntax**
|
||||
|
||||
``` sql
|
||||
h3EdgeLengthKm(resolution)
|
||||
```
|
||||
|
||||
**Parameter**
|
||||
|
||||
- `resolution` — Index resolution. Type: [UInt8](../../../sql-reference/data-types/int-uint.md). Range: `[0, 15]`.
|
||||
|
||||
**Returned values**
|
||||
|
||||
- The average length of the [H3](#h3index) hexagon edge in kilometers. Type: [Float64](../../../sql-reference/data-types/float.md).
|
||||
|
||||
**Example**
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT h3EdgeLengthKm(15) AS edgeLengthKm;
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌─edgeLengthKm─┐
|
||||
│ 0.000509713 │
|
||||
└──────────────┘
|
||||
```
|
||||
|
||||
## geoToH3 {#geotoh3}
|
||||
|
||||
Returns [H3](#h3index) point index `(lon, lat)` with specified resolution.
|
||||
@ -849,4 +883,147 @@ Result:
|
||||
└────────────────────┘
|
||||
```
|
||||
|
||||
## h3ExactEdgeLengthM {#h3exactedgelengthm}
|
||||
|
||||
Returns the exact edge length of the unidirectional edge represented by the input h3 index in meters.
|
||||
|
||||
**Syntax**
|
||||
|
||||
``` sql
|
||||
h3ExactEdgeLengthM(index)
|
||||
```
|
||||
|
||||
**Parameter**
|
||||
|
||||
- `index` — Hexagon index number. Type: [UInt64](../../../sql-reference/data-types/int-uint.md).
|
||||
|
||||
**Returned value**
|
||||
|
||||
- Exact edge length in meters.
|
||||
|
||||
Type: [Float64](../../../sql-reference/data-types/float.md).
|
||||
|
||||
**Example**
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT h3ExactEdgeLengthM(1310277011704381439) AS exactEdgeLengthM;;
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌───exactEdgeLengthM─┐
|
||||
│ 195449.63163407316 │
|
||||
└────────────────────┘
|
||||
```
|
||||
|
||||
## h3ExactEdgeLengthKm {#h3exactedgelengthkm}
|
||||
|
||||
Returns the exact edge length of the unidirectional edge represented by the input h3 index in kilometers.
|
||||
|
||||
**Syntax**
|
||||
|
||||
``` sql
|
||||
h3ExactEdgeLengthKm(index)
|
||||
```
|
||||
|
||||
**Parameter**
|
||||
|
||||
- `index` — Hexagon index number. Type: [UInt64](../../../sql-reference/data-types/int-uint.md).
|
||||
|
||||
**Returned value**
|
||||
|
||||
- Exact edge length in kilometers.
|
||||
|
||||
Type: [Float64](../../../sql-reference/data-types/float.md).
|
||||
|
||||
**Example**
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT h3ExactEdgeLengthKm(1310277011704381439) AS exactEdgeLengthKm;;
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌──exactEdgeLengthKm─┐
|
||||
│ 195.44963163407317 │
|
||||
└────────────────────┘
|
||||
```
|
||||
|
||||
## h3ExactEdgeLengthRads {#h3exactedgelengthrads}
|
||||
|
||||
Returns the exact edge length of the unidirectional edge represented by the input h3 index in radians.
|
||||
|
||||
**Syntax**
|
||||
|
||||
``` sql
|
||||
h3ExactEdgeLengthRads(index)
|
||||
```
|
||||
|
||||
**Parameter**
|
||||
|
||||
- `index` — Hexagon index number. Type: [UInt64](../../../sql-reference/data-types/int-uint.md).
|
||||
|
||||
**Returned value**
|
||||
|
||||
- Exact edge length in radians.
|
||||
|
||||
Type: [Float64](../../../sql-reference/data-types/float.md).
|
||||
|
||||
**Example**
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT h3ExactEdgeLengthRads(1310277011704381439) AS exactEdgeLengthRads;;
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌──exactEdgeLengthRads─┐
|
||||
│ 0.030677980118976447 │
|
||||
└──────────────────────┘
|
||||
```
|
||||
|
||||
## h3NumHexagons {#h3numhexagons}
|
||||
|
||||
Returns the number of unique H3 indices at the given resolution.
|
||||
|
||||
**Syntax**
|
||||
|
||||
``` sql
|
||||
h3NumHexagons(resolution)
|
||||
```
|
||||
|
||||
**Parameter**
|
||||
|
||||
- `resolution` — Index resolution. Range: `[0, 15]`. Type: [UInt8](../../../sql-reference/data-types/int-uint.md).
|
||||
|
||||
**Returned value**
|
||||
|
||||
- Number of H3 indices.
|
||||
|
||||
Type: [Int64](../../../sql-reference/data-types/int-uint.md).
|
||||
|
||||
**Example**
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT h3NumHexagons(3) AS numHexagons;
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌─numHexagons─┐
|
||||
│ 41162 │
|
||||
└─────────────┘
|
||||
```
|
||||
[Original article](https://clickhouse.com/docs/en/sql-reference/functions/geo/h3) <!--hide-->
|
||||
|
@ -477,3 +477,74 @@ Result:
|
||||
└──────────┘
|
||||
```
|
||||
|
||||
## degrees(x) {#degreesx}
|
||||
|
||||
Converts the input value in radians to degrees.
|
||||
|
||||
**Syntax**
|
||||
|
||||
``` sql
|
||||
degrees(x)
|
||||
```
|
||||
|
||||
**Arguments**
|
||||
|
||||
- `x` — Input in radians. [Float64](../../sql-reference/data-types/float.md#float32-float64).
|
||||
|
||||
**Returned value**
|
||||
|
||||
- Value in degrees.
|
||||
|
||||
Type: [Float64](../../sql-reference/data-types/float.md#float32-float64).
|
||||
|
||||
**Example**
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT degrees(3.141592653589793);
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌─degrees(3.141592653589793)─┐
|
||||
│ 180 │
|
||||
└────────────────────────────┘
|
||||
```
|
||||
|
||||
## radians(x) {#radiansx}
|
||||
|
||||
Converts the input value in degrees to radians.
|
||||
|
||||
**Syntax**
|
||||
|
||||
``` sql
|
||||
radians(x)
|
||||
```
|
||||
|
||||
**Arguments**
|
||||
|
||||
- `x` — Input in degrees. [Float64](../../sql-reference/data-types/float.md#float32-float64).
|
||||
|
||||
**Returned value**
|
||||
|
||||
- Value in radians.
|
||||
|
||||
Type: [Float64](../../sql-reference/data-types/float.md#float32-float64).
|
||||
|
||||
**Example**
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT radians(180);
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌──────radians(180)─┐
|
||||
│ 3.141592653589793 │
|
||||
└───────────────────┘
|
||||
```
|
||||
|
@ -195,7 +195,7 @@ SELECT geoToH3(37.79506683, 55.71290588, 15) AS h3Index;
|
||||
|
||||
## h3ToGeo {#h3togeo}
|
||||
|
||||
Возвращает географические координаты долготы и широты, соответствующие указанному [H3](#h3index)-индексу.
|
||||
Возвращает географические координаты долготы и широты центра шестигранника, соответствующие указанному [H3](#h3index)-индексу.
|
||||
|
||||
**Синтаксис**
|
||||
|
||||
|
@ -70,7 +70,7 @@ SELECT and(NULL, 1, 10, -2);
|
||||
**Синтаксис**
|
||||
|
||||
``` sql
|
||||
and(val1, val2...)
|
||||
or(val1, val2...)
|
||||
```
|
||||
|
||||
Чтобы вычислять функцию `or` по короткой схеме, используйте настройку [short_circuit_function_evaluation](../../operations/settings/settings.md#short-circuit-function-evaluation). Если настройка включена, то выражение `vali` вычисляется только для строк, где условие `((NOT val1) AND (NOT val2) AND ... AND (NOT val{i-1}))` верно. Например, при выполнении запроса `SELECT or(number = 0, intDiv(1, number) != 0) FROM numbers(10)` не будет сгенерировано исключение из-за деления на ноль.
|
||||
|
@ -1 +0,0 @@
|
||||
../../../en/faq/use-cases/index.md
|
18
docs/zh/faq/use-cases/index.md
Normal file
18
docs/zh/faq/use-cases/index.md
Normal file
@ -0,0 +1,18 @@
|
||||
---
|
||||
title: 关于ClickHouse使用案例的问题
|
||||
toc_hidden_folder: true
|
||||
toc_priority: 2
|
||||
toc_title: 使用案例
|
||||
---
|
||||
|
||||
# 关于ClickHouse使用案例的问题 {#questions-about-clickhouse-use-cases}
|
||||
|
||||
问题:
|
||||
|
||||
- [我能把 ClickHouse 当做时序数据库来使用吗?](../../faq/use-cases/time-series.md)
|
||||
- [我能把 ClickHouse 当做Key-value 键值存储来使用吗?](../../faq/use-cases/key-value.md)
|
||||
|
||||
!!! info "没找到您所需要的内容?"
|
||||
请查看[其他常见问题类别](../../faq/index.md)或浏览左侧边栏中的主要文档文章。
|
||||
|
||||
{## [原始文档](https://clickhouse.com/docs/en/faq/use-cases/) ##}
|
@ -1 +0,0 @@
|
||||
../../../en/faq/use-cases/key-value.md
|
16
docs/zh/faq/use-cases/key-value.md
Normal file
16
docs/zh/faq/use-cases/key-value.md
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
title: 我能把 ClickHouse 当做Key-value 键值存储来使用吗?
|
||||
toc_hidden: true
|
||||
toc_priority: 101
|
||||
---
|
||||
# 我能把 ClickHouse 当做Key-value 键值存储来使用吗? {#can-i-use-clickhouse-as-a-key-value-storage}.
|
||||
|
||||
简短的回答是 **不能** 。关键值的工作量是在列表中的最高位置时,**不能**{.text-danger}使用ClickHouse的情况。它是一个[OLAP](../../faq/general/olap.md)系统,毕竟有很多优秀的键值存储系统在那里。
|
||||
|
||||
然而,可能在某些情况下,使用ClickHouse进行类似键值的查询仍然是有意义的。通常,是一些低预算的产品,主要的工作负载是分析性的,很适合ClickHouse,但也有一些次要的过程需要一个键值模式,请求吞吐量不是很高,没有严格的延迟要求。如果你有无限的预算,你会为这样的次要工作负载安装一个次要的键值数据库,但实际上,多维护一个存储系统(监控、备份等)会有额外的成本,这可能是值得避免的。
|
||||
|
||||
如果你决定违背建议,对ClickHouse运行一些类似键值的查询,这里有一些提示。
|
||||
|
||||
- ClickHouse中点查询昂贵的关键原因是其稀疏的主索引[MergeTree表引擎家族](../../engines/table-engines/mergetree-family/mergetree.md)。这个索引不能指向每一行具体的数据,相反,它指向每N行,系统必须从邻近的N行扫描到所需的行,沿途读取过多的数据。在一个键值场景中,通过`index_granularity`的设置来减少N的值可能是有用的。
|
||||
- ClickHouse将每一列保存在一组单独的文件中,所以要组装一个完整的行,它需要通过这些文件中的每一个。它们的数量随着列数的增加而线性增加,所以在键值场景中,可能值得避免使用许多列,并将所有的有效数据放在一个单一的`String`列中,并以某种序列化格式(如JSON、Protobuf或任何有效的格式)进行编码。
|
||||
- 还有一种方法,使用[Join](../../engines/table-engines/special/join.md)表引擎代替正常的`MergeTree`表和[joinGet](../../sql-reference/functions/other-functions.md#joinget) 函数来检索数据。它可以提供更好的查询性能,但可能有一些可用性和可靠性问题。下面是一个[使用实例](https://github.com/ClickHouse/ClickHouse/blob/master/tests/queries/0_stateless/00800_versatile_storage_join.sql#L49-L51)。
|
@ -1,5 +1,5 @@
|
||||
---
|
||||
machine_translated: true
|
||||
machine_translated: false
|
||||
machine_translated_rev: 72537a2d527c63c07aa5d2361a8829f3895cf2bd
|
||||
toc_priority: 63
|
||||
toc_title: "\u7528\u6237\u8BBE\u7F6E"
|
||||
@ -7,12 +7,12 @@ toc_title: "\u7528\u6237\u8BBE\u7F6E"
|
||||
|
||||
# 用户设置 {#user-settings}
|
||||
|
||||
该 `users` 一节 `user.xml` 配置文件包含用户设置。
|
||||
`user.xml` 中的 `users` 配置段包含了用户配置
|
||||
|
||||
!!! note "信息"
|
||||
!!! note "提示"
|
||||
ClickHouse还支持 [SQL驱动的工作流](../access-rights.md#access-control) 用于管理用户。 我们建议使用它。
|
||||
|
||||
的结构 `users` 科:
|
||||
`users` 配置段的结构:
|
||||
|
||||
``` xml
|
||||
<users>
|
||||
@ -43,21 +43,21 @@ toc_title: "\u7528\u6237\u8BBE\u7F6E"
|
||||
</users>
|
||||
```
|
||||
|
||||
### 用户名称/密码 {#user-namepassword}
|
||||
### user_name/password {#user-namepassword}
|
||||
|
||||
密码可以以明文或SHA256(十六进制格式)指定。
|
||||
|
||||
- 以明文形式分配密码 (**不推荐**),把它放在一个 `password` 元素。
|
||||
- 以明文形式分配密码 (**不推荐**),把它放在一个 `password` 配置段中。
|
||||
|
||||
例如, `<password>qwerty</password>`. 密码可以留空。
|
||||
|
||||
<a id="password_sha256_hex"></a>
|
||||
|
||||
- 要使用其SHA256散列分配密码,请将其放置在 `password_sha256_hex` 元素。
|
||||
- 要使用SHA256加密后的密码,请将其放置在 `password_sha256_hex` 配置段。
|
||||
|
||||
例如, `<password_sha256_hex>65e84be33532fb784c48129675f9eff3a682b27168c0ea744b2cf58ee02337c5</password_sha256_hex>`.
|
||||
|
||||
如何从shell生成密码的示例:
|
||||
从shell生成加密密码的示例:
|
||||
|
||||
PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha256sum | tr -d '-'
|
||||
|
||||
@ -65,19 +65,19 @@ toc_title: "\u7528\u6237\u8BBE\u7F6E"
|
||||
|
||||
<a id="password_double_sha1_hex"></a>
|
||||
|
||||
- 为了与MySQL客户端兼容,密码可以在双SHA1哈希中指定。 放进去 `password_double_sha1_hex` 元素。
|
||||
- 为了与MySQL客户端兼容,密码可以设置为双SHA1哈希加密, 请将其放置在 `password_double_sha1_hex` 配置段。
|
||||
|
||||
例如, `<password_double_sha1_hex>08b4a0f1de6ad37da17359e592c8d74788a83eb0</password_double_sha1_hex>`.
|
||||
|
||||
如何从shell生成密码的示例:
|
||||
从shell生成密码的示例:
|
||||
|
||||
PASSWORD=$(base64 < /dev/urandom | head -c8); echo "$PASSWORD"; echo -n "$PASSWORD" | sha1sum | tr -d '-' | xxd -r -p | sha1sum | tr -d '-'
|
||||
|
||||
结果的第一行是密码。 第二行是相应的双SHA1哈希。
|
||||
|
||||
### 访问管理 {#access_management-user-setting}
|
||||
### access_management {#access_management-user-setting}
|
||||
|
||||
此设置启用禁用使用SQL驱动 [访问控制和帐户管理](../access-rights.md#access-control) 对于用户。
|
||||
此设置可为用户启用或禁用 SQL-driven [访问控制和帐户管理](../access-rights.md#access-control) 。
|
||||
|
||||
可能的值:
|
||||
|
||||
@ -86,42 +86,42 @@ toc_title: "\u7528\u6237\u8BBE\u7F6E"
|
||||
|
||||
默认值:0。
|
||||
|
||||
### 用户名称/网络 {#user-namenetworks}
|
||||
### user_name/networks {#user-namenetworks}
|
||||
|
||||
用户可以从中连接到ClickHouse服务器的网络列表。
|
||||
用户访问来源列表
|
||||
|
||||
列表中的每个元素都可以具有以下形式之一:
|
||||
|
||||
- `<ip>` — IP address or network mask.
|
||||
- `<ip>` — IP地址或网络掩码
|
||||
|
||||
例: `213.180.204.3`, `10.0.0.1/8`, `10.0.0.1/255.255.255.0`, `2a02:6b8::3`, `2a02:6b8::3/64`, `2a02:6b8::3/ffff:ffff:ffff:ffff::`.
|
||||
|
||||
- `<host>` — Hostname.
|
||||
- `<host>` — 域名
|
||||
|
||||
示例: `example01.host.ru`.
|
||||
|
||||
要检查访问,将执行DNS查询,并将所有返回的IP地址与对等地址进行比较。
|
||||
为检查访问,将执行DNS查询,并将所有返回的IP地址与对端地址进行比较。
|
||||
|
||||
- `<host_regexp>` — Regular expression for hostnames.
|
||||
- `<host_regexp>` — 域名的正则表达式.
|
||||
|
||||
示例, `^example\d\d-\d\d-\d\.host\.ru$`
|
||||
|
||||
要检查访问,a [DNS PTR查询](https://en.wikipedia.org/wiki/Reverse_DNS_lookup) 对对等体地址执行,然后应用指定的正则表达式。 然后,对PTR查询的结果执行另一个DNS查询,并将所有接收到的地址与对等地址进行比较。 我们强烈建议正则表达式以$结尾。
|
||||
为检查访问,[DNS PTR查询](https://en.wikipedia.org/wiki/Reverse_DNS_lookup) 对对端地址执行,然后应用指定的正则表达式。 然后,以PTR查询的结果执行另一个DNS查询,并将所有接收到的地址与对端地址进行比较. 我们强烈建议正则表达式以$结尾.
|
||||
|
||||
DNS请求的所有结果都将被缓存,直到服务器重新启动。
|
||||
|
||||
**例**
|
||||
|
||||
要从任何网络打开用户的访问权限,请指定:
|
||||
要开启任意来源网络的访问, 请指定:
|
||||
|
||||
``` xml
|
||||
<ip>::/0</ip>
|
||||
```
|
||||
|
||||
!!! warning "警告"
|
||||
从任何网络开放访问是不安全的,除非你有一个防火墙正确配置或服务器没有直接连接到互联网。
|
||||
从任何网络开放访问是不安全的,除非你有一个正确配置的防火墙, 或者服务器没有直接连接到互联网。
|
||||
|
||||
若要仅从本地主机打开访问权限,请指定:
|
||||
若要限定本机访问, 请指定:
|
||||
|
||||
``` xml
|
||||
<ip>::1</ip>
|
||||
@ -130,22 +130,21 @@ DNS请求的所有结果都将被缓存,直到服务器重新启动。
|
||||
|
||||
### user_name/profile {#user-nameprofile}
|
||||
|
||||
您可以为用户分配设置配置文件。 设置配置文件在单独的部分配置 `users.xml` 文件 有关详细信息,请参阅 [设置配置文件](settings-profiles.md).
|
||||
您可以为用户分配设置配置文件。 设置配置文件在`users.xml` 中有单独的配置段. 有关详细信息,请参阅 [设置配置文件](settings-profiles.md).
|
||||
|
||||
### 用户名称/配额 {#user-namequota}
|
||||
### user_name/quota {#user-namequota}
|
||||
|
||||
配额允许您在一段时间内跟踪或限制资源使用情况。 配额在配置 `quotas`
|
||||
一节 `users.xml` 配置文件。
|
||||
配额允许您在一段时间内跟踪或限制资源使用情况。 配额在`users.xml` 中的 `quotas` 配置段下.
|
||||
|
||||
您可以为用户分配配额。 有关配额配置的详细说明,请参阅 [配额](../quotas.md#quotas).
|
||||
|
||||
### 用户名/数据库 {#user-namedatabases}
|
||||
### user_name/databases {#user-namedatabases}
|
||||
|
||||
在本节中,您可以限制ClickHouse返回的行 `SELECT` 由当前用户进行的查询,从而实现基本的行级安全性。
|
||||
在本配置段中,您可以限制ClickHouse中由当前用户进行的 `SELECT` 查询所返回的行,从而实现基本的行级安全性。
|
||||
|
||||
**示例**
|
||||
|
||||
以下配置强制该用户 `user1` 只能看到的行 `table1` 作为结果 `SELECT` 查询,其中的值 `id` 场是1000。
|
||||
以下配置使用户 `user1` 通过SELECT查询只能得到table1中id为1000的行
|
||||
|
||||
``` xml
|
||||
<user1>
|
||||
@ -159,6 +158,6 @@ DNS请求的所有结果都将被缓存,直到服务器重新启动。
|
||||
</user1>
|
||||
```
|
||||
|
||||
该 `filter` 可以是导致任何表达式 [UInt8](../../sql-reference/data-types/int-uint.md)-键入值。 它通常包含比较和逻辑运算符。 从行 `database_name.table1` 其中,不会为此用户返回为0的筛选结果。 过滤是不兼容的 `PREWHERE` 操作和禁用 `WHERE→PREWHERE` 优化。
|
||||
该 `filter` 可以是[UInt8](../../sql-reference/data-types/int-uint.md)编码的任何表达式。 它通常包含比较和逻辑运算符, 当filter返回0时, database_name.table1 的该行结果将不会返回给用户.过滤不兼容 `PREWHERE` 操作并禁用 `WHERE→PREWHERE` 优化。
|
||||
|
||||
[原始文章](https://clickhouse.com/docs/en/operations/settings/settings_users/) <!--hide-->
|
||||
|
@ -37,6 +37,7 @@
|
||||
#include <Dictionaries/registerDictionaries.h>
|
||||
#include <Disks/registerDisks.h>
|
||||
#include <Formats/registerFormats.h>
|
||||
#include <Formats/FormatFactory.h>
|
||||
#include <boost/algorithm/string/replace.hpp>
|
||||
#include <boost/program_options/options_description.hpp>
|
||||
#include <base/argsToConfig.h>
|
||||
@ -319,9 +320,9 @@ std::string LocalServer::getInitialCreateTableQuery()
|
||||
|
||||
auto table_name = backQuoteIfNeed(config().getString("table-name", "table"));
|
||||
auto table_structure = config().getString("table-structure", "auto");
|
||||
auto data_format = backQuoteIfNeed(config().getString("table-data-format", "TSV"));
|
||||
|
||||
String table_file;
|
||||
String format_from_file_name;
|
||||
if (!config().has("table-file") || config().getString("table-file") == "-")
|
||||
{
|
||||
/// Use Unix tools stdin naming convention
|
||||
@ -330,9 +331,14 @@ std::string LocalServer::getInitialCreateTableQuery()
|
||||
else
|
||||
{
|
||||
/// Use regular file
|
||||
table_file = quoteString(config().getString("table-file"));
|
||||
auto file_name = config().getString("table-file");
|
||||
table_file = quoteString(file_name);
|
||||
format_from_file_name = FormatFactory::instance().getFormatFromFileName(file_name, false);
|
||||
}
|
||||
|
||||
auto data_format
|
||||
= backQuoteIfNeed(config().getString("table-data-format", format_from_file_name.empty() ? "TSV" : format_from_file_name));
|
||||
|
||||
if (table_structure == "auto")
|
||||
table_structure = "";
|
||||
else
|
||||
|
@ -253,15 +253,10 @@ if (TARGET ch_contrib::nuraft)
|
||||
endif()
|
||||
|
||||
set (DBMS_COMMON_LIBRARIES)
|
||||
# libgcc_s does not provide an implementation of an atomics library. Instead,
|
||||
# GCC’s libatomic library can be used to supply these when using libgcc_s.
|
||||
if ((NOT USE_LIBCXX) AND COMPILER_CLANG AND OS_LINUX)
|
||||
list (APPEND DBMS_COMMON_LIBRARIES atomic)
|
||||
endif()
|
||||
|
||||
if (MAKE_STATIC_LIBRARIES OR NOT SPLIT_SHARED_LIBRARIES)
|
||||
add_library (dbms STATIC ${dbms_headers} ${dbms_sources})
|
||||
target_link_libraries (dbms PRIVATE libdivide ${DBMS_COMMON_LIBRARIES})
|
||||
target_link_libraries (dbms PRIVATE ch_contrib::libdivide ${DBMS_COMMON_LIBRARIES})
|
||||
if (TARGET ch_contrib::jemalloc)
|
||||
target_link_libraries (dbms PRIVATE ch_contrib::jemalloc)
|
||||
endif()
|
||||
@ -269,7 +264,7 @@ if (MAKE_STATIC_LIBRARIES OR NOT SPLIT_SHARED_LIBRARIES)
|
||||
else()
|
||||
add_library (dbms SHARED ${dbms_headers} ${dbms_sources})
|
||||
target_link_libraries (dbms PUBLIC ${all_modules} ${DBMS_COMMON_LIBRARIES})
|
||||
target_link_libraries (clickhouse_interpreters PRIVATE libdivide)
|
||||
target_link_libraries (clickhouse_interpreters PRIVATE ch_contrib::libdivide)
|
||||
if (TARGET ch_contrib::jemalloc)
|
||||
target_link_libraries (clickhouse_interpreters PRIVATE ch_contrib::jemalloc)
|
||||
endif()
|
||||
@ -316,7 +311,7 @@ target_link_libraries (clickhouse_common_io
|
||||
PUBLIC
|
||||
common
|
||||
ch_contrib::double_conversion
|
||||
dragonbox_to_chars
|
||||
ch_contrib::dragonbox_to_chars
|
||||
)
|
||||
|
||||
# Use X86 AVX2/AVX512 instructions to accelerate filter operations
|
||||
@ -325,6 +320,7 @@ set_source_files_properties(
|
||||
Columns/ColumnsCommon.cpp
|
||||
Columns/ColumnVector.cpp
|
||||
Columns/ColumnDecimal.cpp
|
||||
Columns/ColumnString.cpp
|
||||
PROPERTIES COMPILE_FLAGS "${X86_INTRINSICS_FLAGS}")
|
||||
|
||||
target_link_libraries(clickhouse_common_io PUBLIC ch_contrib::re2_st)
|
||||
@ -352,7 +348,7 @@ dbms_target_link_libraries(PUBLIC abseil_swiss_tables)
|
||||
|
||||
# Make dbms depend on roaring instead of clickhouse_common_io so that roaring itself can depend on clickhouse_common_io
|
||||
# That way we we can redirect malloc/free functions avoiding circular dependencies
|
||||
dbms_target_link_libraries(PUBLIC roaring)
|
||||
dbms_target_link_libraries(PUBLIC ch_contrib::roaring)
|
||||
|
||||
if (TARGET ch_contrib::rdkafka)
|
||||
dbms_target_link_libraries(PRIVATE ch_contrib::rdkafka ch_contrib::cppkafka)
|
||||
@ -526,7 +522,7 @@ if (TARGET ch_contrib::rapidjson)
|
||||
dbms_target_link_libraries(PRIVATE ch_contrib::rapidjson)
|
||||
endif()
|
||||
|
||||
dbms_target_link_libraries(PUBLIC consistent-hashing)
|
||||
dbms_target_link_libraries(PUBLIC ch_contrib::consistent_hashing)
|
||||
|
||||
include ("${ClickHouse_SOURCE_DIR}/cmake/add_check.cmake")
|
||||
|
||||
|
@ -87,7 +87,7 @@ private:
|
||||
|
||||
public:
|
||||
const char * getFamilyName() const override { return TypeName<T>.data(); }
|
||||
TypeIndex getDataType() const override { return TypeId<T>; }
|
||||
TypeIndex getDataType() const override { return TypeToTypeIndex<T>; }
|
||||
|
||||
bool isNumeric() const override { return false; }
|
||||
bool canBeInsideNullable() const override { return true; }
|
||||
|
@ -238,7 +238,7 @@ public:
|
||||
}
|
||||
|
||||
const char * getFamilyName() const override { return TypeName<T>.data(); }
|
||||
TypeIndex getDataType() const override { return TypeId<T>; }
|
||||
TypeIndex getDataType() const override { return TypeToTypeIndex<T>; }
|
||||
|
||||
MutableColumnPtr cloneResized(size_t size) const override;
|
||||
|
||||
|
@ -123,6 +123,8 @@ public:
|
||||
else
|
||||
return static_cast<T>(x);
|
||||
}
|
||||
|
||||
T operator() (const bool & x) const { return T(x); }
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -37,6 +37,7 @@ String FieldVisitorDump::operator() (const UInt256 & x) const { return formatQuo
|
||||
String FieldVisitorDump::operator() (const Int128 & x) const { return formatQuotedWithPrefix(x, "Int128_"); }
|
||||
String FieldVisitorDump::operator() (const Int256 & x) const { return formatQuotedWithPrefix(x, "Int256_"); }
|
||||
String FieldVisitorDump::operator() (const UUID & x) const { return formatQuotedWithPrefix(x, "UUID_"); }
|
||||
String FieldVisitorDump::operator() (const bool & x) const { return formatQuotedWithPrefix(x, "Bool_"); }
|
||||
|
||||
|
||||
String FieldVisitorDump::operator() (const String & x) const
|
||||
|
@ -27,6 +27,7 @@ public:
|
||||
String operator() (const DecimalField<Decimal128> & x) const;
|
||||
String operator() (const DecimalField<Decimal256> & x) const;
|
||||
String operator() (const AggregateFunctionStateData & x) const;
|
||||
String operator() (const bool & x) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -146,4 +146,11 @@ void FieldVisitorHash::operator() (const Int256 & x) const
|
||||
hash.update(x);
|
||||
}
|
||||
|
||||
void FieldVisitorHash::operator() (const bool & x) const
|
||||
{
|
||||
UInt8 type = Field::Types::Bool;
|
||||
hash.update(type);
|
||||
hash.update(x);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -33,6 +33,7 @@ public:
|
||||
void operator() (const DecimalField<Decimal128> & x) const;
|
||||
void operator() (const DecimalField<Decimal256> & x) const;
|
||||
void operator() (const AggregateFunctionStateData & x) const;
|
||||
void operator() (const bool & x) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -33,5 +33,7 @@ bool FieldVisitorSum::operator() (AggregateFunctionStateData &) const
|
||||
throw Exception("Cannot sum AggregateFunctionStates", ErrorCodes::LOGICAL_ERROR);
|
||||
}
|
||||
|
||||
bool FieldVisitorSum::operator() (bool &) const { throw Exception("Cannot sum Bools", ErrorCodes::LOGICAL_ERROR); }
|
||||
|
||||
}
|
||||
|
||||
|
@ -27,6 +27,7 @@ public:
|
||||
bool operator() (Map &) const;
|
||||
bool operator() (UUID &) const;
|
||||
bool operator() (AggregateFunctionStateData &) const;
|
||||
bool operator() (bool &) const;
|
||||
|
||||
template <typename T>
|
||||
bool operator() (DecimalField<T> & x) const
|
||||
|
@ -51,7 +51,6 @@ static String formatFloat(const Float64 x)
|
||||
return { buffer, buffer + builder.position() };
|
||||
}
|
||||
|
||||
|
||||
String FieldVisitorToString::operator() (const Null & x) const { return x.isNegativeInfinity() ? "-Inf" : (x.isPositiveInfinity() ? "+Inf" : "NULL"); }
|
||||
String FieldVisitorToString::operator() (const UInt64 & x) const { return formatQuoted(x); }
|
||||
String FieldVisitorToString::operator() (const Int64 & x) const { return formatQuoted(x); }
|
||||
@ -67,6 +66,7 @@ String FieldVisitorToString::operator() (const UInt256 & x) const { return forma
|
||||
String FieldVisitorToString::operator() (const Int256 & x) const { return formatQuoted(x); }
|
||||
String FieldVisitorToString::operator() (const UUID & x) const { return formatQuoted(x); }
|
||||
String FieldVisitorToString::operator() (const AggregateFunctionStateData & x) const { return formatQuoted(x.data); }
|
||||
String FieldVisitorToString::operator() (const bool & x) const { return x ? "true" : "false"; }
|
||||
|
||||
String FieldVisitorToString::operator() (const Array & x) const
|
||||
{
|
||||
|
@ -27,6 +27,7 @@ public:
|
||||
String operator() (const DecimalField<Decimal128> & x) const;
|
||||
String operator() (const DecimalField<Decimal256> & x) const;
|
||||
String operator() (const AggregateFunctionStateData & x) const;
|
||||
String operator() (const bool & x) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -66,5 +66,10 @@ void FieldVisitorWriteBinary::operator() (const Map & x, WriteBuffer & buf) cons
|
||||
}
|
||||
}
|
||||
|
||||
void FieldVisitorWriteBinary::operator()(const bool & x, WriteBuffer & buf) const
|
||||
{
|
||||
writeBinary(UInt8(x), buf);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -26,6 +26,7 @@ public:
|
||||
void operator() (const DecimalField<Decimal128> & x, WriteBuffer & buf) const;
|
||||
void operator() (const DecimalField<Decimal256> & x, WriteBuffer & buf) const;
|
||||
void operator() (const AggregateFunctionStateData & x, WriteBuffer & buf) const;
|
||||
void operator() (const bool & x, WriteBuffer & buf) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -32,6 +32,14 @@ public:
|
||||
return l == r;
|
||||
return false;
|
||||
}
|
||||
else if constexpr (std::is_same_v<T, bool>)
|
||||
{
|
||||
return operator()(UInt8(l), r);
|
||||
}
|
||||
else if constexpr (std::is_same_v<U, bool>)
|
||||
{
|
||||
return operator()(l, UInt8(r));
|
||||
}
|
||||
else
|
||||
{
|
||||
if constexpr (std::is_same_v<T, U>)
|
||||
@ -91,6 +99,14 @@ public:
|
||||
{
|
||||
return r.isPositiveInfinity();
|
||||
}
|
||||
else if constexpr (std::is_same_v<T, bool>)
|
||||
{
|
||||
return operator()(UInt8(l), r);
|
||||
}
|
||||
else if constexpr (std::is_same_v<U, bool>)
|
||||
{
|
||||
return operator()(l, UInt8(r));
|
||||
}
|
||||
else
|
||||
{
|
||||
if constexpr (std::is_same_v<T, U>)
|
||||
|
@ -3,7 +3,12 @@ include("${ClickHouse_SOURCE_DIR}/cmake/dbms_glob_sources.cmake")
|
||||
add_headers_and_sources(clickhouse_common_zookeeper .)
|
||||
|
||||
# for clickhouse server
|
||||
add_library(clickhouse_common_zookeeper ${clickhouse_common_zookeeper_headers} ${clickhouse_common_zookeeper_sources})
|
||||
#
|
||||
# NOTE: this library depends from Interpreters (DB::SystemLog<DB::ZooKeeperLogElement>::add),
|
||||
# and so it should be STATIC because otherwise:
|
||||
# - it will either fail to compile with -Wl,--unresolved-symbols=report-all
|
||||
# - or it will report errors at runtime
|
||||
add_library(clickhouse_common_zookeeper STATIC ${clickhouse_common_zookeeper_headers} ${clickhouse_common_zookeeper_sources})
|
||||
target_compile_definitions (clickhouse_common_zookeeper PRIVATE -DZOOKEEPER_LOG)
|
||||
target_link_libraries (clickhouse_common_zookeeper
|
||||
PUBLIC
|
||||
|
@ -41,10 +41,10 @@ add_executable (space_saving space_saving.cpp)
|
||||
target_link_libraries (space_saving PRIVATE clickhouse_common_io)
|
||||
|
||||
add_executable (integer_hash_tables_and_hashes integer_hash_tables_and_hashes.cpp)
|
||||
target_link_libraries (integer_hash_tables_and_hashes PRIVATE dbms abseil_swiss_tables ch_contrib::sparsehash)
|
||||
target_link_libraries (integer_hash_tables_and_hashes PRIVATE dbms ch_contrib::abseil_swiss_tables ch_contrib::sparsehash)
|
||||
|
||||
add_executable (integer_hash_tables_benchmark integer_hash_tables_benchmark.cpp)
|
||||
target_link_libraries (integer_hash_tables_benchmark PRIVATE dbms abseil_swiss_tables ch_contrib::sparsehash)
|
||||
target_link_libraries (integer_hash_tables_benchmark PRIVATE dbms ch_contrib::abseil_swiss_tables ch_contrib::sparsehash)
|
||||
|
||||
add_executable (cow_columns cow_columns.cpp)
|
||||
target_link_libraries (cow_columns PRIVATE clickhouse_common_io)
|
||||
@ -78,8 +78,10 @@ target_link_libraries (shell_command_inout PRIVATE clickhouse_common_io)
|
||||
add_executable (executable_udf executable_udf.cpp)
|
||||
target_link_libraries (executable_udf PRIVATE dbms)
|
||||
|
||||
add_executable (hive_metastore_client hive_metastore_client.cpp)
|
||||
target_link_libraries (hive_metastore_client PUBLIC ch_contrib::hivemetastore ch_contrib::thrift)
|
||||
if (ENABLE_HIVE)
|
||||
add_executable (hive_metastore_client hive_metastore_client.cpp)
|
||||
target_link_libraries (hive_metastore_client PUBLIC ch_contrib::hivemetastore ch_contrib::thrift)
|
||||
endif()
|
||||
|
||||
add_executable (interval_tree interval_tree.cpp)
|
||||
target_link_libraries (interval_tree PRIVATE dbms)
|
||||
|
@ -1,27 +1,80 @@
|
||||
#include "getNumberOfPhysicalCPUCores.h"
|
||||
|
||||
#include <Common/config.h>
|
||||
#if defined(OS_LINUX)
|
||||
# include <cmath>
|
||||
# include <fstream>
|
||||
#endif
|
||||
#if USE_CPUID
|
||||
# include <libcpuid/libcpuid.h>
|
||||
#endif
|
||||
|
||||
#include <thread>
|
||||
|
||||
#if defined(OS_LINUX)
|
||||
unsigned getCGroupLimitedCPUCores(unsigned default_cpu_count)
|
||||
{
|
||||
// Try to look at cgroups limit if it is available.
|
||||
auto read_from = [](const char * filename, int default_value) -> int {
|
||||
std::ifstream infile(filename);
|
||||
if (!infile.is_open())
|
||||
{
|
||||
return default_value;
|
||||
}
|
||||
int idata;
|
||||
if (infile >> idata)
|
||||
return idata;
|
||||
else
|
||||
return default_value;
|
||||
};
|
||||
|
||||
unsigned quota_count = default_cpu_count;
|
||||
// Return the number of milliseconds per period process is guaranteed to run.
|
||||
// -1 for no quota
|
||||
int cgroup_quota = read_from("/sys/fs/cgroup/cpu/cpu.cfs_quota_us", -1);
|
||||
int cgroup_period = read_from("/sys/fs/cgroup/cpu/cpu.cfs_period_us", -1);
|
||||
if (cgroup_quota > -1 && cgroup_period > 0)
|
||||
{
|
||||
quota_count = ceil(static_cast<float>(cgroup_quota) / static_cast<float>(cgroup_period));
|
||||
}
|
||||
|
||||
// Share number (typically a number relative to 1024) (2048 typically expresses 2 CPUs worth of processing)
|
||||
// -1 for no share setup
|
||||
int cgroup_share = read_from("/sys/fs/cgroup/cpu/cpu.shares", -1);
|
||||
// Convert 1024 to no shares setup
|
||||
if (cgroup_share == 1024)
|
||||
cgroup_share = -1;
|
||||
|
||||
# define PER_CPU_SHARES 1024
|
||||
unsigned share_count = default_cpu_count;
|
||||
if (cgroup_share > -1)
|
||||
{
|
||||
share_count = ceil(static_cast<float>(cgroup_share) / static_cast<float>(PER_CPU_SHARES));
|
||||
}
|
||||
|
||||
return std::min(default_cpu_count, std::min(share_count, quota_count));
|
||||
}
|
||||
#endif // OS_LINUX
|
||||
|
||||
unsigned getNumberOfPhysicalCPUCores()
|
||||
{
|
||||
static const unsigned number = []
|
||||
static const unsigned number = [] {
|
||||
unsigned cpu_count = 0; // start with an invalid num
|
||||
#if USE_CPUID
|
||||
do
|
||||
{
|
||||
# if USE_CPUID
|
||||
cpu_raw_data_t raw_data;
|
||||
cpu_id_t data;
|
||||
|
||||
/// On Xen VMs, libcpuid returns wrong info (zero number of cores). Fallback to alternative method.
|
||||
/// Also, libcpuid does not support some CPUs like AMD Hygon C86 7151.
|
||||
if (0 != cpuid_get_raw_data(&raw_data) || 0 != cpu_identify(&raw_data, &data) || data.num_logical_cpus == 0)
|
||||
return std::thread::hardware_concurrency();
|
||||
{
|
||||
// Just fallback
|
||||
break;
|
||||
}
|
||||
|
||||
unsigned res = data.num_cores * data.total_logical_cpus / data.num_logical_cpus;
|
||||
cpu_count = data.num_cores * data.total_logical_cpus / data.num_logical_cpus;
|
||||
|
||||
/// Also, libcpuid gives strange result on Google Compute Engine VMs.
|
||||
/// Example:
|
||||
@ -29,14 +82,18 @@ unsigned getNumberOfPhysicalCPUCores()
|
||||
/// total_logical_cpus = 1, /// total number of logical cores on all sockets
|
||||
/// num_logical_cpus = 24. /// number of logical cores on current CPU socket
|
||||
/// It means two-way hyper-threading (24 / 12), but contradictory, 'total_logical_cpus' == 1.
|
||||
|
||||
if (res != 0)
|
||||
return res;
|
||||
# endif
|
||||
} while (false);
|
||||
#endif
|
||||
|
||||
/// As a fallback (also for non-x86 architectures) assume there are no hyper-threading on the system.
|
||||
/// (Actually, only Aarch64 is supported).
|
||||
return std::thread::hardware_concurrency();
|
||||
if (cpu_count == 0)
|
||||
cpu_count = std::thread::hardware_concurrency();
|
||||
|
||||
#if defined(OS_LINUX)
|
||||
cpu_count = getCGroupLimitedCPUCores(cpu_count);
|
||||
#endif // OS_LINUX
|
||||
return cpu_count;
|
||||
}();
|
||||
return number;
|
||||
}
|
||||
|
@ -25,8 +25,240 @@ inline int cmp(T a, T b)
|
||||
/// We can process uninitialized memory in the functions below.
|
||||
/// Results don't depend on the values inside uninitialized memory but Memory Sanitizer cannot see it.
|
||||
/// Disable optimized functions if compile with Memory Sanitizer.
|
||||
#if defined(__AVX512BW__) && defined(__AVX512VL__) && !defined(MEMORY_SANITIZER)
|
||||
#include <immintrin.h>
|
||||
|
||||
#if defined(__SSE2__) && !defined(MEMORY_SANITIZER)
|
||||
|
||||
/** All functions works under the following assumptions:
|
||||
* - it's possible to read up to 15 excessive bytes after end of 'a' and 'b' region;
|
||||
* - memory regions are relatively small and extra loop unrolling is not worth to do.
|
||||
*/
|
||||
|
||||
/** Variant when memory regions may have different sizes.
|
||||
*/
|
||||
template <typename Char>
|
||||
inline int memcmpSmallAllowOverflow15(const Char * a, size_t a_size, const Char * b, size_t b_size)
|
||||
{
|
||||
size_t min_size = std::min(a_size, b_size);
|
||||
|
||||
for (size_t offset = 0; offset < min_size; offset += 16)
|
||||
{
|
||||
uint16_t mask = _mm_cmp_epi8_mask(
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(a + offset)),
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(b + offset)), _MM_CMPINT_NE);
|
||||
|
||||
if (mask)
|
||||
{
|
||||
offset += __builtin_ctz(mask);
|
||||
|
||||
if (offset >= min_size)
|
||||
break;
|
||||
|
||||
return detail::cmp(a[offset], b[offset]);
|
||||
}
|
||||
}
|
||||
|
||||
return detail::cmp(a_size, b_size);
|
||||
}
|
||||
|
||||
|
||||
/** Variant when memory regions may have different sizes.
|
||||
* But compare the regions as the smaller one is padded with zero bytes up to the size of the larger.
|
||||
* It's needed to hold that: toFixedString('abc', 5) = 'abc'
|
||||
* for compatibility with SQL standard.
|
||||
*/
|
||||
template <typename Char>
|
||||
inline int memcmpSmallLikeZeroPaddedAllowOverflow15(const Char * a, size_t a_size, const Char * b, size_t b_size)
|
||||
{
|
||||
size_t min_size = std::min(a_size, b_size);
|
||||
|
||||
for (size_t offset = 0; offset < min_size; offset += 16)
|
||||
{
|
||||
uint16_t mask = _mm_cmp_epi8_mask(
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(a + offset)),
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(b + offset)), _MM_CMPINT_NE);
|
||||
|
||||
if (mask)
|
||||
{
|
||||
offset += __builtin_ctz(mask);
|
||||
|
||||
if (offset >= min_size)
|
||||
break;
|
||||
|
||||
return detail::cmp(a[offset], b[offset]);
|
||||
}
|
||||
}
|
||||
|
||||
/// The strings are equal up to min_size.
|
||||
/// If the rest of the larger string is zero bytes then the strings are considered equal.
|
||||
|
||||
size_t max_size;
|
||||
const Char * longest;
|
||||
int cmp;
|
||||
|
||||
if (a_size == b_size)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
else if (a_size > b_size)
|
||||
{
|
||||
max_size = a_size;
|
||||
longest = a;
|
||||
cmp = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
max_size = b_size;
|
||||
longest = b;
|
||||
cmp = -1;
|
||||
}
|
||||
|
||||
const __m128i zero16 = _mm_setzero_si128();
|
||||
|
||||
for (size_t offset = min_size; offset < max_size; offset += 16)
|
||||
{
|
||||
uint16_t mask = _mm_cmpneq_epi8_mask(
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(longest + offset)),
|
||||
zero16);
|
||||
|
||||
if (mask)
|
||||
{
|
||||
offset += __builtin_ctz(mask);
|
||||
|
||||
if (offset >= max_size)
|
||||
return 0;
|
||||
return cmp;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/** Variant when memory regions have same size.
|
||||
* TODO Check if the compiler can optimize previous function when the caller pass identical sizes.
|
||||
*/
|
||||
template <typename Char>
|
||||
inline int memcmpSmallAllowOverflow15(const Char * a, const Char * b, size_t size)
|
||||
{
|
||||
for (size_t offset = 0; offset < size; offset += 16)
|
||||
{
|
||||
uint16_t mask = _mm_cmp_epi8_mask(
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(a + offset)),
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(b + offset)), _MM_CMPINT_NE);
|
||||
|
||||
if (mask)
|
||||
{
|
||||
offset += __builtin_ctz(mask);
|
||||
|
||||
if (offset >= size)
|
||||
return 0;
|
||||
|
||||
return detail::cmp(a[offset], b[offset]);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/** Compare memory regions for equality.
|
||||
*/
|
||||
template <typename Char>
|
||||
inline bool memequalSmallAllowOverflow15(const Char * a, size_t a_size, const Char * b, size_t b_size)
|
||||
{
|
||||
if (a_size != b_size)
|
||||
return false;
|
||||
|
||||
for (size_t offset = 0; offset < a_size; offset += 16)
|
||||
{
|
||||
uint16_t mask = _mm_cmp_epi8_mask(
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(a + offset)),
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(b + offset)), _MM_CMPINT_NE);
|
||||
|
||||
if (mask)
|
||||
{
|
||||
offset += __builtin_ctz(mask);
|
||||
return offset >= a_size;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
/** Variant when the caller know in advance that the size is a multiple of 16.
|
||||
*/
|
||||
template <typename Char>
|
||||
inline int memcmpSmallMultipleOf16(const Char * a, const Char * b, size_t size)
|
||||
{
|
||||
for (size_t offset = 0; offset < size; offset += 16)
|
||||
{
|
||||
uint16_t mask = _mm_cmp_epi8_mask(
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(a + offset)),
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(b + offset)), _MM_CMPINT_NE);
|
||||
|
||||
if (mask)
|
||||
{
|
||||
offset += __builtin_ctz(mask);
|
||||
return detail::cmp(a[offset], b[offset]);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/** Variant when the size is 16 exactly.
|
||||
*/
|
||||
template <typename Char>
|
||||
inline int memcmp16(const Char * a, const Char * b)
|
||||
{
|
||||
uint16_t mask = _mm_cmp_epi8_mask(
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(a)),
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(b)), _MM_CMPINT_NE);
|
||||
|
||||
if (mask)
|
||||
{
|
||||
auto offset = __builtin_ctz(mask);
|
||||
return detail::cmp(a[offset], b[offset]);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/** Variant when the size is 16 exactly.
|
||||
*/
|
||||
inline bool memequal16(const void * a, const void * b)
|
||||
{
|
||||
return 0xFFFF == _mm_cmp_epi8_mask(
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(a)),
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(b)), _MM_CMPINT_EQ);
|
||||
}
|
||||
|
||||
|
||||
/** Compare memory region to zero */
|
||||
inline bool memoryIsZeroSmallAllowOverflow15(const void * data, size_t size)
|
||||
{
|
||||
const __m128i zero16 = _mm_setzero_si128();
|
||||
|
||||
for (size_t offset = 0; offset < size; offset += 16)
|
||||
{
|
||||
uint16_t mask = _mm_cmp_epi8_mask(zero16,
|
||||
_mm_loadu_si128(reinterpret_cast<const __m128i *>(reinterpret_cast<const char *>(data) + offset)), _MM_CMPINT_NE);
|
||||
|
||||
if (mask)
|
||||
{
|
||||
offset += __builtin_ctz(mask);
|
||||
return offset >= size;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#elif defined(__SSE2__) && !defined(MEMORY_SANITIZER)
|
||||
#include <emmintrin.h>
|
||||
|
||||
|
||||
|
@ -106,6 +106,12 @@ inline Field getBinaryValue(UInt8 type, ReadBuffer & buf)
|
||||
readStringBinary(value.data, buf);
|
||||
return value;
|
||||
}
|
||||
case Field::Types::Bool:
|
||||
{
|
||||
UInt8 value;
|
||||
readBinary(value, buf);
|
||||
return bool(value);
|
||||
}
|
||||
}
|
||||
return Field();
|
||||
}
|
||||
@ -346,6 +352,13 @@ Field Field::restoreFromDump(const std::string_view & dump_)
|
||||
return str;
|
||||
}
|
||||
|
||||
prefix = std::string_view{"Bool_"};
|
||||
if (dump.starts_with(prefix))
|
||||
{
|
||||
bool value = parseFromString<bool>(dump.substr(prefix.length()));
|
||||
return value;
|
||||
}
|
||||
|
||||
prefix = std::string_view{"Array_["};
|
||||
if (dump.starts_with(prefix))
|
||||
{
|
||||
|
@ -282,6 +282,7 @@ public:
|
||||
Int256 = 25,
|
||||
Map = 26,
|
||||
UUID = 27,
|
||||
Bool = 28,
|
||||
};
|
||||
};
|
||||
|
||||
@ -323,7 +324,10 @@ public:
|
||||
template <typename T>
|
||||
Field(T && rhs, enable_if_not_field_or_bool_or_stringlike_t<T> = nullptr);
|
||||
|
||||
Field(bool rhs) : Field(castToNearestFieldType(rhs)) {}
|
||||
Field(bool rhs) : Field(castToNearestFieldType(rhs))
|
||||
{
|
||||
which = Types::Bool;
|
||||
}
|
||||
|
||||
/// Create a string inplace.
|
||||
Field(const std::string_view & str) { create(str.data(), str.size()); }
|
||||
@ -376,7 +380,12 @@ public:
|
||||
enable_if_not_field_or_bool_or_stringlike_t<T, Field> &
|
||||
operator=(T && rhs);
|
||||
|
||||
Field & operator= (bool rhs) { return *this = castToNearestFieldType(rhs); }
|
||||
Field & operator= (bool rhs)
|
||||
{
|
||||
*this = castToNearestFieldType(rhs);
|
||||
which = Types::Bool;
|
||||
return *this;
|
||||
}
|
||||
|
||||
Field & operator= (const std::string_view & str);
|
||||
Field & operator= (const String & str) { return *this = std::string_view{str}; }
|
||||
@ -450,6 +459,7 @@ public:
|
||||
switch (which)
|
||||
{
|
||||
case Types::Null: return false;
|
||||
case Types::Bool: [[fallthrough]];
|
||||
case Types::UInt64: return get<UInt64>() < rhs.get<UInt64>();
|
||||
case Types::UInt128: return get<UInt128>() < rhs.get<UInt128>();
|
||||
case Types::UInt256: return get<UInt256>() < rhs.get<UInt256>();
|
||||
@ -487,6 +497,7 @@ public:
|
||||
switch (which)
|
||||
{
|
||||
case Types::Null: return true;
|
||||
case Types::Bool: [[fallthrough]];
|
||||
case Types::UInt64: return get<UInt64>() <= rhs.get<UInt64>();
|
||||
case Types::UInt128: return get<UInt128>() <= rhs.get<UInt128>();
|
||||
case Types::UInt256: return get<UInt256>() <= rhs.get<UInt256>();
|
||||
@ -524,6 +535,7 @@ public:
|
||||
switch (which)
|
||||
{
|
||||
case Types::Null: return true;
|
||||
case Types::Bool: [[fallthrough]];
|
||||
case Types::UInt64: return get<UInt64>() == rhs.get<UInt64>();
|
||||
case Types::Int64: return get<Int64>() == rhs.get<Int64>();
|
||||
case Types::Float64:
|
||||
@ -580,6 +592,11 @@ public:
|
||||
case Types::Array: return f(field.template get<Array>());
|
||||
case Types::Tuple: return f(field.template get<Tuple>());
|
||||
case Types::Map: return f(field.template get<Map>());
|
||||
case Types::Bool:
|
||||
{
|
||||
bool value = bool(field.template get<UInt64>());
|
||||
return f(value);
|
||||
}
|
||||
case Types::Decimal32: return f(field.template get<DecimalField<Decimal32>>());
|
||||
case Types::Decimal64: return f(field.template get<DecimalField<Decimal64>>());
|
||||
case Types::Decimal128: return f(field.template get<DecimalField<Decimal128>>());
|
||||
@ -739,6 +756,7 @@ template <> struct Field::TypeToEnum<DecimalField<Decimal128>>{ static const Typ
|
||||
template <> struct Field::TypeToEnum<DecimalField<Decimal256>>{ static const Types::Which value = Types::Decimal256; };
|
||||
template <> struct Field::TypeToEnum<DecimalField<DateTime64>>{ static const Types::Which value = Types::Decimal64; };
|
||||
template <> struct Field::TypeToEnum<AggregateFunctionStateData>{ static const Types::Which value = Types::AggregateFunctionState; };
|
||||
template <> struct Field::TypeToEnum<bool>{ static const Types::Which value = Types::Bool; };
|
||||
|
||||
template <> struct Field::EnumToType<Field::Types::Null> { using Type = Null; };
|
||||
template <> struct Field::EnumToType<Field::Types::UInt64> { using Type = UInt64; };
|
||||
@ -758,6 +776,7 @@ template <> struct Field::EnumToType<Field::Types::Decimal64> { using Type = Dec
|
||||
template <> struct Field::EnumToType<Field::Types::Decimal128> { using Type = DecimalField<Decimal128>; };
|
||||
template <> struct Field::EnumToType<Field::Types::Decimal256> { using Type = DecimalField<Decimal256>; };
|
||||
template <> struct Field::EnumToType<Field::Types::AggregateFunctionState> { using Type = DecimalField<AggregateFunctionStateData>; };
|
||||
template <> struct Field::EnumToType<Field::Types::Bool> { using Type = UInt64; };
|
||||
|
||||
inline constexpr bool isInt64OrUInt64FieldType(Field::Types::Which t)
|
||||
{
|
||||
@ -765,6 +784,13 @@ inline constexpr bool isInt64OrUInt64FieldType(Field::Types::Which t)
|
||||
|| t == Field::Types::UInt64;
|
||||
}
|
||||
|
||||
inline constexpr bool isInt64OrUInt64orBoolFieldType(Field::Types::Which t)
|
||||
{
|
||||
return t == Field::Types::Int64
|
||||
|| t == Field::Types::UInt64
|
||||
|| t == Field::Types::Bool;
|
||||
}
|
||||
|
||||
// Field value getter with type checking in debug builds.
|
||||
template <typename T>
|
||||
NearestFieldType<std::decay_t<T>> & Field::get()
|
||||
@ -781,7 +807,7 @@ NearestFieldType<std::decay_t<T>> & Field::get()
|
||||
// Disregard signedness when converting between int64 types.
|
||||
constexpr Field::Types::Which target = TypeToEnum<StoredType>::value;
|
||||
if (target != which
|
||||
&& (!isInt64OrUInt64FieldType(target) || !isInt64OrUInt64FieldType(which)))
|
||||
&& (!isInt64OrUInt64orBoolFieldType(target) || !isInt64OrUInt64orBoolFieldType(which)))
|
||||
throw Exception(ErrorCodes::LOGICAL_ERROR,
|
||||
"Invalid Field get from type {} to type {}", which, target);
|
||||
#endif
|
||||
|
@ -10,12 +10,12 @@ namespace DB
|
||||
* Returns TypeIndex::Nothing if type was not present in TypeIndex;
|
||||
* Returns TypeIndex element otherwise.
|
||||
*
|
||||
* @example TypeId<UInt8> == TypeIndex::UInt8
|
||||
* @example TypeId<MySuperType> == TypeIndex::Nothing
|
||||
* @example TypeToTypeIndex<UInt8> == TypeIndex::UInt8
|
||||
* @example TypeToTypeIndex<MySuperType> == TypeIndex::Nothing
|
||||
*/
|
||||
template <class T> inline constexpr TypeIndex TypeId = TypeIndex::Nothing;
|
||||
template <class T> inline constexpr TypeIndex TypeToTypeIndex = TypeIndex::Nothing;
|
||||
|
||||
template <TypeIndex index> struct ReverseTypeIdT : std::false_type {};
|
||||
template <TypeIndex index> struct TypeIndexToTypeHelper : std::false_type {};
|
||||
|
||||
/**
|
||||
* Obtain real type from TypeIndex if possible.
|
||||
@ -23,14 +23,14 @@ template <TypeIndex index> struct ReverseTypeIdT : std::false_type {};
|
||||
* Returns a type alias if is corresponds to TypeIndex value.
|
||||
* Yields a compiler error otherwise.
|
||||
*
|
||||
* @example ReverseTypeId<TypeIndex::UInt8> == UInt8
|
||||
* @example TypeIndexToType<TypeIndex::UInt8> == UInt8
|
||||
*/
|
||||
template <TypeIndex index> using ReverseTypeId = typename ReverseTypeIdT<index>::T;
|
||||
template <TypeIndex index> constexpr bool HasReverseTypeId = ReverseTypeIdT<index>::value;
|
||||
template <TypeIndex index> using TypeIndexToType = typename TypeIndexToTypeHelper<index>::T;
|
||||
template <TypeIndex index> constexpr bool TypeIndexHasType = TypeIndexToTypeHelper<index>::value;
|
||||
|
||||
#define TYPEID_MAP(_A) \
|
||||
template <> inline constexpr TypeIndex TypeId<_A> = TypeIndex::_A; \
|
||||
template <> struct ReverseTypeIdT<TypeIndex::_A> : std::true_type { using T = _A; };
|
||||
template <> inline constexpr TypeIndex TypeToTypeIndex<_A> = TypeIndex::_A; \
|
||||
template <> struct TypeIndexToTypeHelper<TypeIndex::_A> : std::true_type { using T = _A; };
|
||||
|
||||
TYPEID_MAP(UInt8)
|
||||
TYPEID_MAP(UInt16)
|
||||
@ -58,4 +58,7 @@ TYPEID_MAP(String)
|
||||
|
||||
struct Array;
|
||||
TYPEID_MAP(Array)
|
||||
|
||||
#undef TYPEID_MAP
|
||||
|
||||
}
|
||||
|
@ -7,14 +7,14 @@ GTEST_TEST(Field, FromBool)
|
||||
{
|
||||
{
|
||||
Field f{false};
|
||||
ASSERT_EQ(f.getType(), Field::Types::UInt64);
|
||||
ASSERT_EQ(f.getType(), Field::Types::Bool);
|
||||
ASSERT_EQ(f.get<UInt64>(), 0);
|
||||
ASSERT_EQ(f.get<bool>(), false);
|
||||
}
|
||||
|
||||
{
|
||||
Field f{true};
|
||||
ASSERT_EQ(f.getType(), Field::Types::UInt64);
|
||||
ASSERT_EQ(f.getType(), Field::Types::Bool);
|
||||
ASSERT_EQ(f.get<UInt64>(), 1);
|
||||
ASSERT_EQ(f.get<bool>(), true);
|
||||
}
|
||||
@ -22,7 +22,7 @@ GTEST_TEST(Field, FromBool)
|
||||
{
|
||||
Field f;
|
||||
f = false;
|
||||
ASSERT_EQ(f.getType(), Field::Types::UInt64);
|
||||
ASSERT_EQ(f.getType(), Field::Types::Bool);
|
||||
ASSERT_EQ(f.get<UInt64>(), 0);
|
||||
ASSERT_EQ(f.get<bool>(), false);
|
||||
}
|
||||
@ -30,7 +30,7 @@ GTEST_TEST(Field, FromBool)
|
||||
{
|
||||
Field f;
|
||||
f = true;
|
||||
ASSERT_EQ(f.getType(), Field::Types::UInt64);
|
||||
ASSERT_EQ(f.getType(), Field::Types::Bool);
|
||||
ASSERT_EQ(f.get<UInt64>(), 1);
|
||||
ASSERT_EQ(f.get<bool>(), true);
|
||||
}
|
||||
|
@ -1,14 +1,15 @@
|
||||
#pragma once
|
||||
|
||||
#include <Columns/ColumnDecimal.h>
|
||||
#include <Core/DecimalFunctions.h>
|
||||
#include <DataTypes/IDataType.h>
|
||||
#include <DataTypes/DataTypesNumber.h>
|
||||
#include <Interpreters/Context_fwd.h>
|
||||
|
||||
#include <cmath>
|
||||
#include <type_traits>
|
||||
|
||||
#include <Core/TypeId.h>
|
||||
#include <Core/DecimalFunctions.h>
|
||||
#include <Columns/ColumnDecimal.h>
|
||||
#include <DataTypes/IDataType.h>
|
||||
#include <DataTypes/DataTypesNumber.h>
|
||||
#include <Interpreters/Context_fwd.h>
|
||||
|
||||
|
||||
namespace DB
|
||||
{
|
||||
@ -59,7 +60,7 @@ class DataTypeDecimalBase : public IDataType
|
||||
public:
|
||||
using FieldType = T;
|
||||
using ColumnType = ColumnDecimal<T>;
|
||||
static constexpr auto type_id = TypeId<T>;
|
||||
static constexpr auto type_id = TypeToTypeIndex<T>;
|
||||
|
||||
static constexpr bool is_parametric = true;
|
||||
|
||||
@ -75,7 +76,7 @@ public:
|
||||
throw Exception("Scale " + std::to_string(scale) + " is out of bounds", ErrorCodes::ARGUMENT_OUT_OF_BOUND);
|
||||
}
|
||||
|
||||
TypeIndex getTypeId() const override { return TypeId<T>; }
|
||||
TypeIndex getTypeId() const override { return TypeToTypeIndex<T>; }
|
||||
|
||||
Field getDefault() const override;
|
||||
MutableColumnPtr createColumn() const override;
|
||||
|
@ -1,5 +1,6 @@
|
||||
#pragma once
|
||||
|
||||
#include <Core/TypeId.h>
|
||||
#include <DataTypes/IDataType.h>
|
||||
#include <DataTypes/Serializations/SerializationNumber.h>
|
||||
|
||||
@ -20,13 +21,13 @@ class DataTypeNumberBase : public IDataType
|
||||
public:
|
||||
static constexpr bool is_parametric = false;
|
||||
static constexpr auto family_name = TypeName<T>;
|
||||
static constexpr auto type_id = TypeId<T>;
|
||||
static constexpr auto type_id = TypeToTypeIndex<T>;
|
||||
|
||||
using FieldType = T;
|
||||
using ColumnType = ColumnVector<T>;
|
||||
|
||||
const char * getFamilyName() const override { return TypeName<T>.data(); }
|
||||
TypeIndex getTypeId() const override { return TypeId<T>; }
|
||||
TypeIndex getTypeId() const override { return TypeToTypeIndex<T>; }
|
||||
|
||||
Field getDefault() const override;
|
||||
|
||||
|
@ -38,7 +38,7 @@ public:
|
||||
|
||||
const char * getFamilyName() const override { return family_name; }
|
||||
std::string doGetName() const override;
|
||||
TypeIndex getTypeId() const override { return TypeId<T>; }
|
||||
TypeIndex getTypeId() const override { return TypeToTypeIndex<T>; }
|
||||
bool canBePromoted() const override { return true; }
|
||||
DataTypePtr promoteNumericType() const override;
|
||||
|
||||
|
@ -152,4 +152,9 @@ DataTypePtr FieldToDataType::operator() (const AggregateFunctionStateData & x) c
|
||||
return DataTypeFactory::instance().get(name);
|
||||
}
|
||||
|
||||
DataTypePtr FieldToDataType::operator()(const bool &) const
|
||||
{
|
||||
return DataTypeFactory::instance().get("Bool");
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -38,6 +38,7 @@ public:
|
||||
DataTypePtr operator() (const DecimalField<Decimal128> & x) const;
|
||||
DataTypePtr operator() (const DecimalField<Decimal256> & x) const;
|
||||
DataTypePtr operator() (const AggregateFunctionStateData & x) const;
|
||||
DataTypePtr operator() (const bool & x) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -1,14 +1,15 @@
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include <Common/COW.h>
|
||||
#include <boost/noncopyable.hpp>
|
||||
#include <Core/Names.h>
|
||||
#include <Core/TypeId.h>
|
||||
#include <Common/COW.h>
|
||||
#include <DataTypes/DataTypeCustom.h>
|
||||
#include <DataTypes/Serializations/ISerialization.h>
|
||||
#include <DataTypes/Serializations/SerializationInfo.h>
|
||||
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
@ -475,7 +476,7 @@ template <typename T, typename DataType>
|
||||
inline bool isColumnedAsDecimalT(const DataType & data_type)
|
||||
{
|
||||
const WhichDataType which(data_type);
|
||||
return (which.isDecimal() || which.isDateTime64()) && which.idx == TypeId<T>;
|
||||
return (which.isDecimal() || which.isDateTime64()) && which.idx == TypeToTypeIndex<T>;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
|
@ -158,6 +158,19 @@ void SerializationInfoByName::add(const SerializationInfoByName & other)
|
||||
}
|
||||
}
|
||||
|
||||
void SerializationInfoByName::replaceData(const SerializationInfoByName & other)
|
||||
{
|
||||
for (const auto & [name, new_info] : other)
|
||||
{
|
||||
auto & old_info = (*this)[name];
|
||||
|
||||
if (old_info)
|
||||
old_info->replaceData(*new_info);
|
||||
else
|
||||
old_info = new_info->clone();
|
||||
}
|
||||
}
|
||||
|
||||
void SerializationInfoByName::writeJSON(WriteBuffer & out) const
|
||||
{
|
||||
Poco::JSON::Object object;
|
||||
|
@ -89,6 +89,11 @@ public:
|
||||
void add(const Block & block);
|
||||
void add(const SerializationInfoByName & other);
|
||||
|
||||
/// Takes data from @other, but keeps current serialization kinds.
|
||||
/// If column exists in @other infos, but not in current infos,
|
||||
/// it's cloned to current infos.
|
||||
void replaceData(const SerializationInfoByName & other);
|
||||
|
||||
void writeJSON(WriteBuffer & out) const;
|
||||
void readJSON(ReadBuffer & in);
|
||||
};
|
||||
|
@ -33,9 +33,7 @@ target_link_libraries(clickhouse_dictionaries
|
||||
string_utils
|
||||
)
|
||||
|
||||
target_link_libraries(clickhouse_dictionaries
|
||||
PUBLIC
|
||||
abseil_swiss_tables)
|
||||
target_link_libraries(clickhouse_dictionaries PUBLIC ch_contrib::abseil_swiss_tables)
|
||||
|
||||
if (TARGET ch_contrib::cassandra)
|
||||
target_link_libraries(clickhouse_dictionaries PRIVATE ch_contrib::cassandra)
|
||||
|
@ -7,12 +7,14 @@
|
||||
|
||||
#include <Poco/Util/AbstractConfiguration.h>
|
||||
|
||||
#include <base/EnumReflection.h>
|
||||
|
||||
#include <Core/Field.h>
|
||||
#include <Core/TypeId.h>
|
||||
#include <IO/ReadBufferFromString.h>
|
||||
#include <DataTypes/IDataType.h>
|
||||
#include <Interpreters/IExternalLoadable.h>
|
||||
#include <base/EnumReflection.h>
|
||||
#include <Core/TypeId.h>
|
||||
|
||||
|
||||
#if defined(__GNUC__)
|
||||
/// GCC mistakenly warns about the names in enum class.
|
||||
@ -26,7 +28,7 @@ using TypeIndexUnderlying = magic_enum::underlying_type_t<TypeIndex>;
|
||||
// We need to be able to map TypeIndex -> AttributeUnderlyingType and AttributeUnderlyingType -> real type
|
||||
// The first can be done by defining AttributeUnderlyingType enum values to TypeIndex values and then performing
|
||||
// a enum_cast.
|
||||
// The second can be achieved by using ReverseTypeId
|
||||
// The second can be achieved by using TypeIndexToType
|
||||
#define map_item(__T) __T = static_cast<TypeIndexUnderlying>(TypeIndex::__T)
|
||||
|
||||
enum class AttributeUnderlyingType : TypeIndexUnderlying
|
||||
@ -73,7 +75,7 @@ template <AttributeUnderlyingType type>
|
||||
struct DictionaryAttributeType
|
||||
{
|
||||
/// Converts @c type to it underlying type e.g. AttributeUnderlyingType::UInt8 -> UInt8
|
||||
using AttributeType = ReverseTypeId<
|
||||
using AttributeType = TypeIndexToType<
|
||||
static_cast<TypeIndex>(
|
||||
static_cast<TypeIndexUnderlying>(type))>;
|
||||
};
|
||||
|
@ -291,30 +291,23 @@ void FlatDictionary::blockToAttributes(const Block & block)
|
||||
|
||||
DictionaryKeysArenaHolder<DictionaryKeyType::Simple> arena_holder;
|
||||
DictionaryKeysExtractor<DictionaryKeyType::Simple> keys_extractor({ keys_column }, arena_holder.getComplexKeyArena());
|
||||
auto keys = keys_extractor.extractAllKeys();
|
||||
size_t keys_size = keys_extractor.getKeysSize();
|
||||
|
||||
HashSet<UInt64> already_processed_keys;
|
||||
|
||||
size_t key_offset = 1;
|
||||
static constexpr size_t key_offset = 1;
|
||||
for (size_t attribute_index = 0; attribute_index < attributes.size(); ++attribute_index)
|
||||
{
|
||||
const IColumn & attribute_column = *block.safeGetByPosition(attribute_index + key_offset).column;
|
||||
Attribute & attribute = attributes[attribute_index];
|
||||
|
||||
for (size_t i = 0; i < keys.size(); ++i)
|
||||
for (size_t i = 0; i < keys_size; ++i)
|
||||
{
|
||||
auto key = keys[i];
|
||||
|
||||
if (already_processed_keys.find(key) != nullptr)
|
||||
continue;
|
||||
|
||||
already_processed_keys.insert(key);
|
||||
auto key = keys_extractor.extractCurrentKey();
|
||||
|
||||
setAttributeValue(attribute, key, attribute_column[i]);
|
||||
++element_count;
|
||||
keys_extractor.rollbackCurrentKey();
|
||||
}
|
||||
|
||||
already_processed_keys.clear();
|
||||
keys_extractor.reset();
|
||||
}
|
||||
}
|
||||
|
||||
@ -369,6 +362,12 @@ void FlatDictionary::loadData()
|
||||
else
|
||||
updateData();
|
||||
|
||||
element_count = 0;
|
||||
|
||||
size_t loaded_keys_size = loaded_keys.size();
|
||||
for (size_t i = 0; i < loaded_keys_size; ++i)
|
||||
element_count += loaded_keys[i];
|
||||
|
||||
if (configuration.require_nonempty && 0 == element_count)
|
||||
throw Exception(ErrorCodes::DICTIONARY_IS_EMPTY, "{}: dictionary source is empty and 'require_nonempty' property is set.", getFullName());
|
||||
}
|
||||
@ -520,15 +519,12 @@ void FlatDictionary::setAttributeValue(Attribute & attribute, const UInt64 key,
|
||||
|
||||
resize<ValueType>(attribute, key);
|
||||
|
||||
if (attribute.is_nullable_set)
|
||||
{
|
||||
if (value.isNull())
|
||||
if (attribute.is_nullable_set && value.isNull())
|
||||
{
|
||||
attribute.is_nullable_set->insert(key);
|
||||
loaded_keys[key] = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
setAttributeValueImpl<AttributeType>(attribute, key, value.get<AttributeType>());
|
||||
};
|
||||
|
@ -3,26 +3,6 @@
|
||||
#include "DictionaryStructure.h"
|
||||
#include "registerDictionaries.h"
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
void registerDictionarySourceRedis(DictionarySourceFactory & factory)
|
||||
{
|
||||
auto create_table_source = [=](const DictionaryStructure & dict_struct,
|
||||
const Poco::Util::AbstractConfiguration & config,
|
||||
const String & config_prefix,
|
||||
Block & sample_block,
|
||||
ContextPtr /* global_context */,
|
||||
const std::string & /* default_database */,
|
||||
bool /* created_from_ddl */) -> DictionarySourcePtr {
|
||||
return std::make_unique<RedisDictionarySource>(dict_struct, config, config_prefix + ".redis", sample_block);
|
||||
};
|
||||
factory.registerSource("redis", create_table_source);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
#include <Poco/Redis/Array.h>
|
||||
#include <Poco/Redis/Client.h>
|
||||
#include <Poco/Redis/Command.h>
|
||||
@ -33,7 +13,6 @@ void registerDictionarySourceRedis(DictionarySourceFactory & factory)
|
||||
|
||||
#include "RedisSource.h"
|
||||
|
||||
|
||||
namespace DB
|
||||
{
|
||||
namespace ErrorCodes
|
||||
@ -42,34 +21,64 @@ namespace DB
|
||||
extern const int INVALID_CONFIG_PARAMETER;
|
||||
extern const int INTERNAL_REDIS_ERROR;
|
||||
extern const int LOGICAL_ERROR;
|
||||
extern const int TIMEOUT_EXCEEDED;
|
||||
}
|
||||
|
||||
static RedisStorageType parseStorageType(const String & storage_type_str)
|
||||
{
|
||||
if (storage_type_str == "hash_map")
|
||||
return RedisStorageType::HASH_MAP;
|
||||
else if (!storage_type_str.empty() && storage_type_str != "simple")
|
||||
throw Exception(ErrorCodes::INVALID_CONFIG_PARAMETER, "Unknown storage type {} for Redis dictionary", storage_type_str);
|
||||
|
||||
static const size_t max_block_size = 8192;
|
||||
return RedisStorageType::SIMPLE;
|
||||
}
|
||||
|
||||
void registerDictionarySourceRedis(DictionarySourceFactory & factory)
|
||||
{
|
||||
auto create_table_source = [=](const DictionaryStructure & dict_struct,
|
||||
const Poco::Util::AbstractConfiguration & config,
|
||||
const String & config_prefix,
|
||||
Block & sample_block,
|
||||
ContextPtr /* global_context */,
|
||||
const std::string & /* default_database */,
|
||||
bool /* created_from_ddl */) -> DictionarySourcePtr {
|
||||
|
||||
auto redis_config_prefix = config_prefix + ".redis";
|
||||
RedisDictionarySource::Configuration configuration =
|
||||
{
|
||||
.host = config.getString(redis_config_prefix + ".host"),
|
||||
.port = static_cast<UInt16>(config.getUInt(redis_config_prefix + ".port")),
|
||||
.db_index = config.getUInt(redis_config_prefix + ".db_index", 0),
|
||||
.password = config.getString(redis_config_prefix + ".password", ""),
|
||||
.storage_type = parseStorageType(config.getString(redis_config_prefix + ".storage_type", "")),
|
||||
.pool_size = config.getUInt(redis_config_prefix + ".pool_size", 16),
|
||||
};
|
||||
|
||||
return std::make_unique<RedisDictionarySource>(dict_struct, configuration, sample_block);
|
||||
};
|
||||
|
||||
factory.registerSource("redis", create_table_source);
|
||||
}
|
||||
|
||||
static constexpr size_t REDIS_MAX_BLOCK_SIZE = DEFAULT_BLOCK_SIZE;
|
||||
static constexpr size_t REDIS_LOCK_ACQUIRE_TIMEOUT_MS = 5000;
|
||||
|
||||
RedisDictionarySource::RedisDictionarySource(
|
||||
const DictionaryStructure & dict_struct_,
|
||||
const String & host_,
|
||||
UInt16 port_,
|
||||
UInt8 db_index_,
|
||||
const String & password_,
|
||||
RedisStorageType storage_type_,
|
||||
const Configuration & configuration_,
|
||||
const Block & sample_block_)
|
||||
: dict_struct{dict_struct_}
|
||||
, host{host_}
|
||||
, port{port_}
|
||||
, db_index{db_index_}
|
||||
, password{password_}
|
||||
, storage_type{storage_type_}
|
||||
, configuration(configuration_)
|
||||
, pool(std::make_shared<Pool>(configuration.pool_size))
|
||||
, sample_block{sample_block_}
|
||||
, client{std::make_shared<Poco::Redis::Client>(host, port)}
|
||||
{
|
||||
if (dict_struct.attributes.size() != 1)
|
||||
throw Exception(ErrorCodes::INVALID_CONFIG_PARAMETER,
|
||||
"Invalid number of non key columns for Redis source: {}, expected 1",
|
||||
DB::toString(dict_struct.attributes.size()));
|
||||
|
||||
if (storage_type == RedisStorageType::HASH_MAP)
|
||||
if (configuration.storage_type == RedisStorageType::HASH_MAP)
|
||||
{
|
||||
if (!dict_struct.key)
|
||||
throw Exception(ErrorCodes::INVALID_CONFIG_PARAMETER,
|
||||
@ -87,61 +96,13 @@ namespace DB
|
||||
key.name,
|
||||
key.type->getName());
|
||||
}
|
||||
|
||||
if (!password.empty())
|
||||
{
|
||||
RedisCommand command("AUTH");
|
||||
command << password;
|
||||
String reply = client->execute<String>(command);
|
||||
if (reply != "OK")
|
||||
throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR,
|
||||
"Authentication failed with reason {}",
|
||||
reply);
|
||||
}
|
||||
|
||||
if (db_index != 0)
|
||||
{
|
||||
RedisCommand command("SELECT");
|
||||
command << std::to_string(db_index);
|
||||
String reply = client->execute<String>(command);
|
||||
if (reply != "OK")
|
||||
throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR,
|
||||
"Selecting database with index {} failed with reason {}",
|
||||
DB::toString(db_index),
|
||||
reply);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
RedisDictionarySource::RedisDictionarySource(
|
||||
const DictionaryStructure & dict_struct_,
|
||||
const Poco::Util::AbstractConfiguration & config_,
|
||||
const String & config_prefix_,
|
||||
Block & sample_block_)
|
||||
: RedisDictionarySource(
|
||||
dict_struct_,
|
||||
config_.getString(config_prefix_ + ".host"),
|
||||
config_.getUInt(config_prefix_ + ".port"),
|
||||
config_.getUInt(config_prefix_ + ".db_index", 0),
|
||||
config_.getString(config_prefix_ + ".password",""),
|
||||
parseStorageType(config_.getString(config_prefix_ + ".storage_type", "")),
|
||||
sample_block_)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
RedisDictionarySource::RedisDictionarySource(const RedisDictionarySource & other)
|
||||
: RedisDictionarySource{other.dict_struct,
|
||||
other.host,
|
||||
other.port,
|
||||
other.db_index,
|
||||
other.password,
|
||||
other.storage_type,
|
||||
other.sample_block}
|
||||
: RedisDictionarySource(other.dict_struct, other.configuration, other.sample_block)
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
RedisDictionarySource::~RedisDictionarySource() = default;
|
||||
|
||||
static String storageTypeToKeyType(RedisStorageType type)
|
||||
@ -161,24 +122,25 @@ namespace DB
|
||||
|
||||
Pipe RedisDictionarySource::loadAll()
|
||||
{
|
||||
if (!client->isConnected())
|
||||
client->connect(host, port);
|
||||
auto connection = getConnection();
|
||||
|
||||
RedisCommand command_for_keys("KEYS");
|
||||
command_for_keys << "*";
|
||||
|
||||
/// Get only keys for specified storage type.
|
||||
auto all_keys = client->execute<RedisArray>(command_for_keys);
|
||||
auto all_keys = connection->client->execute<RedisArray>(command_for_keys);
|
||||
if (all_keys.isNull())
|
||||
return Pipe(std::make_shared<RedisSource>(client, RedisArray{}, storage_type, sample_block, max_block_size));
|
||||
return Pipe(std::make_shared<RedisSource>(
|
||||
std::move(connection), RedisArray{},
|
||||
configuration.storage_type, sample_block, REDIS_MAX_BLOCK_SIZE));
|
||||
|
||||
RedisArray keys;
|
||||
auto key_type = storageTypeToKeyType(storage_type);
|
||||
auto key_type = storageTypeToKeyType(configuration.storage_type);
|
||||
for (const auto & key : all_keys)
|
||||
if (key_type == client->execute<String>(RedisCommand("TYPE").addRedisType(key)))
|
||||
if (key_type == connection->client->execute<String>(RedisCommand("TYPE").addRedisType(key)))
|
||||
keys.addRedisType(std::move(key));
|
||||
|
||||
if (storage_type == RedisStorageType::HASH_MAP)
|
||||
if (configuration.storage_type == RedisStorageType::HASH_MAP)
|
||||
{
|
||||
RedisArray hkeys;
|
||||
for (const auto & key : keys)
|
||||
@ -186,7 +148,7 @@ namespace DB
|
||||
RedisCommand command_for_secondary_keys("HKEYS");
|
||||
command_for_secondary_keys.addRedisType(key);
|
||||
|
||||
auto secondary_keys = client->execute<RedisArray>(command_for_secondary_keys);
|
||||
auto secondary_keys = connection->client->execute<RedisArray>(command_for_secondary_keys);
|
||||
|
||||
RedisArray primary_with_secondary;
|
||||
primary_with_secondary.addRedisType(key);
|
||||
@ -194,7 +156,7 @@ namespace DB
|
||||
{
|
||||
primary_with_secondary.addRedisType(secondary_key);
|
||||
/// Do not store more than max_block_size values for one request.
|
||||
if (primary_with_secondary.size() == max_block_size + 1)
|
||||
if (primary_with_secondary.size() == REDIS_MAX_BLOCK_SIZE + 1)
|
||||
{
|
||||
hkeys.add(primary_with_secondary);
|
||||
primary_with_secondary.clear();
|
||||
@ -209,16 +171,16 @@ namespace DB
|
||||
keys = std::move(hkeys);
|
||||
}
|
||||
|
||||
return Pipe(std::make_shared<RedisSource>(client, std::move(keys), storage_type, sample_block, max_block_size));
|
||||
return Pipe(std::make_shared<RedisSource>(
|
||||
std::move(connection), std::move(keys),
|
||||
configuration.storage_type, sample_block, REDIS_MAX_BLOCK_SIZE));
|
||||
}
|
||||
|
||||
|
||||
Pipe RedisDictionarySource::loadIds(const std::vector<UInt64> & ids)
|
||||
{
|
||||
if (!client->isConnected())
|
||||
client->connect(host, port);
|
||||
auto connection = getConnection();
|
||||
|
||||
if (storage_type == RedisStorageType::HASH_MAP)
|
||||
if (configuration.storage_type == RedisStorageType::HASH_MAP)
|
||||
throw Exception(ErrorCodes::UNSUPPORTED_METHOD, "Cannot use loadIds with 'hash_map' storage type");
|
||||
|
||||
if (!dict_struct.id)
|
||||
@ -229,13 +191,14 @@ namespace DB
|
||||
for (UInt64 id : ids)
|
||||
keys << DB::toString(id);
|
||||
|
||||
return Pipe(std::make_shared<RedisSource>(client, std::move(keys), storage_type, sample_block, max_block_size));
|
||||
return Pipe(std::make_shared<RedisSource>(
|
||||
std::move(connection), std::move(keys),
|
||||
configuration.storage_type, sample_block, REDIS_MAX_BLOCK_SIZE));
|
||||
}
|
||||
|
||||
Pipe RedisDictionarySource::loadKeys(const Columns & key_columns, const std::vector<size_t> & requested_rows)
|
||||
{
|
||||
if (!client->isConnected())
|
||||
client->connect(host, port);
|
||||
auto connection = getConnection();
|
||||
|
||||
if (key_columns.size() != dict_struct.key->size())
|
||||
throw Exception(ErrorCodes::LOGICAL_ERROR, "The size of key_columns does not equal to the size of dictionary key");
|
||||
@ -250,7 +213,7 @@ namespace DB
|
||||
if (isInteger(type))
|
||||
key << DB::toString(key_columns[i]->get64(row));
|
||||
else if (isString(type))
|
||||
key << get<String>((*key_columns[i])[row]);
|
||||
key << get<const String &>((*key_columns[i])[row]);
|
||||
else
|
||||
throw Exception(ErrorCodes::LOGICAL_ERROR, "Unexpected type of key in Redis dictionary");
|
||||
}
|
||||
@ -258,22 +221,65 @@ namespace DB
|
||||
keys.add(key);
|
||||
}
|
||||
|
||||
return Pipe(std::make_shared<RedisSource>(client, std::move(keys), storage_type, sample_block, max_block_size));
|
||||
return Pipe(std::make_shared<RedisSource>(
|
||||
std::move(connection), std::move(keys),
|
||||
configuration.storage_type, sample_block, REDIS_MAX_BLOCK_SIZE));
|
||||
}
|
||||
|
||||
|
||||
String RedisDictionarySource::toString() const
|
||||
{
|
||||
return "Redis: " + host + ':' + DB::toString(port);
|
||||
return "Redis: " + configuration.host + ':' + DB::toString(configuration.port);
|
||||
}
|
||||
|
||||
RedisStorageType RedisDictionarySource::parseStorageType(const String & storage_type_str)
|
||||
RedisDictionarySource::ConnectionPtr RedisDictionarySource::getConnection() const
|
||||
{
|
||||
if (storage_type_str == "hash_map")
|
||||
return RedisStorageType::HASH_MAP;
|
||||
else if (!storage_type_str.empty() && storage_type_str != "simple")
|
||||
throw Exception(ErrorCodes::INVALID_CONFIG_PARAMETER, "Unknown storage type {} for Redis dictionary", storage_type_str);
|
||||
ClientPtr client;
|
||||
bool ok = pool->tryBorrowObject(client,
|
||||
[] { return std::make_unique<Poco::Redis::Client>(); },
|
||||
REDIS_LOCK_ACQUIRE_TIMEOUT_MS);
|
||||
|
||||
return RedisStorageType::SIMPLE;
|
||||
if (!ok)
|
||||
throw Exception(ErrorCodes::TIMEOUT_EXCEEDED,
|
||||
"Could not get connection from pool, timeout exceeded {} seconds",
|
||||
REDIS_LOCK_ACQUIRE_TIMEOUT_MS);
|
||||
|
||||
if (!client->isConnected())
|
||||
{
|
||||
try
|
||||
{
|
||||
client->connect(configuration.host, configuration.port);
|
||||
|
||||
if (!configuration.password.empty())
|
||||
{
|
||||
RedisCommand command("AUTH");
|
||||
command << configuration.password;
|
||||
String reply = client->execute<String>(command);
|
||||
if (reply != "OK")
|
||||
throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR,
|
||||
"Authentication failed with reason {}", reply);
|
||||
}
|
||||
|
||||
if (configuration.db_index != 0)
|
||||
{
|
||||
RedisCommand command("SELECT");
|
||||
command << std::to_string(configuration.db_index);
|
||||
String reply = client->execute<String>(command);
|
||||
if (reply != "OK")
|
||||
throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR,
|
||||
"Selecting database with index {} failed with reason {}",
|
||||
configuration.db_index, reply);
|
||||
}
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
if (client->isConnected())
|
||||
client->disconnect();
|
||||
|
||||
pool->returnObject(std::move(client));
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
return std::make_unique<Connection>(pool, std::move(client));
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <Core/Block.h>
|
||||
#include <base/BorrowedObjectPool.h>
|
||||
|
||||
#include "DictionaryStructure.h"
|
||||
#include "IDictionarySource.h"
|
||||
@ -20,13 +21,13 @@ namespace Poco
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
namespace DB
|
||||
{
|
||||
namespace ErrorCodes
|
||||
{
|
||||
namespace ErrorCodes
|
||||
{
|
||||
extern const int NOT_IMPLEMENTED;
|
||||
}
|
||||
}
|
||||
|
||||
enum class RedisStorageType
|
||||
{
|
||||
SIMPLE,
|
||||
@ -36,24 +37,46 @@ namespace ErrorCodes
|
||||
|
||||
class RedisDictionarySource final : public IDictionarySource
|
||||
{
|
||||
RedisDictionarySource(
|
||||
const DictionaryStructure & dict_struct,
|
||||
const std::string & host,
|
||||
UInt16 port,
|
||||
UInt8 db_index,
|
||||
const std::string & password,
|
||||
RedisStorageType storage_type,
|
||||
const Block & sample_block);
|
||||
|
||||
public:
|
||||
using RedisArray = Poco::Redis::Array;
|
||||
using RedisCommand = Poco::Redis::Command;
|
||||
|
||||
using ClientPtr = std::unique_ptr<Poco::Redis::Client>;
|
||||
using Pool = BorrowedObjectPool<ClientPtr>;
|
||||
using PoolPtr = std::shared_ptr<Pool>;
|
||||
|
||||
struct Configuration
|
||||
{
|
||||
const std::string host;
|
||||
const UInt16 port;
|
||||
const UInt32 db_index;
|
||||
const std::string password;
|
||||
const RedisStorageType storage_type;
|
||||
const size_t pool_size;
|
||||
};
|
||||
|
||||
struct Connection
|
||||
{
|
||||
Connection(PoolPtr pool_, ClientPtr client_)
|
||||
: pool(std::move(pool_)), client(std::move(client_))
|
||||
{
|
||||
}
|
||||
|
||||
~Connection()
|
||||
{
|
||||
pool->returnObject(std::move(client));
|
||||
}
|
||||
|
||||
PoolPtr pool;
|
||||
ClientPtr client;
|
||||
};
|
||||
|
||||
using ConnectionPtr = std::unique_ptr<Connection>;
|
||||
|
||||
RedisDictionarySource(
|
||||
const DictionaryStructure & dict_struct,
|
||||
const Poco::Util::AbstractConfiguration & config,
|
||||
const std::string & config_prefix,
|
||||
Block & sample_block);
|
||||
const DictionaryStructure & dict_struct_,
|
||||
const Configuration & configuration_,
|
||||
const Block & sample_block_);
|
||||
|
||||
RedisDictionarySource(const RedisDictionarySource & other);
|
||||
|
||||
@ -81,17 +104,12 @@ namespace ErrorCodes
|
||||
std::string toString() const override;
|
||||
|
||||
private:
|
||||
static RedisStorageType parseStorageType(const std::string& storage_type);
|
||||
ConnectionPtr getConnection() const;
|
||||
|
||||
const DictionaryStructure dict_struct;
|
||||
const std::string host;
|
||||
const UInt16 port;
|
||||
const UInt8 db_index;
|
||||
const std::string password;
|
||||
const RedisStorageType storage_type;
|
||||
const Configuration configuration;
|
||||
|
||||
PoolPtr pool;
|
||||
Block sample_block;
|
||||
|
||||
std::shared_ptr<Poco::Redis::Client> client;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -30,20 +30,22 @@ namespace DB
|
||||
|
||||
|
||||
RedisSource::RedisSource(
|
||||
const std::shared_ptr<Poco::Redis::Client> & client_,
|
||||
ConnectionPtr connection_,
|
||||
const RedisArray & keys_,
|
||||
const RedisStorageType & storage_type_,
|
||||
const DB::Block & sample_block,
|
||||
const size_t max_block_size_)
|
||||
size_t max_block_size_)
|
||||
: SourceWithProgress(sample_block)
|
||||
, client(client_), keys(keys_), storage_type(storage_type_), max_block_size{max_block_size_}
|
||||
, connection(std::move(connection_))
|
||||
, keys(keys_)
|
||||
, storage_type(storage_type_)
|
||||
, max_block_size{max_block_size_}
|
||||
{
|
||||
description.init(sample_block);
|
||||
}
|
||||
|
||||
RedisSource::~RedisSource() = default;
|
||||
|
||||
|
||||
namespace
|
||||
{
|
||||
using ValueType = ExternalResultDescription::ValueType;
|
||||
@ -121,7 +123,6 @@ namespace DB
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Chunk RedisSource::generate()
|
||||
{
|
||||
if (keys.isNull() || description.sample_block.rows() == 0 || cursor >= keys.size())
|
||||
@ -168,7 +169,7 @@ namespace DB
|
||||
for (const auto & elem : keys_array)
|
||||
command_for_values.addRedisType(elem);
|
||||
|
||||
auto values = client->execute<RedisArray>(command_for_values);
|
||||
auto values = connection->client->execute<RedisArray>(command_for_values);
|
||||
|
||||
if (keys_array.size() != values.size() + 1) // 'HMGET' primary_key secondary_keys
|
||||
throw Exception(ErrorCodes::NUMBER_OF_COLUMNS_DOESNT_MATCH,
|
||||
@ -199,7 +200,7 @@ namespace DB
|
||||
for (size_t i = 0; i < need_values; ++i)
|
||||
command_for_values.add(keys.get<RedisBulkString>(cursor + i));
|
||||
|
||||
auto values = client->execute<RedisArray>(command_for_values);
|
||||
auto values = connection->client->execute<RedisArray>(command_for_values);
|
||||
if (values.size() != need_values)
|
||||
throw Exception(ErrorCodes::INTERNAL_REDIS_ERROR,
|
||||
"Inconsistent sizes of keys and values in Redis request");
|
||||
|
@ -24,13 +24,14 @@ namespace DB
|
||||
public:
|
||||
using RedisArray = Poco::Redis::Array;
|
||||
using RedisBulkString = Poco::Redis::BulkString;
|
||||
using ConnectionPtr = RedisDictionarySource::ConnectionPtr;
|
||||
|
||||
RedisSource(
|
||||
const std::shared_ptr<Poco::Redis::Client> & client_,
|
||||
ConnectionPtr connection_,
|
||||
const Poco::Redis::Array & keys_,
|
||||
const RedisStorageType & storage_type_,
|
||||
const Block & sample_block,
|
||||
const size_t max_block_size);
|
||||
size_t max_block_size);
|
||||
|
||||
~RedisSource() override;
|
||||
|
||||
@ -39,7 +40,7 @@ namespace DB
|
||||
private:
|
||||
Chunk generate() override;
|
||||
|
||||
std::shared_ptr<Poco::Redis::Client> client;
|
||||
ConnectionPtr connection;
|
||||
Poco::Redis::Array keys;
|
||||
RedisStorageType storage_type;
|
||||
const size_t max_block_size;
|
||||
|
@ -8,6 +8,7 @@
|
||||
#include <Poco/Net/HTTPRequest.h>
|
||||
#include <Poco/Net/HTTPResponse.h>
|
||||
#include <base/logger_useful.h>
|
||||
#include <Common/DNSResolver.h>
|
||||
|
||||
namespace DB::ErrorCodes
|
||||
{
|
||||
@ -44,13 +45,36 @@ Aws::Client::ClientConfigurationPerRequest ProxyResolverConfiguration::getConfig
|
||||
Poco::Timespan(1000000), /// Send timeout.
|
||||
Poco::Timespan(1000000) /// Receive timeout.
|
||||
);
|
||||
auto session = makeHTTPSession(endpoint, timeouts);
|
||||
|
||||
try
|
||||
{
|
||||
/// It should be just empty GET request.
|
||||
Poco::Net::HTTPRequest request(Poco::Net::HTTPRequest::HTTP_GET, endpoint.getPath(), Poco::Net::HTTPRequest::HTTP_1_1);
|
||||
|
||||
const auto & host = endpoint.getHost();
|
||||
auto resolved_hosts = DNSResolver::instance().resolveHostAll(host);
|
||||
|
||||
if (resolved_hosts.empty())
|
||||
throw Exception(ErrorCodes::BAD_ARGUMENTS, "Proxy resolver cannot resolve host {}", host);
|
||||
|
||||
HTTPSessionPtr session;
|
||||
|
||||
for (size_t i = 0; i < resolved_hosts.size(); ++i)
|
||||
{
|
||||
auto resolved_endpoint = endpoint;
|
||||
resolved_endpoint.setHost(resolved_hosts[i].toString());
|
||||
session = makeHTTPSession(endpoint, timeouts, false);
|
||||
|
||||
try
|
||||
{
|
||||
session->sendRequest(request);
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
if (i + 1 == resolved_hosts.size())
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
Poco::Net::HTTPResponse response;
|
||||
auto & response_body_stream = session->receiveResponse(response);
|
||||
|
@ -18,10 +18,10 @@ target_link_libraries(clickhouse_functions
|
||||
clickhouse_dictionaries
|
||||
clickhouse_dictionaries_embedded
|
||||
clickhouse_parsers
|
||||
consistent-hashing
|
||||
ch_contrib::consistent_hashing
|
||||
dbms
|
||||
metrohash
|
||||
murmurhash
|
||||
ch_contrib::metrohash
|
||||
ch_contrib::murmurhash
|
||||
|
||||
PRIVATE
|
||||
ch_contrib::zlib
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user