mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-10 09:32:06 +00:00
Merge remote-tracking branch 'origin' into unbundled-packager
This commit is contained in:
commit
fa1875a313
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -24,3 +24,6 @@ Detailed description / Documentation draft:
|
||||
By adding documentation, you'll allow users to try your new feature immediately, not when someone else will have time to document it later. Documentation is necessary for all features that affect user experience in any way. You can add brief documentation draft above, or add documentation right into your patch as Markdown files in [docs](https://github.com/ClickHouse/ClickHouse/tree/master/docs) folder.
|
||||
|
||||
If you are doing this for the first time, it's recommended to read the lightweight [Contributing to ClickHouse Documentation](https://github.com/ClickHouse/ClickHouse/tree/master/docs/README.md) guide first.
|
||||
|
||||
|
||||
Information about CI checks: https://clickhouse.tech/docs/en/development/continuous-integration/
|
||||
|
@ -28,6 +28,16 @@ endforeach()
|
||||
|
||||
project(ClickHouse)
|
||||
|
||||
option(FAIL_ON_UNSUPPORTED_OPTIONS_COMBINATION
|
||||
"Stop/Fail CMake configuration if some ENABLE_XXX option is defined (either ON or OFF) but is not possible to satisfy"
|
||||
ON
|
||||
)
|
||||
if(FAIL_ON_UNSUPPORTED_OPTIONS_COMBINATION)
|
||||
set(RECONFIGURE_MESSAGE_LEVEL FATAL_ERROR)
|
||||
else()
|
||||
set(RECONFIGURE_MESSAGE_LEVEL STATUS)
|
||||
endif()
|
||||
|
||||
include (cmake/arch.cmake)
|
||||
include (cmake/target.cmake)
|
||||
include (cmake/tools.cmake)
|
||||
@ -57,7 +67,7 @@ if(ENABLE_IPO)
|
||||
message(STATUS "IPO/LTO is supported, enabling")
|
||||
set(CMAKE_INTERPROCEDURAL_OPTIMIZATION TRUE)
|
||||
else()
|
||||
message(STATUS "IPO/LTO is not supported: <${IPO_NOT_SUPPORTED}>")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "IPO/LTO is not supported: <${IPO_NOT_SUPPORTED}>")
|
||||
endif()
|
||||
else()
|
||||
message(STATUS "IPO/LTO not enabled.")
|
||||
@ -133,6 +143,8 @@ option (ENABLE_TESTS "Enables tests" ON)
|
||||
|
||||
if (OS_LINUX AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND CMAKE_VERSION VERSION_GREATER "3.9.0")
|
||||
option (GLIBC_COMPATIBILITY "Set to TRUE to enable compatibility with older glibc libraries. Only for x86_64, Linux. Implies ENABLE_FASTMEMCPY." ON)
|
||||
elseif(GLIBC_COMPATIBILITY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Glibc compatibility cannot be enabled in current configuration")
|
||||
endif ()
|
||||
|
||||
if (NOT CMAKE_VERSION VERSION_GREATER "3.9.0")
|
||||
@ -253,7 +265,9 @@ if (COMPILER_CLANG)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-absolute-paths")
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-absolute-paths")
|
||||
|
||||
if (NOT ENABLE_TESTS AND NOT SANITIZE)
|
||||
option(ENABLE_THINLTO "Enable Thin LTO. Only applicable for clang. It's also suppressed when building with tests or sanitizers." ON)
|
||||
endif()
|
||||
|
||||
# We cannot afford to use LTO when compiling unitests, and it's not enough
|
||||
# to only supply -fno-lto at the final linking stage. So we disable it
|
||||
@ -263,6 +277,8 @@ if (COMPILER_CLANG)
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin")
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin")
|
||||
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin")
|
||||
elseif (ENABLE_THINLTO)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable ThinLTO")
|
||||
endif ()
|
||||
|
||||
# Always prefer llvm tools when using clang. For instance, we cannot use GNU ar when llvm LTO is enabled
|
||||
@ -282,6 +298,8 @@ if (COMPILER_CLANG)
|
||||
else ()
|
||||
message(WARNING "Cannot find llvm-ranlib. System ranlib will be used instead. It does not work with ThinLTO.")
|
||||
endif ()
|
||||
elseif (ENABLE_THINLTO)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "ThinLTO is only available with CLang")
|
||||
endif ()
|
||||
|
||||
option (ENABLE_LIBRARIES "Enable all libraries (Global default switch)" ON)
|
||||
@ -385,6 +403,7 @@ include (cmake/find/rdkafka.cmake)
|
||||
include (cmake/find/amqpcpp.cmake)
|
||||
include (cmake/find/capnp.cmake)
|
||||
include (cmake/find/llvm.cmake)
|
||||
include (cmake/find/termcap.cmake) # for external static llvm
|
||||
include (cmake/find/opencl.cmake)
|
||||
include (cmake/find/h3.cmake)
|
||||
include (cmake/find/libxml2.cmake)
|
||||
@ -393,21 +412,33 @@ include (cmake/find/protobuf.cmake)
|
||||
include (cmake/find/grpc.cmake)
|
||||
include (cmake/find/pdqsort.cmake)
|
||||
include (cmake/find/hdfs3.cmake) # uses protobuf
|
||||
include (cmake/find/poco.cmake)
|
||||
include (cmake/find/curl.cmake)
|
||||
include (cmake/find/s3.cmake)
|
||||
include (cmake/find/base64.cmake)
|
||||
include (cmake/find/parquet.cmake)
|
||||
include (cmake/find/simdjson.cmake)
|
||||
include (cmake/find/rapidjson.cmake)
|
||||
include (cmake/find/fastops.cmake)
|
||||
include (cmake/find/odbc.cmake)
|
||||
|
||||
if(NOT USE_INTERNAL_PARQUET_LIBRARY)
|
||||
set (ENABLE_ORC OFF CACHE INTERNAL "")
|
||||
endif()
|
||||
include (cmake/find/orc.cmake)
|
||||
|
||||
include (cmake/find/avro.cmake)
|
||||
include (cmake/find/msgpack.cmake)
|
||||
include (cmake/find/cassandra.cmake)
|
||||
include (cmake/find/sentry.cmake)
|
||||
include (cmake/find/stats.cmake)
|
||||
|
||||
set (USE_INTERNAL_CITYHASH_LIBRARY ON CACHE INTERNAL "")
|
||||
find_contrib_lib(cityhash)
|
||||
|
||||
find_contrib_lib(farmhash)
|
||||
|
||||
set (USE_INTERNAL_BTRIE_LIBRARY ON CACHE INTERNAL "")
|
||||
find_contrib_lib(btrie)
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
|
@ -50,6 +50,14 @@ if (NOT USE_INTERNAL_MYSQL_LIBRARY AND OPENSSL_INCLUDE_DIR)
|
||||
target_include_directories (mysqlxx SYSTEM PRIVATE ${OPENSSL_INCLUDE_DIR})
|
||||
endif ()
|
||||
|
||||
if (NOT USE_INTERNAL_MYSQL_LIBRARY AND USE_STATIC_LIBRARIES)
|
||||
message(WARNING "Statically linking with system mysql/mariadb only works "
|
||||
"if mysql client libraries are built with same openssl version as "
|
||||
"we are going to use now. It wouldn't work if GnuTLS is used. "
|
||||
"Try -D\"USE_INTERNAL_MYSQL_LIBRARY\"=ON or -D\"ENABLE_MYSQL\"=OFF or "
|
||||
"-D\"USE_STATIC_LIBRARIES\"=OFF")
|
||||
endif ()
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
add_subdirectory (tests)
|
||||
endif ()
|
||||
|
@ -17,143 +17,417 @@
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# - Find ARROW (arrow/api.h, libarrow.a, libarrow.so)
|
||||
# - Find Arrow (arrow/api.h, libarrow.a, libarrow.so)
|
||||
# This module defines
|
||||
# ARROW_FOUND, whether Arrow has been found
|
||||
# ARROW_FULL_SO_VERSION, full shared object version of found Arrow "100.0.0"
|
||||
# ARROW_IMPORT_LIB, path to libarrow's import library (Windows only)
|
||||
# ARROW_INCLUDE_DIR, directory containing headers
|
||||
# ARROW_LIBS, directory containing arrow libraries
|
||||
# ARROW_STATIC_LIB, path to libarrow.a
|
||||
# ARROW_LIBS, deprecated. Use ARROW_LIB_DIR instead
|
||||
# ARROW_LIB_DIR, directory containing Arrow libraries
|
||||
# ARROW_SHARED_IMP_LIB, deprecated. Use ARROW_IMPORT_LIB instead
|
||||
# ARROW_SHARED_LIB, path to libarrow's shared library
|
||||
# ARROW_SHARED_IMP_LIB, path to libarrow's import library (MSVC only)
|
||||
# ARROW_FOUND, whether arrow has been found
|
||||
# ARROW_SO_VERSION, shared object version of found Arrow such as "100"
|
||||
# ARROW_STATIC_LIB, path to libarrow.a
|
||||
# ARROW_VERSION, version of found Arrow
|
||||
# ARROW_VERSION_MAJOR, major version of found Arrow
|
||||
# ARROW_VERSION_MINOR, minor version of found Arrow
|
||||
# ARROW_VERSION_PATCH, patch version of found Arrow
|
||||
|
||||
if(DEFINED ARROW_FOUND)
|
||||
return()
|
||||
endif()
|
||||
|
||||
include(FindPkgConfig)
|
||||
include(GNUInstallDirs)
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
if ("$ENV{ARROW_HOME}" STREQUAL "")
|
||||
pkg_check_modules(ARROW arrow)
|
||||
if (ARROW_FOUND)
|
||||
pkg_get_variable(ARROW_SO_VERSION arrow so_version)
|
||||
set(ARROW_ABI_VERSION ${ARROW_SO_VERSION})
|
||||
message(STATUS "Arrow SO and ABI version: ${ARROW_SO_VERSION}")
|
||||
pkg_get_variable(ARROW_FULL_SO_VERSION arrow full_so_version)
|
||||
message(STATUS "Arrow full SO version: ${ARROW_FULL_SO_VERSION}")
|
||||
if ("${ARROW_INCLUDE_DIRS}" STREQUAL "")
|
||||
set(ARROW_INCLUDE_DIRS "/usr/${CMAKE_INSTALL_INCLUDEDIR}")
|
||||
endif()
|
||||
if ("${ARROW_LIBRARY_DIRS}" STREQUAL "")
|
||||
set(ARROW_LIBRARY_DIRS "/usr/${CMAKE_INSTALL_LIBDIR}")
|
||||
if (EXISTS "/etc/debian_version" AND CMAKE_LIBRARY_ARCHITECTURE)
|
||||
set(ARROW_LIBRARY_DIRS
|
||||
"${ARROW_LIBRARY_DIRS}/${CMAKE_LIBRARY_ARCHITECTURE}")
|
||||
endif()
|
||||
endif()
|
||||
set(ARROW_INCLUDE_DIR ${ARROW_INCLUDE_DIRS})
|
||||
set(ARROW_LIBS ${ARROW_LIBRARY_DIRS})
|
||||
set(ARROW_SEARCH_LIB_PATH ${ARROW_LIBRARY_DIRS})
|
||||
endif()
|
||||
else()
|
||||
set(ARROW_HOME "$ENV{ARROW_HOME}")
|
||||
|
||||
set(ARROW_SEARCH_HEADER_PATHS
|
||||
${ARROW_HOME}/include
|
||||
)
|
||||
|
||||
set(ARROW_SEARCH_LIB_PATH
|
||||
${ARROW_HOME}/lib
|
||||
)
|
||||
|
||||
find_path(ARROW_INCLUDE_DIR arrow/array.h PATHS
|
||||
${ARROW_SEARCH_HEADER_PATHS}
|
||||
# make sure we don't accidentally pick up a different version
|
||||
NO_DEFAULT_PATH
|
||||
)
|
||||
set(ARROW_SEARCH_LIB_PATH_SUFFIXES)
|
||||
if(CMAKE_LIBRARY_ARCHITECTURE)
|
||||
list(APPEND ARROW_SEARCH_LIB_PATH_SUFFIXES "lib/${CMAKE_LIBRARY_ARCHITECTURE}")
|
||||
endif()
|
||||
list(APPEND ARROW_SEARCH_LIB_PATH_SUFFIXES
|
||||
"lib64"
|
||||
"lib32"
|
||||
"lib"
|
||||
"bin")
|
||||
set(ARROW_CONFIG_SUFFIXES
|
||||
"_RELEASE"
|
||||
"_RELWITHDEBINFO"
|
||||
"_MINSIZEREL"
|
||||
"_DEBUG"
|
||||
"")
|
||||
if(CMAKE_BUILD_TYPE)
|
||||
string(TOUPPER ${CMAKE_BUILD_TYPE} ARROW_CONFIG_SUFFIX_PREFERRED)
|
||||
set(ARROW_CONFIG_SUFFIX_PREFERRED "_${ARROW_CONFIG_SUFFIX_PREFERRED}")
|
||||
list(INSERT ARROW_CONFIG_SUFFIXES 0 "${ARROW_CONFIG_SUFFIX_PREFERRED}")
|
||||
endif()
|
||||
|
||||
find_library(ARROW_LIB_PATH NAMES arrow
|
||||
PATHS
|
||||
${ARROW_SEARCH_LIB_PATH}
|
||||
NO_DEFAULT_PATH)
|
||||
get_filename_component(ARROW_LIBS ${ARROW_LIB_PATH} DIRECTORY)
|
||||
|
||||
find_library(ARROW_PYTHON_LIB_PATH NAMES arrow_python
|
||||
PATHS
|
||||
${ARROW_SEARCH_LIB_PATH}
|
||||
NO_DEFAULT_PATH)
|
||||
get_filename_component(ARROW_PYTHON_LIBS ${ARROW_PYTHON_LIB_PATH} DIRECTORY)
|
||||
|
||||
if (MSVC)
|
||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".lib" ".dll")
|
||||
|
||||
if (MSVC AND NOT DEFINED ARROW_MSVC_STATIC_LIB_SUFFIX)
|
||||
if(NOT DEFINED ARROW_MSVC_STATIC_LIB_SUFFIX)
|
||||
if(MSVC)
|
||||
set(ARROW_MSVC_STATIC_LIB_SUFFIX "_static")
|
||||
endif()
|
||||
|
||||
find_library(ARROW_SHARED_LIBRARIES NAMES arrow
|
||||
PATHS ${ARROW_HOME} NO_DEFAULT_PATH
|
||||
PATH_SUFFIXES "bin" )
|
||||
|
||||
find_library(ARROW_PYTHON_SHARED_LIBRARIES NAMES arrow_python
|
||||
PATHS ${ARROW_HOME} NO_DEFAULT_PATH
|
||||
PATH_SUFFIXES "bin" )
|
||||
get_filename_component(ARROW_SHARED_LIBS ${ARROW_SHARED_LIBRARIES} PATH )
|
||||
get_filename_component(ARROW_PYTHON_SHARED_LIBS ${ARROW_PYTHON_SHARED_LIBRARIES} PATH )
|
||||
endif ()
|
||||
|
||||
if (ARROW_INCLUDE_DIR AND ARROW_LIBS)
|
||||
set(ARROW_FOUND TRUE)
|
||||
set(ARROW_LIB_NAME arrow)
|
||||
set(ARROW_PYTHON_LIB_NAME arrow_python)
|
||||
if (MSVC)
|
||||
set(ARROW_STATIC_LIB ${ARROW_LIBS}/${ARROW_LIB_NAME}${ARROW_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
set(ARROW_PYTHON_STATIC_LIB ${ARROW_PYTHON_LIBS}/${ARROW_PYTHON_LIB_NAME}${ARROW_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
set(ARROW_SHARED_LIB ${ARROW_SHARED_LIBS}/${ARROW_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
set(ARROW_PYTHON_SHARED_LIB ${ARROW_PYTHON_SHARED_LIBS}/${ARROW_PYTHON_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
set(ARROW_SHARED_IMP_LIB ${ARROW_LIBS}/${ARROW_LIB_NAME}.lib)
|
||||
set(ARROW_PYTHON_SHARED_IMP_LIB ${ARROW_PYTHON_LIBS}/${ARROW_PYTHON_LIB_NAME}.lib)
|
||||
else()
|
||||
set(ARROW_STATIC_LIB ${ARROW_LIBS}/lib${ARROW_LIB_NAME}.a)
|
||||
set(ARROW_PYTHON_STATIC_LIB ${ARROW_LIBS}/lib${ARROW_PYTHON_LIB_NAME}.a)
|
||||
|
||||
set(ARROW_SHARED_LIB ${ARROW_LIBS}/lib${ARROW_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
set(ARROW_PYTHON_SHARED_LIB ${ARROW_LIBS}/lib${ARROW_PYTHON_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
set(ARROW_MSVC_STATIC_LIB_SUFFIX "")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (ARROW_FOUND)
|
||||
if (NOT Arrow_FIND_QUIETLY)
|
||||
message(STATUS "Found the Arrow core library: ${ARROW_LIB_PATH}")
|
||||
message(STATUS "Found the Arrow Python library: ${ARROW_PYTHON_LIB_PATH}")
|
||||
endif ()
|
||||
else ()
|
||||
if (NOT Arrow_FIND_QUIETLY)
|
||||
set(ARROW_ERR_MSG "Could not find the Arrow library. Looked for headers")
|
||||
set(ARROW_ERR_MSG "${ARROW_ERR_MSG} in ${ARROW_SEARCH_HEADER_PATHS}, and for libs")
|
||||
set(ARROW_ERR_MSG "${ARROW_ERR_MSG} in ${ARROW_SEARCH_LIB_PATH}")
|
||||
if (Arrow_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "${ARROW_ERR_MSG}")
|
||||
else (Arrow_FIND_REQUIRED)
|
||||
message(STATUS "${ARROW_ERR_MSG}")
|
||||
endif (Arrow_FIND_REQUIRED)
|
||||
endif ()
|
||||
set(ARROW_FOUND FALSE)
|
||||
endif ()
|
||||
# Internal function.
|
||||
#
|
||||
# Set shared library name for ${base_name} to ${output_variable}.
|
||||
#
|
||||
# Example:
|
||||
# arrow_build_shared_library_name(ARROW_SHARED_LIBRARY_NAME arrow)
|
||||
# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.so on Linux
|
||||
# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.dylib on macOS
|
||||
# # -> ARROW_SHARED_LIBRARY_NAME=arrow.dll with MSVC on Windows
|
||||
# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.dll with MinGW on Windows
|
||||
function(arrow_build_shared_library_name output_variable base_name)
|
||||
set(${output_variable}
|
||||
"${CMAKE_SHARED_LIBRARY_PREFIX}${base_name}${CMAKE_SHARED_LIBRARY_SUFFIX}"
|
||||
PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
if (MSVC)
|
||||
mark_as_advanced(
|
||||
ARROW_INCLUDE_DIR
|
||||
ARROW_STATIC_LIB
|
||||
ARROW_SHARED_LIB
|
||||
ARROW_SHARED_IMP_LIB
|
||||
ARROW_PYTHON_STATIC_LIB
|
||||
ARROW_PYTHON_SHARED_LIB
|
||||
ARROW_PYTHON_SHARED_IMP_LIB
|
||||
)
|
||||
else()
|
||||
mark_as_advanced(
|
||||
ARROW_INCLUDE_DIR
|
||||
ARROW_STATIC_LIB
|
||||
ARROW_SHARED_LIB
|
||||
ARROW_PYTHON_STATIC_LIB
|
||||
ARROW_PYTHON_SHARED_LIB
|
||||
)
|
||||
# Internal function.
|
||||
#
|
||||
# Set import library name for ${base_name} to ${output_variable}.
|
||||
# This is useful only for MSVC build. Import library is used only
|
||||
# with MSVC build.
|
||||
#
|
||||
# Example:
|
||||
# arrow_build_import_library_name(ARROW_IMPORT_LIBRARY_NAME arrow)
|
||||
# # -> ARROW_IMPORT_LIBRARY_NAME=arrow on Linux (meaningless)
|
||||
# # -> ARROW_IMPORT_LIBRARY_NAME=arrow on macOS (meaningless)
|
||||
# # -> ARROW_IMPORT_LIBRARY_NAME=arrow.lib with MSVC on Windows
|
||||
# # -> ARROW_IMPORT_LIBRARY_NAME=libarrow.dll.a with MinGW on Windows
|
||||
function(arrow_build_import_library_name output_variable base_name)
|
||||
set(${output_variable}
|
||||
"${CMAKE_IMPORT_LIBRARY_PREFIX}${base_name}${CMAKE_IMPORT_LIBRARY_SUFFIX}"
|
||||
PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Internal function.
|
||||
#
|
||||
# Set static library name for ${base_name} to ${output_variable}.
|
||||
#
|
||||
# Example:
|
||||
# arrow_build_static_library_name(ARROW_STATIC_LIBRARY_NAME arrow)
|
||||
# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.a on Linux
|
||||
# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.a on macOS
|
||||
# # -> ARROW_STATIC_LIBRARY_NAME=arrow.lib with MSVC on Windows
|
||||
# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.dll.a with MinGW on Windows
|
||||
function(arrow_build_static_library_name output_variable base_name)
|
||||
set(
|
||||
${output_variable}
|
||||
"${CMAKE_STATIC_LIBRARY_PREFIX}${base_name}${ARROW_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}"
|
||||
PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Internal function.
|
||||
#
|
||||
# Set macro value for ${macro_name} in ${header_content} to ${output_variable}.
|
||||
#
|
||||
# Example:
|
||||
# arrow_extract_macro_value(version_major
|
||||
# "ARROW_VERSION_MAJOR"
|
||||
# "#define ARROW_VERSION_MAJOR 1.0.0")
|
||||
# # -> version_major=1.0.0
|
||||
function(arrow_extract_macro_value output_variable macro_name header_content)
|
||||
string(REGEX MATCH "#define +${macro_name} +[^\r\n]+" macro_definition
|
||||
"${header_content}")
|
||||
string(REGEX
|
||||
REPLACE "^#define +${macro_name} +(.+)$" "\\1" macro_value "${macro_definition}")
|
||||
set(${output_variable} "${macro_value}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Internal macro only for arrow_find_package.
|
||||
#
|
||||
# Find package in HOME.
|
||||
macro(arrow_find_package_home)
|
||||
find_path(${prefix}_include_dir "${header_path}"
|
||||
PATHS "${home}"
|
||||
PATH_SUFFIXES "include"
|
||||
NO_DEFAULT_PATH)
|
||||
set(include_dir "${${prefix}_include_dir}")
|
||||
set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE)
|
||||
|
||||
if(MSVC)
|
||||
set(CMAKE_SHARED_LIBRARY_SUFFIXES_ORIGINAL ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
# .dll isn't found by find_library with MSVC because .dll isn't included in
|
||||
# CMAKE_FIND_LIBRARY_SUFFIXES.
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${CMAKE_SHARED_LIBRARY_SUFFIX}")
|
||||
endif()
|
||||
find_library(${prefix}_shared_lib
|
||||
NAMES "${shared_lib_name}"
|
||||
PATHS "${home}"
|
||||
PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
|
||||
NO_DEFAULT_PATH)
|
||||
if(MSVC)
|
||||
set(CMAKE_SHARED_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_ORIGINAL})
|
||||
endif()
|
||||
set(shared_lib "${${prefix}_shared_lib}")
|
||||
set(${prefix}_SHARED_LIB "${shared_lib}" PARENT_SCOPE)
|
||||
if(shared_lib)
|
||||
add_library(${target_shared} SHARED IMPORTED)
|
||||
set_target_properties(${target_shared} PROPERTIES IMPORTED_LOCATION "${shared_lib}")
|
||||
if(include_dir)
|
||||
set_target_properties(${target_shared}
|
||||
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}")
|
||||
endif()
|
||||
find_library(${prefix}_import_lib
|
||||
NAMES "${import_lib_name}"
|
||||
PATHS "${home}"
|
||||
PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
|
||||
NO_DEFAULT_PATH)
|
||||
set(import_lib "${${prefix}_import_lib}")
|
||||
set(${prefix}_IMPORT_LIB "${import_lib}" PARENT_SCOPE)
|
||||
if(import_lib)
|
||||
set_target_properties(${target_shared} PROPERTIES IMPORTED_IMPLIB "${import_lib}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
find_library(${prefix}_static_lib
|
||||
NAMES "${static_lib_name}"
|
||||
PATHS "${home}"
|
||||
PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
|
||||
NO_DEFAULT_PATH)
|
||||
set(static_lib "${${prefix}_static_lib}")
|
||||
set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE)
|
||||
if(static_lib)
|
||||
add_library(${target_static} STATIC IMPORTED)
|
||||
set_target_properties(${target_static} PROPERTIES IMPORTED_LOCATION "${static_lib}")
|
||||
if(include_dir)
|
||||
set_target_properties(${target_static}
|
||||
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}")
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
# Internal macro only for arrow_find_package.
|
||||
#
|
||||
# Find package by CMake package configuration.
|
||||
macro(arrow_find_package_cmake_package_configuration)
|
||||
find_package(${cmake_package_name} CONFIG)
|
||||
if(${cmake_package_name}_FOUND)
|
||||
set(${prefix}_USE_CMAKE_PACKAGE_CONFIG TRUE PARENT_SCOPE)
|
||||
if(TARGET ${target_shared})
|
||||
foreach(suffix ${ARROW_CONFIG_SUFFIXES})
|
||||
get_target_property(shared_lib ${target_shared} IMPORTED_LOCATION${suffix})
|
||||
if(shared_lib)
|
||||
# Remove shared library version:
|
||||
# libarrow.so.100.0.0 -> libarrow.so
|
||||
# Because ARROW_HOME and pkg-config approaches don't add
|
||||
# shared library version.
|
||||
string(REGEX
|
||||
REPLACE "(${CMAKE_SHARED_LIBRARY_SUFFIX})[.0-9]+$" "\\1" shared_lib
|
||||
"${shared_lib}")
|
||||
set(${prefix}_SHARED_LIB "${shared_lib}" PARENT_SCOPE)
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
if(TARGET ${target_static})
|
||||
foreach(suffix ${ARROW_CONFIG_SUFFIXES})
|
||||
get_target_property(static_lib ${target_static} IMPORTED_LOCATION${suffix})
|
||||
if(static_lib)
|
||||
set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE)
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
# Internal macro only for arrow_find_package.
|
||||
#
|
||||
# Find package by pkg-config.
|
||||
macro(arrow_find_package_pkg_config)
|
||||
pkg_check_modules(${prefix}_PC ${pkg_config_name})
|
||||
if(${prefix}_PC_FOUND)
|
||||
set(${prefix}_USE_PKG_CONFIG TRUE PARENT_SCOPE)
|
||||
|
||||
set(include_dir "${${prefix}_PC_INCLUDEDIR}")
|
||||
set(lib_dir "${${prefix}_PC_LIBDIR}")
|
||||
set(shared_lib_paths "${${prefix}_PC_LINK_LIBRARIES}")
|
||||
# Use the first shared library path as the IMPORTED_LOCATION
|
||||
# for ${target_shared}. This assumes that the first shared library
|
||||
# path is the shared library path for this module.
|
||||
list(GET shared_lib_paths 0 first_shared_lib_path)
|
||||
# Use the rest shared library paths as the INTERFACE_LINK_LIBRARIES
|
||||
# for ${target_shared}. This assumes that the rest shared library
|
||||
# paths are dependency library paths for this module.
|
||||
list(LENGTH shared_lib_paths n_shared_lib_paths)
|
||||
if(n_shared_lib_paths LESS_EQUAL 1)
|
||||
set(rest_shared_lib_paths)
|
||||
else()
|
||||
list(SUBLIST
|
||||
shared_lib_paths
|
||||
1
|
||||
-1
|
||||
rest_shared_lib_paths)
|
||||
endif()
|
||||
|
||||
set(${prefix}_VERSION "${${prefix}_PC_VERSION}" PARENT_SCOPE)
|
||||
set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE)
|
||||
set(${prefix}_SHARED_LIB "${first_shared_lib_path}" PARENT_SCOPE)
|
||||
|
||||
add_library(${target_shared} SHARED IMPORTED)
|
||||
set_target_properties(${target_shared}
|
||||
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES
|
||||
"${include_dir}"
|
||||
INTERFACE_LINK_LIBRARIES
|
||||
"${rest_shared_lib_paths}"
|
||||
IMPORTED_LOCATION
|
||||
"${first_shared_lib_path}")
|
||||
get_target_property(shared_lib ${target_shared} IMPORTED_LOCATION)
|
||||
|
||||
find_library(${prefix}_static_lib
|
||||
NAMES "${static_lib_name}"
|
||||
PATHS "${lib_dir}"
|
||||
NO_DEFAULT_PATH)
|
||||
set(static_lib "${${prefix}_static_lib}")
|
||||
set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE)
|
||||
if(static_lib)
|
||||
add_library(${target_static} STATIC IMPORTED)
|
||||
set_target_properties(${target_static}
|
||||
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}"
|
||||
IMPORTED_LOCATION "${static_lib}")
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
function(arrow_find_package
|
||||
prefix
|
||||
home
|
||||
base_name
|
||||
header_path
|
||||
cmake_package_name
|
||||
pkg_config_name)
|
||||
arrow_build_shared_library_name(shared_lib_name ${base_name})
|
||||
arrow_build_import_library_name(import_lib_name ${base_name})
|
||||
arrow_build_static_library_name(static_lib_name ${base_name})
|
||||
|
||||
set(target_shared ${base_name}_shared)
|
||||
set(target_static ${base_name}_static)
|
||||
|
||||
if(home)
|
||||
arrow_find_package_home()
|
||||
set(${prefix}_FIND_APPROACH "HOME: ${home}" PARENT_SCOPE)
|
||||
else()
|
||||
arrow_find_package_cmake_package_configuration()
|
||||
if(${cmake_package_name}_FOUND)
|
||||
set(${prefix}_FIND_APPROACH
|
||||
"CMake package configuration: ${cmake_package_name}"
|
||||
PARENT_SCOPE)
|
||||
else()
|
||||
arrow_find_package_pkg_config()
|
||||
set(${prefix}_FIND_APPROACH "pkg-config: ${pkg_config_name}" PARENT_SCOPE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT include_dir)
|
||||
if(TARGET ${target_shared})
|
||||
get_target_property(include_dir ${target_shared} INTERFACE_INCLUDE_DIRECTORIES)
|
||||
elseif(TARGET ${target_static})
|
||||
get_target_property(include_dir ${target_static} INTERFACE_INCLUDE_DIRECTORIES)
|
||||
endif()
|
||||
endif()
|
||||
if(include_dir)
|
||||
set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE)
|
||||
endif()
|
||||
|
||||
if(shared_lib)
|
||||
get_filename_component(lib_dir "${shared_lib}" DIRECTORY)
|
||||
elseif(static_lib)
|
||||
get_filename_component(lib_dir "${static_lib}" DIRECTORY)
|
||||
else()
|
||||
set(lib_dir NOTFOUND)
|
||||
endif()
|
||||
set(${prefix}_LIB_DIR "${lib_dir}" PARENT_SCOPE)
|
||||
# For backward compatibility
|
||||
set(${prefix}_LIBS "${lib_dir}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
if(NOT "$ENV{ARROW_HOME}" STREQUAL "")
|
||||
file(TO_CMAKE_PATH "$ENV{ARROW_HOME}" ARROW_HOME)
|
||||
endif()
|
||||
arrow_find_package(ARROW
|
||||
"${ARROW_HOME}"
|
||||
arrow
|
||||
arrow/api.h
|
||||
Arrow
|
||||
arrow)
|
||||
|
||||
if(ARROW_HOME)
|
||||
if(ARROW_INCLUDE_DIR)
|
||||
file(READ "${ARROW_INCLUDE_DIR}/arrow/util/config.h" ARROW_CONFIG_H_CONTENT)
|
||||
arrow_extract_macro_value(ARROW_VERSION_MAJOR "ARROW_VERSION_MAJOR"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
arrow_extract_macro_value(ARROW_VERSION_MINOR "ARROW_VERSION_MINOR"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
arrow_extract_macro_value(ARROW_VERSION_PATCH "ARROW_VERSION_PATCH"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
if("${ARROW_VERSION_MAJOR}" STREQUAL ""
|
||||
OR "${ARROW_VERSION_MINOR}" STREQUAL ""
|
||||
OR "${ARROW_VERSION_PATCH}" STREQUAL "")
|
||||
set(ARROW_VERSION "0.0.0")
|
||||
else()
|
||||
set(ARROW_VERSION
|
||||
"${ARROW_VERSION_MAJOR}.${ARROW_VERSION_MINOR}.${ARROW_VERSION_PATCH}")
|
||||
endif()
|
||||
|
||||
arrow_extract_macro_value(ARROW_SO_VERSION_QUOTED "ARROW_SO_VERSION"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
string(REGEX REPLACE "^\"(.+)\"$" "\\1" ARROW_SO_VERSION "${ARROW_SO_VERSION_QUOTED}")
|
||||
arrow_extract_macro_value(ARROW_FULL_SO_VERSION_QUOTED "ARROW_FULL_SO_VERSION"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
string(REGEX
|
||||
REPLACE "^\"(.+)\"$" "\\1" ARROW_FULL_SO_VERSION
|
||||
"${ARROW_FULL_SO_VERSION_QUOTED}")
|
||||
endif()
|
||||
else()
|
||||
if(ARROW_USE_CMAKE_PACKAGE_CONFIG)
|
||||
find_package(Arrow CONFIG)
|
||||
elseif(ARROW_USE_PKG_CONFIG)
|
||||
pkg_get_variable(ARROW_SO_VERSION arrow so_version)
|
||||
pkg_get_variable(ARROW_FULL_SO_VERSION arrow full_so_version)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(ARROW_ABI_VERSION ${ARROW_SO_VERSION})
|
||||
|
||||
mark_as_advanced(ARROW_ABI_VERSION
|
||||
ARROW_CONFIG_SUFFIXES
|
||||
ARROW_FULL_SO_VERSION
|
||||
ARROW_IMPORT_LIB
|
||||
ARROW_INCLUDE_DIR
|
||||
ARROW_LIBS
|
||||
ARROW_LIB_DIR
|
||||
ARROW_SEARCH_LIB_PATH_SUFFIXES
|
||||
ARROW_SHARED_IMP_LIB
|
||||
ARROW_SHARED_LIB
|
||||
ARROW_SO_VERSION
|
||||
ARROW_STATIC_LIB
|
||||
ARROW_VERSION
|
||||
ARROW_VERSION_MAJOR
|
||||
ARROW_VERSION_MINOR
|
||||
ARROW_VERSION_PATCH)
|
||||
|
||||
find_package_handle_standard_args(Arrow REQUIRED_VARS
|
||||
# The first required variable is shown
|
||||
# in the found message. So this list is
|
||||
# not sorted alphabetically.
|
||||
ARROW_INCLUDE_DIR
|
||||
ARROW_LIB_DIR
|
||||
ARROW_FULL_SO_VERSION
|
||||
ARROW_SO_VERSION
|
||||
VERSION_VAR
|
||||
ARROW_VERSION)
|
||||
set(ARROW_FOUND ${Arrow_FOUND})
|
||||
|
||||
if(Arrow_FOUND AND NOT Arrow_FIND_QUIETLY)
|
||||
message(STATUS "Arrow version: ${ARROW_VERSION} (${ARROW_FIND_APPROACH})")
|
||||
message(STATUS "Arrow SO and ABI version: ${ARROW_SO_VERSION}")
|
||||
message(STATUS "Arrow full SO version: ${ARROW_FULL_SO_VERSION}")
|
||||
message(STATUS "Found the Arrow core shared library: ${ARROW_SHARED_LIB}")
|
||||
message(STATUS "Found the Arrow core import library: ${ARROW_IMPORT_LIB}")
|
||||
message(STATUS "Found the Arrow core static library: ${ARROW_STATIC_LIB}")
|
||||
endif()
|
||||
|
@ -17,131 +17,116 @@
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# - Find PARQUET (parquet/parquet.h, libparquet.a, libparquet.so)
|
||||
# - Find Parquet (parquet/api/reader.h, libparquet.a, libparquet.so)
|
||||
#
|
||||
# This module requires Arrow from which it uses
|
||||
# arrow_find_package()
|
||||
#
|
||||
# This module defines
|
||||
# PARQUET_FOUND, whether Parquet has been found
|
||||
# PARQUET_IMPORT_LIB, path to libparquet's import library (Windows only)
|
||||
# PARQUET_INCLUDE_DIR, directory containing headers
|
||||
# PARQUET_LIBS, directory containing parquet libraries
|
||||
# PARQUET_STATIC_LIB, path to libparquet.a
|
||||
# PARQUET_LIBS, deprecated. Use PARQUET_LIB_DIR instead
|
||||
# PARQUET_LIB_DIR, directory containing Parquet libraries
|
||||
# PARQUET_SHARED_IMP_LIB, deprecated. Use PARQUET_IMPORT_LIB instead
|
||||
# PARQUET_SHARED_LIB, path to libparquet's shared library
|
||||
# PARQUET_SHARED_IMP_LIB, path to libparquet's import library (MSVC only)
|
||||
# PARQUET_FOUND, whether parquet has been found
|
||||
# PARQUET_SO_VERSION, shared object version of found Parquet such as "100"
|
||||
# PARQUET_STATIC_LIB, path to libparquet.a
|
||||
|
||||
include(FindPkgConfig)
|
||||
if(DEFINED PARQUET_FOUND)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(find_package_arguments)
|
||||
if(${CMAKE_FIND_PACKAGE_NAME}_FIND_VERSION)
|
||||
list(APPEND find_package_arguments "${${CMAKE_FIND_PACKAGE_NAME}_FIND_VERSION}")
|
||||
endif()
|
||||
if(${CMAKE_FIND_PACKAGE_NAME}_FIND_REQUIRED)
|
||||
list(APPEND find_package_arguments REQUIRED)
|
||||
endif()
|
||||
if(${CMAKE_FIND_PACKAGE_NAME}_FIND_QUIETLY)
|
||||
list(APPEND find_package_arguments QUIET)
|
||||
endif()
|
||||
find_package(Arrow ${find_package_arguments})
|
||||
|
||||
if(NOT "$ENV{PARQUET_HOME}" STREQUAL "")
|
||||
set(PARQUET_HOME "$ENV{PARQUET_HOME}")
|
||||
file(TO_CMAKE_PATH "$ENV{PARQUET_HOME}" PARQUET_HOME)
|
||||
endif()
|
||||
|
||||
if (MSVC)
|
||||
SET(CMAKE_FIND_LIBRARY_SUFFIXES ".lib" ".dll")
|
||||
if((NOT PARQUET_HOME) AND ARROW_HOME)
|
||||
set(PARQUET_HOME ${ARROW_HOME})
|
||||
endif()
|
||||
|
||||
if (MSVC AND NOT DEFINED PARQUET_MSVC_STATIC_LIB_SUFFIX)
|
||||
set(PARQUET_MSVC_STATIC_LIB_SUFFIX "_static")
|
||||
endif()
|
||||
|
||||
find_library(PARQUET_SHARED_LIBRARIES NAMES parquet
|
||||
PATHS ${PARQUET_HOME} NO_DEFAULT_PATH
|
||||
PATH_SUFFIXES "bin" )
|
||||
|
||||
get_filename_component(PARQUET_SHARED_LIBS ${PARQUET_SHARED_LIBRARIES} PATH )
|
||||
endif ()
|
||||
|
||||
if(PARQUET_HOME)
|
||||
set(PARQUET_SEARCH_HEADER_PATHS
|
||||
${PARQUET_HOME}/include
|
||||
)
|
||||
set(PARQUET_SEARCH_LIB_PATH
|
||||
${PARQUET_HOME}/lib
|
||||
)
|
||||
find_path(PARQUET_INCLUDE_DIR parquet/api/reader.h PATHS
|
||||
${PARQUET_SEARCH_HEADER_PATHS}
|
||||
# make sure we don't accidentally pick up a different version
|
||||
NO_DEFAULT_PATH
|
||||
)
|
||||
find_library(PARQUET_LIBRARIES NAMES parquet
|
||||
PATHS ${PARQUET_HOME} NO_DEFAULT_PATH
|
||||
PATH_SUFFIXES "lib")
|
||||
get_filename_component(PARQUET_LIBS ${PARQUET_LIBRARIES} PATH )
|
||||
|
||||
# Try to autodiscover the Parquet ABI version
|
||||
get_filename_component(PARQUET_LIB_REALPATH ${PARQUET_LIBRARIES} REALPATH)
|
||||
get_filename_component(PARQUET_EXT_REALPATH ${PARQUET_LIB_REALPATH} EXT)
|
||||
string(REGEX MATCH ".([0-9]+.[0-9]+.[0-9]+)" HAS_ABI_VERSION ${PARQUET_EXT_REALPATH})
|
||||
if (HAS_ABI_VERSION)
|
||||
if (APPLE)
|
||||
string(REGEX REPLACE ".([0-9]+.[0-9]+.[0-9]+).dylib" "\\1" PARQUET_ABI_VERSION ${PARQUET_EXT_REALPATH})
|
||||
if(ARROW_FOUND)
|
||||
arrow_find_package(PARQUET
|
||||
"${PARQUET_HOME}"
|
||||
parquet
|
||||
parquet/api/reader.h
|
||||
Parquet
|
||||
parquet)
|
||||
if(PARQUET_HOME)
|
||||
if(PARQUET_INCLUDE_DIR)
|
||||
file(READ "${PARQUET_INCLUDE_DIR}/parquet/parquet_version.h"
|
||||
PARQUET_VERSION_H_CONTENT)
|
||||
arrow_extract_macro_value(PARQUET_VERSION_MAJOR "PARQUET_VERSION_MAJOR"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
arrow_extract_macro_value(PARQUET_VERSION_MINOR "PARQUET_VERSION_MINOR"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
arrow_extract_macro_value(PARQUET_VERSION_PATCH "PARQUET_VERSION_PATCH"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
if("${PARQUET_VERSION_MAJOR}" STREQUAL ""
|
||||
OR "${PARQUET_VERSION_MINOR}" STREQUAL ""
|
||||
OR "${PARQUET_VERSION_PATCH}" STREQUAL "")
|
||||
set(PARQUET_VERSION "0.0.0")
|
||||
else()
|
||||
string(REGEX REPLACE ".so.([0-9]+.[0-9]+.[0-9]+)" "\\1" PARQUET_ABI_VERSION ${PARQUET_EXT_REALPATH})
|
||||
set(PARQUET_VERSION
|
||||
"${PARQUET_VERSION_MAJOR}.${PARQUET_VERSION_MINOR}.${PARQUET_VERSION_PATCH}")
|
||||
endif()
|
||||
|
||||
arrow_extract_macro_value(PARQUET_SO_VERSION_QUOTED "PARQUET_SO_VERSION"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
string(REGEX
|
||||
REPLACE "^\"(.+)\"$" "\\1" PARQUET_SO_VERSION "${PARQUET_SO_VERSION_QUOTED}")
|
||||
arrow_extract_macro_value(PARQUET_FULL_SO_VERSION_QUOTED "PARQUET_FULL_SO_VERSION"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
string(REGEX
|
||||
REPLACE "^\"(.+)\"$" "\\1" PARQUET_FULL_SO_VERSION
|
||||
"${PARQUET_FULL_SO_VERSION_QUOTED}")
|
||||
endif()
|
||||
string(REGEX REPLACE "([0-9]+).[0-9]+.[0-9]+" "\\1" PARQUET_SO_VERSION ${PARQUET_ABI_VERSION})
|
||||
else()
|
||||
set(PARQUET_ABI_VERSION "1.0.0")
|
||||
set(PARQUET_SO_VERSION "1")
|
||||
endif()
|
||||
else()
|
||||
pkg_check_modules(PARQUET parquet)
|
||||
if (PARQUET_FOUND)
|
||||
pkg_get_variable(PARQUET_ABI_VERSION parquet abi_version)
|
||||
message(STATUS "Parquet C++ ABI version: ${PARQUET_ABI_VERSION}")
|
||||
if(PARQUET_USE_CMAKE_PACKAGE_CONFIG)
|
||||
find_package(Parquet CONFIG)
|
||||
elseif(PARQUET_USE_PKG_CONFIG)
|
||||
pkg_get_variable(PARQUET_SO_VERSION parquet so_version)
|
||||
message(STATUS "Parquet C++ SO version: ${PARQUET_SO_VERSION}")
|
||||
set(PARQUET_INCLUDE_DIR ${PARQUET_INCLUDE_DIRS})
|
||||
set(PARQUET_LIBS ${PARQUET_LIBRARY_DIRS})
|
||||
set(PARQUET_SEARCH_LIB_PATH ${PARQUET_LIBRARY_DIRS})
|
||||
message(STATUS "Searching for parquet libs in: ${PARQUET_SEARCH_LIB_PATH}")
|
||||
find_library(PARQUET_LIBRARIES NAMES parquet
|
||||
PATHS ${PARQUET_SEARCH_LIB_PATH} NO_DEFAULT_PATH)
|
||||
else()
|
||||
find_path(PARQUET_INCLUDE_DIR NAMES parquet/api/reader.h )
|
||||
find_library(PARQUET_LIBRARIES NAMES parquet)
|
||||
get_filename_component(PARQUET_LIBS ${PARQUET_LIBRARIES} PATH )
|
||||
pkg_get_variable(PARQUET_FULL_SO_VERSION parquet full_so_version)
|
||||
endif()
|
||||
endif()
|
||||
set(PARQUET_ABI_VERSION "${PARQUET_SO_VERSION}")
|
||||
endif()
|
||||
|
||||
if (PARQUET_INCLUDE_DIR AND PARQUET_LIBRARIES)
|
||||
set(PARQUET_FOUND TRUE)
|
||||
set(PARQUET_LIB_NAME parquet)
|
||||
if (MSVC)
|
||||
set(PARQUET_STATIC_LIB "${PARQUET_LIBS}/${PARQUET_LIB_NAME}${PARQUET_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}")
|
||||
set(PARQUET_SHARED_LIB "${PARQUET_SHARED_LIBS}/${PARQUET_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX}")
|
||||
set(PARQUET_SHARED_IMP_LIB "${PARQUET_LIBS}/${PARQUET_LIB_NAME}.lib")
|
||||
else()
|
||||
set(PARQUET_STATIC_LIB ${PARQUET_LIBS}/${CMAKE_STATIC_LIBRARY_PREFIX}${PARQUET_LIB_NAME}.a)
|
||||
set(PARQUET_SHARED_LIB ${PARQUET_LIBS}/${CMAKE_SHARED_LIBRARY_PREFIX}${PARQUET_LIB_NAME}${CMAKE_SHARED_LIBRARY_SUFFIX})
|
||||
endif()
|
||||
else ()
|
||||
set(PARQUET_FOUND FALSE)
|
||||
endif ()
|
||||
|
||||
if (PARQUET_FOUND)
|
||||
if (NOT Parquet_FIND_QUIETLY)
|
||||
message(STATUS "Found the Parquet library: ${PARQUET_LIBRARIES}")
|
||||
endif ()
|
||||
else ()
|
||||
if (NOT Parquet_FIND_QUIETLY)
|
||||
if (NOT PARQUET_FOUND)
|
||||
set(PARQUET_ERR_MSG "${PARQUET_ERR_MSG} Could not find the parquet library.")
|
||||
endif()
|
||||
|
||||
set(PARQUET_ERR_MSG "${PARQUET_ERR_MSG} Looked in ")
|
||||
if ( _parquet_roots )
|
||||
set(PARQUET_ERR_MSG "${PARQUET_ERR_MSG} in ${_parquet_roots}.")
|
||||
else ()
|
||||
set(PARQUET_ERR_MSG "${PARQUET_ERR_MSG} system search paths.")
|
||||
endif ()
|
||||
if (Parquet_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "${PARQUET_ERR_MSG}")
|
||||
else (Parquet_FIND_REQUIRED)
|
||||
message(STATUS "${PARQUET_ERR_MSG}")
|
||||
endif (Parquet_FIND_REQUIRED)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
mark_as_advanced(
|
||||
PARQUET_FOUND
|
||||
mark_as_advanced(PARQUET_ABI_VERSION
|
||||
PARQUET_IMPORT_LIB
|
||||
PARQUET_INCLUDE_DIR
|
||||
PARQUET_LIBS
|
||||
PARQUET_LIBRARIES
|
||||
PARQUET_STATIC_LIB
|
||||
PARQUET_LIB_DIR
|
||||
PARQUET_SHARED_IMP_LIB
|
||||
PARQUET_SHARED_LIB
|
||||
)
|
||||
PARQUET_SO_VERSION
|
||||
PARQUET_STATIC_LIB
|
||||
PARQUET_VERSION)
|
||||
|
||||
find_package_handle_standard_args(Parquet
|
||||
REQUIRED_VARS
|
||||
PARQUET_INCLUDE_DIR
|
||||
PARQUET_LIB_DIR
|
||||
PARQUET_SO_VERSION
|
||||
VERSION_VAR
|
||||
PARQUET_VERSION)
|
||||
set(PARQUET_FOUND ${Parquet_FOUND})
|
||||
|
||||
if(Parquet_FOUND AND NOT Parquet_FIND_QUIETLY)
|
||||
message(STATUS "Parquet version: ${PARQUET_VERSION} (${PARQUET_FIND_APPROACH})")
|
||||
message(STATUS "Found the Parquet shared library: ${PARQUET_SHARED_LIB}")
|
||||
message(STATUS "Found the Parquet import library: ${PARQUET_IMPORT_LIB}")
|
||||
message(STATUS "Found the Parquet static library: ${PARQUET_STATIC_LIB}")
|
||||
endif()
|
||||
|
@ -12,7 +12,9 @@ if (ENABLE_CLANG_TIDY)
|
||||
set (USE_CLANG_TIDY 1)
|
||||
# The variable CMAKE_CXX_CLANG_TIDY will be set inside src and base directories with non third-party code.
|
||||
# set (CMAKE_CXX_CLANG_TIDY "${CLANG_TIDY_PATH}")
|
||||
elseif (FAIL_ON_UNSUPPORTED_OPTIONS_COMBINATION)
|
||||
message(FATAL_ERROR "clang-tidy is not found")
|
||||
else ()
|
||||
message(STATUS "clang-tidy is not found. This is normal - the tool is used only for static code analysis and not essential for build.")
|
||||
message(STATUS "clang-tidy is not found. This is normal - the tool is only used for static code analysis and isn't essential for the build.")
|
||||
endif ()
|
||||
endif ()
|
||||
|
@ -8,6 +8,9 @@ macro(find_contrib_lib LIB_NAME)
|
||||
|
||||
if (NOT USE_INTERNAL_${LIB_NAME_UC}_LIBRARY)
|
||||
find_package ("${LIB_NAME}")
|
||||
if (NOT ${LIB_NAME_UC}_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use system ${LIB_NAME}")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
if (NOT ${LIB_NAME_UC}_FOUND)
|
||||
@ -17,5 +20,4 @@ macro(find_contrib_lib LIB_NAME)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using ${LIB_NAME}: ${${LIB_NAME_UC}_INCLUDE_DIR} : ${${LIB_NAME_UC}_LIBRARIES}")
|
||||
|
||||
endmacro()
|
||||
|
@ -1,21 +1,22 @@
|
||||
option(ENABLE_AMQPCPP "Enalbe AMQP-CPP" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_AMQPCPP)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
set (ENABLE_AMQPCPP 0)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library")
|
||||
set (USE_AMQPCPP 0)
|
||||
return()
|
||||
endif ()
|
||||
|
||||
if (ENABLE_AMQPCPP)
|
||||
set (USE_AMQPCPP 1)
|
||||
set (AMQPCPP_LIBRARY AMQP-CPP)
|
||||
|
||||
set (USE_AMQPCPP 1)
|
||||
set (AMQPCPP_LIBRARY AMQP-CPP)
|
||||
|
||||
set (AMQPCPP_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/include")
|
||||
|
||||
list (APPEND AMQPCPP_INCLUDE_DIR
|
||||
set (AMQPCPP_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/include")
|
||||
list (APPEND AMQPCPP_INCLUDE_DIR
|
||||
"${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/include"
|
||||
"${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP")
|
||||
|
||||
endif()
|
||||
|
||||
message (STATUS "Using AMQP-CPP=${USE_AMQPCPP}: ${AMQPCPP_INCLUDE_DIR} : ${AMQPCPP_LIBRARY}")
|
||||
|
@ -1,28 +1,35 @@
|
||||
option (ENABLE_AVRO "Enable Avro" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_AVRO)
|
||||
if (NOT ENABLE_AVRO)
|
||||
if (USE_INTERNAL_AVRO_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal avro library with ENABLE_AVRO=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_AVRO_LIBRARY "Set to FALSE to use system avro library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_AVRO_LIBRARY
|
||||
"Set to FALSE to use system avro library instead of bundled" ON) # TODO: provide unbundled support
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt")
|
||||
if(USE_INTERNAL_AVRO_LIBRARY)
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt")
|
||||
if (USE_INTERNAL_AVRO_LIBRARY)
|
||||
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro")
|
||||
set(USE_INTERNAL_AVRO_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_AVRO_LIBRARY 1)
|
||||
set(USE_INTERNAL_AVRO_LIBRARY 0)
|
||||
endif()
|
||||
|
||||
if (NOT USE_INTERNAL_AVRO_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Using system avro library is not supported yet")
|
||||
elseif(NOT MISSING_INTERNAL_AVRO_LIBRARY)
|
||||
include(cmake/find/snappy.cmake)
|
||||
set(AVROCPP_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/include")
|
||||
set(AVROCPP_LIBRARY avrocpp)
|
||||
set(USE_INTERNAL_AVRO_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (AVROCPP_LIBRARY AND AVROCPP_INCLUDE_DIR)
|
||||
set(USE_AVRO 1)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
message (STATUS "Using avro=${USE_AVRO}: ${AVROCPP_INCLUDE_DIR} : ${AVROCPP_LIBRARY}")
|
||||
|
@ -1,17 +1,21 @@
|
||||
option (ENABLE_BASE64 "Enable base64" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_BASE64)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64/LICENSE")
|
||||
set (MISSING_INTERNAL_BASE64_LIBRARY 1)
|
||||
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
endif ()
|
||||
|
||||
if (NOT MISSING_INTERNAL_BASE64_LIBRARY)
|
||||
option (ENABLE_BASE64 "Enable base64" ${ENABLE_LIBRARIES})
|
||||
endif ()
|
||||
|
||||
if (ENABLE_BASE64)
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64")
|
||||
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
else()
|
||||
else()
|
||||
set (BASE64_LIBRARY base64)
|
||||
set (USE_BASE64 1)
|
||||
endif()
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (NOT USE_BASE64)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable base64")
|
||||
endif()
|
||||
|
@ -1,12 +1,23 @@
|
||||
option (ENABLE_BROTLI "Enable brotli" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_BROTLI)
|
||||
if (NOT ENABLE_BROTLI)
|
||||
if (USE_INTERNAL_BROTLI_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal brotly library with ENABLE_BROTLI=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ${NOT_UNBUNDLED})
|
||||
if (UNBUNDLED)
|
||||
# Many system ship only dynamic brotly libraries, so we back off to bundled by default
|
||||
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ${USE_STATIC_LIBRARIES})
|
||||
else()
|
||||
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
|
||||
if (USE_INTERNAL_BROTLI_LIBRARY)
|
||||
message (WARNING "submodule contrib/brotli is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal brotli")
|
||||
set (USE_INTERNAL_BROTLI_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_BROTLI_LIBRARY 1)
|
||||
@ -19,18 +30,18 @@ if(NOT USE_INTERNAL_BROTLI_LIBRARY)
|
||||
find_path(BROTLI_INCLUDE_DIR NAMES brotli/decode.h brotli/encode.h brotli/port.h brotli/types.h PATHS ${BROTLI_INCLUDE_PATHS})
|
||||
if(BROTLI_LIBRARY_DEC AND BROTLI_LIBRARY_ENC AND BROTLI_LIBRARY_COMMON)
|
||||
set(BROTLI_LIBRARY ${BROTLI_LIBRARY_DEC} ${BROTLI_LIBRARY_ENC} ${BROTLI_LIBRARY_COMMON})
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use system brotli")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (BROTLI_LIBRARY AND BROTLI_INCLUDE_DIR)
|
||||
set (USE_BROTLI 1)
|
||||
elseif (NOT MISSING_INTERNAL_BROTLI_LIBRARY)
|
||||
set (BROTLI_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include)
|
||||
set (BROTLI_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include")
|
||||
set (USE_INTERNAL_BROTLI_LIBRARY 1)
|
||||
set (BROTLI_LIBRARY brotli)
|
||||
set (USE_BROTLI 1)
|
||||
endif ()
|
||||
|
||||
endif()
|
||||
|
||||
message (STATUS "Using brotli=${USE_BROTLI}: ${BROTLI_INCLUDE_DIR} : ${BROTLI_LIBRARY}")
|
||||
|
@ -1,15 +1,21 @@
|
||||
option (ENABLE_CAPNP "Enable Cap'n Proto" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_CAPNP)
|
||||
if (NOT ENABLE_CAPNP)
|
||||
if (USE_INTERNAL_CAPNP_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal capnproto library with ENABLE_CAPNP=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/CMakeLists.txt")
|
||||
if(USE_INTERNAL_CAPNP_LIBRARY)
|
||||
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto")
|
||||
set(USE_INTERNAL_CAPNP_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_CAPNP_LIBRARY 1)
|
||||
set(USE_INTERNAL_CAPNP_LIBRARY 0)
|
||||
endif()
|
||||
|
||||
# FIXME: refactor to use `add_library(… IMPORTED)` if possible.
|
||||
@ -18,17 +24,21 @@ if (NOT USE_INTERNAL_CAPNP_LIBRARY)
|
||||
find_library (CAPNP capnp)
|
||||
find_library (CAPNPC capnpc)
|
||||
|
||||
if(KJ AND CAPNP AND CAPNPC)
|
||||
set (CAPNP_LIBRARIES ${CAPNPC} ${CAPNP} ${KJ})
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system capnproto")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (CAPNP_LIBRARIES)
|
||||
set (USE_CAPNP 1)
|
||||
elseif(NOT MISSING_INTERNAL_CAPNP_LIBRARY)
|
||||
add_subdirectory(contrib/capnproto-cmake)
|
||||
|
||||
set (CAPNP_LIBRARIES capnpc)
|
||||
endif ()
|
||||
|
||||
if (CAPNP_LIBRARIES)
|
||||
set (USE_CAPNP 1)
|
||||
endif ()
|
||||
|
||||
set (USE_INTERNAL_CAPNP_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using capnp=${USE_CAPNP}: ${CAPNP_LIBRARIES}")
|
||||
|
@ -1,29 +1,33 @@
|
||||
option(ENABLE_CASSANDRA "Enable Cassandra" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_CASSANDRA)
|
||||
if (APPLE)
|
||||
SET(CMAKE_MACOSX_RPATH ON)
|
||||
endif()
|
||||
if (NOT ENABLE_CASSANDRA)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libuv")
|
||||
if (APPLE)
|
||||
set(CMAKE_MACOSX_RPATH ON)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libuv")
|
||||
message (ERROR "submodule contrib/libuv is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
elseif (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cassandra")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libuv needed for Cassandra")
|
||||
elseif (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cassandra")
|
||||
message (ERROR "submodule contrib/cassandra is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal Cassandra")
|
||||
else()
|
||||
set (LIBUV_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/libuv")
|
||||
set (CASSANDRA_INCLUDE_DIR
|
||||
"${ClickHouse_SOURCE_DIR}/contrib/cassandra/include/")
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
set (LIBUV_LIBRARY uv_a)
|
||||
set (CASSANDRA_LIBRARY cassandra_static)
|
||||
else()
|
||||
set (LIBUV_LIBRARY uv)
|
||||
set (CASSANDRA_LIBRARY cassandra)
|
||||
endif()
|
||||
|
||||
set (USE_CASSANDRA 1)
|
||||
set (CASS_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/cassandra")
|
||||
|
||||
endif()
|
||||
endif()
|
||||
|
||||
message (STATUS "Using cassandra=${USE_CASSANDRA}: ${CASSANDRA_INCLUDE_DIR} : ${CASSANDRA_LIBRARY}")
|
||||
|
@ -1,3 +1,9 @@
|
||||
option(ENABLE_CCACHE "Speedup re-compilations using ccache" ON)
|
||||
|
||||
if (NOT ENABLE_CCACHE)
|
||||
return()
|
||||
endif()
|
||||
|
||||
find_program (CCACHE_FOUND ccache)
|
||||
|
||||
if (CCACHE_FOUND AND NOT CMAKE_CXX_COMPILER_LAUNCHER MATCHES "ccache" AND NOT CMAKE_CXX_COMPILER MATCHES "ccache")
|
||||
@ -9,6 +15,8 @@ if (CCACHE_FOUND AND NOT CMAKE_CXX_COMPILER_LAUNCHER MATCHES "ccache" AND NOT CM
|
||||
set_property (GLOBAL PROPERTY RULE_LAUNCH_COMPILE ${CCACHE_FOUND})
|
||||
set_property (GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_FOUND})
|
||||
else ()
|
||||
message(STATUS "Not using ${CCACHE_FOUND} ${CCACHE_VERSION} bug: https://bugzilla.samba.org/show_bug.cgi?id=8118")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Not using ${CCACHE_FOUND} ${CCACHE_VERSION} bug: https://bugzilla.samba.org/show_bug.cgi?id=8118")
|
||||
endif ()
|
||||
elseif (NOT CCACHE_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use ccache")
|
||||
endif ()
|
||||
|
37
cmake/find/curl.cmake
Normal file
37
cmake/find/curl.cmake
Normal file
@ -0,0 +1,37 @@
|
||||
option (ENABLE_CURL "Enable curl" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_CURL)
|
||||
if (USE_INTERNAL_CURL)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal curl with ENABLE_CURL=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_CURL "Use internal curl library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT USE_INTERNAL_CURL)
|
||||
find_package (CURL)
|
||||
if (NOT CURL_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system curl")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT CURL_FOUND)
|
||||
set (USE_INTERNAL_CURL 1)
|
||||
set (CURL_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/curl")
|
||||
|
||||
# find_package(CURL) compatibility for the following packages that uses
|
||||
# find_package(CURL)/include(FindCURL):
|
||||
# - mariadb-connector-c
|
||||
# - aws-s3-cmake
|
||||
# - sentry-native
|
||||
set (CURL_FOUND ON CACHE BOOL "")
|
||||
set (CURL_ROOT_DIR ${CURL_LIBRARY_DIR} CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIR ${CURL_LIBRARY_DIR}/include CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIRS ${CURL_LIBRARY_DIR}/include CACHE PATH "")
|
||||
set (CURL_LIBRARY curl CACHE STRING "")
|
||||
set (CURL_LIBRARIES ${CURL_LIBRARY} CACHE STRING "")
|
||||
set (CURL_VERSION_STRING 7.67.0 CACHE STRING "")
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using curl: ${CURL_INCLUDE_DIRS} : ${CURL_LIBRARIES}")
|
@ -1,41 +1,68 @@
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ${NOT_UNBUNDLED})
|
||||
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt")
|
||||
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT 0)
|
||||
if (NOT USE_LIBCXX)
|
||||
if (USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal libcxx with USE_LIBCXX=OFF")
|
||||
endif()
|
||||
target_link_libraries(global-libs INTERFACE -l:libstdc++.a -l:libstdc++fs.a) # Always link these libraries as static
|
||||
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ${NOT_UNBUNDLED})
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_LIBCXX_LIBRARY "Set to FALSE to use system libcxx and libcxxabi libraries instead of bundled" ${USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT})
|
||||
|
||||
if (USE_LIBCXX)
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_LIBCPP_DEBUG=0") # More checks in debug build.
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt")
|
||||
if (USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx")
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY 0)
|
||||
endif()
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT 0)
|
||||
set(MISSING_INTERNAL_LIBCXX_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if (NOT USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_LIBCPP_DEBUG=0") # More checks in debug build.
|
||||
|
||||
if (NOT USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
find_library (LIBCXX_LIBRARY c++)
|
||||
find_library (LIBCXXFS_LIBRARY c++fs)
|
||||
find_library (LIBCXXABI_LIBRARY c++abi)
|
||||
|
||||
if(LIBCXX_LIBRARY AND LIBCXXABI_LIBRARY) # c++fs is now a part of the libc++
|
||||
set (HAVE_LIBCXX 1)
|
||||
else ()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libcxx")
|
||||
endif()
|
||||
|
||||
if(NOT LIBCXXFS_LIBRARY)
|
||||
set(LIBCXXFS_LIBRARY ${LIBCXX_LIBRARY})
|
||||
endif()
|
||||
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
|
||||
|
||||
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
|
||||
else ()
|
||||
endif ()
|
||||
|
||||
if (NOT HAVE_LIBCXX AND NOT MISSING_INTERNAL_LIBCXX_LIBRARY)
|
||||
set (LIBCXX_LIBRARY cxx)
|
||||
set (LIBCXXABI_LIBRARY cxxabi)
|
||||
add_subdirectory(contrib/libcxxabi-cmake)
|
||||
add_subdirectory(contrib/libcxx-cmake)
|
||||
|
||||
# Exception handling library is embedded into libcxxabi.
|
||||
endif ()
|
||||
|
||||
target_link_libraries(global-libs INTERFACE ${LIBCXX_LIBRARY} ${LIBCXXABI_LIBRARY} ${LIBCXXFS_LIBRARY})
|
||||
|
||||
set (HAVE_LIBCXX 1)
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (HAVE_LIBCXX)
|
||||
target_link_libraries(global-libs INTERFACE ${LIBCXX_LIBRARY} ${LIBCXXABI_LIBRARY} ${LIBCXXFS_LIBRARY})
|
||||
|
||||
message (STATUS "Using libcxx: ${LIBCXX_LIBRARY}")
|
||||
message (STATUS "Using libcxxfs: ${LIBCXXFS_LIBRARY}")
|
||||
message (STATUS "Using libcxxabi: ${LIBCXXABI_LIBRARY}")
|
||||
else ()
|
||||
else()
|
||||
target_link_libraries(global-libs INTERFACE -l:libstdc++.a -l:libstdc++fs.a) # Always link these libraries as static
|
||||
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
|
||||
endif ()
|
||||
endif()
|
||||
|
@ -1,19 +1,24 @@
|
||||
if(NOT ARCH_ARM AND NOT OS_FREEBSD AND NOT OS_DARWIN)
|
||||
option(ENABLE_FASTOPS "Enable fast vectorized mathematical functions library by Mikhail Parakhin" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_FASTOPS)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Fastops library is not supported on ARM, FreeBSD and Darwin")
|
||||
endif()
|
||||
|
||||
if(ENABLE_FASTOPS)
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fastops/fastops/fastops.h")
|
||||
if(NOT ENABLE_FASTOPS)
|
||||
set(USE_FASTOPS 0)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fastops/fastops/fastops.h")
|
||||
message(WARNING "submodule contrib/fastops is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal fastops library")
|
||||
set(MISSING_INTERNAL_FASTOPS_LIBRARY 1)
|
||||
endif()
|
||||
if(NOT MISSING_INTERNAL_FASTOPS_LIBRARY)
|
||||
endif()
|
||||
|
||||
if(NOT MISSING_INTERNAL_FASTOPS_LIBRARY)
|
||||
set(USE_FASTOPS 1)
|
||||
set(FASTOPS_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/fastops/)
|
||||
set(FASTOPS_LIBRARY fastops)
|
||||
endif()
|
||||
else()
|
||||
set(USE_FASTOPS 0)
|
||||
endif()
|
||||
|
||||
message(STATUS "Using fastops=${USE_FASTOPS}: ${FASTOPS_INCLUDE_DIR} : ${FASTOPS_LIBRARY}")
|
||||
|
@ -1,8 +1,11 @@
|
||||
# Check if gperf was installed
|
||||
find_program(GPERF gperf)
|
||||
if(GPERF)
|
||||
if(NOT DEFINED ENABLE_GPERF OR ENABLE_GPERF)
|
||||
# Check if gperf was installed
|
||||
find_program(GPERF gperf)
|
||||
if(GPERF)
|
||||
option(ENABLE_GPERF "Use gperf function hash generator tool" ${ENABLE_LIBRARIES})
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (ENABLE_GPERF)
|
||||
if(NOT GPERF)
|
||||
message(FATAL_ERROR "Could not find the program gperf")
|
||||
|
@ -1,26 +1,45 @@
|
||||
option (ENABLE_GRPC "Use gRPC" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_GRPC)
|
||||
option (USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT ENABLE_GRPC)
|
||||
if (USE_INTERNAL_GRPC_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal gRPC library with ENABLE_GRPC=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_GRPC_LIBRARY
|
||||
"Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)"
|
||||
${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT USE_INTERNAL_GRPC_LIBRARY)
|
||||
find_package(grpc)
|
||||
if (NOT GRPC_FOUND)
|
||||
find_path(GRPC_INCLUDE_DIR grpcpp/grpcpp.h)
|
||||
find_library(GRPC_LIBRARY grpc++)
|
||||
endif ()
|
||||
|
||||
if (GRPC_INCLUDE_DIR AND GRPC_LIBRARY)
|
||||
set (USE_GRPC ON)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT USE_GRPC)
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include/grpc++/grpc++.h")
|
||||
message(WARNING "submodule contrib/grpc is missing. To fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/grpc is missing. To fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gRPC")
|
||||
set (USE_INTERNAL_GRPC_LIBRARY OFF)
|
||||
elif (NOT USE_PROTOBUF)
|
||||
message(WARNING "gRPC requires protobuf which is disabled")
|
||||
elseif (NOT USE_PROTOBUF)
|
||||
message (WARNING "gRPC requires protobuf which is disabled")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Will not use internal gRPC without protobuf")
|
||||
set (USE_INTERNAL_GRPC_LIBRARY OFF)
|
||||
else()
|
||||
set (GRPC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
|
||||
set (GRPC_LIBRARY "libgrpc++")
|
||||
set (USE_GRPC ON)
|
||||
endif()
|
||||
else()
|
||||
find_package(grpc)
|
||||
if (GRPC_INCLUDE_DIR AND GRPC_LIBRARY)
|
||||
set (USE_GRPC ON)
|
||||
endif()
|
||||
set (USE_INTERNAL_GRPC_LIBRARY ON)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
message(STATUS "Using gRPC=${USE_GRPC}: ${GRPC_INCLUDE_DIR} : ${GRPC_LIBRARY}")
|
||||
message (STATUS "Using gRPC=${USE_GRPC}: ${GRPC_INCLUDE_DIR} : ${GRPC_LIBRARY}")
|
||||
|
@ -1,22 +1,30 @@
|
||||
option (ENABLE_GTEST_LIBRARY "Enable gtest library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_GTEST_LIBRARY)
|
||||
if (NOT ENABLE_GTEST_LIBRARY)
|
||||
if(USE_INTERNAL_GTEST_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal Google Test when ENABLE_GTEST_LIBRARY=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt")
|
||||
if (USE_INTERNAL_GTEST_LIBRARY)
|
||||
message (WARNING "submodule contrib/googletest is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gtest")
|
||||
set (USE_INTERNAL_GTEST_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_GTEST_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
|
||||
if(NOT USE_INTERNAL_GTEST_LIBRARY)
|
||||
# TODO: autodetect of GTEST_SRC_DIR by EXISTS /usr/src/googletest/CMakeLists.txt
|
||||
if(NOT GTEST_SRC_DIR)
|
||||
find_package(GTest)
|
||||
if (NOT GTEST_INCLUDE_DIRS)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system Google Test")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@ -26,12 +34,13 @@ if (NOT GTEST_SRC_DIR AND NOT GTEST_INCLUDE_DIRS AND NOT MISSING_INTERNAL_GTEST_
|
||||
set (GTEST_LIBRARIES gtest)
|
||||
set (GTEST_BOTH_LIBRARIES ${GTEST_MAIN_LIBRARIES} ${GTEST_LIBRARIES})
|
||||
set (GTEST_INCLUDE_DIRS ${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest)
|
||||
elseif(USE_INTERNAL_GTEST_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Wouldn't use internal Google Test library")
|
||||
set (USE_INTERNAL_GTEST_LIBRARY 0)
|
||||
endif ()
|
||||
|
||||
if((GTEST_INCLUDE_DIRS AND GTEST_BOTH_LIBRARIES) OR GTEST_SRC_DIR)
|
||||
set(USE_GTEST 1)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
message (STATUS "Using gtest=${USE_GTEST}: ${GTEST_INCLUDE_DIRS} : ${GTEST_BOTH_LIBRARIES} : ${GTEST_SRC_DIR}")
|
||||
|
@ -1,28 +1,39 @@
|
||||
option (ENABLE_H3 "Enable H3" ${ENABLE_LIBRARIES})
|
||||
if (ENABLE_H3)
|
||||
if(NOT ENABLE_H3)
|
||||
if(USE_INTERNAL_H3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal h3 library with ENABLE_H3=OFF")
|
||||
endif ()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_H3_LIBRARY "Set to FALSE to use system h3 library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option(USE_INTERNAL_H3_LIBRARY "Set to FALSE to use system h3 library instead of bundled"
|
||||
ON) # we are not aware of any distribution that provides h3 package
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include/h3Index.h")
|
||||
if(USE_INTERNAL_H3_LIBRARY)
|
||||
message(WARNING "submodule contrib/h3 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal h3 library")
|
||||
set(USE_INTERNAL_H3_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_H3_LIBRARY 1)
|
||||
set(USE_INTERNAL_H3_LIBRARY 0)
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_H3_LIBRARY)
|
||||
set (H3_LIBRARY h3)
|
||||
set (H3_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include)
|
||||
elseif (NOT MISSING_INTERNAL_H3_LIBRARY)
|
||||
find_library (H3_LIBRARY h3)
|
||||
find_path (H3_INCLUDE_DIR NAMES h3/h3api.h PATHS ${H3_INCLUDE_PATHS})
|
||||
endif ()
|
||||
if(NOT USE_INTERNAL_H3_LIBRARY)
|
||||
find_library(H3_LIBRARY h3)
|
||||
find_path(H3_INCLUDE_DIR NAMES h3/h3api.h PATHS ${H3_INCLUDE_PATHS})
|
||||
|
||||
if(NOT H3_LIBRARY OR NOT H3_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system h3 library")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (H3_LIBRARY AND H3_INCLUDE_DIR)
|
||||
set (USE_H3 1)
|
||||
endif ()
|
||||
|
||||
endif ()
|
||||
elseif(NOT MISSING_INTERNAL_H3_LIBRARY)
|
||||
set (H3_LIBRARY h3)
|
||||
set (H3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include")
|
||||
set (USE_H3 1)
|
||||
set (USE_INTERNAL_H3_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
message (STATUS "Using h3=${USE_H3}: ${H3_INCLUDE_DIR} : ${H3_LIBRARY}")
|
||||
|
@ -1,34 +1,45 @@
|
||||
if(NOT ARCH_ARM AND NOT OS_FREEBSD AND NOT APPLE AND USE_PROTOBUF)
|
||||
option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_HDFS OR USE_INTERNAL_HDFS3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration")
|
||||
endif()
|
||||
|
||||
if(ENABLE_HDFS)
|
||||
option(USE_INTERNAL_HDFS3_LIBRARY "Set to FALSE to use system HDFS3 instead of bundled" ${NOT_UNBUNDLED})
|
||||
if(NOT ENABLE_HDFS)
|
||||
if(USE_INTERNAL_HDFS3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library with ENABLE_HDFS3=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option(USE_INTERNAL_HDFS3_LIBRARY "Set to FALSE to use system HDFS3 instead of bundled (experimental - set to OFF on your own risk)"
|
||||
ON) # We don't know any linux distribution with package for it
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include/hdfs/hdfs.h")
|
||||
if(USE_INTERNAL_HDFS3_LIBRARY)
|
||||
message(WARNING "submodule contrib/libhdfs3 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library")
|
||||
set(USE_INTERNAL_HDFS3_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_HDFS3_LIBRARY 1)
|
||||
set(USE_INTERNAL_HDFS3_LIBRARY 0)
|
||||
endif()
|
||||
|
||||
if(NOT USE_INTERNAL_HDFS3_LIBRARY)
|
||||
find_library(HDFS3_LIBRARY hdfs3)
|
||||
find_path(HDFS3_INCLUDE_DIR NAMES hdfs/hdfs.h PATHS ${HDFS3_INCLUDE_PATHS})
|
||||
if(NOT HDFS3_LIBRARY OR NOT HDFS3_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find system HDFS3 library")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(HDFS3_LIBRARY AND HDFS3_INCLUDE_DIR)
|
||||
set(USE_HDFS 1)
|
||||
elseif(NOT MISSING_INTERNAL_HDFS3_LIBRARY AND LIBGSASL_LIBRARY AND LIBXML2_LIBRARY)
|
||||
elseif(NOT MISSING_INTERNAL_HDFS3_LIBRARY AND LIBGSASL_LIBRARY AND LIBXML2_LIBRARIES)
|
||||
set(HDFS3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include")
|
||||
set(HDFS3_LIBRARY hdfs3)
|
||||
set(USE_INTERNAL_HDFS3_LIBRARY 1)
|
||||
set(USE_HDFS 1)
|
||||
else()
|
||||
set(USE_INTERNAL_HDFS3_LIBRARY 0)
|
||||
endif()
|
||||
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannout enable HDFS3")
|
||||
endif()
|
||||
|
||||
message(STATUS "Using hdfs3=${USE_HDFS}: ${HDFS3_INCLUDE_DIR} : ${HDFS3_LIBRARY}")
|
||||
|
@ -4,13 +4,20 @@ else ()
|
||||
option(ENABLE_ICU "Enable ICU" 0)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_ICU)
|
||||
if (NOT ENABLE_ICU)
|
||||
if(USE_INTERNAL_ICU_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal icu library with ENABLE_ICU=OFF")
|
||||
endif()
|
||||
message(STATUS "Build without ICU (support for collations and charset conversion functions will be disabled)")
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE")
|
||||
if (USE_INTERNAL_ICU_LIBRARY)
|
||||
message (WARNING "submodule contrib/icu is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ICU")
|
||||
set (USE_INTERNAL_ICU_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_ICU_LIBRARY 1)
|
||||
@ -24,6 +31,8 @@ if(NOT USE_INTERNAL_ICU_LIBRARY)
|
||||
#set (ICU_LIBRARIES ${ICU_I18N_LIBRARY} ${ICU_UC_LIBRARY} ${ICU_DATA_LIBRARY} CACHE STRING "")
|
||||
if(ICU_FOUND)
|
||||
set(USE_ICU 1)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ICU")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
@ -35,8 +44,6 @@ elseif (NOT MISSING_INTERNAL_ICU_LIBRARY)
|
||||
set (USE_ICU 1)
|
||||
endif ()
|
||||
|
||||
endif()
|
||||
|
||||
if(USE_ICU)
|
||||
message(STATUS "Using icu=${USE_ICU}: ${ICU_INCLUDE_DIR} : ${ICU_LIBRARIES}")
|
||||
else()
|
||||
|
@ -1,23 +1,35 @@
|
||||
if (UNBUNDLED AND USE_STATIC_LIBRARIES)
|
||||
set (ENABLE_LDAP OFF CACHE INTERNAL "")
|
||||
endif()
|
||||
|
||||
option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_LDAP)
|
||||
option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ${NOT_UNBUNDLED})
|
||||
if (NOT ENABLE_LDAP)
|
||||
if(USE_INTERNAL_LDAP_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal LDAP library with ENABLE_LDAP=OFF")
|
||||
endif ()
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
|
||||
option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
|
||||
if (USE_INTERNAL_LDAP_LIBRARY)
|
||||
message (WARNING "Submodule contrib/openldap is missing. To fix try running:\n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal LDAP library")
|
||||
endif ()
|
||||
|
||||
set (USE_INTERNAL_LDAP_LIBRARY 0)
|
||||
set (MISSING_INTERNAL_LDAP_LIBRARY 1)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
set (OPENLDAP_USE_STATIC_LIBS ${USE_STATIC_LIBRARIES})
|
||||
set (OPENLDAP_USE_REENTRANT_LIBS 1)
|
||||
set (OPENLDAP_USE_STATIC_LIBS ${USE_STATIC_LIBRARIES})
|
||||
set (OPENLDAP_USE_REENTRANT_LIBS 1)
|
||||
|
||||
if (NOT USE_INTERNAL_LDAP_LIBRARY)
|
||||
if (NOT USE_INTERNAL_LDAP_LIBRARY)
|
||||
if (OPENLDAP_USE_STATIC_LIBS)
|
||||
message (WARNING "Unable to use external static OpenLDAP libraries, falling back to the bundled version.")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Unable to use external OpenLDAP")
|
||||
set (USE_INTERNAL_LDAP_LIBRARY 1)
|
||||
else ()
|
||||
if (APPLE AND NOT OPENLDAP_ROOT_DIR)
|
||||
@ -25,10 +37,14 @@ if (ENABLE_LDAP)
|
||||
endif ()
|
||||
|
||||
find_package (OpenLDAP)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (NOT OPENLDAP_FOUND AND NOT MISSING_INTERNAL_LDAP_LIBRARY)
|
||||
if (NOT OPENLDAP_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system OpenLDAP")
|
||||
endif()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (NOT OPENLDAP_FOUND AND NOT MISSING_INTERNAL_LDAP_LIBRARY)
|
||||
string (TOLOWER "${CMAKE_SYSTEM_NAME}" _system_name)
|
||||
string (TOLOWER "${CMAKE_SYSTEM_PROCESSOR}" _system_processor)
|
||||
|
||||
@ -54,8 +70,10 @@ if (ENABLE_LDAP)
|
||||
|
||||
if (NOT _ldap_supported_platform)
|
||||
message (WARNING "LDAP support using the bundled library is not implemented for ${CMAKE_SYSTEM_NAME} ${CMAKE_SYSTEM_PROCESSOR} platform.")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable LDAP support")
|
||||
elseif (NOT USE_SSL)
|
||||
message (WARNING "LDAP support using the bundled library is not possible if SSL is not used.")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable LDAP support")
|
||||
else ()
|
||||
set (USE_INTERNAL_LDAP_LIBRARY 1)
|
||||
set (OPENLDAP_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/openldap")
|
||||
@ -74,11 +92,10 @@ if (ENABLE_LDAP)
|
||||
set (OPENLDAP_LIBRARIES ${OPENLDAP_LDAP_LIBRARY} ${OPENLDAP_LBER_LIBRARY})
|
||||
set (OPENLDAP_FOUND 1)
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (OPENLDAP_FOUND)
|
||||
if (OPENLDAP_FOUND)
|
||||
set (USE_LDAP 1)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using ldap=${USE_LDAP}: ${OPENLDAP_INCLUDE_DIRS} : ${OPENLDAP_LIBRARIES}")
|
||||
|
@ -1,12 +1,23 @@
|
||||
option(ENABLE_GSASL_LIBRARY "Enable gsasl library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_GSASL_LIBRARY)
|
||||
if (NOT ENABLE_GSASL_LIBRARY)
|
||||
if(USE_INTERNAL_LIBGSASL_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal libgsasl library with ENABLE_GSASL_LIBRARY=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${NOT_UNBUNDLED})
|
||||
if (UNBUNDLED)
|
||||
# when USE_STATIC_LIBRARIES we usually need to pick up hell a lot of dependencies for libgsasl
|
||||
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${USE_STATIC_LIBRARIES})
|
||||
else()
|
||||
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h")
|
||||
if (USE_INTERNAL_LIBGSASL_LIBRARY)
|
||||
message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libgsasl")
|
||||
set (USE_INTERNAL_LIBGSASL_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1)
|
||||
@ -15,11 +26,14 @@ endif ()
|
||||
if (NOT USE_INTERNAL_LIBGSASL_LIBRARY)
|
||||
find_library (LIBGSASL_LIBRARY gsasl)
|
||||
find_path (LIBGSASL_INCLUDE_DIR NAMES gsasl.h PATHS ${LIBGSASL_INCLUDE_PATHS})
|
||||
if (NOT LIBGSASL_LIBRARY OR NOT LIBGSASL_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libgsasl")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
|
||||
elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY)
|
||||
set (LIBGSASL_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src ${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include)
|
||||
set (LIBGSASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src" "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include")
|
||||
set (USE_INTERNAL_LIBGSASL_LIBRARY 1)
|
||||
set (LIBGSASL_LIBRARY libgsasl)
|
||||
endif ()
|
||||
@ -28,6 +42,4 @@ if(LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
|
||||
set (USE_LIBGSASL 1)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
message (STATUS "Using libgsasl=${USE_LIBGSASL}: ${LIBGSASL_INCLUDE_DIR} : ${LIBGSASL_LIBRARY}")
|
||||
|
@ -3,6 +3,7 @@ option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h")
|
||||
if (USE_INTERNAL_LIBXML2_LIBRARY)
|
||||
message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libxml")
|
||||
set (USE_INTERNAL_LIBXML2_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_LIBXML2_LIBRARY 1)
|
||||
@ -12,13 +13,22 @@ if (NOT USE_INTERNAL_LIBXML2_LIBRARY)
|
||||
find_package (LibXml2)
|
||||
#find_library (LIBXML2_LIBRARY libxml2)
|
||||
#find_path (LIBXML2_INCLUDE_DIR NAMES libxml.h PATHS ${LIBXML2_INCLUDE_PATHS})
|
||||
|
||||
if (NOT LIBXML2_LIBRARY OR NOT LIBXML2_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libxml2")
|
||||
endif ()
|
||||
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
find_package(LibLZMA)
|
||||
set (LIBXML2_LIBRARIES ${LIBXML2_LIBRARIES} ${LIBLZMA_LIBRARIES})
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (LIBXML2_LIBRARY AND LIBXML2_INCLUDE_DIR)
|
||||
elseif (NOT MISSING_INTERNAL_LIBXML2_LIBRARY)
|
||||
set (LIBXML2_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/libxml2/include ${ClickHouse_SOURCE_DIR}/contrib/libxml2-cmake/linux_x86_64/include)
|
||||
set (USE_INTERNAL_LIBXML2_LIBRARY 1)
|
||||
set (LIBXML2_LIBRARY libxml2)
|
||||
set (LIBXML2_LIBRARIES libxml2)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using libxml2: ${LIBXML2_INCLUDE_DIR} : ${LIBXML2_LIBRARY}")
|
||||
message (STATUS "Using libxml2: ${LIBXML2_INCLUDE_DIR} : ${LIBXML2_LIBRARIES}")
|
||||
|
@ -1,16 +1,29 @@
|
||||
# Broken in macos. TODO: update clang, re-test, enable
|
||||
if (NOT APPLE)
|
||||
option (ENABLE_EMBEDDED_COMPILER "Set to TRUE to enable support for 'compile_expressions' option for query execution" ${ENABLE_LIBRARIES})
|
||||
if (APPLE OR SPLIT_SHARED_LIBRARIES OR NOT ARCH_AMD64)
|
||||
set (ENABLE_EMBEDDED_COMPILER OFF CACHE INTERNAL "")
|
||||
endif()
|
||||
|
||||
option (ENABLE_EMBEDDED_COMPILER "Set to TRUE to enable support for 'compile_expressions' option for query execution" ${ENABLE_LIBRARIES})
|
||||
# Broken in macos. TODO: update clang, re-test, enable on Apple
|
||||
if (ENABLE_EMBEDDED_COMPILER AND NOT SPLIT_SHARED_LIBRARIES AND ARCH_AMD64 AND NOT (SANITIZE STREQUAL "undefined"))
|
||||
option (USE_INTERNAL_LLVM_LIBRARY "Use bundled or system LLVM library." ${NOT_UNBUNDLED})
|
||||
endif()
|
||||
|
||||
if (NOT ENABLE_EMBEDDED_COMPILER)
|
||||
if(USE_INTERNAL_LLVM_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal LLVM library with ENABLE_EMBEDDED_COMPILER=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/CMakeLists.txt")
|
||||
if (USE_INTERNAL_LLVM_LIBRARY)
|
||||
message (WARNING "submodule contrib/llvm is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't fidd internal LLVM library")
|
||||
endif()
|
||||
set (MISSING_INTERNAL_LLVM_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_EMBEDDED_COMPILER)
|
||||
if (USE_INTERNAL_LLVM_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/llvm is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
set (USE_INTERNAL_LLVM_LIBRARY 0)
|
||||
endif ()
|
||||
|
||||
if (NOT USE_INTERNAL_LLVM_LIBRARY)
|
||||
if (NOT USE_INTERNAL_LLVM_LIBRARY)
|
||||
set (LLVM_PATHS "/usr/local/lib/llvm")
|
||||
|
||||
foreach(llvm_v 9 8)
|
||||
@ -27,53 +40,59 @@ if (ENABLE_EMBEDDED_COMPILER)
|
||||
option(LLVM_HAS_RTTI "Enable if LLVM was build with RTTI enabled" ON)
|
||||
set (USE_EMBEDDED_COMPILER 1)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system LLVM")
|
||||
set (USE_EMBEDDED_COMPILER 0)
|
||||
endif()
|
||||
|
||||
if (LLVM_FOUND AND OS_LINUX AND USE_LIBCXX)
|
||||
message(WARNING "Option USE_INTERNAL_LLVM_LIBRARY is not set but the LLVM library from OS packages in Linux is incompatible with libc++ ABI. LLVM Will be disabled.")
|
||||
if (LLVM_FOUND AND OS_LINUX AND USE_LIBCXX AND NOT FORCE_LLVM_WITH_LIBCXX)
|
||||
message(WARNING "Option USE_INTERNAL_LLVM_LIBRARY is not set but the LLVM library from OS packages "
|
||||
"in Linux is incompatible with libc++ ABI. LLVM Will be disabled. Force: -DFORCE_LLVM_WITH_LIBCXX=ON")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Unsupported LLVM configuration, cannot enable LLVM")
|
||||
set (LLVM_FOUND 0)
|
||||
set (USE_EMBEDDED_COMPILER 0)
|
||||
endif ()
|
||||
else()
|
||||
endif()
|
||||
|
||||
if(NOT LLVM_FOUND AND NOT MISSING_INTERNAL_LLVM_LIBRARY)
|
||||
if (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR)
|
||||
message(WARNING "Option ENABLE_EMBEDDED_COMPILER is set but LLVM library cannot build if build directory is the same as source directory.")
|
||||
message(WARNING "Option ENABLE_EMBEDDED_COMPILER is set but internal LLVM library cannot build if build directory is the same as source directory.")
|
||||
set (LLVM_FOUND 0)
|
||||
set (USE_EMBEDDED_COMPILER 0)
|
||||
elseif (SPLIT_SHARED_LIBRARIES)
|
||||
# llvm-tablegen cannot find shared libraries that we build. Probably can be easily fixed.
|
||||
message(WARNING "Option ENABLE_EMBEDDED_COMPILER is not compatible with SPLIT_SHARED_LIBRARIES. Build of LLVM will be disabled.")
|
||||
message(WARNING "Option USE_INTERNAL_LLVM_LIBRARY is not compatible with SPLIT_SHARED_LIBRARIES. Build of LLVM will be disabled.")
|
||||
set (LLVM_FOUND 0)
|
||||
set (USE_EMBEDDED_COMPILER 0)
|
||||
elseif (NOT ARCH_AMD64)
|
||||
# It's not supported yet, but you can help.
|
||||
message(WARNING "Option ENABLE_EMBEDDED_COMPILER is only available for x86_64. Build of LLVM will be disabled.")
|
||||
message(WARNING "Option USE_INTERNAL_LLVM_LIBRARY is only available for x86_64. Build of LLVM will be disabled.")
|
||||
set (LLVM_FOUND 0)
|
||||
set (USE_EMBEDDED_COMPILER 0)
|
||||
elseif (SANITIZE STREQUAL "undefined")
|
||||
# llvm-tblgen, that is used during LLVM build, doesn't work with UBSan.
|
||||
message(WARNING "Option ENABLE_EMBEDDED_COMPILER does not work with UBSan, because 'llvm-tblgen' tool from LLVM has undefined behaviour. Build of LLVM will be disabled.")
|
||||
message(WARNING "Option USE_INTERNAL_LLVM_LIBRARY does not work with UBSan, because 'llvm-tblgen' tool from LLVM has undefined behaviour. Build of LLVM will be disabled.")
|
||||
set (LLVM_FOUND 0)
|
||||
set (USE_EMBEDDED_COMPILER 0)
|
||||
else ()
|
||||
set (USE_INTERNAL_LLVM_LIBRARY ON)
|
||||
set (LLVM_FOUND 1)
|
||||
set (USE_EMBEDDED_COMPILER 1)
|
||||
set (LLVM_VERSION "9.0.0bundled")
|
||||
set (LLVM_INCLUDE_DIRS
|
||||
${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/include
|
||||
${ClickHouse_BINARY_DIR}/contrib/llvm/llvm/include
|
||||
"${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/include"
|
||||
"${ClickHouse_BINARY_DIR}/contrib/llvm/llvm/include"
|
||||
)
|
||||
set (LLVM_LIBRARY_DIRS ${ClickHouse_BINARY_DIR}/contrib/llvm/llvm)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (LLVM_FOUND)
|
||||
message(STATUS "LLVM include Directory: ${LLVM_INCLUDE_DIRS}")
|
||||
message(STATUS "LLVM library Directory: ${LLVM_LIBRARY_DIRS}")
|
||||
message(STATUS "LLVM C++ compiler flags: ${LLVM_CXXFLAGS}")
|
||||
set (LLVM_LIBRARY_DIRS "${ClickHouse_BINARY_DIR}/contrib/llvm/llvm")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (LLVM_FOUND)
|
||||
message(STATUS "LLVM include Directory: ${LLVM_INCLUDE_DIRS}")
|
||||
message(STATUS "LLVM library Directory: ${LLVM_LIBRARY_DIRS}")
|
||||
message(STATUS "LLVM C++ compiler flags: ${LLVM_CXXFLAGS}")
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable LLVM")
|
||||
endif()
|
||||
|
||||
# This list was generated by listing all LLVM libraries, compiling the binary and removing all libraries while it still compiles.
|
||||
set (REQUIRED_LLVM_LIBRARIES
|
||||
|
@ -1,5 +1,5 @@
|
||||
if (ENABLE_ODBC AND NOT USE_INTERNAL_ODBC_LIBRARY)
|
||||
set (LTDL_PATHS "/usr/local/opt/libtool/lib")
|
||||
find_library (LTDL_LIBRARY ltdl PATHS ${LTDL_PATHS})
|
||||
find_library (LTDL_LIBRARY ltdl PATHS ${LTDL_PATHS} REQUIRED)
|
||||
message (STATUS "Using ltdl: ${LTDL_LIBRARY}")
|
||||
endif ()
|
||||
|
@ -1,27 +1,37 @@
|
||||
option (ENABLE_MSGPACK "Enable msgpack library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_MSGPACK)
|
||||
if(NOT ENABLE_MSGPACK)
|
||||
if(USE_INTERNAL_MSGPACK_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal msgpack with ENABLE_MSGPACK=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (USE_INTERNAL_MSGPACK_LIBRARY)
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp")
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp")
|
||||
if(USE_INTERNAL_MSGPACK_LIBRARY)
|
||||
message(WARNING "Submodule contrib/msgpack-c is missing. To fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal msgpack")
|
||||
set(USE_INTERNAL_MSGPACK_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_MSGPACK_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if(NOT USE_INTERNAL_MSGPACK_LIBRARY)
|
||||
find_path(MSGPACK_INCLUDE_DIR NAMES msgpack.hpp PATHS ${MSGPACK_INCLUDE_PATHS})
|
||||
if(NOT MSGPACK_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system msgpack")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_MSGPACK_LIBRARY)
|
||||
set(MSGPACK_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include)
|
||||
else()
|
||||
find_path(MSGPACK_INCLUDE_DIR NAMES msgpack.hpp PATHS ${MSGPACK_INCLUDE_PATHS})
|
||||
if(NOT MSGPACK_INCLUDE_DIR AND NOT MISSING_INTERNAL_MSGPACK_LIBRARY)
|
||||
set(MSGPACK_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include")
|
||||
set(USE_INTERNAL_MSGPACK_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if (MSGPACK_INCLUDE_DIR)
|
||||
set(USE_MSGPACK 1)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
message(STATUS "Using msgpack=${USE_MSGPACK}: ${MSGPACK_INCLUDE_DIR}")
|
||||
|
@ -4,19 +4,26 @@ else ()
|
||||
option(ENABLE_MYSQL "Enable MySQL" FALSE)
|
||||
endif ()
|
||||
|
||||
if(ENABLE_MYSQL)
|
||||
option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ${NOT_UNBUNDLED})
|
||||
if(NOT ENABLE_MYSQL)
|
||||
if (USE_INTERNAL_MYSQL_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal mysql library with ENABLE_MYSQL=OFF")
|
||||
endif ()
|
||||
message (STATUS "Build without mysqlclient (support for MYSQL dictionary source will be disabled)")
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(USE_INTERNAL_MYSQL_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README")
|
||||
option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README")
|
||||
if(USE_INTERNAL_MYSQL_LIBRARY)
|
||||
message(WARNING "submodule contrib/mariadb-connector-c is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal mysql library")
|
||||
set(USE_INTERNAL_MYSQL_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_MYSQL_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_MYSQL_LIBRARY)
|
||||
set (MYSQLCLIENT_LIBRARIES mariadbclient)
|
||||
set (USE_MYSQL 1)
|
||||
set (MYSQLXX_LIBRARY mysqlxx)
|
||||
else ()
|
||||
if (NOT USE_INTERNAL_MYSQL_LIBRARY)
|
||||
set (MYSQL_LIB_PATHS
|
||||
"/usr/local/opt/mysql/lib"
|
||||
"/usr/local/lib"
|
||||
@ -33,9 +40,11 @@ if(ENABLE_MYSQL)
|
||||
"/usr/local/opt/mysql/include"
|
||||
"/usr/mysql/include"
|
||||
"/usr/local/include"
|
||||
"/usr/include/mariadb"
|
||||
"/usr/include/mysql"
|
||||
"/usr/include")
|
||||
|
||||
find_path (MYSQL_INCLUDE_DIR NAMES mysql/mysql.h mariadb/mysql.h PATHS ${MYSQL_INCLUDE_PATHS} PATH_SUFFIXES mysql)
|
||||
find_path (MYSQL_INCLUDE_DIR NAMES mysql.h mysql/mysql.h mariadb/mysql.h PATHS ${MYSQL_INCLUDE_PATHS} PATH_SUFFIXES mysql)
|
||||
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
find_library (STATIC_MYSQLCLIENT_LIB NAMES mariadbclient mysqlclient PATHS ${MYSQL_LIB_PATHS} PATH_SUFFIXES mysql)
|
||||
@ -50,10 +59,18 @@ if(ENABLE_MYSQL)
|
||||
# /usr/local/include/mysql/mysql_com.h:1011:10: fatal error: mysql/udf_registration_types.h: No such file or directory
|
||||
set(MYSQL_INCLUDE_DIR ${MYSQL_INCLUDE_DIR} ${MYSQL_INCLUDE_DIR}/mysql)
|
||||
endif ()
|
||||
endif ()
|
||||
else ()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system mysql library")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (NOT USE_MYSQL AND NOT MISSING_INTERNAL_MYSQL_LIBRARY)
|
||||
set (MYSQLCLIENT_LIBRARIES mariadbclient)
|
||||
set (MYSQLXX_LIBRARY mysqlxx)
|
||||
set (USE_MYSQL 1)
|
||||
set (USE_INTERNAL_MYSQL_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if (USE_MYSQL)
|
||||
message (STATUS "Using mysqlclient=${USE_MYSQL}: ${MYSQL_INCLUDE_DIR} : ${MYSQLCLIENT_LIBRARIES}; staticlib=${STATIC_MYSQLCLIENT_LIB}")
|
||||
else ()
|
||||
|
53
cmake/find/odbc.cmake
Normal file
53
cmake/find/odbc.cmake
Normal file
@ -0,0 +1,53 @@
|
||||
option (ENABLE_ODBC "Enable ODBC library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT OS_LINUX)
|
||||
if (ENABLE_ODBC)
|
||||
message(STATUS "ODBC is only supported on Linux")
|
||||
endif()
|
||||
set (ENABLE_ODBC OFF CACHE INTERNAL "")
|
||||
endif ()
|
||||
|
||||
if (NOT ENABLE_ODBC)
|
||||
if (USE_INTERNAL_ODBC_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal ODBC with ENABLE_ODBC=OFF")
|
||||
endif()
|
||||
|
||||
add_library (unixodbc INTERFACE)
|
||||
target_compile_definitions (unixodbc INTERFACE USE_ODBC=0)
|
||||
|
||||
message (STATUS "Not using unixodbc")
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT USE_INTERNAL_ODBC_LIBRARY)
|
||||
find_library (LIBRARY_ODBC NAMES unixodbc odbc)
|
||||
find_path (INCLUDE_ODBC sql.h)
|
||||
|
||||
if(LIBRARY_ODBC AND INCLUDE_ODBC)
|
||||
add_library (unixodbc UNKNOWN IMPORTED)
|
||||
set_target_properties (unixodbc PROPERTIES IMPORTED_LOCATION ${LIBRARY_ODBC})
|
||||
set_target_properties (unixodbc PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_ODBC})
|
||||
set_target_properties (unixodbc PROPERTIES INTERFACE_COMPILE_DEFINITIONS USE_ODBC=1)
|
||||
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
find_library(LTDL_LIBRARY ltdl)
|
||||
if (LTDL_LIBRARY)
|
||||
target_link_libraries(unixodbc INTERFACE ${LTDL_LIBRARY})
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(EXTERNAL_ODBC_LIBRARY_FOUND 1)
|
||||
message (STATUS "Found odbc: ${LIBRARY_ODBC}")
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ODBC library")
|
||||
set(EXTERNAL_ODBC_LIBRARY_FOUND 0)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT EXTERNAL_ODBC_LIBRARY_FOUND)
|
||||
set (USE_INTERNAL_ODBC_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using unixodbc")
|
@ -3,12 +3,14 @@ if(0)
|
||||
option(ENABLE_OPENCL "Enable OpenCL support" ${ENABLE_LIBRARIES})
|
||||
endif()
|
||||
|
||||
if(ENABLE_OPENCL)
|
||||
if(NOT ENABLE_OPENCL)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# Intel OpenCl driver: sudo apt install intel-opencl-icd
|
||||
# @sa https://github.com/intel/compute-runtime/releases
|
||||
|
||||
# OpenCL applications should link wiht ICD loader
|
||||
# OpenCL applications should link with ICD loader
|
||||
# sudo apt install opencl-headers ocl-icd-libopencl1
|
||||
# sudo ln -s /usr/lib/x86_64-linux-gnu/libOpenCL.so.1.0.0 /usr/lib/libOpenCL.so
|
||||
# TODO: add https://github.com/OCL-dev/ocl-icd as submodule instead
|
||||
@ -16,8 +18,8 @@ if(ENABLE_OPENCL)
|
||||
find_package(OpenCL)
|
||||
if(OpenCL_FOUND)
|
||||
set(USE_OPENCL 1)
|
||||
endif()
|
||||
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable OpenCL support")
|
||||
endif()
|
||||
|
||||
message(STATUS "Using opencl=${USE_OPENCL}: ${OpenCL_INCLUDE_DIRS} : ${OpenCL_LIBRARIES}")
|
||||
|
@ -1,12 +1,25 @@
|
||||
option (ENABLE_ORC "Enable ORC" ${ENABLE_LIBRARIES})
|
||||
|
||||
if(ENABLE_ORC)
|
||||
if(NOT ENABLE_ORC)
|
||||
if(USE_INTERNAL_ORC_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal ORC library with ENABLE_ORD=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_PARQUET_LIBRARY)
|
||||
option(USE_INTERNAL_ORC_LIBRARY "Set to FALSE to use system ORC instead of bundled (experimental set to OFF on your own risk)"
|
||||
ON)
|
||||
elseif(USE_INTERNAL_ORC_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Currently internal ORC can be build only with bundled Parquet")
|
||||
endif()
|
||||
|
||||
include(cmake/find/snappy.cmake)
|
||||
option(USE_INTERNAL_ORC_LIBRARY "Set to FALSE to use system ORC instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include/orc/OrcFile.hh")
|
||||
if(USE_INTERNAL_ORC_LIBRARY)
|
||||
message(WARNING "submodule contrib/orc is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ORC")
|
||||
set(USE_INTERNAL_ORC_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_ORC_LIBRARY 1)
|
||||
@ -14,6 +27,9 @@ endif ()
|
||||
|
||||
if (NOT USE_INTERNAL_ORC_LIBRARY)
|
||||
find_package(orc)
|
||||
if (NOT ORC_LIBRARY OR NOT ORC_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ORC")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
#if (USE_INTERNAL_ORC_LIBRARY)
|
||||
@ -30,10 +46,12 @@ elseif(NOT MISSING_INTERNAL_ORC_LIBRARY AND ARROW_LIBRARY AND SNAPPY_LIBRARY) #
|
||||
set(ORC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include")
|
||||
set(ORC_LIBRARY orc)
|
||||
set(USE_ORC 1)
|
||||
set(USE_INTERNAL_ORC_LIBRARY 1)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL}
|
||||
"Can't enable ORC support - missing dependencies. Missing internal orc=${MISSING_INTERNAL_ORC_LIBRARY}. "
|
||||
"arrow=${ARROW_LIBRARY} snappy=${SNAPPY_LIBRARY}")
|
||||
set(USE_INTERNAL_ORC_LIBRARY 0)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
message (STATUS "Using internal=${USE_INTERNAL_ORC_LIBRARY} orc=${USE_ORC}: ${ORC_INCLUDE_DIR} : ${ORC_LIBRARY}")
|
||||
|
@ -1,31 +1,123 @@
|
||||
if (Protobuf_PROTOC_EXECUTABLE)
|
||||
option (ENABLE_PARQUET "Enable parquet" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_PARQUET OR USE_INTERNAL_PARQUET_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use parquet without protoc executable")
|
||||
endif()
|
||||
|
||||
if (ENABLE_PARQUET)
|
||||
if (NOT ENABLE_PARQUET)
|
||||
if(USE_INTERNAL_PARQUET_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal parquet with ENABLE_PARQUET=OFF")
|
||||
endif()
|
||||
message(STATUS "Building without Parquet support")
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT OS_FREEBSD) # Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory
|
||||
option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ${NOT_UNBUNDLED})
|
||||
elseif(USE_INTERNAL_PARQUET_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet is not supported on freebsd")
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/CMakeLists.txt")
|
||||
if(USE_INTERNAL_PARQUET_LIBRARY)
|
||||
message(WARNING "submodule contrib/arrow (required for Parquet) is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
endif()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet library")
|
||||
set(USE_INTERNAL_PARQUET_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_PARQUET_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if (NOT SNAPPY_LIBRARY)
|
||||
include(cmake/find/snappy.cmake)
|
||||
endif()
|
||||
|
||||
if(NOT USE_INTERNAL_PARQUET_LIBRARY)
|
||||
find_package(Arrow)
|
||||
find_package(Parquet)
|
||||
find_library(THRIFT_LIBRARY thrift)
|
||||
find_library(UTF8_PROC_LIBRARY utf8proc)
|
||||
find_package(BZip2)
|
||||
|
||||
if(USE_STATIC_LIBRARIES)
|
||||
find_library(ARROW_DEPS_LIBRARY arrow_bundled_dependencies)
|
||||
|
||||
if (ARROW_DEPS_LIBRARY)
|
||||
set(ARROW_IMPORT_OBJ_DIR "${CMAKE_CURRENT_BINARY_DIR}/contrib/arrow-cmake/imported-objects")
|
||||
set(ARROW_OTHER_OBJS
|
||||
"${ARROW_IMPORT_OBJ_DIR}/jemalloc.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/arena.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/background_thread.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/base.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/bin.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/bitmap.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/ckh.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/ctl.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/div.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/extent.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/extent_dss.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/extent_mmap.pic.o"
|
||||
# skip hash
|
||||
"${ARROW_IMPORT_OBJ_DIR}/hook.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/large.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/log.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/malloc_io.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/mutex.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/mutex_pool.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/nstime.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/pages.pic.o"
|
||||
# skip prng
|
||||
"${ARROW_IMPORT_OBJ_DIR}/prof.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/rtree.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/stats.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/sc.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/sz.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/tcache.pic.o"
|
||||
# skip ticker
|
||||
"${ARROW_IMPORT_OBJ_DIR}/tsd.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/test_hooks.pic.o"
|
||||
"${ARROW_IMPORT_OBJ_DIR}/witness.pic.o"
|
||||
)
|
||||
add_custom_command(OUTPUT ${ARROW_OTHER_OBJS}
|
||||
COMMAND
|
||||
mkdir -p "${ARROW_IMPORT_OBJ_DIR}" &&
|
||||
cd "${ARROW_IMPORT_OBJ_DIR}" &&
|
||||
"${CMAKE_AR}" x "${ARROW_DEPS_LIBRARY}"
|
||||
)
|
||||
set_source_files_properties(jemalloc.pic.o PROPERTIES EXTERNAL_OBJECT true GENERATED true)
|
||||
add_library(imported_arrow_deps STATIC ${ARROW_OTHER_OBJS})
|
||||
|
||||
set(ARROW_LIBRARY ${ARROW_STATIC_LIB}
|
||||
imported_arrow_deps ${THRIFT_LIBRARY} ${UTF8_PROC_LIBRARY} ${BZIP2_LIBRARIES} ${SNAPPY_LIBRARY})
|
||||
else()
|
||||
message(WARNING "Using external static Arrow does not always work. "
|
||||
"Could not find arrow_bundled_dependencies.a. If compilation fails, "
|
||||
"Try: -D\"USE_INTERNAL_PARQUET_LIBRARY\"=ON or -D\"ENABLE_PARQUET\"=OFF or "
|
||||
"-D\"USE_STATIC_LIBRARIES\"=OFF")
|
||||
set(ARROW_LIBRARY ${ARROW_STATIC_LIB})
|
||||
endif()
|
||||
set(PARQUET_LIBRARY ${PARQUET_STATIC_LIB})
|
||||
else()
|
||||
set(ARROW_LIBRARY ${ARROW_SHARED_LIB})
|
||||
set(PARQUET_LIBRARY ${PARQUET_SHARED_LIB})
|
||||
endif()
|
||||
|
||||
if(ARROW_INCLUDE_DIR AND ARROW_LIBRARY AND PARQUET_INCLUDE_DIR AND PARQUET_LIBRARY AND THRIFT_LIBRARY AND UTF8_PROC_LIBRARY AND BZIP2_FOUND)
|
||||
set(USE_PARQUET 1)
|
||||
set(EXTERNAL_PARQUET_FOUND 1)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL}
|
||||
"Can't find system parquet: arrow=${ARROW_INCLUDE_DIR}:${ARROW_LIBRARY} ;"
|
||||
" parquet=${PARQUET_INCLUDE_DIR}:${PARQUET_LIBRARY} ;"
|
||||
" thrift=${THRIFT_LIBRARY} ;")
|
||||
set(EXTERNAL_PARQUET_FOUND 0)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(ARROW_INCLUDE_DIR AND PARQUET_INCLUDE_DIR)
|
||||
elseif(NOT MISSING_INTERNAL_PARQUET_LIBRARY AND NOT OS_FREEBSD)
|
||||
include(cmake/find/snappy.cmake)
|
||||
if(NOT EXTERNAL_PARQUET_FOUND AND NOT MISSING_INTERNAL_PARQUET_LIBRARY AND NOT OS_FREEBSD)
|
||||
if(SNAPPY_LIBRARY)
|
||||
set(CAN_USE_INTERNAL_PARQUET_LIBRARY 1)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet library without snappy")
|
||||
endif()
|
||||
|
||||
include(CheckCXXSourceCompiles)
|
||||
@ -38,12 +130,13 @@ elseif(NOT MISSING_INTERNAL_PARQUET_LIBRARY AND NOT OS_FREEBSD)
|
||||
" HAVE_DOUBLE_CONVERSION_ALLOW_CASE_INSENSIBILITY)
|
||||
|
||||
if(NOT HAVE_DOUBLE_CONVERSION_ALLOW_CASE_INSENSIBILITY) # HAVE_STD_RANDOM_SHUFFLE
|
||||
message(STATUS "Disabling internal parquet library because arrow is broken (can't use old double_conversion)")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Disabling internal parquet library because arrow is broken (can't use old double_conversion)")
|
||||
set(CAN_USE_INTERNAL_PARQUET_LIBRARY 0)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT CAN_USE_INTERNAL_PARQUET_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet")
|
||||
set(USE_INTERNAL_PARQUET_LIBRARY 0)
|
||||
else()
|
||||
set(USE_INTERNAL_PARQUET_LIBRARY 1)
|
||||
@ -53,7 +146,7 @@ elseif(NOT MISSING_INTERNAL_PARQUET_LIBRARY AND NOT OS_FREEBSD)
|
||||
set(PARQUET_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src" ${ClickHouse_BINARY_DIR}/contrib/arrow/cpp/src)
|
||||
endif()
|
||||
|
||||
if(${USE_STATIC_LIBRARIES})
|
||||
if(MAKE_STATIC_LIBRARIES)
|
||||
set(FLATBUFFERS_LIBRARY flatbuffers)
|
||||
set(ARROW_LIBRARY arrow_static)
|
||||
set(PARQUET_LIBRARY parquet_static)
|
||||
@ -72,12 +165,15 @@ elseif(NOT MISSING_INTERNAL_PARQUET_LIBRARY AND NOT OS_FREEBSD)
|
||||
set(USE_ORC 1)
|
||||
set(USE_ARROW 1)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
elseif(OS_FREEBSD)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet library on FreeBSD is not supported")
|
||||
endif()
|
||||
|
||||
if(USE_PARQUET)
|
||||
message(STATUS "Using Parquet: ${ARROW_LIBRARY}:${ARROW_INCLUDE_DIR} ; ${PARQUET_LIBRARY}:${PARQUET_INCLUDE_DIR} ; ${THRIFT_LIBRARY} ; ${FLATBUFFERS_LIBRARY}")
|
||||
message(STATUS "Using Parquet: arrow=${ARROW_LIBRARY}:${ARROW_INCLUDE_DIR} ;"
|
||||
" parquet=${PARQUET_LIBRARY}:${PARQUET_INCLUDE_DIR} ;"
|
||||
" thrift=${THRIFT_LIBRARY} ;"
|
||||
" flatbuffers=${FLATBUFFERS_LIBRARY}")
|
||||
else()
|
||||
message(STATUS "Building without Parquet support")
|
||||
endif()
|
||||
|
10
cmake/find/poco.cmake
Normal file
10
cmake/find/poco.cmake
Normal file
@ -0,0 +1,10 @@
|
||||
option (USE_INTERNAL_POCO_LIBRARY "Use internal Poco library" ON)
|
||||
|
||||
if (USE_INTERNAL_POCO_LIBRARY)
|
||||
set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/poco)
|
||||
else ()
|
||||
find_path (ROOT_DIR NAMES Foundation/include/Poco/Poco.h include/Poco/Poco.h)
|
||||
if (NOT ROOT_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system poco")
|
||||
endif()
|
||||
endif ()
|
@ -1,12 +1,18 @@
|
||||
option(ENABLE_PROTOBUF "Enable protobuf" ${ENABLE_LIBRARIES})
|
||||
|
||||
if(ENABLE_PROTOBUF)
|
||||
if(NOT ENABLE_PROTOBUF)
|
||||
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf with ENABLE_PROTOBUF=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
|
||||
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
message(WARNING "submodule contrib/protobuf is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf")
|
||||
set(USE_INTERNAL_PROTOBUF_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_PROTOBUF_LIBRARY 1)
|
||||
@ -14,12 +20,17 @@ endif()
|
||||
|
||||
if(NOT USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
find_package(Protobuf)
|
||||
if (Protobuf_LIBRARY AND Protobuf_INCLUDE_DIR AND Protobuf_PROTOC_EXECUTABLE)
|
||||
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 1)
|
||||
set(USE_PROTOBUF 1)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf")
|
||||
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (Protobuf_LIBRARY AND Protobuf_INCLUDE_DIR)
|
||||
set(USE_PROTOBUF 1)
|
||||
elseif(NOT MISSING_INTERNAL_PROTOBUF_LIBRARY)
|
||||
set(Protobuf_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/protobuf/src)
|
||||
if (NOT EXTERNAL_PROTOBUF_LIBRARY_FOUND AND NOT MISSING_INTERNAL_PROTOBUF_LIBRARY)
|
||||
set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src")
|
||||
|
||||
set(USE_PROTOBUF 1)
|
||||
set(USE_INTERNAL_PROTOBUF_LIBRARY 1)
|
||||
@ -34,13 +45,13 @@ if(OS_FREEBSD AND SANITIZE STREQUAL "address")
|
||||
# ../contrib/protobuf/src/google/protobuf/arena_impl.h:45:10: fatal error: 'sanitizer/asan_interface.h' file not found
|
||||
# #include <sanitizer/asan_interface.h>
|
||||
if(LLVM_INCLUDE_DIRS)
|
||||
set(Protobuf_INCLUDE_DIR ${Protobuf_INCLUDE_DIR} ${LLVM_INCLUDE_DIRS})
|
||||
set(Protobuf_INCLUDE_DIR "${Protobuf_INCLUDE_DIR}" ${LLVM_INCLUDE_DIRS})
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use protobuf on FreeBSD with address sanitizer without LLVM")
|
||||
set(USE_PROTOBUF 0)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
include (${ClickHouse_SOURCE_DIR}/cmake/protobuf_generate_cpp.cmake)
|
||||
endif()
|
||||
include ("${ClickHouse_SOURCE_DIR}/cmake/protobuf_generate_cpp.cmake")
|
||||
|
||||
message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY} : ${Protobuf_PROTOC_EXECUTABLE}")
|
||||
|
@ -1,5 +1,8 @@
|
||||
option(ENABLE_RAPIDJSON "Use rapidjson" ${ENABLE_LIBRARIES})
|
||||
if(NOT ENABLE_RAPIDJSON)
|
||||
if(USE_INTERNAL_RAPIDJSON_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal rapidjson library with ENABLE_RAPIDJSON=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
@ -8,6 +11,7 @@ option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson libr
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h")
|
||||
if(USE_INTERNAL_RAPIDJSON_LIBRARY)
|
||||
message(WARNING "submodule contrib/rapidjson is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rapidjson library")
|
||||
set(USE_INTERNAL_RAPIDJSON_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_RAPIDJSON_LIBRARY 1)
|
||||
@ -15,6 +19,9 @@ endif()
|
||||
|
||||
if(NOT USE_INTERNAL_RAPIDJSON_LIBRARY)
|
||||
find_path(RAPIDJSON_INCLUDE_DIR NAMES rapidjson/rapidjson.h PATHS ${RAPIDJSON_INCLUDE_PATHS})
|
||||
if(NOT RAPIDJSON_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system rapidjson")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(RAPIDJSON_INCLUDE_DIR)
|
||||
|
@ -1,37 +1,66 @@
|
||||
# Freebsd: contrib/cppkafka/include/cppkafka/detail/endianness.h:53:23: error: 'betoh16' was not declared in this scope
|
||||
if (NOT ARCH_ARM AND NOT OS_FREEBSD AND OPENSSL_FOUND)
|
||||
option (ENABLE_RDKAFKA "Enable kafka" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_RDKAFKA AND NOT OPENSSL_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use librdkafka without SSL")
|
||||
elseif(ENABLE_RDKAFKA)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "librdafka is not supported on ARM and on FreeBSD")
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
set (ENABLE_RDKAFKA 0)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_RDKAFKA)
|
||||
if (NOT ENABLE_RDKAFKA)
|
||||
if (USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ENABLE_RDKAFKA=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT ARCH_ARM AND USE_LIBGSASL)
|
||||
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED})
|
||||
elseif(USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ARCH_ARM=${ARCH_ARM} AND USE_LIBGSASL=${USE_LIBGSASL}")
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_RDKAFKA_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
|
||||
if(USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka")
|
||||
set (USE_INTERNAL_RDKAFKA_LIBRARY 0)
|
||||
endif()
|
||||
set (MISSING_INTERNAL_CPPKAFKA_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/CMakeLists.txt")
|
||||
if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY)
|
||||
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka")
|
||||
set (USE_INTERNAL_RDKAFKA_LIBRARY 0)
|
||||
endif()
|
||||
set (MISSING_INTERNAL_RDKAFKA_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (NOT USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||
find_library (RDKAFKA_LIB rdkafka)
|
||||
find_path (RDKAFKA_INCLUDE_DIR NAMES librdkafka/rdkafka.h PATHS ${RDKAFKA_INCLUDE_PATHS})
|
||||
if (NOT RDKAFKA_LIB OR NOT RDKAFKA_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system librdkafka")
|
||||
endif()
|
||||
|
||||
if (USE_STATIC_LIBRARIES AND NOT OS_FREEBSD)
|
||||
find_library (SASL2_LIBRARY sasl2)
|
||||
if (NOT SASL2_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sasl2 library needed for static librdkafka")
|
||||
endif()
|
||||
endif ()
|
||||
set (CPPKAFKA_LIBRARY cppkafka) # TODO: try to use unbundled version.
|
||||
endif ()
|
||||
|
||||
if (RDKAFKA_LIB AND RDKAFKA_INCLUDE_DIR)
|
||||
set (USE_RDKAFKA 1)
|
||||
set (RDKAFKA_LIBRARY ${RDKAFKA_LIB} ${OPENSSL_LIBRARIES})
|
||||
add_library (rdkafka_imp UNKNOWN IMPORTED)
|
||||
set_target_properties (rdkafka_imp PROPERTIES IMPORTED_LOCATION ${RDKAFKA_LIB})
|
||||
set_target_properties (rdkafka_imp PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${RDKAFKA_INCLUDE_DIR})
|
||||
|
||||
set (RDKAFKA_LIBRARY rdkafka_imp ${OPENSSL_LIBRARIES})
|
||||
set (CPPKAFKA_LIBRARY cppkafka)
|
||||
if (SASL2_LIBRARY)
|
||||
list (APPEND RDKAFKA_LIBRARY ${SASL2_LIBRARY})
|
||||
@ -39,14 +68,14 @@ if (RDKAFKA_LIB AND RDKAFKA_INCLUDE_DIR)
|
||||
if (LZ4_LIBRARY)
|
||||
list (APPEND RDKAFKA_LIBRARY ${LZ4_LIBRARY})
|
||||
endif ()
|
||||
elseif (NOT MISSING_INTERNAL_RDKAFKA_LIBRARY AND NOT ARCH_ARM)
|
||||
elseif (NOT MISSING_INTERNAL_RDKAFKA_LIBRARY AND NOT MISSING_INTERNAL_CPPKAFKA_LIBRARY AND NOT ARCH_ARM)
|
||||
set (USE_INTERNAL_RDKAFKA_LIBRARY 1)
|
||||
set (RDKAFKA_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src")
|
||||
set (RDKAFKA_LIBRARY rdkafka)
|
||||
set (CPPKAFKA_LIBRARY cppkafka)
|
||||
set (USE_RDKAFKA 1)
|
||||
endif ()
|
||||
|
||||
elseif(ARCH_ARM)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal rdkafka on ARM is not supported")
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using librdkafka=${USE_RDKAFKA}: ${RDKAFKA_INCLUDE_DIR} : ${RDKAFKA_LIBRARY} ${CPPKAFKA_LIBRARY}")
|
||||
|
@ -3,6 +3,7 @@ option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/CMakeLists.txt")
|
||||
if(USE_INTERNAL_RE2_LIBRARY)
|
||||
message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library")
|
||||
endif()
|
||||
set(USE_INTERNAL_RE2_LIBRARY 0)
|
||||
set(MISSING_INTERNAL_RE2_LIBRARY 1)
|
||||
@ -11,6 +12,9 @@ endif()
|
||||
if (NOT USE_INTERNAL_RE2_LIBRARY)
|
||||
find_library (RE2_LIBRARY re2)
|
||||
find_path (RE2_INCLUDE_DIR NAMES re2/re2.h PATHS ${RE2_INCLUDE_PATHS})
|
||||
if (NOT RE2_LIBRARY OR NOT RE2_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system re2 library")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
string(FIND ${CMAKE_CURRENT_BINARY_DIR} " " _have_space)
|
||||
@ -24,12 +28,13 @@ if (RE2_LIBRARY AND RE2_INCLUDE_DIR)
|
||||
elseif (NOT MISSING_INTERNAL_RE2_LIBRARY)
|
||||
set (USE_INTERNAL_RE2_LIBRARY 1)
|
||||
set (RE2_LIBRARY re2)
|
||||
set (RE2_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/re2)
|
||||
set (RE2_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2")
|
||||
if (NOT MISSING_INTERNAL_RE2_ST_LIBRARY)
|
||||
set (RE2_ST_LIBRARY re2_st)
|
||||
set (USE_RE2_ST 1)
|
||||
else ()
|
||||
set (RE2_ST_LIBRARY ${RE2_LIBRARY})
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal re2 library instead of re2_st")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
|
@ -1,26 +1,45 @@
|
||||
if(NOT OS_FREEBSD AND NOT APPLE AND NOT ARCH_ARM)
|
||||
option(ENABLE_S3 "Enable S3" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_S3 OR USE_INTERNAL_AWS_S3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use S3 on ARM, Apple or FreeBSD")
|
||||
endif()
|
||||
|
||||
if(ENABLE_S3)
|
||||
option(USE_INTERNAL_AWS_S3_LIBRARY "Set to FALSE to use system S3 instead of bundled" ${NOT_UNBUNDLED})
|
||||
if(NOT ENABLE_S3)
|
||||
if(USE_INTERNAL_AWS_S3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal S3 library with ENABLE_S3=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
|
||||
option(USE_INTERNAL_AWS_S3_LIBRARY "Set to FALSE to use system S3 instead of bundled (experimental set to OFF on your own risk)"
|
||||
ON)
|
||||
|
||||
if (NOT USE_INTERNAL_POCO_LIBRARY AND USE_INTERNAL_AWS_S3_LIBRARY)
|
||||
message (FATAL_ERROR "Currently S3 support can be built only with internal POCO library")
|
||||
endif()
|
||||
|
||||
if (NOT USE_INTERNAL_AWS_S3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Compilation with external S3 library is not supported yet")
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
|
||||
message (WARNING "submodule contrib/aws is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
set (MISSING_AWS_S3 1)
|
||||
if (USE_INTERNAL_AWS_S3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal S3 library")
|
||||
endif ()
|
||||
set (MISSING_AWS_S3 1)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_AWS_S3_LIBRARY AND NOT MISSING_AWS_S3)
|
||||
if (USE_INTERNAL_AWS_S3_LIBRARY AND NOT MISSING_AWS_S3)
|
||||
set(AWS_S3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3/include")
|
||||
set(AWS_S3_CORE_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core/include")
|
||||
set(AWS_S3_LIBRARY aws_s3)
|
||||
set(USE_INTERNAL_AWS_S3_LIBRARY 1)
|
||||
set(USE_AWS_S3 1)
|
||||
else()
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable S3")
|
||||
set(USE_INTERNAL_AWS_S3_LIBRARY 0)
|
||||
set(USE_AWS_S3 0)
|
||||
endif ()
|
||||
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using aws_s3=${USE_AWS_S3}: ${AWS_S3_INCLUDE_DIR} : ${AWS_S3_LIBRARY}")
|
||||
|
@ -2,6 +2,9 @@ set (SENTRY_LIBRARY "sentry")
|
||||
set (SENTRY_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native/include")
|
||||
if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h")
|
||||
message (WARNING "submodule contrib/sentry-native is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
if (USE_SENTRY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sentry library")
|
||||
endif()
|
||||
return()
|
||||
endif ()
|
||||
|
||||
@ -16,4 +19,6 @@ if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT_UNBUNDLED AND NOT (OS_
|
||||
message (STATUS "Using sentry=${USE_SENTRY}: ${SENTRY_LIBRARY}")
|
||||
|
||||
include_directories("${SENTRY_INCLUDE_DIR}")
|
||||
elseif (USE_SENTRY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Sentry is not supported in current configuration")
|
||||
endif ()
|
||||
|
@ -1,8 +1,11 @@
|
||||
option (USE_SIMDJSON "Use simdjson" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/simdjson/include/simdjson.h")
|
||||
message (WARNING "submodule contrib/simdjson is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
if (USE_SIMDJSON)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal simdjson library")
|
||||
endif()
|
||||
return()
|
||||
endif ()
|
||||
|
||||
option (USE_SIMDJSON "Use simdjson" ON)
|
||||
|
||||
message(STATUS "Using simdjson=${USE_SIMDJSON}")
|
||||
|
@ -1,13 +1,21 @@
|
||||
option(USE_SNAPPY "Enable support of snappy library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if(USE_SNAPPY)
|
||||
option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT USE_INTERNAL_SNAPPY_LIBRARY)
|
||||
find_library(SNAPPY_LIBRARY snappy)
|
||||
else ()
|
||||
set(SNAPPY_LIBRARY snappy)
|
||||
if(NOT USE_SNAPPY)
|
||||
if (USE_INTERNAL_SNAPPY_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal snappy library with USE_SNAPPY=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
message (STATUS "Using snappy: ${SNAPPY_LIBRARY}")
|
||||
endif ()
|
||||
option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT USE_INTERNAL_SNAPPY_LIBRARY)
|
||||
find_library(SNAPPY_LIBRARY snappy)
|
||||
if (NOT SNAPPY_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system snappy library")
|
||||
endif()
|
||||
else ()
|
||||
set(SNAPPY_LIBRARY snappy)
|
||||
endif()
|
||||
|
||||
message (STATUS "Using snappy: ${SNAPPY_LIBRARY}")
|
||||
|
@ -1,7 +1,11 @@
|
||||
option (USE_INTERNAL_SPARSEHASH_LIBRARY "Set to FALSE to use system sparsehash library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_SPARSEHASH_LIBRARY "Set to FALSE to use system sparsehash library instead of bundled"
|
||||
ON) # ON by default as we are not aware of any system providing package for sparsehash-c11
|
||||
|
||||
if (NOT USE_INTERNAL_SPARSEHASH_LIBRARY)
|
||||
find_path (SPARSEHASH_INCLUDE_DIR NAMES sparsehash/sparse_hash_map PATHS ${SPARSEHASH_INCLUDE_PATHS})
|
||||
if (NOT SPARSEHASH_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sparsehash library")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (SPARSEHASH_INCLUDE_DIR)
|
||||
|
@ -1,12 +1,18 @@
|
||||
option(ENABLE_SSL "Enable ssl" ${ENABLE_LIBRARIES})
|
||||
|
||||
if(ENABLE_SSL)
|
||||
if(NOT ENABLE_SSL)
|
||||
if (USE_INTERNAL_SSL_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal ssl library with ENABLE_SSL=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openssl/README")
|
||||
if(USE_INTERNAL_SSL_LIBRARY)
|
||||
message(WARNING "submodule contrib/openssl is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ssl library")
|
||||
endif()
|
||||
set(USE_INTERNAL_SSL_LIBRARY 0)
|
||||
set(MISSING_INTERNAL_SSL_LIBRARY 1)
|
||||
@ -36,6 +42,10 @@ if (NOT USE_INTERNAL_SSL_LIBRARY)
|
||||
set (OPENSSL_FOUND 1)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (NOT OPENSSL_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ssl")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
if (NOT OPENSSL_FOUND AND NOT MISSING_INTERNAL_SSL_LIBRARY)
|
||||
@ -123,7 +133,4 @@ if(OPENSSL_FOUND AND NOT USE_INTERNAL_SSL_LIBRARY)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using ssl=${USE_SSL}: ${OPENSSL_INCLUDE_DIR} : ${OPENSSL_LIBRARIES}")
|
||||
|
@ -14,6 +14,10 @@ if (ENABLE_STATS)
|
||||
set(GCEM_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/gcem/include)
|
||||
set (USE_STATS 1)
|
||||
endif()
|
||||
|
||||
if (NOT USE_STATS)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable stats library")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
message (STATUS "Using stats=${USE_STATS} : ${STATS_INCLUDE_DIR}")
|
||||
|
17
cmake/find/termcap.cmake
Normal file
17
cmake/find/termcap.cmake
Normal file
@ -0,0 +1,17 @@
|
||||
if (ENABLE_EMBEDDED_COMPILER AND NOT USE_INTERNAL_LLVM_LIBRARY AND USE_STATIC_LIBRARIES)
|
||||
find_library (TERMCAP_LIBRARY tinfo)
|
||||
if (NOT TERMCAP_LIBRARY)
|
||||
find_library (TERMCAP_LIBRARY ncurses)
|
||||
endif()
|
||||
if (NOT TERMCAP_LIBRARY)
|
||||
find_library (TERMCAP_LIBRARY termcap)
|
||||
endif()
|
||||
|
||||
if (NOT TERMCAP_LIBRARY)
|
||||
message (FATAL_ERROR "Statically Linking external LLVM requires termcap")
|
||||
endif()
|
||||
|
||||
target_link_libraries(LLVMSupport INTERFACE ${TERMCAP_LIBRARY})
|
||||
|
||||
message (STATUS "Using termcap: ${TERMCAP_LIBRARY}")
|
||||
endif()
|
@ -6,12 +6,14 @@ else ()
|
||||
set (INTERNAL_ZLIB_NAME "zlib" CACHE INTERNAL "")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}")
|
||||
message (WARNING "Will use standard zlib, please clone manually:\n git clone https://github.com/madler/zlib.git ${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal zlib library")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}/zlib.h")
|
||||
if(USE_INTERNAL_ZLIB_LIBRARY)
|
||||
message(WARNING "submodule contrib/${INTERNAL_ZLIB_NAME} is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zlib library")
|
||||
endif()
|
||||
set(USE_INTERNAL_ZLIB_LIBRARY 0)
|
||||
set(MISSING_INTERNAL_ZLIB_LIBRARY 1)
|
||||
@ -19,6 +21,11 @@ endif()
|
||||
|
||||
if (NOT USE_INTERNAL_ZLIB_LIBRARY)
|
||||
find_package (ZLIB)
|
||||
if (NOT ZLIB_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system zlib library")
|
||||
else()
|
||||
set (ZLIB_NAME "libz")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
if (NOT ZLIB_FOUND AND NOT MISSING_INTERNAL_ZLIB_LIBRARY)
|
||||
@ -28,6 +35,7 @@ if (NOT ZLIB_FOUND AND NOT MISSING_INTERNAL_ZLIB_LIBRARY)
|
||||
set (ZLIB_INCLUDE_DIRECTORIES ${ZLIB_INCLUDE_DIR}) # for protobuf
|
||||
set (ZLIB_FOUND 1) # for poco
|
||||
set (ZLIB_LIBRARIES zlib CACHE INTERNAL "")
|
||||
set (ZLIB_NAME "${INTERNAL_ZLIB_NAME}")
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using ${INTERNAL_ZLIB_NAME}: ${ZLIB_INCLUDE_DIR} : ${ZLIB_LIBRARIES}")
|
||||
message (STATUS "Using ${ZLIB_NAME}: ${ZLIB_INCLUDE_DIR} : ${ZLIB_LIBRARIES}")
|
||||
|
@ -3,14 +3,18 @@ option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library inste
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h")
|
||||
if(USE_INTERNAL_ZSTD_LIBRARY)
|
||||
message(WARNING "submodule contrib/zstd is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
endif()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zstd library")
|
||||
set(USE_INTERNAL_ZSTD_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_ZSTD_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if (NOT USE_INTERNAL_ZSTD_LIBRARY)
|
||||
find_library (ZSTD_LIBRARY zstd)
|
||||
find_path (ZSTD_INCLUDE_DIR NAMES zstd.h PATHS ${ZSTD_INCLUDE_PATHS})
|
||||
if (NOT ZSTD_LIBRARY OR NOT ZSTD_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system zstd library")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (ZSTD_LIBRARY AND ZSTD_INCLUDE_DIR)
|
||||
|
@ -17,21 +17,33 @@ endif ()
|
||||
if (CMAKE_CROSSCOMPILING)
|
||||
if (OS_DARWIN)
|
||||
# FIXME: broken dependencies
|
||||
set (USE_SNAPPY OFF CACHE INTERNAL "")
|
||||
set (ENABLE_PROTOBUF OFF CACHE INTERNAL "")
|
||||
set (ENABLE_PARQUET OFF CACHE INTERNAL "")
|
||||
set (ENABLE_GRPC OFF CACHE INTERNAL "") # no protobuf -> no grpc
|
||||
|
||||
set (USE_SNAPPY OFF CACHE INTERNAL "")
|
||||
set (ENABLE_PARQUET OFF CACHE INTERNAL "") # no snappy and protobuf -> no parquet
|
||||
set (ENABLE_ORC OFF CACHE INTERNAL "") # no arrow (parquet) -> no orc
|
||||
|
||||
set (ENABLE_ICU OFF CACHE INTERNAL "")
|
||||
set (ENABLE_FASTOPS OFF CACHE INTERNAL "")
|
||||
elseif (OS_LINUX OR OS_ANDROID)
|
||||
if (ARCH_AARCH64)
|
||||
# FIXME: broken dependencies
|
||||
set (ENABLE_PROTOBUF OFF CACHE INTERNAL "")
|
||||
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
||||
|
||||
set (ENABLE_PARQUET OFF CACHE INTERNAL "")
|
||||
set (ENABLE_ORC OFF CACHE INTERNAL "")
|
||||
|
||||
set (ENABLE_MYSQL OFF CACHE INTERNAL "")
|
||||
endif ()
|
||||
elseif (OS_FREEBSD)
|
||||
# FIXME: broken dependencies
|
||||
set (ENABLE_PROTOBUF OFF CACHE INTERNAL "")
|
||||
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
||||
|
||||
set (ENABLE_ORC OFF CACHE INTERNAL "") # no protobuf -> no parquet -> no orc
|
||||
|
||||
set (ENABLE_EMBEDDED_COMPILER OFF CACHE INTERNAL "")
|
||||
else ()
|
||||
message (FATAL_ERROR "Trying to cross-compile to unsupported system: ${CMAKE_SYSTEM_NAME}!")
|
||||
|
@ -41,17 +41,16 @@ STRING(REGEX MATCHALL "[0-9]+" COMPILER_VERSION_LIST ${CMAKE_CXX_COMPILER_VERSIO
|
||||
LIST(GET COMPILER_VERSION_LIST 0 COMPILER_VERSION_MAJOR)
|
||||
|
||||
option (LINKER_NAME "Linker name or full path")
|
||||
if (COMPILER_GCC)
|
||||
if (COMPILER_GCC AND NOT LINKER_NAME)
|
||||
find_program (LLD_PATH NAMES "ld.lld")
|
||||
find_program (GOLD_PATH NAMES "ld.gold")
|
||||
else ()
|
||||
elseif (NOT LINKER_NAME)
|
||||
find_program (LLD_PATH NAMES "ld.lld-${COMPILER_VERSION_MAJOR}" "lld-${COMPILER_VERSION_MAJOR}" "ld.lld" "lld")
|
||||
find_program (GOLD_PATH NAMES "ld.gold" "gold")
|
||||
endif ()
|
||||
|
||||
if (OS_LINUX)
|
||||
if (OS_LINUX AND NOT LINKER_NAME)
|
||||
# We prefer LLD linker over Gold or BFD on Linux.
|
||||
if (NOT LINKER_NAME)
|
||||
if (LLD_PATH)
|
||||
if (COMPILER_GCC)
|
||||
# GCC driver requires one of supported linker names like "lld".
|
||||
@ -61,7 +60,6 @@ if (OS_LINUX)
|
||||
set (LINKER_NAME ${LLD_PATH})
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (NOT LINKER_NAME)
|
||||
if (GOLD_PATH)
|
||||
|
@ -80,7 +80,7 @@ set(FLATBUFFERS_BINARY_DIR ${ClickHouse_BINARY_DIR}/contrib/flatbuffers)
|
||||
set(FLATBUFFERS_INCLUDE_DIR ${FLATBUFFERS_SRC_DIR}/include)
|
||||
|
||||
# set flatbuffers CMake options
|
||||
if (${USE_STATIC_LIBRARIES})
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
set(FLATBUFFERS_BUILD_FLATLIB ON CACHE BOOL "Enable the build of the flatbuffers library")
|
||||
set(FLATBUFFERS_BUILD_SHAREDLIB OFF CACHE BOOL "Disable the build of the flatbuffers shared library")
|
||||
else ()
|
||||
|
@ -1,6 +1,52 @@
|
||||
option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (USE_INTERNAL_BOOST_LIBRARY)
|
||||
if (NOT USE_INTERNAL_BOOST_LIBRARY)
|
||||
# 1.70 like in contrib/boost
|
||||
# 1.67 on CI
|
||||
set(BOOST_VERSION 1.67)
|
||||
|
||||
find_package(Boost ${BOOST_VERSION} COMPONENTS
|
||||
system
|
||||
filesystem
|
||||
iostreams
|
||||
program_options
|
||||
regex
|
||||
)
|
||||
|
||||
if(Boost_INCLUDE_DIR AND Boost_FILESYSTEM_LIBRARY AND Boost_FILESYSTEM_LIBRARY AND
|
||||
Boost_PROGRAM_OPTIONS_LIBRARY AND Boost_REGEX_LIBRARY AND Boost_SYSTEM_LIBRARY)
|
||||
|
||||
set(EXTERNAL_BOOST_FOUND 1)
|
||||
|
||||
add_library (_boost_headers_only INTERFACE)
|
||||
add_library (boost::headers_only ALIAS _boost_headers_only)
|
||||
target_include_directories (_boost_headers_only SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIR})
|
||||
|
||||
add_library (_boost_filesystem INTERFACE)
|
||||
add_library (_boost_iostreams INTERFACE)
|
||||
add_library (_boost_program_options INTERFACE)
|
||||
add_library (_boost_regex INTERFACE)
|
||||
add_library (_boost_system INTERFACE)
|
||||
|
||||
target_link_libraries (_boost_filesystem INTERFACE ${Boost_FILESYSTEM_LIBRARY})
|
||||
target_link_libraries (_boost_iostreams INTERFACE ${Boost_IOSTREAMS_LIBRARY})
|
||||
target_link_libraries (_boost_program_options INTERFACE ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
target_link_libraries (_boost_regex INTERFACE ${Boost_REGEX_LIBRARY})
|
||||
target_link_libraries (_boost_system INTERFACE ${Boost_SYSTEM_LIBRARY})
|
||||
|
||||
add_library (boost::filesystem ALIAS _boost_filesystem)
|
||||
add_library (boost::iostreams ALIAS _boost_iostreams)
|
||||
add_library (boost::program_options ALIAS _boost_program_options)
|
||||
add_library (boost::regex ALIAS _boost_regex)
|
||||
add_library (boost::system ALIAS _boost_system)
|
||||
else()
|
||||
set(EXTERNAL_BOOST_FOUND 0)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system boost")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT EXTERNAL_BOOST_FOUND)
|
||||
set (USE_INTERNAL_BOOST_LIBRARY 1)
|
||||
set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/boost)
|
||||
|
||||
# filesystem
|
||||
@ -38,7 +84,7 @@ if (USE_INTERNAL_BOOST_LIBRARY)
|
||||
add_library (_boost_iostreams ${SRCS_IOSTREAMS})
|
||||
add_library (boost::iostreams ALIAS _boost_iostreams)
|
||||
target_include_directories (_boost_iostreams PRIVATE ${LIBRARY_DIR})
|
||||
target_link_libraries (_boost_iostreams PRIVATE zlib)
|
||||
target_link_libraries (_boost_iostreams PRIVATE ${ZLIB_LIBRARIES})
|
||||
|
||||
# program_options
|
||||
|
||||
@ -96,38 +142,4 @@ if (USE_INTERNAL_BOOST_LIBRARY)
|
||||
add_library (_boost_system ${SRCS_SYSTEM})
|
||||
add_library (boost::system ALIAS _boost_system)
|
||||
target_include_directories (_boost_system PRIVATE ${LIBRARY_DIR})
|
||||
else ()
|
||||
# 1.70 like in contrib/boost
|
||||
# 1.67 on CI
|
||||
set(BOOST_VERSION 1.67)
|
||||
|
||||
find_package(Boost ${BOOST_VERSION} COMPONENTS
|
||||
system
|
||||
filesystem
|
||||
iostreams
|
||||
program_options
|
||||
regex
|
||||
REQUIRED)
|
||||
|
||||
add_library (_boost_headers_only INTERFACE)
|
||||
add_library (boost::headers_only ALIAS _boost_headers_only)
|
||||
target_include_directories (_boost_headers_only SYSTEM BEFORE INTERFACE ${Boost_INCLUDE_DIR})
|
||||
|
||||
add_library (_boost_filesystem INTERFACE)
|
||||
add_library (_boost_iostreams INTERFACE)
|
||||
add_library (_boost_program_options INTERFACE)
|
||||
add_library (_boost_regex INTERFACE)
|
||||
add_library (_boost_system INTERFACE)
|
||||
|
||||
target_link_libraries (_boost_filesystem INTERFACE ${Boost_FILESYSTEM_LIBRARY})
|
||||
target_link_libraries (_boost_iostreams INTERFACE ${Boost_IOSTREAMS_LIBRARY})
|
||||
target_link_libraries (_boost_program_options INTERFACE ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||
target_link_libraries (_boost_regex INTERFACE ${Boost_REGEX_LIBRARY})
|
||||
target_link_libraries (_boost_system INTERFACE ${Boost_SYSTEM_LIBRARY})
|
||||
|
||||
add_library (boost::filesystem ALIAS _boost_filesystem)
|
||||
add_library (boost::iostreams ALIAS _boost_iostreams)
|
||||
add_library (boost::program_options ALIAS _boost_program_options)
|
||||
add_library (boost::regex ALIAS _boost_regex)
|
||||
add_library (boost::system ALIAS _boost_system)
|
||||
endif ()
|
||||
|
@ -1,9 +1,42 @@
|
||||
option (USE_INTERNAL_CCTZ "Use internal cctz library" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (USE_INTERNAL_CCTZ)
|
||||
SET(LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/cctz)
|
||||
if (NOT USE_INTERNAL_CCTZ_LIBRARY)
|
||||
find_library (LIBRARY_CCTZ cctz)
|
||||
find_path (INCLUDE_CCTZ NAMES cctz/civil_time.h)
|
||||
|
||||
SET (SRCS
|
||||
if (LIBRARY_CCTZ AND INCLUDE_CCTZ)
|
||||
set (EXTERNAL_CCTZ_LIBRARY_FOUND 1)
|
||||
|
||||
set(CMAKE_REQUIRED_LIBRARIES ${LIBRARY_CCTZ})
|
||||
set(CMAKE_REQUIRED_INCLUDES ${INCLUDE_CCTZ})
|
||||
check_cxx_source_compiles(
|
||||
"
|
||||
#include <cctz/civil_time.h>
|
||||
int main() {
|
||||
cctz::civil_day date;
|
||||
}
|
||||
"
|
||||
EXTERNAL_CCTZ_LIBRARY_WORKS
|
||||
)
|
||||
|
||||
if (NOT EXTERNAL_CCTZ_LIBRARY_WORKS)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "External cctz is not working: ${LIBRARY_CCTZ} ${INCLUDE_CCTZ}")
|
||||
else()
|
||||
add_library (cctz UNKNOWN IMPORTED)
|
||||
set_property (TARGET cctz PROPERTY IMPORTED_LOCATION ${LIBRARY_CCTZ})
|
||||
set_property (TARGET cctz PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_CCTZ})
|
||||
endif()
|
||||
else()
|
||||
set (EXTERNAL_CCTZ_LIBRARY_FOUND 0)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system cctz")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT EXTERNAL_CCTZ_LIBRARY_FOUND OR NOT EXTERNAL_CCTZ_LIBRARY_WORKS)
|
||||
set(USE_INTERNAL_CCTZ_LIBRARY 1)
|
||||
set(LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/cctz)
|
||||
|
||||
set (SRCS
|
||||
${LIBRARY_DIR}/src/civil_time_detail.cc
|
||||
${LIBRARY_DIR}/src/time_zone_fixed.cc
|
||||
${LIBRARY_DIR}/src/time_zone_format.cc
|
||||
@ -63,29 +96,6 @@ if (USE_INTERNAL_CCTZ)
|
||||
add_dependencies(cctz tzdata)
|
||||
target_link_libraries(cctz INTERFACE "-Wl,${WHOLE_ARCHIVE} $<TARGET_FILE:tzdata> -Wl,${NO_WHOLE_ARCHIVE}")
|
||||
endif ()
|
||||
|
||||
else ()
|
||||
find_library (LIBRARY_CCTZ cctz)
|
||||
find_path (INCLUDE_CCTZ NAMES cctz/civil_time.h)
|
||||
|
||||
add_library (cctz UNKNOWN IMPORTED)
|
||||
set_property (TARGET cctz PROPERTY IMPORTED_LOCATION ${LIBRARY_CCTZ})
|
||||
set_property (TARGET cctz PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_CCTZ})
|
||||
|
||||
set(CMAKE_REQUIRED_LIBRARIES cctz)
|
||||
check_cxx_source_compiles(
|
||||
"
|
||||
#include <cctz/civil_time.h>
|
||||
int main() {
|
||||
cctz::civil_day date;
|
||||
}
|
||||
"
|
||||
EXTERNAL_CCTZ_WORKS
|
||||
)
|
||||
|
||||
if (NOT EXTERNAL_CCTZ_WORKS)
|
||||
message (FATAL_ERROR "cctz is unusable: ${LIBRARY_CCTZ} ${INCLUDE_CCTZ}")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using cctz")
|
||||
|
@ -1,12 +1,10 @@
|
||||
option (ENABLE_CURL "Enable curl" ${ENABLE_LIBRARIES})
|
||||
if (NOT USE_INTERNAL_CURL)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (ENABLE_CURL)
|
||||
option (USE_INTERNAL_CURL "Use internal curl library" ${NOT_UNBUNDLED})
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/curl")
|
||||
|
||||
if (USE_INTERNAL_CURL)
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/curl")
|
||||
|
||||
set (SRCS
|
||||
set (SRCS
|
||||
${LIBRARY_DIR}/lib/file.c
|
||||
${LIBRARY_DIR}/lib/timeval.c
|
||||
${LIBRARY_DIR}/lib/base64.c
|
||||
@ -144,44 +142,38 @@ if (ENABLE_CURL)
|
||||
${LIBRARY_DIR}/lib/vquic/quiche.c
|
||||
${LIBRARY_DIR}/lib/vssh/libssh2.c
|
||||
${LIBRARY_DIR}/lib/vssh/libssh.c
|
||||
)
|
||||
)
|
||||
|
||||
add_library (curl ${SRCS})
|
||||
add_library (curl ${SRCS})
|
||||
|
||||
target_compile_definitions (curl PRIVATE
|
||||
target_compile_definitions (curl PRIVATE
|
||||
HAVE_CONFIG_H
|
||||
BUILDING_LIBCURL
|
||||
CURL_HIDDEN_SYMBOLS
|
||||
libcurl_EXPORTS
|
||||
OS="${CMAKE_SYSTEM_NAME}"
|
||||
)
|
||||
target_include_directories (curl PUBLIC
|
||||
)
|
||||
target_include_directories (curl PUBLIC
|
||||
${LIBRARY_DIR}/include
|
||||
${LIBRARY_DIR}/lib
|
||||
. # curl_config.h
|
||||
)
|
||||
)
|
||||
|
||||
target_link_libraries (curl PRIVATE ssl)
|
||||
target_link_libraries (curl PRIVATE ssl)
|
||||
|
||||
# The library is large - avoid bloat (XXX: is it?)
|
||||
target_compile_options (curl PRIVATE -g0)
|
||||
# The library is large - avoid bloat (XXX: is it?)
|
||||
target_compile_options (curl PRIVATE -g0)
|
||||
|
||||
# find_package(CURL) compatibility for the following packages that uses
|
||||
# find_package(CURL)/include(FindCURL):
|
||||
# - mariadb-connector-c
|
||||
# - aws-s3-cmake
|
||||
# - sentry-native
|
||||
set (CURL_FOUND ON CACHE BOOL "")
|
||||
set (CURL_ROOT_DIR ${LIBRARY_DIR} CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIR ${LIBRARY_DIR}/include CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIRS ${LIBRARY_DIR}/include CACHE PATH "")
|
||||
set (CURL_LIBRARY curl CACHE STRING "")
|
||||
set (CURL_LIBRARIES ${CURL_LIBRARY} CACHE STRING "")
|
||||
set (CURL_VERSION_STRING 7.67.0 CACHE STRING "")
|
||||
add_library (CURL::libcurl ALIAS ${CURL_LIBRARY})
|
||||
else ()
|
||||
find_package (CURL REQUIRED)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using curl: ${CURL_INCLUDE_DIRS} : ${CURL_LIBRARIES}")
|
||||
# find_package(CURL) compatibility for the following packages that uses
|
||||
# find_package(CURL)/include(FindCURL):
|
||||
# - mariadb-connector-c
|
||||
# - aws-s3-cmake
|
||||
# - sentry-native
|
||||
set (CURL_FOUND ON CACHE BOOL "")
|
||||
set (CURL_ROOT_DIR ${LIBRARY_DIR} CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIR ${LIBRARY_DIR}/include CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIRS ${LIBRARY_DIR}/include CACHE PATH "")
|
||||
set (CURL_LIBRARY curl CACHE STRING "")
|
||||
set (CURL_LIBRARIES ${CURL_LIBRARY} CACHE STRING "")
|
||||
set (CURL_VERSION_STRING 7.67.0 CACHE STRING "")
|
||||
add_library (CURL::libcurl ALIAS ${CURL_LIBRARY})
|
||||
|
@ -1,13 +1,45 @@
|
||||
option (ENABLE_HYPERSCAN "Enable hyperscan library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT HAVE_SSSE3)
|
||||
if (HAVE_SSSE3)
|
||||
option (ENABLE_HYPERSCAN "Enable hyperscan library" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_HYPERSCAN)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use hyperscan without SSSE3")
|
||||
set (ENABLE_HYPERSCAN OFF)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_HYPERSCAN)
|
||||
option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT ENABLE_HYPERSCAN)
|
||||
if (USE_INTERNAL_HYPERSCAN_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal hyperscan with ENABLE_HYPERSCAN=OFF")
|
||||
endif()
|
||||
|
||||
add_library (hyperscan INTERFACE)
|
||||
target_compile_definitions (hyperscan INTERFACE USE_HYPERSCAN=0)
|
||||
|
||||
message (STATUS "Not using hyperscan")
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT USE_INTERNAL_HYPERSCAN_LIBRARY)
|
||||
find_library (LIBRARY_HYPERSCAN hs)
|
||||
find_path (INCLUDE_HYPERSCAN NAMES hs.h HINTS /usr/include/hs) # Ubuntu puts headers in this folder
|
||||
|
||||
if (LIBRARY_HYPERSCAN AND INCLUDE_HYPERSCAN)
|
||||
set (EXTERNAL_HYPERSCAN_LIBRARY_FOUND 1)
|
||||
|
||||
add_library (hyperscan UNKNOWN IMPORTED GLOBAL)
|
||||
set_target_properties (hyperscan PROPERTIES IMPORTED_LOCATION ${LIBRARY_HYPERSCAN})
|
||||
set_target_properties (hyperscan PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_HYPERSCAN})
|
||||
set_property(TARGET hyperscan APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_HYPERSCAN=1)
|
||||
else ()
|
||||
set (EXTERNAL_HYPERSCAN_LIBRARY_FOUND 0)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system hyperscan library")
|
||||
endif ()
|
||||
|
||||
endif ()
|
||||
|
||||
if (NOT EXTERNAL_HYPERSCAN_LIBRARY_FOUND)
|
||||
set (USE_INTERNAL_HYPERSCAN_LIBRARY 1)
|
||||
|
||||
set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/hyperscan)
|
||||
|
||||
set (SRCS
|
||||
@ -233,20 +265,8 @@ if (ENABLE_HYPERSCAN)
|
||||
target_include_directories (hyperscan PRIVATE x86_64)
|
||||
endif ()
|
||||
target_link_libraries (hyperscan PRIVATE boost::headers_only)
|
||||
else ()
|
||||
find_library (LIBRARY_HYPERSCAN hs)
|
||||
find_path (INCLUDE_HYPERSCAN NAMES hs.h HINTS /usr/include/hs) # Ubuntu puts headers in this folder
|
||||
|
||||
add_library (hyperscan UNKNOWN IMPORTED GLOBAL)
|
||||
set_target_properties (hyperscan PROPERTIES IMPORTED_LOCATION ${LIBRARY_HYPERSCAN})
|
||||
set_target_properties (hyperscan PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_HYPERSCAN})
|
||||
set_property(TARGET hyperscan APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_HYPERSCAN=1)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using hyperscan")
|
||||
else ()
|
||||
add_library (hyperscan INTERFACE)
|
||||
target_compile_definitions (hyperscan INTERFACE USE_HYPERSCAN=0)
|
||||
|
||||
message (STATUS "Not using hyperscan")
|
||||
set (USE_INTERNAL_HYPERSCAN_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using hyperscan")
|
||||
|
@ -1,18 +1,72 @@
|
||||
option (ENABLE_JEMALLOC "Enable jemalloc allocator" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (SANITIZE OR NOT (ARCH_AMD64 OR ARCH_ARM) OR NOT (OS_LINUX OR OS_FREEBSD OR OS_DARWIN))
|
||||
if (ENABLE_JEMALLOC)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL}
|
||||
"jemalloc is disabled implicitly: it doesn't work with sanitizers and can only be used with x86_64 or aarch64 on linux or freebsd.")
|
||||
endif()
|
||||
set (ENABLE_JEMALLOC OFF)
|
||||
message (STATUS "jemalloc is disabled implicitly: it doesn't work with sanitizers and can only be used with x86_64 or aarch64 on linux or freebsd.")
|
||||
else()
|
||||
option (ENABLE_JEMALLOC "Enable jemalloc allocator" ${ENABLE_LIBRARIES})
|
||||
endif ()
|
||||
|
||||
if (ENABLE_JEMALLOC)
|
||||
if (NOT OS_LINUX)
|
||||
message (WARNING "jemalloc support on non-linux is EXPERIMENTAL")
|
||||
if (NOT ENABLE_JEMALLOC)
|
||||
if(USE_INTERNAL_JEMALLOC_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal jemalloc with ENABLE_JEMALLOC=OFF")
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_JEMALLOC "Use internal jemalloc library" ${NOT_UNBUNDLED})
|
||||
add_library(jemalloc INTERFACE)
|
||||
target_compile_definitions(jemalloc INTERFACE USE_JEMALLOC=0)
|
||||
|
||||
message (STATUS "Not using jemalloc")
|
||||
return()
|
||||
endif ()
|
||||
|
||||
if (NOT OS_LINUX)
|
||||
message (WARNING "jemalloc support on non-linux is EXPERIMENTAL")
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_JEMALLOC_LIBRARY "Use internal jemalloc library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT USE_INTERNAL_JEMALLOC_LIBRARY)
|
||||
find_library(LIBRARY_JEMALLOC jemalloc)
|
||||
find_path(INCLUDE_JEMALLOC jemalloc/jemalloc.h)
|
||||
|
||||
if (LIBRARY_JEMALLOC AND INCLUDE_JEMALLOC)
|
||||
set(EXTERNAL_JEMALLOC_LIBRARY_FOUND 1)
|
||||
|
||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package(Threads)
|
||||
|
||||
set (CMAKE_REQUIRED_LIBRARIES ${LIBRARY_JEMALLOC} Threads::Threads "dl")
|
||||
set (CMAKE_REQUIRED_INCLUDES ${INCLUDE_JEMALLOC})
|
||||
check_cxx_source_compiles (
|
||||
"
|
||||
#include <jemalloc/jemalloc.h>
|
||||
|
||||
int main() {
|
||||
free(mallocx(1, 0));
|
||||
}
|
||||
"
|
||||
EXTERNAL_JEMALLOC_LIBRARY_WORKS
|
||||
)
|
||||
|
||||
if (EXTERNAL_JEMALLOC_LIBRARY_WORKS)
|
||||
add_library (jemalloc STATIC IMPORTED)
|
||||
set_property (TARGET jemalloc PROPERTY IMPORTED_LOCATION ${LIBRARY_JEMALLOC})
|
||||
set_property (TARGET jemalloc PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_JEMALLOC})
|
||||
set_property (TARGET jemalloc PROPERTY INTERFACE_LINK_LIBRARIES Threads::Threads dl)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "External jemalloc is unusable: ${LIBRARY_JEMALLOC} ${INCLUDE_JEMALLOC}")
|
||||
endif ()
|
||||
|
||||
else()
|
||||
set(EXTERNAL_JEMALLOC_LIBRARY_FOUND 0)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system jemalloc")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
if (NOT EXTERNAL_JEMALLOC_LIBRARY_FOUND OR NOT EXTERNAL_JEMALLOC_LIBRARY_WORKS)
|
||||
set(USE_INTERNAL_JEMALLOC_LIBRARY 1)
|
||||
|
||||
if (USE_INTERNAL_JEMALLOC)
|
||||
if (OS_LINUX)
|
||||
# ThreadPool select job randomly, and there can be some threads that had been
|
||||
# performed some memory heavy task before and will be inactive for some time,
|
||||
@ -93,7 +147,7 @@ if (ENABLE_JEMALLOC)
|
||||
elseif (OS_DARWIN)
|
||||
set (JEMALLOC_INCLUDE_PREFIX "include_darwin")
|
||||
else ()
|
||||
message (FATAL_ERROR "This OS is not supported")
|
||||
message (FATAL_ERROR "internal jemalloc: This OS is not supported")
|
||||
endif ()
|
||||
# ARCH_
|
||||
if (ARCH_AMD64)
|
||||
@ -101,7 +155,7 @@ if (ENABLE_JEMALLOC)
|
||||
elseif (ARCH_ARM)
|
||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_aarch64")
|
||||
else ()
|
||||
message (FATAL_ERROR "This arch is not supported")
|
||||
message (FATAL_ERROR "internal jemalloc: This arch is not supported")
|
||||
endif ()
|
||||
|
||||
configure_file(${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h.in
|
||||
@ -123,45 +177,14 @@ if (ENABLE_JEMALLOC)
|
||||
target_compile_options(jemalloc PRIVATE -Wno-redundant-decls)
|
||||
# for RTLD_NEXT
|
||||
target_compile_options(jemalloc PRIVATE -D_GNU_SOURCE)
|
||||
else ()
|
||||
find_library(LIBRARY_JEMALLOC jemalloc)
|
||||
find_path(INCLUDE_JEMALLOC jemalloc/jemalloc.h)
|
||||
|
||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package(Threads REQUIRED)
|
||||
set (USE_INTERNAL_JEMALLOC_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
add_library (jemalloc STATIC IMPORTED)
|
||||
set_property (TARGET jemalloc PROPERTY IMPORTED_LOCATION ${LIBRARY_JEMALLOC})
|
||||
set_property (TARGET jemalloc PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_JEMALLOC})
|
||||
set_property (TARGET jemalloc PROPERTY INTERFACE_LINK_LIBRARIES Threads::Threads dl)
|
||||
|
||||
set (CMAKE_REQUIRED_LIBRARIES jemalloc)
|
||||
check_cxx_source_compiles (
|
||||
"
|
||||
#include <jemalloc/jemalloc.h>
|
||||
|
||||
int main() {
|
||||
free(mallocx(1, 0));
|
||||
}
|
||||
"
|
||||
EXTERNAL_JEMALLOC_WORKS
|
||||
)
|
||||
|
||||
if (NOT EXTERNAL_JEMALLOC_WORKS)
|
||||
message (FATAL_ERROR "jemalloc is unusable: ${LIBRARY_JEMALLOC} ${INCLUDE_JEMALLOC}")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
set_property(TARGET jemalloc APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_JEMALLOC=1)
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
set_property(TARGET jemalloc APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_JEMALLOC=1)
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
# To detect whether we need to register jemalloc for osx as default zone.
|
||||
set_property(TARGET jemalloc APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS BUNDLED_STATIC_JEMALLOC=1)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
message (STATUS "Using jemalloc")
|
||||
else ()
|
||||
add_library(jemalloc INTERFACE)
|
||||
target_compile_definitions(jemalloc INTERFACE USE_JEMALLOC=0)
|
||||
|
||||
message (STATUS "Not using jemalloc")
|
||||
endif ()
|
||||
message (STATUS "Using jemalloc")
|
||||
|
@ -1,35 +1,40 @@
|
||||
option (ENABLE_CPUID "Enable libcpuid library (only internal)" ${ENABLE_LIBRARIES})
|
||||
if (NOT ARCH_ARM)
|
||||
option (ENABLE_CPUID "Enable libcpuid library (only internal)" ${ENABLE_LIBRARIES})
|
||||
endif()
|
||||
|
||||
if (ARCH_ARM)
|
||||
if (ARCH_ARM AND ENABLE_CPUID)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "cpuid is not supported on ARM")
|
||||
set (ENABLE_CPUID 0)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_CPUID)
|
||||
set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/libcpuid)
|
||||
|
||||
set (SRCS
|
||||
${LIBRARY_DIR}/libcpuid/asm-bits.c
|
||||
${LIBRARY_DIR}/libcpuid/cpuid_main.c
|
||||
${LIBRARY_DIR}/libcpuid/libcpuid_util.c
|
||||
${LIBRARY_DIR}/libcpuid/msrdriver.c
|
||||
${LIBRARY_DIR}/libcpuid/rdmsr.c
|
||||
${LIBRARY_DIR}/libcpuid/rdtsc.c
|
||||
${LIBRARY_DIR}/libcpuid/recog_amd.c
|
||||
${LIBRARY_DIR}/libcpuid/recog_intel.c
|
||||
)
|
||||
|
||||
add_library (cpuid ${SRCS})
|
||||
|
||||
target_include_directories (cpuid SYSTEM PUBLIC ${LIBRARY_DIR})
|
||||
target_compile_definitions (cpuid PUBLIC USE_CPUID=1)
|
||||
target_compile_definitions (cpuid PRIVATE VERSION="v0.4.1")
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options (cpuid PRIVATE -Wno-reserved-id-macro)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using cpuid")
|
||||
else ()
|
||||
if (NOT ENABLE_CPUID)
|
||||
add_library (cpuid INTERFACE)
|
||||
|
||||
target_compile_definitions (cpuid INTERFACE USE_CPUID=0)
|
||||
|
||||
return()
|
||||
endif()
|
||||
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/libcpuid")
|
||||
|
||||
set (SRCS
|
||||
"${LIBRARY_DIR}/libcpuid/asm-bits.c"
|
||||
"${LIBRARY_DIR}/libcpuid/cpuid_main.c"
|
||||
"${LIBRARY_DIR}/libcpuid/libcpuid_util.c"
|
||||
"${LIBRARY_DIR}/libcpuid/msrdriver.c"
|
||||
"${LIBRARY_DIR}/libcpuid/rdmsr.c"
|
||||
"${LIBRARY_DIR}/libcpuid/rdtsc.c"
|
||||
"${LIBRARY_DIR}/libcpuid/recog_amd.c"
|
||||
"${LIBRARY_DIR}/libcpuid/recog_intel.c"
|
||||
)
|
||||
|
||||
add_library (cpuid ${SRCS})
|
||||
|
||||
target_include_directories (cpuid SYSTEM PUBLIC "${LIBRARY_DIR}")
|
||||
target_compile_definitions (cpuid PUBLIC USE_CPUID=1)
|
||||
target_compile_definitions (cpuid PRIVATE VERSION="v0.4.1")
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options (cpuid PRIVATE -Wno-reserved-id-macro)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using cpuid")
|
||||
|
@ -1,4 +1,14 @@
|
||||
if (NOT USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
if (ENABLE_PROTOBUF AND NOT USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
option(PROTOBUF_OLD_ABI_COMPAT "Set to ON for compatiability with external protobuf which was compiled old C++ ABI" OFF)
|
||||
endif()
|
||||
|
||||
if (PROTOBUF_OLD_ABI_COMPAT)
|
||||
if (NOT ENABLE_PROTOBUF OR USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "PROTOBUF_OLD_ABI_COMPAT option is ignored")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT USE_INTERNAL_PROTOBUF_LIBRARY AND PROTOBUF_OLD_ABI_COMPAT)
|
||||
# compatiable with protobuf which was compiled old C++ ABI
|
||||
set(CMAKE_CXX_FLAGS "-D_GLIBCXX_USE_CXX11_ABI=0")
|
||||
set(CMAKE_C_FLAGS "")
|
||||
@ -7,7 +17,7 @@ if (NOT USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
SET(WITH_KERBEROS false)
|
||||
set(WITH_KERBEROS false)
|
||||
# project and source dir
|
||||
set(HDFS3_ROOT_DIR ${ClickHouse_SOURCE_DIR}/contrib/libhdfs3)
|
||||
set(HDFS3_SOURCE_DIR ${HDFS3_ROOT_DIR}/src)
|
||||
@ -206,7 +216,7 @@ target_link_libraries(hdfs3 PRIVATE ${LIBGSASL_LIBRARY})
|
||||
if (WITH_KERBEROS)
|
||||
target_link_libraries(hdfs3 PRIVATE ${KERBEROS_LIBRARIES})
|
||||
endif()
|
||||
target_link_libraries(hdfs3 PRIVATE ${LIBXML2_LIBRARY})
|
||||
target_link_libraries(hdfs3 PRIVATE ${LIBXML2_LIBRARIES})
|
||||
|
||||
# inherit from parent cmake
|
||||
target_include_directories(hdfs3 PRIVATE ${Protobuf_INCLUDE_DIR})
|
||||
|
@ -6,7 +6,7 @@ set(SRCS
|
||||
# ${RDKAFKA_SOURCE_DIR}/lz4.c
|
||||
# ${RDKAFKA_SOURCE_DIR}/lz4frame.c
|
||||
# ${RDKAFKA_SOURCE_DIR}/lz4hc.c
|
||||
# ${RDKAFKA_SOURCE_DIR}/rdxxhash.c
|
||||
${RDKAFKA_SOURCE_DIR}/rdxxhash.c
|
||||
# ${RDKAFKA_SOURCE_DIR}/regexp.c
|
||||
${RDKAFKA_SOURCE_DIR}/rdaddr.c
|
||||
${RDKAFKA_SOURCE_DIR}/rdavl.c
|
||||
|
@ -1,13 +1,30 @@
|
||||
option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (USE_INTERNAL_LZ4_LIBRARY)
|
||||
set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/lz4)
|
||||
if (NOT USE_INTERNAL_LZ4_LIBRARY)
|
||||
find_library (LIBRARY_LZ4 lz4)
|
||||
find_path (INCLUDE_LZ4 lz4.h)
|
||||
|
||||
if (LIBRARY_LZ4 AND INCLUDE_LZ4)
|
||||
set(EXTERNAL_LZ4_LIBRARY_FOUND 1)
|
||||
add_library (lz4 UNKNOWN IMPORTED)
|
||||
set_property (TARGET lz4 PROPERTY IMPORTED_LOCATION ${LIBRARY_LZ4})
|
||||
set_property (TARGET lz4 PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_LZ4})
|
||||
set_property (TARGET lz4 APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_XXHASH=0)
|
||||
else()
|
||||
set(EXTERNAL_LZ4_LIBRARY_FOUND 0)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system lz4")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT EXTERNAL_LZ4_LIBRARY_FOUND)
|
||||
set (USE_INTERNAL_LZ4_LIBRARY 1)
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/lz4")
|
||||
|
||||
set (SRCS
|
||||
${LIBRARY_DIR}/lib/lz4.c
|
||||
${LIBRARY_DIR}/lib/lz4hc.c
|
||||
${LIBRARY_DIR}/lib/lz4frame.c
|
||||
${LIBRARY_DIR}/lib/xxhash.c
|
||||
"${LIBRARY_DIR}/lib/lz4.c"
|
||||
"${LIBRARY_DIR}/lib/lz4hc.c"
|
||||
"${LIBRARY_DIR}/lib/lz4frame.c"
|
||||
"${LIBRARY_DIR}/lib/xxhash.c"
|
||||
)
|
||||
|
||||
add_library (lz4 ${SRCS})
|
||||
@ -17,12 +34,4 @@ if (USE_INTERNAL_LZ4_LIBRARY)
|
||||
target_compile_options (lz4 PRIVATE -fno-sanitize=undefined)
|
||||
endif ()
|
||||
target_include_directories(lz4 PUBLIC ${LIBRARY_DIR}/lib)
|
||||
else ()
|
||||
find_library (LIBRARY_LZ4 lz4)
|
||||
find_path (INCLUDE_LZ4 lz4.h)
|
||||
|
||||
add_library (lz4 UNKNOWN IMPORTED)
|
||||
set_property (TARGET lz4 PROPERTY IMPORTED_LOCATION ${LIBRARY_LZ4})
|
||||
set_property (TARGET lz4 PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_LZ4})
|
||||
set_property (TARGET lz4 APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_XXHASH=0)
|
||||
endif ()
|
||||
|
2
contrib/poco
vendored
2
contrib/poco
vendored
@ -1 +1 @@
|
||||
Subproject commit 74c93443342f6028fa6402057684733b316aa737
|
||||
Subproject commit 297fc905e166392156f83b96aaa5f44e8a6a35c4
|
@ -1,11 +1,3 @@
|
||||
option (USE_INTERNAL_POCO_LIBRARY "Use internal Poco library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (USE_INTERNAL_POCO_LIBRARY)
|
||||
set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/poco)
|
||||
else ()
|
||||
find_path (ROOT_DIR NAMES Foundation/include/Poco/Poco.h include/Poco/Poco.h)
|
||||
endif ()
|
||||
|
||||
add_subdirectory (Crypto)
|
||||
add_subdirectory (Data)
|
||||
add_subdirectory (Data/ODBC)
|
||||
|
@ -1,4 +1,8 @@
|
||||
if (ENABLE_ODBC)
|
||||
if (NOT TARGET unixodbc)
|
||||
message(FATAL_ERROR "Configuration error: unixodbc is not a target")
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_POCO_LIBRARY)
|
||||
set (SRCS
|
||||
${LIBRARY_DIR}/Data/ODBC/src/Binder.cpp
|
||||
|
@ -222,7 +222,7 @@ if (USE_INTERNAL_POCO_LIBRARY)
|
||||
POCO_OS_FAMILY_UNIX
|
||||
)
|
||||
target_include_directories (_poco_foundation SYSTEM PUBLIC ${LIBRARY_DIR}/Foundation/include)
|
||||
target_link_libraries (_poco_foundation PRIVATE Poco::Foundation::PCRE zlib)
|
||||
target_link_libraries (_poco_foundation PRIVATE Poco::Foundation::PCRE ${ZLIB_LIBRARIES})
|
||||
else ()
|
||||
add_library (Poco::Foundation UNKNOWN IMPORTED GLOBAL)
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
set(protobuf_SOURCE_DIR ${ClickHouse_SOURCE_DIR}/contrib/protobuf)
|
||||
set(protobuf_BINARY_DIR ${ClickHouse_BINARY_DIR}/contrib/protobuf)
|
||||
set(protobuf_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf")
|
||||
set(protobuf_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/protobuf")
|
||||
|
||||
set(protobuf_WITH_ZLIB 0 CACHE INTERNAL "" FORCE) # actually will use zlib, but skip find
|
||||
set(protobuf_BUILD_TESTS OFF CACHE INTERNAL "" FORCE)
|
||||
@ -10,4 +10,4 @@ else ()
|
||||
set(protobuf_BUILD_SHARED_LIBS ON CACHE INTERNAL "" FORCE)
|
||||
endif ()
|
||||
|
||||
add_subdirectory(${protobuf_SOURCE_DIR}/cmake ${protobuf_BINARY_DIR})
|
||||
add_subdirectory("${protobuf_SOURCE_DIR}/cmake" "${protobuf_BINARY_DIR}")
|
||||
|
@ -1,35 +1,26 @@
|
||||
option (ENABLE_REPLXX "Enable replxx support" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_REPLXX)
|
||||
option (USE_INTERNAL_REPLXX "Use internal replxx library" ${NOT_UNBUNDLED})
|
||||
if (NOT ENABLE_REPLXX)
|
||||
if (USE_INTERNAL_REPLXX_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal replxx with ENABLE_REPLXX=OFF")
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_REPLXX)
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/replxx")
|
||||
add_library(replxx INTERFACE)
|
||||
target_compile_definitions(replxx INTERFACE USE_REPLXX=0)
|
||||
|
||||
set(SRCS
|
||||
${LIBRARY_DIR}/src/conversion.cxx
|
||||
${LIBRARY_DIR}/src/ConvertUTF.cpp
|
||||
${LIBRARY_DIR}/src/escape.cxx
|
||||
${LIBRARY_DIR}/src/history.cxx
|
||||
${LIBRARY_DIR}/src/io.cxx
|
||||
${LIBRARY_DIR}/src/prompt.cxx
|
||||
${LIBRARY_DIR}/src/replxx_impl.cxx
|
||||
${LIBRARY_DIR}/src/replxx.cxx
|
||||
${LIBRARY_DIR}/src/util.cxx
|
||||
${LIBRARY_DIR}/src/wcwidth.cpp
|
||||
)
|
||||
message (STATUS "Not using replxx (Beware! Runtime fallback to readline is possible!)")
|
||||
return()
|
||||
endif()
|
||||
|
||||
add_library (replxx ${SRCS})
|
||||
target_include_directories(replxx SYSTEM PUBLIC ${LIBRARY_DIR}/include)
|
||||
else ()
|
||||
option (USE_INTERNAL_REPLXX_LIBRARY "Use internal replxx library (Experimental: set to OFF on your own risk)" ON)
|
||||
|
||||
if (NOT USE_INTERNAL_REPLXX_LIBRARY)
|
||||
find_library(LIBRARY_REPLXX NAMES replxx replxx-static)
|
||||
find_path(INCLUDE_REPLXX replxx.hxx)
|
||||
|
||||
add_library(replxx UNKNOWN IMPORTED)
|
||||
set_property(TARGET replxx PROPERTY IMPORTED_LOCATION ${LIBRARY_REPLXX})
|
||||
target_include_directories(replxx SYSTEM PUBLIC ${INCLUDE_REPLXX})
|
||||
|
||||
set(CMAKE_REQUIRED_LIBRARIES replxx)
|
||||
if (LIBRARY_REPLXX AND INCLUDE_REPLXX)
|
||||
set(CMAKE_REQUIRED_LIBRARIES ${LIBRARY_REPLXX})
|
||||
set(CMAKE_REQUIRED_INCLUDES ${INCLUDE_REPLXX})
|
||||
check_cxx_source_compiles(
|
||||
"
|
||||
#include <replxx.hxx>
|
||||
@ -41,20 +32,43 @@ if (ENABLE_REPLXX)
|
||||
)
|
||||
|
||||
if (NOT EXTERNAL_REPLXX_WORKS)
|
||||
message (FATAL_ERROR "replxx is unusable: ${LIBRARY_REPLXX} ${INCLUDE_REPLXX}")
|
||||
endif ()
|
||||
endif ()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "replxx is unusable: ${LIBRARY_REPLXX} ${INCLUDE_REPLXX}")
|
||||
else()
|
||||
add_library(replxx UNKNOWN IMPORTED)
|
||||
set_property(TARGET replxx PROPERTY IMPORTED_LOCATION ${LIBRARY_REPLXX})
|
||||
target_include_directories(replxx SYSTEM PUBLIC ${INCLUDE_REPLXX})
|
||||
endif()
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system replxx")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options(replxx PRIVATE -Wno-documentation)
|
||||
endif ()
|
||||
|
||||
target_compile_definitions(replxx PUBLIC USE_REPLXX=1)
|
||||
if (NOT LIBRARY_REPLXX OR NOT INCLUDE_REPLXX OR NOT EXTERNAL_REPLXX_WORKS)
|
||||
set(USE_INTERNAL_REPLXX_LIBRARY 1)
|
||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/replxx")
|
||||
|
||||
message (STATUS "Using replxx")
|
||||
else ()
|
||||
add_library(replxx INTERFACE)
|
||||
target_compile_definitions(replxx INTERFACE USE_REPLXX=0)
|
||||
set(SRCS
|
||||
"${LIBRARY_DIR}/src/conversion.cxx"
|
||||
"${LIBRARY_DIR}/src/ConvertUTF.cpp"
|
||||
"${LIBRARY_DIR}/src/escape.cxx"
|
||||
"${LIBRARY_DIR}/src/history.cxx"
|
||||
"${LIBRARY_DIR}/src/io.cxx"
|
||||
"${LIBRARY_DIR}/src/prompt.cxx"
|
||||
"${LIBRARY_DIR}/src/replxx_impl.cxx"
|
||||
"${LIBRARY_DIR}/src/replxx.cxx"
|
||||
"${LIBRARY_DIR}/src/util.cxx"
|
||||
"${LIBRARY_DIR}/src/wcwidth.cpp"
|
||||
)
|
||||
|
||||
message (STATUS "Not using replxx (Beware! Runtime fallback to readline is possible!)")
|
||||
add_library (replxx ${SRCS})
|
||||
target_include_directories(replxx SYSTEM PUBLIC ${LIBRARY_DIR}/include)
|
||||
endif ()
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
target_compile_options(replxx PRIVATE -Wno-documentation)
|
||||
endif ()
|
||||
|
||||
target_compile_definitions(replxx PUBLIC USE_REPLXX=1)
|
||||
|
||||
message (STATUS "Using replxx")
|
||||
|
@ -1,18 +1,12 @@
|
||||
option (ENABLE_ODBC "Enable ODBC library" ${ENABLE_LIBRARIES})
|
||||
if (NOT USE_INTERNAL_ODBC_LIBRARY)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT OS_LINUX)
|
||||
set (ENABLE_ODBC OFF CACHE INTERNAL "")
|
||||
endif ()
|
||||
set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/unixodbc)
|
||||
|
||||
if (ENABLE_ODBC)
|
||||
option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ${NOT_UNBUNDLED})
|
||||
# ltdl
|
||||
|
||||
if (USE_INTERNAL_ODBC_LIBRARY)
|
||||
set (LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/unixodbc)
|
||||
|
||||
# ltdl
|
||||
|
||||
set (SRCS_LTDL
|
||||
set (SRCS_LTDL
|
||||
# This file is generated by 'libtool' inside libltdl directory and then removed.
|
||||
linux_x86_64/libltdl/libltdlcS.c
|
||||
|
||||
@ -24,23 +18,23 @@ if (ENABLE_ODBC)
|
||||
${LIBRARY_DIR}/libltdl/lt_error.c
|
||||
${LIBRARY_DIR}/libltdl/loaders/dlopen.c
|
||||
${LIBRARY_DIR}/libltdl/loaders/preopen.c
|
||||
)
|
||||
)
|
||||
|
||||
add_library (ltdl ${SRCS_LTDL})
|
||||
add_library (ltdl ${SRCS_LTDL})
|
||||
|
||||
target_include_directories(ltdl
|
||||
target_include_directories(ltdl
|
||||
PRIVATE
|
||||
linux_x86_64/libltdl
|
||||
PUBLIC
|
||||
${LIBRARY_DIR}/libltdl
|
||||
${LIBRARY_DIR}/libltdl/libltdl
|
||||
)
|
||||
target_compile_definitions(ltdl PRIVATE -DHAVE_CONFIG_H -DLTDL -DLTDLOPEN=libltdlc)
|
||||
target_compile_options(ltdl PRIVATE -Wno-constant-logical-operand -Wno-unknown-warning-option -O2)
|
||||
)
|
||||
target_compile_definitions(ltdl PRIVATE -DHAVE_CONFIG_H -DLTDL -DLTDLOPEN=libltdlc)
|
||||
target_compile_options(ltdl PRIVATE -Wno-constant-logical-operand -Wno-unknown-warning-option -O2)
|
||||
|
||||
# odbc
|
||||
# odbc
|
||||
|
||||
set (SRCS
|
||||
set (SRCS
|
||||
${LIBRARY_DIR}/DriverManager/__attribute.c
|
||||
${LIBRARY_DIR}/DriverManager/__connection.c
|
||||
${LIBRARY_DIR}/DriverManager/__handles.c
|
||||
@ -273,23 +267,23 @@ if (ENABLE_ODBC)
|
||||
${LIBRARY_DIR}/odbcinst/SQLWriteDSNToIni.c
|
||||
${LIBRARY_DIR}/odbcinst/SQLWriteFileDSN.c
|
||||
${LIBRARY_DIR}/odbcinst/SQLWritePrivateProfileString.c
|
||||
)
|
||||
)
|
||||
|
||||
add_library (unixodbc ${SRCS})
|
||||
add_library (unixodbc ${SRCS})
|
||||
|
||||
target_link_libraries (unixodbc PRIVATE ltdl)
|
||||
target_link_libraries (unixodbc PRIVATE ltdl)
|
||||
|
||||
# SYSTEM_FILE_PATH was changed to /etc
|
||||
# SYSTEM_FILE_PATH was changed to /etc
|
||||
|
||||
target_include_directories (unixodbc
|
||||
target_include_directories (unixodbc
|
||||
PRIVATE
|
||||
linux_x86_64/private
|
||||
PUBLIC
|
||||
linux_x86_64
|
||||
${LIBRARY_DIR}/include
|
||||
)
|
||||
target_compile_definitions (unixodbc PRIVATE -DHAVE_CONFIG_H)
|
||||
target_compile_options (unixodbc
|
||||
)
|
||||
target_compile_definitions (unixodbc PRIVATE -DHAVE_CONFIG_H)
|
||||
target_compile_options (unixodbc
|
||||
PRIVATE
|
||||
-Wno-dangling-else
|
||||
-Wno-parentheses
|
||||
@ -297,22 +291,5 @@ if (ENABLE_ODBC)
|
||||
-Wno-unknown-warning-option
|
||||
-Wno-reserved-id-macro
|
||||
-O2
|
||||
)
|
||||
else ()
|
||||
add_library (unixodbc UNKNOWN IMPORTED)
|
||||
|
||||
find_library (LIBRARY_ODBC unixodbc)
|
||||
find_path (INCLUDE_ODBC sql.h)
|
||||
set_target_properties (unixodbc PROPERTIES IMPORTED_LOCATION ${LIBRARY_ODBC})
|
||||
set_target_properties (unixodbc PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_ODBC})
|
||||
endif ()
|
||||
|
||||
target_compile_definitions (unixodbc INTERFACE USE_ODBC=1)
|
||||
|
||||
message (STATUS "Using unixodbc")
|
||||
else ()
|
||||
add_library (unixodbc INTERFACE)
|
||||
target_compile_definitions (unixodbc INTERFACE USE_ODBC=0)
|
||||
|
||||
message (STATUS "Not using unixodbc")
|
||||
endif ()
|
||||
)
|
||||
target_compile_definitions (unixodbc INTERFACE USE_ODBC=1)
|
||||
|
2
contrib/zlib-ng
vendored
2
contrib/zlib-ng
vendored
@ -1 +1 @@
|
||||
Subproject commit bba56a73be249514acfbc7d49aa2a68994dad8ab
|
||||
Subproject commit 6fd1846c8b8f59436fe2dd752d0f316ddbb64df6
|
@ -1,12 +1,20 @@
|
||||
FROM ubuntu:19.10
|
||||
|
||||
RUN apt-get --allow-unauthenticated update -y && apt-get install --yes wget gnupg
|
||||
RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
|
||||
RUN echo "deb [trusted=yes] http://apt.llvm.org/eoan/ llvm-toolchain-eoan-10 main" >> /etc/apt/sources.list
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
|
||||
|
||||
RUN apt-get update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get install --yes --no-install-recommends \
|
||||
RUN apt-get update \
|
||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||
--yes --no-install-recommends --verbose-versions \
|
||||
&& export LLVM_PUBKEY_HASH="bda960a8da687a275a2078d43c111d66b1c6a893a3275271beedf266c1ff4a0cdecb429c7a5cccf9f486ea7aa43fd27f" \
|
||||
&& wget -O /tmp/llvm-snapshot.gpg.key https://apt.llvm.org/llvm-snapshot.gpg.key \
|
||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||
/etc/apt/sources.list
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
bash \
|
||||
ccache \
|
||||
cmake \
|
||||
@ -21,13 +29,13 @@ RUN apt-get update -y \
|
||||
python-lxml \
|
||||
python-requests \
|
||||
python-termcolor \
|
||||
sudo \
|
||||
tzdata \
|
||||
llvm-10 \
|
||||
clang-10 \
|
||||
clang-tidy-10 \
|
||||
lld-10 \
|
||||
lldb-10
|
||||
llvm-${LLVM_VERSION} \
|
||||
clang-${LLVM_VERSION} \
|
||||
clang-tidy-${LLVM_VERSION} \
|
||||
lld-${LLVM_VERSION} \
|
||||
lldb-${LLVM_VERSION} \
|
||||
--yes --no-install-recommends
|
||||
|
||||
COPY build.sh /
|
||||
|
||||
|
@ -2,6 +2,13 @@
|
||||
"docker/packager/deb": {
|
||||
"name": "yandex/clickhouse-deb-builder",
|
||||
"dependent": [
|
||||
<<<<<<< HEAD
|
||||
=======
|
||||
"docker/test/stateless",
|
||||
"docker/test/stateless_with_coverage",
|
||||
"docker/test/stateless_pytest",
|
||||
"docker/test/coverage",
|
||||
>>>>>>> origin
|
||||
"docker/packager/unbundled"
|
||||
]
|
||||
},
|
||||
@ -49,6 +56,10 @@
|
||||
"name": "yandex/clickhouse-performance-comparison",
|
||||
"dependent": []
|
||||
},
|
||||
"docker/test/pvs": {
|
||||
"name": "yandex/clickhouse-pvs-test",
|
||||
"dependent": []
|
||||
},
|
||||
"docker/test/stateful": {
|
||||
"name": "yandex/clickhouse-stateful-test",
|
||||
"dependent": [
|
||||
|
@ -1,35 +1,41 @@
|
||||
# docker build -t yandex/clickhouse-binary-builder .
|
||||
FROM ubuntu:19.10
|
||||
|
||||
RUN apt-get --allow-unauthenticated update -y && apt-get install --yes wget gnupg
|
||||
RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
|
||||
RUN echo "deb [trusted=yes] http://apt.llvm.org/eoan/ llvm-toolchain-eoan-10 main" >> /etc/apt/sources.list
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
|
||||
|
||||
RUN apt-get --allow-unauthenticated update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
RUN apt-get update \
|
||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||
--yes --no-install-recommends --verbose-versions \
|
||||
&& export LLVM_PUBKEY_HASH="bda960a8da687a275a2078d43c111d66b1c6a893a3275271beedf266c1ff4a0cdecb429c7a5cccf9f486ea7aa43fd27f" \
|
||||
&& wget -O /tmp/llvm-snapshot.gpg.key https://apt.llvm.org/llvm-snapshot.gpg.key \
|
||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||
/etc/apt/sources.list
|
||||
|
||||
# initial packages
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
bash \
|
||||
fakeroot \
|
||||
ccache \
|
||||
curl \
|
||||
software-properties-common \
|
||||
gnupg \
|
||||
apt-transport-https \
|
||||
ca-certificates
|
||||
--yes --no-install-recommends
|
||||
|
||||
RUN apt-get update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get install --yes --no-install-recommends \
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
bash \
|
||||
cmake \
|
||||
ccache \
|
||||
curl \
|
||||
gcc-9 \
|
||||
g++-9 \
|
||||
llvm-10 \
|
||||
clang-10 \
|
||||
lld-10 \
|
||||
clang-tidy-10 \
|
||||
llvm-${LLVM_VERSION} \
|
||||
clang-${LLVM_VERSION} \
|
||||
lld-${LLVM_VERSION} \
|
||||
clang-tidy-${LLVM_VERSION} \
|
||||
clang-9 \
|
||||
lld-9 \
|
||||
clang-tidy-9 \
|
||||
@ -50,13 +56,14 @@ RUN apt-get update -y \
|
||||
gdb \
|
||||
rename \
|
||||
wget \
|
||||
build-essential
|
||||
build-essential \
|
||||
--yes --no-install-recommends
|
||||
|
||||
# This symlink required by gcc to find lld compiler
|
||||
RUN ln -s /usr/bin/lld-10 /usr/bin/ld.lld
|
||||
RUN ln -s /usr/bin/lld-${LLVM_VERSION} /usr/bin/ld.lld
|
||||
|
||||
ENV CC=clang-10
|
||||
ENV CXX=clang++-10
|
||||
ENV CC=clang-${LLVM_VERSION}
|
||||
ENV CXX=clang++-${LLVM_VERSION}
|
||||
|
||||
# libtapi is required to support .tbh format from recent MacOS SDKs
|
||||
RUN git clone https://github.com/tpoechtrager/apple-libtapi.git \
|
||||
|
@ -1,30 +1,43 @@
|
||||
# docker build -t yandex/clickhouse-deb-builder .
|
||||
FROM ubuntu:20.04
|
||||
|
||||
<<<<<<< HEAD
|
||||
RUN apt-get --allow-unauthenticated update -y && apt-get install --yes wget gnupg
|
||||
RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
|
||||
RUN echo "deb [trusted=yes] http://apt.llvm.org/focal/ llvm-toolchain-focal-11 main" >> /etc/apt/sources.list
|
||||
=======
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||
--yes --no-install-recommends --verbose-versions \
|
||||
&& export LLVM_PUBKEY_HASH="bda960a8da687a275a2078d43c111d66b1c6a893a3275271beedf266c1ff4a0cdecb429c7a5cccf9f486ea7aa43fd27f" \
|
||||
&& wget -O /tmp/llvm-snapshot.gpg.key https://apt.llvm.org/llvm-snapshot.gpg.key \
|
||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||
/etc/apt/sources.list
|
||||
>>>>>>> origin
|
||||
|
||||
# initial packages
|
||||
RUN apt-get --allow-unauthenticated update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
bash \
|
||||
fakeroot \
|
||||
ccache \
|
||||
curl \
|
||||
software-properties-common \
|
||||
gnupg \
|
||||
apt-transport-https \
|
||||
ca-certificates
|
||||
--yes --no-install-recommends
|
||||
|
||||
# Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able
|
||||
# to compress files using pigz (https://zlib.net/pigz/) instead of gzip.
|
||||
# Significantly increase deb packaging speed and compatible with old systems
|
||||
RUN curl -O https://clickhouse-builds.s3.yandex.net/utils/1/dpkg-deb
|
||||
RUN chmod +x dpkg-deb
|
||||
RUN cp dpkg-deb /usr/bin
|
||||
RUN curl -O https://clickhouse-builds.s3.yandex.net/utils/1/dpkg-deb \
|
||||
&& chmod +x dpkg-deb \
|
||||
&& cp dpkg-deb /usr/bin
|
||||
|
||||
<<<<<<< HEAD
|
||||
RUN apt-get --allow-unauthenticated update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
@ -56,10 +69,93 @@ RUN apt-get --allow-unauthenticated update -y \
|
||||
gdb \
|
||||
moreutils \
|
||||
pigz
|
||||
=======
|
||||
ENV APACHE_PUBKEY_HASH="ec52a838a734b9d5e09d95c0167b09bf1af9565f814d4d5b3785f66fd5b53dd01c918d9ec008cb58e843324dec523d26"
|
||||
|
||||
RUN export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||
&& wget -O /tmp/arrow-keyring.deb "https://apache.bintray.com/arrow/ubuntu/apache-arrow-archive-keyring-latest-${CODENAME}.deb" \
|
||||
&& echo "${APACHE_PUBKEY_HASH} /tmp/arrow-keyring.deb" | sha384sum -c \
|
||||
&& dpkg -i /tmp/arrow-keyring.deb
|
||||
>>>>>>> origin
|
||||
|
||||
# Libraries from OS are only needed to test the "unbundled" build (this is not used in production).
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
gcc-9 \
|
||||
g++-9 \
|
||||
llvm-${LLVM_VERSION} \
|
||||
llvm-9-dev \
|
||||
llvm-${LLVM_VERSION}-dev \
|
||||
lld-9 \
|
||||
lld-${LLVM_VERSION} \
|
||||
clang-tidy-${LLVM_VERSION} \
|
||||
clang-9 \
|
||||
clang-${LLVM_VERSION} \
|
||||
clang-tidy-9 \
|
||||
libssl-dev \
|
||||
libicu-dev \
|
||||
gperf \
|
||||
ninja-build \
|
||||
perl \
|
||||
pkg-config \
|
||||
devscripts \
|
||||
debhelper \
|
||||
git \
|
||||
libc++-dev \
|
||||
libc++abi-dev \
|
||||
libboost-program-options-dev \
|
||||
libboost-system-dev \
|
||||
libboost-filesystem-dev \
|
||||
libboost-thread-dev \
|
||||
libboost-iostreams-dev \
|
||||
libboost-regex-dev \
|
||||
zlib1g-dev \
|
||||
liblz4-dev \
|
||||
libdouble-conversion-dev \
|
||||
libxml2-dev \
|
||||
librdkafka-dev \
|
||||
libgoogle-perftools-dev \
|
||||
libzstd-dev \
|
||||
libltdl-dev \
|
||||
libre2-dev \
|
||||
libjemalloc-dev \
|
||||
libmsgpack-dev \
|
||||
libcurl4-openssl-dev \
|
||||
opencl-headers \
|
||||
ocl-icd-libopencl1 \
|
||||
intel-opencl-icd \
|
||||
unixodbc-dev \
|
||||
odbcinst \
|
||||
tzdata \
|
||||
alien \
|
||||
libcapnp-dev \
|
||||
cmake \
|
||||
gdb \
|
||||
pigz \
|
||||
moreutils \
|
||||
libcctz-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libgsasl7-dev \
|
||||
heimdal-multidev \
|
||||
libhyperscan-dev \
|
||||
libbrotli-dev \
|
||||
protobuf-compiler \
|
||||
libprotoc-dev \
|
||||
libgrpc++-dev \
|
||||
rapidjson-dev \
|
||||
libsnappy-dev \
|
||||
libparquet-dev \
|
||||
libthrift-dev \
|
||||
libutf8proc-dev \
|
||||
libbz2-dev \
|
||||
libavro-dev \
|
||||
libfarmhash-dev \
|
||||
libmysqlclient-dev \
|
||||
--yes --no-install-recommends
|
||||
|
||||
# This symlink required by gcc to find lld compiler
|
||||
RUN ln -s /usr/bin/lld-10 /usr/bin/ld.lld
|
||||
RUN ln -s /usr/bin/lld-${LLVM_VERSION} /usr/bin/ld.lld
|
||||
|
||||
COPY build.sh /
|
||||
|
||||
|
@ -143,7 +143,7 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
|
||||
|
||||
if unbundled:
|
||||
# TODO: fix build with ENABLE_RDKAFKA
|
||||
cmake_flags.append('-DUNBUNDLED=1 -DENABLE_MYSQL=0 -DENABLE_ODBC=0 -DENABLE_REPLXX=0 -DENABLE_RDKAFKA=0')
|
||||
cmake_flags.append('-DUNBUNDLED=1 -DUSE_INTERNAL_RDKAFKA_LIBRARY=1') # too old version in ubuntu 19.10
|
||||
|
||||
if split_binary:
|
||||
cmake_flags.append('-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1')
|
||||
|
@ -1,12 +1,17 @@
|
||||
# docker build -t yandex/clickhouse-unbundled-builder .
|
||||
FROM yandex/clickhouse-deb-builder
|
||||
|
||||
ENV APACHE_PUBKEY_HASH="ec52a838a734b9d5e09d95c0167b09bf1af9565f814d4d5b3785f66fd5b53dd01c918d9ec008cb58e843324dec523d26"
|
||||
|
||||
RUN export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||
&& wget -O /tmp/arrow-keyring.deb "https://apache.bintray.com/arrow/ubuntu/apache-arrow-archive-keyring-latest-${CODENAME}.deb" \
|
||||
&& echo "${APACHE_PUBKEY_HASH} /tmp/arrow-keyring.deb" | sha384sum -c \
|
||||
&& dpkg -i /tmp/arrow-keyring.deb
|
||||
|
||||
# Libraries from OS are only needed to test the "unbundled" build (that is not used in production).
|
||||
RUN apt-get --allow-unauthenticated update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
libicu-dev \
|
||||
libreadline-dev \
|
||||
gperf \
|
||||
perl \
|
||||
pkg-config \
|
||||
@ -22,9 +27,8 @@ RUN apt-get --allow-unauthenticated update -y \
|
||||
zlib1g-dev \
|
||||
liblz4-dev \
|
||||
libdouble-conversion-dev \
|
||||
libxml2-dev \
|
||||
librdkafka-dev \
|
||||
libpoconetssl62 \
|
||||
libpoco-dev \
|
||||
libgoogle-perftools-dev \
|
||||
libzstd-dev \
|
||||
libltdl-dev \
|
||||
@ -38,7 +42,6 @@ RUN apt-get --allow-unauthenticated update -y \
|
||||
unixodbc-dev \
|
||||
odbcinst \
|
||||
tzdata \
|
||||
gperf \
|
||||
alien \
|
||||
libcapnp-dev \
|
||||
cmake \
|
||||
@ -48,8 +51,23 @@ RUN apt-get --allow-unauthenticated update -y \
|
||||
libcctz-dev \
|
||||
libldap2-dev \
|
||||
libsasl2-dev \
|
||||
libgsasl7-dev \
|
||||
heimdal-multidev \
|
||||
libhyperscan-dev
|
||||
libhyperscan-dev \
|
||||
libbrotli-dev \
|
||||
protobuf-compiler \
|
||||
libprotoc-dev \
|
||||
libgrpc++-dev \
|
||||
rapidjson-dev \
|
||||
libsnappy-dev \
|
||||
libparquet-dev \
|
||||
libthrift-dev \
|
||||
libutf8proc-dev \
|
||||
libbz2-dev \
|
||||
libavro-dev \
|
||||
libfarmhash-dev \
|
||||
libmysqlclient-dev \
|
||||
--yes --no-install-recommends
|
||||
|
||||
COPY build.sh
|
||||
|
||||
|
@ -2,9 +2,6 @@
|
||||
|
||||
set -x -e
|
||||
|
||||
# Update tzdata to the latest version. It is embedded into clickhouse binary.
|
||||
sudo apt-get update && sudo apt-get install tzdata
|
||||
|
||||
ccache --show-stats ||:
|
||||
ccache --zero-stats ||:
|
||||
build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S'
|
||||
|
@ -1,51 +1,58 @@
|
||||
# docker build -t yandex/clickhouse-test-base .
|
||||
FROM ubuntu:19.10
|
||||
|
||||
RUN apt-get --allow-unauthenticated update -y && apt-get install --yes wget gnupg
|
||||
RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
|
||||
RUN echo "deb [trusted=yes] http://apt.llvm.org/eoan/ llvm-toolchain-eoan-10 main" >> /etc/apt/sources.list
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||
--yes --no-install-recommends --verbose-versions \
|
||||
&& export LLVM_PUBKEY_HASH="bda960a8da687a275a2078d43c111d66b1c6a893a3275271beedf266c1ff4a0cdecb429c7a5cccf9f486ea7aa43fd27f" \
|
||||
&& wget -O /tmp/llvm-snapshot.gpg.key https://apt.llvm.org/llvm-snapshot.gpg.key \
|
||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||
/etc/apt/sources.list
|
||||
|
||||
# initial packages
|
||||
RUN apt-get --allow-unauthenticated update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
apt-transport-https \
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
bash \
|
||||
ca-certificates \
|
||||
curl \
|
||||
fakeroot \
|
||||
gnupg \
|
||||
software-properties-common
|
||||
ccache \
|
||||
curl \
|
||||
software-properties-common \
|
||||
--yes --no-install-recommends
|
||||
|
||||
# Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able
|
||||
# to compress files using pigz (https://zlib.net/pigz/) instead of gzip.
|
||||
# Significantly increase deb packaging speed and compatible with old systems
|
||||
RUN curl -O https://clickhouse-builds.s3.yandex.net/utils/1/dpkg-deb
|
||||
RUN chmod +x dpkg-deb
|
||||
RUN cp dpkg-deb /usr/bin
|
||||
RUN curl -O https://clickhouse-builds.s3.yandex.net/utils/1/dpkg-deb \
|
||||
&& chmod +x dpkg-deb \
|
||||
&& cp dpkg-deb /usr/bin
|
||||
|
||||
RUN apt-get --allow-unauthenticated update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
clang-10 \
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
clang-${LLVM_VERSION} \
|
||||
debhelper \
|
||||
devscripts \
|
||||
gdb \
|
||||
git \
|
||||
gperf \
|
||||
lcov \
|
||||
llvm-10 \
|
||||
llvm-${LLVM_VERSION} \
|
||||
moreutils \
|
||||
perl \
|
||||
perl \
|
||||
pigz \
|
||||
pkg-config \
|
||||
tzdata
|
||||
tzdata \
|
||||
--yes --no-install-recommends
|
||||
|
||||
# Sanitizer options
|
||||
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment; \
|
||||
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \
|
||||
echo "MSAN_OPTIONS='abort_on_error=1'" >> /etc/environment; \
|
||||
ln -s /usr/lib/llvm-10/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
|
||||
ln -s /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
|
||||
|
||||
CMD sleep 1
|
@ -1,38 +1,54 @@
|
||||
# docker build -t yandex/clickhouse-fasttest .
|
||||
FROM ubuntu:19.10
|
||||
|
||||
ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
|
||||
ENV COMMIT_SHA=''
|
||||
ENV PULL_REQUEST_NUMBER=''
|
||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
|
||||
|
||||
RUN apt-get --allow-unauthenticated update -y && apt-get install --yes wget gnupg
|
||||
RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
|
||||
RUN echo "deb [trusted=yes] http://apt.llvm.org/eoan/ llvm-toolchain-eoan-10 main" >> /etc/apt/sources.list
|
||||
RUN apt-get update \
|
||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||
--yes --no-install-recommends --verbose-versions \
|
||||
&& export LLVM_PUBKEY_HASH="bda960a8da687a275a2078d43c111d66b1c6a893a3275271beedf266c1ff4a0cdecb429c7a5cccf9f486ea7aa43fd27f" \
|
||||
&& wget -O /tmp/llvm-snapshot.gpg.key https://apt.llvm.org/llvm-snapshot.gpg.key \
|
||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||
/etc/apt/sources.list
|
||||
|
||||
|
||||
RUN apt-get --allow-unauthenticated update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
apt-transport-https \
|
||||
# initial packages
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
bash \
|
||||
fakeroot \
|
||||
ccache \
|
||||
curl \
|
||||
software-properties-common \
|
||||
--yes --no-install-recommends
|
||||
|
||||
# Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able
|
||||
# to compress files using pigz (https://zlib.net/pigz/) instead of gzip.
|
||||
# Significantly increase deb packaging speed and compatible with old systems
|
||||
RUN curl -O https://clickhouse-builds.s3.yandex.net/utils/1/dpkg-deb \
|
||||
&& chmod +x dpkg-deb \
|
||||
&& cp dpkg-deb /usr/bin
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install \
|
||||
apt-transport-https \
|
||||
bash \
|
||||
brotli \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
ccache \
|
||||
ccache \
|
||||
clang-10 \
|
||||
clang-tidy-10 \
|
||||
clang-${LLVM_VERSION} \
|
||||
clang-tidy-${LLVM_VERSION} \
|
||||
cmake \
|
||||
curl \
|
||||
expect \
|
||||
fakeroot \
|
||||
fakeroot \
|
||||
git \
|
||||
gperf \
|
||||
gperf \
|
||||
lld-10 \
|
||||
llvm-10 \
|
||||
lld-${LLVM_VERSION} \
|
||||
llvm-${LLVM_VERSION} \
|
||||
moreutils \
|
||||
ninja-build \
|
||||
psmisc \
|
||||
@ -43,10 +59,15 @@ RUN apt-get --allow-unauthenticated update -y \
|
||||
qemu-user-static \
|
||||
rename \
|
||||
software-properties-common \
|
||||
sudo \
|
||||
tzdata \
|
||||
unixodbc \
|
||||
wget
|
||||
wget \
|
||||
--yes --no-install-recommends
|
||||
|
||||
# This symlink required by gcc to find lld compiler
|
||||
RUN ln -s /usr/bin/lld-${LLVM_VERSION} /usr/bin/ld.lld
|
||||
|
||||
ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
|
||||
|
||||
RUN mkdir -p /tmp/clickhouse-odbc-tmp \
|
||||
&& wget --quiet -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \
|
||||
@ -55,12 +76,11 @@ RUN mkdir -p /tmp/clickhouse-odbc-tmp \
|
||||
&& odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
|
||||
&& rm -rf /tmp/clickhouse-odbc-tmp
|
||||
|
||||
# This symlink required by gcc to find lld compiler
|
||||
RUN ln -s /usr/bin/lld-10 /usr/bin/ld.lld
|
||||
|
||||
ENV TZ=Europe/Moscow
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
ENV COMMIT_SHA=''
|
||||
ENV PULL_REQUEST_NUMBER=''
|
||||
|
||||
COPY run.sh /
|
||||
CMD ["/bin/bash", "/run.sh"]
|
||||
|
@ -2,15 +2,15 @@
|
||||
|
||||
FROM yandex/clickhouse-binary-builder
|
||||
|
||||
RUN apt-get --allow-unauthenticated update -y \
|
||||
&& env DEBIAN_FRONTEND=noninteractive \
|
||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
RUN apt-get update --yes \
|
||||
&& apt-get install \
|
||||
bash \
|
||||
sudo \
|
||||
wget \
|
||||
software-properties-common \
|
||||
gpg-agent \
|
||||
strace
|
||||
debsig-verify \
|
||||
strace \
|
||||
--yes --no-install-recommends
|
||||
|
||||
#RUN wget -q -O - http://files.viva64.com/etc/pubkey.txt | sudo apt-key add -
|
||||
#RUN sudo wget -O /etc/apt/sources.list.d/viva64.list http://files.viva64.com/etc/viva64.list
|
||||
@ -20,14 +20,21 @@ RUN apt-get --allow-unauthenticated update -y \
|
||||
# apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||
# pvs-studio
|
||||
|
||||
ENV PKG_VERSION="pvs-studio-7.08.39365.50-amd64.deb"
|
||||
ENV PKG_VERSION="pvs-studio-latest"
|
||||
|
||||
RUN wget "https://files.viva64.com/$PKG_VERSION"
|
||||
RUN sudo dpkg -i "$PKG_VERSION"
|
||||
RUN set -x \
|
||||
&& export PUBKEY_HASHSUM="486a0694c7f92e96190bbfac01c3b5ac2cb7823981db510a28f744c99eabbbf17a7bcee53ca42dc6d84d4323c2742761" \
|
||||
&& wget https://files.viva64.com/etc/pubkey.txt -O /tmp/pubkey.txt \
|
||||
&& echo "${PUBKEY_HASHSUM} /tmp/pubkey.txt" | sha384sum -c \
|
||||
&& apt-key add /tmp/pubkey.txt \
|
||||
&& wget "https://files.viva64.com/${PKG_VERSION}.deb" \
|
||||
&& { debsig-verify ${PKG_VERSION}.deb \
|
||||
|| echo "WARNING: Some file was just downloaded from the internet without any validation and we are installing it into the system"; } \
|
||||
&& dpkg -i "${PKG_VERSION}.deb"
|
||||
|
||||
CMD echo "Running PVS version $PKG_VERSION" && cd /repo_folder && pvs-studio-analyzer credentials $LICENCE_NAME $LICENCE_KEY -o ./licence.lic \
|
||||
&& cmake . && ninja re2_st && \
|
||||
pvs-studio-analyzer analyze -o pvs-studio.log -e contrib -j 4 -l ./licence.lic; \
|
||||
&& cmake . -D"ENABLE_EMBEDDED_COMPILER"=OFF && ninja re2_st \
|
||||
&& pvs-studio-analyzer analyze -o pvs-studio.log -e contrib -j 4 -l ./licence.lic; \
|
||||
plog-converter -a GA:1,2 -t fullhtml -o /test_output/pvs-studio-html-report pvs-studio.log; \
|
||||
plog-converter -a GA:1,2 -t tasklist -o /test_output/pvs-studio-task-report.txt pvs-studio.log
|
||||
|
||||
|
@ -19,7 +19,7 @@ $ echo '{"foo":"bar"}' | curl 'http://localhost:8123/?query=INSERT%20INTO%20test
|
||||
Using [CLI interface](../../interfaces/cli.md):
|
||||
|
||||
``` bash
|
||||
$ echo '{"foo":"bar"}' | clickhouse-client ---query="INSERT INTO test FORMAT 20JSONEachRow"
|
||||
$ echo '{"foo":"bar"}' | clickhouse-client ---query="INSERT INTO test FORMAT JSONEachRow"
|
||||
```
|
||||
|
||||
Instead of inserting data manually, you might consider to use one of [client libraries](../../interfaces/index.md) instead.
|
||||
|
@ -46,6 +46,7 @@ toc_title: Client Libraries
|
||||
- Kotlin
|
||||
- [AORM](https://github.com/TanVD/AORM)
|
||||
- C\#
|
||||
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
|
||||
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
|
||||
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
|
||||
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
||||
|
@ -1864,4 +1864,17 @@ Default value: `0`.
|
||||
- [Synchronicity of ALTER Queries](../../sql-reference/statements/alter/index.md#synchronicity-of-alter-queries)
|
||||
- [Mutations](../../sql-reference/statements/alter/index.md#mutations)
|
||||
|
||||
## lock_acquire_timeout {#lock_acquire_timeout}
|
||||
|
||||
Defines how many seconds locking request waits before failing.
|
||||
|
||||
Locking timeout is used to protect from deadlocks while executing read/write operations with tables. When timeout expires and locking request fails, the ClickHouse server throws an exeption "Locking attempt timed out! Possible deadlock avoided. Client should retry." with error code `DEADLOCK_AVOIDED`.
|
||||
|
||||
Possible values:
|
||||
|
||||
- Positive integer.
|
||||
- 0 — No locking timeout.
|
||||
|
||||
Default value: `120`.
|
||||
|
||||
[Original article](https://clickhouse.tech/docs/en/operations/settings/settings/) <!-- hide -->
|
||||
|
@ -917,6 +917,48 @@ SELECT defaultValueOfArgumentType( CAST(1 AS Nullable(Int8) ) )
|
||||
└───────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## defaultValueOfTypeName {#defaultvalueoftypename}
|
||||
|
||||
Outputs the default value for given type name.
|
||||
|
||||
Does not include default values for custom columns set by the user.
|
||||
|
||||
``` sql
|
||||
defaultValueOfTypeName(type)
|
||||
```
|
||||
|
||||
**Parameters:**
|
||||
|
||||
- `type` — A string representing a type name.
|
||||
|
||||
**Returned values**
|
||||
|
||||
- `0` for numbers.
|
||||
- Empty string for strings.
|
||||
- `ᴺᵁᴸᴸ` for [Nullable](../../sql-reference/data-types/nullable.md).
|
||||
|
||||
**Example**
|
||||
|
||||
``` sql
|
||||
SELECT defaultValueOfTypeName('Int8')
|
||||
```
|
||||
|
||||
``` text
|
||||
┌─defaultValueOfTypeName('Int8')─┐
|
||||
│ 0 │
|
||||
└────────────────────────────────┘
|
||||
```
|
||||
|
||||
``` sql
|
||||
SELECT defaultValueOfTypeName('Nullable(Int8)')
|
||||
```
|
||||
|
||||
``` text
|
||||
┌─defaultValueOfTypeName('Nullable(Int8)')─┐
|
||||
│ ᴺᵁᴸᴸ │
|
||||
└──────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## replicate {#other-functions-replicate}
|
||||
|
||||
Creates an array with a single value.
|
||||
|
@ -14,5 +14,4 @@ The command changes the [sorting key](../../../engines/table-engines/mergetree-f
|
||||
The command is lightweight in a sense that it only changes metadata. To keep the property that data part rows are ordered by the sorting key expression you cannot add expressions containing existing columns to the sorting key (only columns added by the `ADD COLUMN` command in the same `ALTER` query).
|
||||
|
||||
!!! note "Note"
|
||||
It only works for tables in the [`MergeTree`](../../../engines/table-engines/mergetree-family/mergetree.md) family (including
|
||||
[replicated](../../../engines/table-engines/mergetree-family/replication.md) tables).
|
||||
It only works for tables in the [`MergeTree`](../../../engines/table-engines/mergetree-family/mergetree.md) family (including [replicated](../../../engines/table-engines/mergetree-family/replication.md) tables).
|
||||
|
@ -46,6 +46,7 @@ toc_title: Client Libraries
|
||||
- Kotlin
|
||||
- [AORM](https://github.com/TanVD/AORM)
|
||||
- C\#
|
||||
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
|
||||
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
|
||||
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
|
||||
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
||||
|
@ -49,6 +49,7 @@ toc_title: "\u06A9\u062A\u0627\u0628\u062E\u0627\u0646\u0647 \u0647\u0627\u06CC
|
||||
- کوتلین
|
||||
- [AORM](https://github.com/TanVD/AORM)
|
||||
- C\#
|
||||
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
|
||||
- [فاحشه خانه.ادو](https://github.com/killwort/ClickHouse-Net)
|
||||
- [فاحشه خانه.کارگیر](https://github.com/DarkWanderer/ClickHouse.Client)
|
||||
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
||||
|
@ -48,6 +48,7 @@ toc_title: "Biblioth\xE8ques Clientes"
|
||||
- Kotlin
|
||||
- [AORM](https://github.com/TanVD/AORM)
|
||||
- C\#
|
||||
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
|
||||
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
|
||||
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
|
||||
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
||||
|
@ -48,6 +48,7 @@ toc_title: "\u30AF\u30E9\u30A4\u30A2\u30F3\u30C8"
|
||||
- コトリン
|
||||
- [AORM](https://github.com/TanVD/AORM)
|
||||
- C\#
|
||||
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
|
||||
- [クリックハウスAdo](https://github.com/killwort/ClickHouse-Net)
|
||||
- [クリックハウスクライアン](https://github.com/DarkWanderer/ClickHouse.Client)
|
||||
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
||||
|
@ -26,7 +26,7 @@ toc_priority: 29
|
||||
|
||||
Во время запросов `INSERT` таблица блокируется, а другие запросы на чтение и запись ожидают разблокировки таблицы. Если запросов на запись данных нет, то можно выполнять любое количество конкуретных запросов на чтение.
|
||||
|
||||
- Не поддерживают операции [мутации](../../../sql-reference/statements/alter.md#mutations).
|
||||
- Не поддерживают операции [мутации](../../../sql-reference/statements/alter/index.md#mutations).
|
||||
|
||||
- Не поддерживают индексы.
|
||||
|
||||
|
@ -113,7 +113,7 @@ drwxr-xr-x 2 clickhouse clickhouse 4096 Feb 1 16:48 detached
|
||||
|
||||
‘201901\_1\_1\_0’, ‘201901\_1\_7\_1’ и т. д. – это директории кусков партиции. Каждый кусок содержит данные только для соответствующего месяца (таблица в данном примере содержит партиционирование по месяцам).
|
||||
|
||||
Директория `detached` содержит куски, отсоединенные от таблицы с помощью запроса [DETACH](../../../sql-reference/statements/alter.md#alter_detach-partition). Поврежденные куски также попадают в эту директорию – они не удаляются с сервера.
|
||||
Директория `detached` содержит куски, отсоединенные от таблицы с помощью запроса [DETACH](../../../sql-reference/statements/alter/partition.md#alter_detach-partition). Поврежденные куски также попадают в эту директорию – они не удаляются с сервера.
|
||||
|
||||
Сервер не использует куски из директории `detached`. Вы можете в любое время добавлять, удалять, модифицировать данные в директории detached - сервер не будет об этом знать, пока вы не сделаете запрос [ATTACH](../../../engines/table-engines/mergetree-family/custom-partitioning-key.md#alter_attach-partition).
|
||||
|
||||
|
@ -601,7 +601,7 @@ SETTINGS storage_policy = 'moving_from_ssd_to_hdd'
|
||||
В таблицах `MergeTree` данные попадают на диск несколькими способами:
|
||||
|
||||
- В результате вставки (запрос `INSERT`).
|
||||
- В фоновых операциях слияний и [мутаций](../../../sql-reference/statements/alter.md#mutations).
|
||||
- В фоновых операциях слияний и [мутаций](../../../sql-reference/statements/alter/index.md#mutations).
|
||||
- При скачивании данных с другой реплики.
|
||||
- В результате заморозки партиций [ALTER TABLE … FREEZE PARTITION](../../../engines/table-engines/mergetree-family/mergetree.md#alter_freeze-partition).
|
||||
|
||||
|
@ -40,6 +40,7 @@
|
||||
- Kotlin
|
||||
- [AORM](https://github.com/TanVD/AORM)
|
||||
- C\#
|
||||
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
|
||||
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
|
||||
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
|
||||
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
||||
|
@ -56,7 +56,7 @@ ClickHouse поддерживает управление доступом на
|
||||
Запросы управления:
|
||||
|
||||
- [CREATE USER](../sql-reference/statements/create/user.md#create-user-statement)
|
||||
- [ALTER USER](../sql-reference/statements/alter.md#alter-user-statement)
|
||||
- [ALTER USER](../sql-reference/statements/alter/user.md)
|
||||
- [DROP USER](../sql-reference/statements/misc.md#drop-user-statement)
|
||||
- [SHOW CREATE USER](../sql-reference/statements/show.md#show-create-user-statement)
|
||||
|
||||
@ -83,7 +83,7 @@ ClickHouse поддерживает управление доступом на
|
||||
Запросы управления:
|
||||
|
||||
- [CREATE ROLE](../sql-reference/statements/create/index.md#create-role-statement)
|
||||
- [ALTER ROLE](../sql-reference/statements/alter.md#alter-role-statement)
|
||||
- [ALTER ROLE](../sql-reference/statements/alter/role.md)
|
||||
- [DROP ROLE](../sql-reference/statements/misc.md#drop-role-statement)
|
||||
- [SET ROLE](../sql-reference/statements/misc.md#set-role-statement)
|
||||
- [SET DEFAULT ROLE](../sql-reference/statements/misc.md#set-default-role-statement)
|
||||
@ -98,7 +98,7 @@ ClickHouse поддерживает управление доступом на
|
||||
Запросы управления:
|
||||
|
||||
- [CREATE ROW POLICY](../sql-reference/statements/create/index.md#create-row-policy-statement)
|
||||
- [ALTER ROW POLICY](../sql-reference/statements/alter.md#alter-row-policy-statement)
|
||||
- [ALTER ROW POLICY](../sql-reference/statements/alter/row-policy.md)
|
||||
- [DROP ROW POLICY](../sql-reference/statements/misc.md#drop-row-policy-statement)
|
||||
- [SHOW CREATE ROW POLICY](../sql-reference/statements/show.md#show-create-row-policy-statement)
|
||||
|
||||
@ -110,7 +110,7 @@ ClickHouse поддерживает управление доступом на
|
||||
Запросы управления:
|
||||
|
||||
- [CREATE SETTINGS PROFILE](../sql-reference/statements/create/index.md#create-settings-profile-statement)
|
||||
- [ALTER SETTINGS PROFILE](../sql-reference/statements/alter.md#alter-settings-profile-statement)
|
||||
- [ALTER SETTINGS PROFILE](../sql-reference/statements/alter/settings-profile.md)
|
||||
- [DROP SETTINGS PROFILE](../sql-reference/statements/misc.md#drop-settings-profile-statement)
|
||||
- [SHOW CREATE SETTINGS PROFILE](../sql-reference/statements/show.md#show-create-settings-profile-statement)
|
||||
|
||||
@ -124,7 +124,7 @@ ClickHouse поддерживает управление доступом на
|
||||
Запросы управления:
|
||||
|
||||
- [CREATE QUOTA](../sql-reference/statements/create/index.md#create-quota-statement)
|
||||
- [ALTER QUOTA](../sql-reference/statements/alter.md#alter-quota-statement)
|
||||
- [ALTER QUOTA](../sql-reference/statements/alter/quota.md)
|
||||
- [DROP QUOTA](../sql-reference/statements/misc.md#drop-quota-statement)
|
||||
- [SHOW CREATE QUOTA](../sql-reference/statements/show.md#show-create-quota-statement)
|
||||
|
||||
|
@ -27,7 +27,7 @@
|
||||
|
||||
ClickHouse позволяет использовать запрос `ALTER TABLE ... FREEZE PARTITION ...` для создания локальной копии партиций таблицы. Это реализуется с помощью жестких ссылок (hardlinks) на каталог `/var/lib/clickhouse/shadow/`, поэтому такая копия обычно не занимает дополнительное место на диске для старых данных. Созданные копии файлов не обрабатываются сервером ClickHouse, поэтому вы можете просто оставить их там: у вас будет простая резервная копия, которая не требует дополнительной внешней системы, однако при аппаратных проблемах вы можете утратить и актуальные данные и сохраненную копию. По этой причине, лучше удаленно скопировать их в другое место, а затем удалить локальную копию. Распределенные файловые системы и хранилища объектов по-прежнему являются хорошими вариантами для этого, однако можно использовать и обычные присоединенные файловые серверы с достаточно большой ёмкостью (в этом случае передача будет происходить через сетевую файловую систему или, возможно, [rsync](https://en.wikipedia.org/wiki/Rsync)).
|
||||
|
||||
Дополнительные сведения о запросах, связанных с манипуляциями партициями, см. в разделе [ALTER](../sql-reference/statements/alter.md#alter_manipulations-with-partitions).
|
||||
Дополнительные сведения о запросах, связанных с манипуляциями партициями, см. в разделе [ALTER](../sql-reference/statements/alter/partition.md#alter_manipulations-with-partitions).
|
||||
|
||||
Для автоматизации этого подхода доступен инструмент от сторонних разработчиков: [clickhouse-backup](https://github.com/AlexAkulov/clickhouse-backup).
|
||||
|
||||
|
@ -1606,7 +1606,7 @@ SELECT idx, i FROM null_in WHERE i IN (1, NULL) SETTINGS transform_null_in = 1;
|
||||
|
||||
## mutations_sync {#mutations_sync}
|
||||
|
||||
Позволяет выполнять запросы `ALTER TABLE ... UPDATE|DELETE` ([мутации](../../sql-reference/statements/alter.md#mutations)) синхронно.
|
||||
Позволяет выполнять запросы `ALTER TABLE ... UPDATE|DELETE` ([мутации](../../sql-reference/statements/alter/index.md#mutations)) синхронно.
|
||||
|
||||
Возможные значения:
|
||||
|
||||
@ -1618,8 +1618,8 @@ SELECT idx, i FROM null_in WHERE i IN (1, NULL) SETTINGS transform_null_in = 1;
|
||||
|
||||
**См. также**
|
||||
|
||||
- [Синхронность запросов ALTER](../../sql-reference/statements/alter.md#synchronicity-of-alter-queries)
|
||||
- [Мутации](../../sql-reference/statements/alter.md#mutations)
|
||||
- [Синхронность запросов ALTER](../../sql-reference/statements/alter/index.md#synchronicity-of-alter-queries)
|
||||
- [Мутации](../../sql-reference/statements/alter/index.md#mutations)
|
||||
|
||||
|
||||
[Оригинальная статья](https://clickhouse.tech/docs/ru/operations/settings/settings/) <!--hide-->
|
||||
|
@ -1,6 +1,6 @@
|
||||
# system.mutations {#system_tables-mutations}
|
||||
|
||||
Таблица содержит информацию о ходе выполнения [мутаций](../../sql-reference/statements/alter.md#mutations) таблиц семейства MergeTree. Каждой команде мутации соответствует одна строка таблицы.
|
||||
Таблица содержит информацию о ходе выполнения [мутаций](../../sql-reference/statements/alter/index.md#mutations) таблиц семейства MergeTree. Каждой команде мутации соответствует одна строка таблицы.
|
||||
|
||||
Столбцы:
|
||||
|
||||
@ -41,7 +41,7 @@
|
||||
|
||||
**См. также**
|
||||
|
||||
- [Мутации](../../sql-reference/statements/alter.md#mutations)
|
||||
- [Мутации](../../sql-reference/statements/alter/index.md#mutations)
|
||||
- [Движок MergeTree](../../engines/table-engines/mergetree-family/mergetree.md)
|
||||
- [Репликация данных](../../engines/table-engines/mergetree-family/replication.md) (семейство ReplicatedMergeTree)
|
||||
|
||||
|
@ -10,7 +10,7 @@
|
||||
- `NEW_PART` — вставка нового куска.
|
||||
- `MERGE_PARTS` — слияние кусков.
|
||||
- `DOWNLOAD_PART` — загрузка с реплики.
|
||||
- `REMOVE_PART` — удаление или отсоединение из таблицы с помощью [DETACH PARTITION](../../sql-reference/statements/alter.md#alter_detach-partition).
|
||||
- `REMOVE_PART` — удаление или отсоединение из таблицы с помощью [DETACH PARTITION](../../sql-reference/statements/alter/partition.md#alter_detach-partition).
|
||||
- `MUTATE_PART` — изменение куска.
|
||||
- `MOVE_PART` — перемещение куска между дисками.
|
||||
- `event_date` (Date) — дата события.
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
Столбцы:
|
||||
|
||||
- `partition` ([String](../../sql-reference/data-types/string.md)) – имя партиции. Что такое партиция можно узнать из описания запроса [ALTER](../../sql-reference/statements/alter.md#query_language_queries_alter).
|
||||
- `partition` ([String](../../sql-reference/data-types/string.md)) – имя партиции. Что такое партиция можно узнать из описания запроса [ALTER](../../sql-reference/statements/alter/index.md#query_language_queries_alter).
|
||||
|
||||
Форматы:
|
||||
|
||||
@ -66,7 +66,7 @@
|
||||
|
||||
- `primary_key_bytes_in_memory_allocated` ([UInt64](../../sql-reference/data-types/int-uint.md)) – объём памяти (в байтах) выделенный для размещения первичных ключей.
|
||||
|
||||
- `is_frozen` ([UInt8](../../sql-reference/data-types/int-uint.md)) – Признак, показывающий существование бэкапа партиции. 1, бэкап есть. 0, бэкапа нет. Смотрите раздел [FREEZE PARTITION](../../sql-reference/statements/alter.md#alter_freeze-partition).
|
||||
- `is_frozen` ([UInt8](../../sql-reference/data-types/int-uint.md)) – Признак, показывающий существование бэкапа партиции. 1, бэкап есть. 0, бэкапа нет. Смотрите раздел [FREEZE PARTITION](../../sql-reference/statements/alter/partition.md#alter_freeze-partition).
|
||||
|
||||
- `database` ([String](../../sql-reference/data-types/string.md)) – имя базы данных.
|
||||
|
||||
|
@ -10,7 +10,7 @@ toc_title: hidden
|
||||
- [SELECT](statements/select/index.md)
|
||||
- [INSERT INTO](statements/insert-into.md)
|
||||
- [CREATE](statements/create/index.md)
|
||||
- [ALTER](statements/alter.md#query_language_queries_alter)
|
||||
- [ALTER](statements/alter/index.md#query_language_queries_alter)
|
||||
- [Прочие виды запросов](statements/misc.md)
|
||||
|
||||
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/) <!--hide-->
|
||||
|
@ -1,613 +0,0 @@
|
||||
---
|
||||
toc_priority: 36
|
||||
toc_title: ALTER
|
||||
---
|
||||
|
||||
## ALTER {#query_language_queries_alter}
|
||||
|
||||
Запрос `ALTER` поддерживается только для таблиц типа `*MergeTree`, а также `Merge` и `Distributed`. Запрос имеет несколько вариантов.
|
||||
|
||||
### Манипуляции со столбцами {#manipuliatsii-so-stolbtsami}
|
||||
|
||||
Изменение структуры таблицы.
|
||||
|
||||
``` sql
|
||||
ALTER TABLE [db].name [ON CLUSTER cluster] ADD|DROP|CLEAR|COMMENT|MODIFY COLUMN ...
|
||||
```
|
||||
|
||||
В запросе указывается список из одного или более действий через запятую.
|
||||
Каждое действие — операция над столбцом.
|
||||
|
||||
Существуют следующие действия:
|
||||
|
||||
- [ADD COLUMN](#alter_add-column) — добавляет столбец в таблицу;
|
||||
- [DROP COLUMN](#alter_drop-column) — удаляет столбец;
|
||||
- [CLEAR COLUMN](#alter_clear-column) — сбрасывает все значения в столбце для заданной партиции;
|
||||
- [COMMENT COLUMN](#alter_comment-column) — добавляет комментарий к столбцу;
|
||||
- [MODIFY COLUMN](#alter_modify-column) — изменяет тип столбца, выражение для значения по умолчанию и TTL.
|
||||
|
||||
Подробное описание для каждого действия приведено ниже.
|
||||
|
||||
#### ADD COLUMN {#alter_add-column}
|
||||
|
||||
``` sql
|
||||
ADD COLUMN [IF NOT EXISTS] name [type] [default_expr] [codec] [AFTER name_after]
|
||||
```
|
||||
|
||||
Добавляет в таблицу новый столбец с именем `name`, типом `type`, [кодеком](create/table.md#codecs) `codec` и выражением для умолчания `default_expr` (смотрите раздел [Значения по умолчанию](create/index.md#create-default-values)).
|
||||
|
||||
Если указано `IF NOT EXISTS`, запрос не будет возвращать ошибку, если столбец уже существует. Если указано `AFTER name_after` (имя другого столбца), то столбец добавляется (в список столбцов таблицы) после указанного. Иначе, столбец добавляется в конец таблицы. Обратите внимание, ClickHouse не позволяет добавлять столбцы в начало таблицы. Для цепочки действий, `name_after` может быть именем столбца, который добавляется в одном из предыдущих действий.
|
||||
|
||||
Добавление столбца всего лишь меняет структуру таблицы, и не производит никаких действий с данными - соответствующие данные не появляются на диске после ALTER-а. При чтении из таблицы, если для какого-либо столбца отсутствуют данные, то он заполняется значениями по умолчанию (выполняя выражение по умолчанию, если такое есть, или нулями, пустыми строками). Также, столбец появляется на диске при слиянии кусков данных (см. [MergeTree](../../sql-reference/statements/alter.md)).
|
||||
|
||||
Такая схема позволяет добиться мгновенной работы запроса `ALTER` и отсутствия необходимости увеличивать объём старых данных.
|
||||
|
||||
Пример:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE visits ADD COLUMN browser String AFTER user_id
|
||||
```
|
||||
|
||||
#### DROP COLUMN {#alter_drop-column}
|
||||
|
||||
``` sql
|
||||
DROP COLUMN [IF EXISTS] name
|
||||
```
|
||||
|
||||
Удаляет столбец с именем `name`. Если указано `IF EXISTS`, запрос не будет возвращать ошибку, если столбца не существует.
|
||||
|
||||
Запрос удаляет данные из файловой системы. Так как это представляет собой удаление целых файлов, запрос выполняется почти мгновенно.
|
||||
|
||||
Пример:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE visits DROP COLUMN browser
|
||||
```
|
||||
|
||||
#### CLEAR COLUMN {#alter_clear-column}
|
||||
|
||||
``` sql
|
||||
CLEAR COLUMN [IF EXISTS] name IN PARTITION partition_name
|
||||
```
|
||||
|
||||
Сбрасывает все значения в столбце для заданной партиции. Если указано `IF EXISTS`, запрос не будет возвращать ошибку, если столбца не существует.
|
||||
|
||||
Как корректно задать имя партиции, см. в разделе [Как задавать имя партиции в запросах ALTER](#alter-how-to-specify-part-expr).
|
||||
|
||||
Пример:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE visits CLEAR COLUMN browser IN PARTITION tuple()
|
||||
```
|
||||
|
||||
#### COMMENT COLUMN {#alter_comment-column}
|
||||
|
||||
``` sql
|
||||
COMMENT COLUMN [IF EXISTS] name 'Text comment'
|
||||
```
|
||||
|
||||
Добавляет комментарий к таблице. Если указано `IF EXISTS`, запрос не будет возвращать ошибку, если столбца не существует.
|
||||
|
||||
Каждый столбец может содержать только один комментарий. При выполнении запроса существующий комментарий заменяется на новый.
|
||||
|
||||
Посмотреть комментарии можно в столбце `comment_expression` из запроса [DESCRIBE TABLE](misc.md#misc-describe-table).
|
||||
|
||||
Пример:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE visits COMMENT COLUMN browser 'Столбец показывает, из каких браузеров пользователи заходили на сайт.'
|
||||
```
|
||||
|
||||
#### MODIFY COLUMN {#alter_modify-column}
|
||||
|
||||
``` sql
|
||||
MODIFY COLUMN [IF EXISTS] name [type] [default_expr] [TTL]
|
||||
```
|
||||
|
||||
Запрос изменяет следующие свойства столбца `name`:
|
||||
|
||||
- Тип
|
||||
|
||||
- Значение по умолчанию
|
||||
|
||||
- TTL
|
||||
|
||||
Примеры изменения TTL столбца смотрите в разделе [TTL столбца](../../sql_reference/statements/alter.md#mergetree-column-ttl).
|
||||
|
||||
Если указано `IF EXISTS`, запрос не возвращает ошибку, если столбца не существует.
|
||||
|
||||
При изменении типа, значения преобразуются так, как если бы к ним была применена функция [toType](../../sql-reference/statements/alter.md). Если изменяется только выражение для умолчания, запрос не делает никакой сложной работы и выполняется мгновенно.
|
||||
|
||||
Пример запроса:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE visits MODIFY COLUMN browser Array(String)
|
||||
```
|
||||
|
||||
Изменение типа столбца - это единственное действие, которое выполняет сложную работу - меняет содержимое файлов с данными. Для больших таблиц, выполнение может занять длительное время.
|
||||
|
||||
Выполнение производится в несколько стадий:
|
||||
|
||||
- подготовка временных (новых) файлов с изменёнными данными;
|
||||
- переименование старых файлов;
|
||||
- переименование временных (новых) файлов в старые;
|
||||
- удаление старых файлов.
|
||||
|
||||
Из них, длительной является только первая стадия. Если на этой стадии возникнет сбой, то данные не поменяются.
|
||||
Если на одной из следующих стадий возникнет сбой, то данные будет можно восстановить вручную. За исключением случаев, когда старые файлы удалены из файловой системы, а данные для новых файлов не доехали на диск и потеряны.
|
||||
|
||||
Запрос `ALTER` на изменение столбцов реплицируется. Соответствующие инструкции сохраняются в ZooKeeper, и затем каждая реплика их применяет. Все запросы `ALTER` выполняются в одном и том же порядке. Запрос ждёт выполнения соответствующих действий на всех репликах. Но при этом, запрос на изменение столбцов в реплицируемой таблице можно прервать, и все действия будут осуществлены асинхронно.
|
||||
|
||||
#### Ограничения запроса ALTER {#ogranicheniia-zaprosa-alter}
|
||||
|
||||
Запрос `ALTER` позволяет создавать и удалять отдельные элементы (столбцы) вложенных структур данных, но не вложенные структуры данных целиком. Для добавления вложенной структуры данных, вы можете добавить столбцы с именем вида `name.nested_name` и типом `Array(T)` - вложенная структура данных полностью эквивалентна нескольким столбцам-массивам с именем, имеющим одинаковый префикс до точки.
|
||||
|
||||
Отсутствует возможность удалять столбцы, входящие в первичный ключ или ключ для сэмплирования (в общем, входящие в выражение `ENGINE`). Изменение типа у столбцов, входящих в первичный ключ возможно только в том случае, если это изменение не приводит к изменению данных (например, разрешено добавление значения в Enum или изменение типа с `DateTime` на `UInt32`).
|
||||
|
||||
Если возможностей запроса `ALTER` не хватает для нужного изменения таблицы, вы можете создать новую таблицу, скопировать туда данные с помощью запроса [INSERT SELECT](insert-into.md#insert_query_insert-select), затем поменять таблицы местами с помощью запроса [RENAME](misc.md#misc_operations-rename), и удалить старую таблицу. В качестве альтернативы для запроса `INSERT SELECT`, можно использовать инструмент [clickhouse-copier](../../sql-reference/statements/alter.md).
|
||||
|
||||
Запрос `ALTER` блокирует все чтения и записи для таблицы. То есть, если на момент запроса `ALTER`, выполнялся долгий `SELECT`, то запрос `ALTER` сначала дождётся его выполнения. И в это время, все новые запросы к той же таблице, будут ждать, пока завершится этот `ALTER`.
|
||||
|
||||
Для таблиц, которые не хранят данные самостоятельно (типа [Merge](../../sql-reference/statements/alter.md) и [Distributed](../../sql-reference/statements/alter.md)), `ALTER` всего лишь меняет структуру таблицы, но не меняет структуру подчинённых таблиц. Для примера, при ALTER-е таблицы типа `Distributed`, вам также потребуется выполнить запрос `ALTER` для таблиц на всех удалённых серверах.
|
||||
|
||||
### Манипуляции с ключевыми выражениями таблиц {#manipuliatsii-s-kliuchevymi-vyrazheniiami-tablits}
|
||||
|
||||
Поддерживается операция:
|
||||
|
||||
``` sql
|
||||
MODIFY ORDER BY new_expression
|
||||
```
|
||||
|
||||
Работает только для таблиц семейства [`MergeTree`](../../sql-reference/statements/alter.md) (в том числе [реплицированных](../../sql-reference/statements/alter.md)). После выполнения запроса
|
||||
[ключ сортировки](../../sql-reference/statements/alter.md) таблицы
|
||||
заменяется на `new_expression` (выражение или кортеж выражений). Первичный ключ при этом остаётся прежним.
|
||||
|
||||
Операция затрагивает только метаданные. Чтобы сохранить свойство упорядоченности кусков данных по ключу
|
||||
сортировки, разрешено добавлять в ключ только новые столбцы (т.е. столбцы, добавляемые командой `ADD COLUMN`
|
||||
в том же запросе `ALTER`), у которых нет выражения по умолчанию.
|
||||
|
||||
### Манипуляции с индексами {#manipuliatsii-s-indeksami}
|
||||
|
||||
Добавить или удалить индекс можно с помощью операций
|
||||
|
||||
``` sql
|
||||
ALTER TABLE [db].name ADD INDEX name expression TYPE type GRANULARITY value [AFTER name]
|
||||
ALTER TABLE [db].name DROP INDEX name
|
||||
```
|
||||
|
||||
Поддерживается только таблицами семейства `*MergeTree`.
|
||||
|
||||
Команда `ADD INDEX` добавляет описание индексов в метаданные, а `DROP INDEX` удаляет индекс из метаданных и стирает файлы индекса с диска, поэтому они легковесные и работают мгновенно.
|
||||
|
||||
Если индекс появился в метаданных, то он начнет считаться в последующих слияниях и записях в таблицу, а не сразу после выполнения операции `ALTER`.
|
||||
|
||||
Запрос на изменение индексов реплицируется, сохраняя новые метаданные в ZooKeeper и применяя изменения на всех репликах.
|
||||
|
||||
### Манипуляции с ограничениями (constraints) {#manipuliatsii-s-ogranicheniiami-constraints}
|
||||
|
||||
Про ограничения подробнее написано [тут](create/index.md#constraints).
|
||||
|
||||
Добавить или удалить ограничение можно с помощью запросов
|
||||
|
||||
``` sql
|
||||
ALTER TABLE [db].name ADD CONSTRAINT constraint_name CHECK expression;
|
||||
ALTER TABLE [db].name DROP CONSTRAINT constraint_name;
|
||||
```
|
||||
|
||||
Запросы выполняют добавление или удаление метаданных об ограничениях таблицы `[db].name`, поэтому выполняются мнгновенно.
|
||||
|
||||
Если ограничение появилось для непустой таблицы, то *проверка ограничения для имеющихся данных не производится*.
|
||||
|
||||
Запрос на изменение ограничений для Replicated таблиц реплицируется, сохраняя новые метаданные в ZooKeeper и применяя изменения на всех репликах.
|
||||
|
||||
### Манипуляции с партициями и кусками {#alter_manipulations-with-partitions}
|
||||
|
||||
Для работы с [партициями](../../sql-reference/statements/alter.md) доступны следующие операции:
|
||||
|
||||
- [DETACH PARTITION](#alter_detach-partition) — перенести партицию в директорию `detached`;
|
||||
- [DROP PARTITION](#alter_drop-partition) — удалить партицию;
|
||||
- [ATTACH PARTITION\|PART](#alter_attach-partition) — добавить партицию/кусок в таблицу из директории `detached`;
|
||||
- [ATTACH PARTITION FROM](#alter_attach-partition-from) — скопировать партицию из другой таблицы;
|
||||
- [REPLACE PARTITION](#alter_replace-partition) — скопировать партицию из другой таблицы с заменой;
|
||||
- [MOVE PARTITION TO TABLE](#alter_move_to_table-partition) — переместить партицию в другую таблицу;
|
||||
- [CLEAR COLUMN IN PARTITION](#alter_clear-column-partition) — удалить все значения в столбце для заданной партиции;
|
||||
- [CLEAR INDEX IN PARTITION](#alter_clear-index-partition) — очистить построенные вторичные индексы для заданной партиции;
|
||||
- [FREEZE PARTITION](#alter_freeze-partition) — создать резервную копию партиции;
|
||||
- [FETCH PARTITION](#alter_fetch-partition) — скачать партицию с другого сервера;
|
||||
- [MOVE PARTITION\|PART](#alter_move-partition) — переместить партицию/кускок на другой диск или том.
|
||||
|
||||
#### DETACH PARTITION {#alter_detach-partition}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_name DETACH PARTITION partition_expr
|
||||
```
|
||||
|
||||
Перемещает заданную партицию в директорию `detached`. Сервер не будет знать об этой партиции до тех пор, пока вы не выполните запрос [ATTACH](#alter_attach-partition).
|
||||
|
||||
Пример:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE visits DETACH PARTITION 201901
|
||||
```
|
||||
|
||||
Подробнее о том, как корректно задать имя партиции, см. в разделе [Как задавать имя партиции в запросах ALTER](#alter-how-to-specify-part-expr).
|
||||
|
||||
После того как запрос будет выполнен, вы сможете производить любые операции с данными в директории `detached`. Например, можно удалить их из файловой системы.
|
||||
|
||||
Запрос реплицируется — данные будут перенесены в директорию `detached` и забыты на всех репликах. Обратите внимание, запрос может быть отправлен только на реплику-лидер. Чтобы узнать, является ли реплика лидером, выполните запрос `SELECT` к системной таблице [system.replicas](../../operations/system-tables/replicas.md#system_tables-replicas). Либо можно выполнить запрос `DETACH` на всех репликах — тогда на всех репликах, кроме реплики-лидера, запрос вернет ошибку.
|
||||
|
||||
#### DROP PARTITION {#alter_drop-partition}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_name DROP PARTITION partition_expr
|
||||
```
|
||||
|
||||
Удаляет партицию. Партиция помечается как неактивная и будет полностью удалена примерно через 10 минут.
|
||||
|
||||
Подробнее о том, как корректно задать имя партиции, см. в разделе [Как задавать имя партиции в запросах ALTER](#alter-how-to-specify-part-expr).
|
||||
|
||||
Запрос реплицируется — данные будут удалены на всех репликах.
|
||||
|
||||
#### DROP DETACHED PARTITION\|PART {#alter_drop-detached}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_name DROP DETACHED PARTITION|PART partition_expr
|
||||
```
|
||||
|
||||
Удаляет из `detached` кусок или все куски, принадлежащие партиции.
|
||||
Подробнее о том, как корректно задать имя партиции, см. в разделе [Как задавать имя партиции в запросах ALTER](#alter-how-to-specify-part-expr).
|
||||
|
||||
#### ATTACH PARTITION\|PART {#alter_attach-partition}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_name ATTACH PARTITION|PART partition_expr
|
||||
```
|
||||
|
||||
Добавляет данные в таблицу из директории `detached`. Можно добавить данные как для целой партиции, так и для отдельного куска. Примеры:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE visits ATTACH PARTITION 201901;
|
||||
ALTER TABLE visits ATTACH PART 201901_2_2_0;
|
||||
```
|
||||
|
||||
Как корректно задать имя партиции или куска, см. в разделе [Как задавать имя партиции в запросах ALTER](#alter-how-to-specify-part-expr).
|
||||
|
||||
Этот запрос реплицируется. Реплика-иницатор проверяет, есть ли данные в директории `detached`. Если данные есть, то запрос проверяет их целостность. В случае успеха данные добавляются в таблицу. Все остальные реплики загружают данные с реплики-инициатора запроса.
|
||||
|
||||
Это означает, что вы можете разместить данные в директории `detached` на одной реплике и с помощью запроса `ALTER ... ATTACH` добавить их в таблицу на всех репликах.
|
||||
|
||||
#### ATTACH PARTITION FROM {#alter_attach-partition-from}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table2 ATTACH PARTITION partition_expr FROM table1
|
||||
```
|
||||
|
||||
Копирует партицию из таблицы `table1` в таблицу `table2` и добавляет к существующим данным `table2`. Данные из `table1` не удаляются.
|
||||
|
||||
Следует иметь в виду:
|
||||
|
||||
- Таблицы должны иметь одинаковую структуру.
|
||||
- Для таблиц должен быть задан одинаковый ключ партиционирования.
|
||||
|
||||
Подробнее о том, как корректно задать имя партиции, см. в разделе [Как задавать имя партиции в запросах ALTER](#alter-how-to-specify-part-expr).
|
||||
|
||||
#### REPLACE PARTITION {#alter_replace-partition}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table2 REPLACE PARTITION partition_expr FROM table1
|
||||
```
|
||||
|
||||
Копирует партицию из таблицы `table1` в таблицу `table2` с заменой существующих данных в `table2`. Данные из `table1` не удаляются.
|
||||
|
||||
Следует иметь в виду:
|
||||
|
||||
- Таблицы должны иметь одинаковую структуру.
|
||||
- Для таблиц должен быть задан одинаковый ключ партиционирования.
|
||||
|
||||
Подробнее о том, как корректно задать имя партиции, см. в разделе [Как задавать имя партиции в запросах ALTER](#alter-how-to-specify-part-expr).
|
||||
|
||||
#### MOVE PARTITION TO TABLE {#alter_move_to_table-partition}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_source MOVE PARTITION partition_expr TO TABLE table_dest
|
||||
```
|
||||
|
||||
Перемещает партицию из таблицы `table_source` в таблицу `table_dest` (добавляет к существующим данным в `table_dest`) с удалением данных из таблицы `table_source`.
|
||||
|
||||
Следует иметь в виду:
|
||||
|
||||
- Таблицы должны иметь одинаковую структуру.
|
||||
- Для таблиц должен быть задан одинаковый ключ партиционирования.
|
||||
- Движки таблиц должны быть одинакового семейства (реплицированные или нереплицированные).
|
||||
- Для таблиц должна быть задана одинаковая политика хранения.
|
||||
|
||||
#### CLEAR COLUMN IN PARTITION {#alter_clear-column-partition}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_name CLEAR COLUMN column_name IN PARTITION partition_expr
|
||||
```
|
||||
|
||||
Сбрасывает все значения в столбце для заданной партиции. Если для столбца определено значение по умолчанию (в секции `DEFAULT`), то будет выставлено это значение.
|
||||
|
||||
Пример:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE visits CLEAR COLUMN hour in PARTITION 201902
|
||||
```
|
||||
|
||||
#### CLEAR INDEX IN PARTITION {#alter_clear-index-partition}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_name CLEAR INDEX index_name IN PARTITION partition_expr
|
||||
```
|
||||
|
||||
Работает как `CLEAR COLUMN`, но сбрасывает индексы вместо данных в столбцах.
|
||||
|
||||
#### FREEZE PARTITION {#alter_freeze-partition}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_name FREEZE [PARTITION partition_expr]
|
||||
```
|
||||
|
||||
Создаёт резервную копию для заданной партиции. Если выражение `PARTITION` опущено, резервные копии будут созданы для всех партиций.
|
||||
|
||||
!!! note "Примечание"
|
||||
Создание резервной копии не требует остановки сервера.
|
||||
|
||||
Для таблиц старого стиля имя партиций можно задавать в виде префикса (например, ‘2019’). В этом случае резервные копии будут созданы для всех соответствующих партиций. Подробнее о том, как корректно задать имя партиции, см. в разделе [Как задавать имя партиции в запросах ALTER](#alter-how-to-specify-part-expr).
|
||||
|
||||
Запрос делает следующее — для текущего состояния таблицы он формирует жесткие ссылки на данные в этой таблице. Ссылки размещаются в директории `/var/lib/clickhouse/shadow/N/...`, где:
|
||||
|
||||
- `/var/lib/clickhouse/` — рабочая директория ClickHouse, заданная в конфигурационном файле;
|
||||
- `N` — инкрементальный номер резервной копии.
|
||||
|
||||
!!! note "Примечание"
|
||||
При использовании [нескольких дисков для хранения данных таблицы](../../sql-reference/statements/alter.md#table_engine-mergetree-multiple-volumes) директория `shadow/N` появляется на каждом из дисков, на которых были куски, попавшие под выражение `PARTITION`.
|
||||
|
||||
Структура директорий внутри резервной копии такая же, как внутри `/var/lib/clickhouse/`. Запрос выполнит ‘chmod’ для всех файлов, запрещая запись в них.
|
||||
|
||||
Обратите внимание, запрос `ALTER TABLE t FREEZE PARTITION` не реплицируется. Он создает резервную копию только на локальном сервере. После создания резервной копии данные из `/var/lib/clickhouse/shadow/` можно скопировать на удалённый сервер, а локальную копию удалить.
|
||||
|
||||
Резервная копия создается почти мгновенно (однако сначала запрос дожидается завершения всех запросов, которые выполняются для соответствующей таблицы).
|
||||
|
||||
`ALTER TABLE t FREEZE PARTITION` копирует только данные, но не метаданные таблицы. Чтобы сделать резервную копию метаданных таблицы, скопируйте файл `/var/lib/clickhouse/metadata/database/table.sql`
|
||||
|
||||
Чтобы восстановить данные из резервной копии, выполните следующее:
|
||||
|
||||
1. Создайте таблицу, если она ещё не существует. Запрос на создание можно взять из .sql файла (замените в нём `ATTACH` на `CREATE`).
|
||||
2. Скопируйте данные из директории `data/database/table/` внутри резервной копии в директорию `/var/lib/clickhouse/data/database/table/detached/`.
|
||||
3. С помощью запросов `ALTER TABLE t ATTACH PARTITION` добавьте данные в таблицу.
|
||||
|
||||
Восстановление данных из резервной копии не требует остановки сервера.
|
||||
|
||||
Подробнее о резервном копировании и восстановлении данных читайте в разделе [Резервное копирование данных](../../operations/backup.md).
|
||||
|
||||
#### FETCH PARTITION {#alter_fetch-partition}
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_name FETCH PARTITION partition_expr FROM 'path-in-zookeeper'
|
||||
```
|
||||
|
||||
Загружает партицию с другого сервера. Этот запрос работает только для реплицированных таблиц.
|
||||
|
||||
Запрос выполняет следующее:
|
||||
|
||||
1. Загружает партицию с указанного шарда. Путь к шарду задается в секции `FROM` (‘path-in-zookeeper’). Обратите внимание, нужно задавать путь к шарду в ZooKeeper.
|
||||
2. Помещает загруженные данные в директорию `detached` таблицы `table_name`. Чтобы прикрепить эти данные к таблице, используйте запрос [ATTACH PARTITION\|PART](#alter_attach-partition).
|
||||
|
||||
Например:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE users FETCH PARTITION 201902 FROM '/clickhouse/tables/01-01/visits';
|
||||
ALTER TABLE users ATTACH PARTITION 201902;
|
||||
```
|
||||
|
||||
Следует иметь в виду:
|
||||
|
||||
- Запрос `ALTER TABLE t FETCH PARTITION` не реплицируется. Он загружает партицию в директорию `detached` только на локальном сервере.
|
||||
- Запрос `ALTER TABLE t ATTACH` реплицируется — он добавляет данные в таблицу сразу на всех репликах. На одной из реплик данные будут добавлены из директории `detached`, а на других — из соседних реплик.
|
||||
|
||||
Перед загрузкой данных система проверяет, существует ли партиция и совпадает ли её структура со структурой таблицы. При этом автоматически выбирается наиболее актуальная реплика среди всех живых реплик.
|
||||
|
||||
Несмотря на то что запрос называется `ALTER TABLE`, он не изменяет структуру таблицы и не изменяет сразу доступные данные в таблице.
|
||||
|
||||
#### MOVE PARTITION\|PART {#alter_move-partition}
|
||||
|
||||
Перемещает партицию или кусок данных на другой том или диск для таблиц с движком `MergeTree`. Смотрите [Хранение данных таблицы на нескольких блочных устройствах](../../sql-reference/statements/alter.md#table_engine-mergetree-multiple-volumes).
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table_name MOVE PARTITION|PART partition_expr TO DISK|VOLUME 'disk_name'
|
||||
```
|
||||
|
||||
Запрос `ALTER TABLE t MOVE`:
|
||||
|
||||
- Не реплицируется, т.к. на разных репликах могут быть различные конфигурации политик хранения.
|
||||
- Возвращает ошибку, если указан несконфигурированный том или диск. Ошибка также возвращается в случае невыполнения условий перемещения данных, которые указаны в конфигурации политики хранения.
|
||||
- Может возвращать ошибку в случае, когда перемещаемые данные уже оказались перемещены в результате фонового процесса, конкурентного запроса `ALTER TABLE t MOVE` или как часть результата фоновой операции слияния. В данном случае никаких дополнительных действий от пользователя не требуется.
|
||||
|
||||
Примеры:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE hits MOVE PART '20190301_14343_16206_438' TO VOLUME 'slow'
|
||||
ALTER TABLE hits MOVE PARTITION '2019-09-01' TO DISK 'fast_ssd'
|
||||
```
|
||||
|
||||
#### Как задавать имя партиции в запросах ALTER {#alter-how-to-specify-part-expr}
|
||||
|
||||
Чтобы задать нужную партицию в запросах `ALTER ... PARTITION`, можно использовать:
|
||||
|
||||
- Имя партиции. Посмотреть имя партиции можно в столбце `partition` системной таблицы [system.parts](../../operations/system-tables/parts.md#system_tables-parts). Например, `ALTER TABLE visits DETACH PARTITION 201901`.
|
||||
- Произвольное выражение из столбцов исходной таблицы. Также поддерживаются константы и константные выражения. Например, `ALTER TABLE visits DETACH PARTITION toYYYYMM(toDate('2019-01-25'))`.
|
||||
- Строковый идентификатор партиции. Идентификатор партиции используется для именования кусков партиции на файловой системе и в ZooKeeper. В запросах `ALTER` идентификатор партиции нужно указывать в секции `PARTITION ID`, в одинарных кавычках. Например, `ALTER TABLE visits DETACH PARTITION ID '201901'`.
|
||||
- Для запросов [ATTACH PART](#alter_attach-partition) и [DROP DETACHED PART](#alter_drop-detached): чтобы задать имя куска партиции, используйте строковой литерал со значением из столбца `name` системной таблицы [system.detached_parts](../../operations/system-tables/detached_parts.md#system_tables-detached_parts). Например, `ALTER TABLE visits ATTACH PART '201901_1_1_0'`.
|
||||
|
||||
Использование кавычек в имени партиций зависит от типа данных столбца, по которому задано партиционирование. Например, для столбца с типом `String` имя партиции необходимо указывать в кавычках (одинарных). Для типов `Date` и `Int*` кавычки указывать не нужно.
|
||||
|
||||
Замечание: для таблиц старого стиля партицию можно указывать и как число `201901`, и как строку `'201901'`. Синтаксис для таблиц нового типа более строг к типам (аналогично парсеру входного формата VALUES).
|
||||
|
||||
Правила, сформулированные выше, актуальны также для запросов [OPTIMIZE](misc.md#misc_operations-optimize). Чтобы указать единственную партицию непартиционированной таблицы, укажите `PARTITION tuple()`. Например:
|
||||
|
||||
``` sql
|
||||
OPTIMIZE TABLE table_not_partitioned PARTITION tuple() FINAL;
|
||||
```
|
||||
|
||||
Примеры запросов `ALTER ... PARTITION` можно посмотреть в тестах: [`00502_custom_partitioning_local`](https://github.com/ClickHouse/ClickHouse/blob/master/tests/queries/0_stateless/00502_custom_partitioning_local.sql) и [`00502_custom_partitioning_replicated_zookeeper`](https://github.com/ClickHouse/ClickHouse/blob/master/tests/queries/0_stateless/00502_custom_partitioning_replicated_zookeeper.sql).
|
||||
|
||||
### Манипуляции с TTL таблицы {#manipuliatsii-s-ttl-tablitsy}
|
||||
|
||||
Вы можете изменить [TTL для таблицы](../../sql-reference/statements/alter.md#mergetree-table-ttl) запросом следующего вида:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE table-name MODIFY TTL ttl-expression
|
||||
```
|
||||
|
||||
### Мутации {#mutations}
|
||||
|
||||
Мутации - разновидность запроса ALTER, позволяющая изменять или удалять данные в таблице. В отличие от стандартных запросов `DELETE` и `UPDATE`, рассчитанных на точечное изменение данных, область применения мутаций - достаточно тяжёлые изменения, затрагивающие много строк в таблице. Поддержана для движков таблиц семейства `MergeTree`, в том числе для движков с репликацией.
|
||||
|
||||
Конвертировать существующие таблицы для работы с мутациями не нужно. Но после применения первой мутации формат данных таблицы становится несовместимым с предыдущими версиями и откатиться на предыдущую версию уже не получится.
|
||||
|
||||
На данный момент доступны команды:
|
||||
|
||||
``` sql
|
||||
ALTER TABLE [db.]table DELETE WHERE filter_expr
|
||||
```
|
||||
|
||||
Выражение `filter_expr` должно иметь тип `UInt8`. Запрос удаляет строки таблицы, для которых это выражение принимает ненулевое значение.
|
||||
|
||||
``` sql
|
||||
ALTER TABLE [db.]table UPDATE column1 = expr1 [, ...] WHERE filter_expr
|
||||
```
|
||||
|
||||
Выражение `filter_expr` должно иметь тип `UInt8`. Запрос изменяет значение указанных столбцов на вычисленное значение соответствующих выражений в каждой строке, для которой `filter_expr` принимает ненулевое значение. Вычисленные значения преобразуются к типу столбца с помощью оператора `CAST`. Изменение столбцов, которые используются при вычислении первичного ключа или ключа партиционирования, не поддерживается.
|
||||
|
||||
``` sql
|
||||
ALTER TABLE [db.]table MATERIALIZE INDEX name IN PARTITION partition_name
|
||||
```
|
||||
|
||||
Команда перестроит вторичный индекс `name` для партиции `partition_name`.
|
||||
|
||||
В одном запросе можно указать несколько команд через запятую.
|
||||
|
||||
Для \*MergeTree-таблиц мутации выполняются, перезаписывая данные по кускам (parts). При этом атомарности нет — куски заменяются на помутированные по мере выполнения и запрос `SELECT`, заданный во время выполнения мутации, увидит данные как из измененных кусков, так и из кусков, которые еще не были изменены.
|
||||
|
||||
Мутации линейно упорядочены между собой и накладываются на каждый кусок в порядке добавления. Мутации также упорядочены со вставками - гарантируется, что данные, вставленные в таблицу до начала выполнения запроса мутации, будут изменены, а данные, вставленные после окончания запроса мутации, изменены не будут. При этом мутации никак не блокируют вставки.
|
||||
|
||||
Запрос завершается немедленно после добавления информации о мутации (для реплицированных таблиц - в ZooKeeper, для нереплицированных - на файловую систему). Сама мутация выполняется асинхронно, используя настройки системного профиля. Следить за ходом её выполнения можно по таблице [`system.mutations`](../../operations/system-tables/mutations.md#system_tables-mutations). Добавленные мутации будут выполняться до конца даже в случае перезапуска серверов ClickHouse. Откатить мутацию после её добавления нельзя, но если мутация по какой-то причине не может выполниться до конца, её можно остановить с помощью запроса [`KILL MUTATION`](../../sql-reference/statements/kill.md#kill-mutation).
|
||||
|
||||
Записи о последних выполненных мутациях удаляются не сразу (количество сохраняемых мутаций определяется параметром движка таблиц `finished_mutations_to_keep`). Более старые записи удаляются.
|
||||
|
||||
### Синхронность запросов ALTER {#synchronicity-of-alter-queries}
|
||||
|
||||
Для нереплицируемых таблиц, все запросы `ALTER` выполняются синхронно. Для реплицируемых таблиц, запрос всего лишь добавляет инструкцию по соответствующим действиям в `ZooKeeper`, а сами действия осуществляются при первой возможности. Но при этом, запрос может ждать завершения выполнения этих действий на всех репликах.
|
||||
|
||||
Для запросов `ALTER ... ATTACH|DETACH|DROP` можно настроить ожидание, с помощью настройки `replication_alter_partitions_sync`.
|
||||
Возможные значения: `0` - не ждать, `1` - ждать выполнения только у себя (по умолчанию), `2` - ждать всех.
|
||||
|
||||
Для запросов `ALTER TABLE ... UPDATE|DELETE` синхронность выполнения определяется настройкой [mutations_sync](../../operations/settings/settings.md#mutations_sync).
|
||||
|
||||
## ALTER USER {#alter-user-statement}
|
||||
|
||||
Изменяет аккаунт пользователя ClickHouse.
|
||||
|
||||
### Синтаксис {#alter-user-syntax}
|
||||
|
||||
``` sql
|
||||
ALTER USER [IF EXISTS] name [ON CLUSTER cluster_name]
|
||||
[RENAME TO new_name]
|
||||
[IDENTIFIED [WITH {PLAINTEXT_PASSWORD|SHA256_PASSWORD|DOUBLE_SHA1_PASSWORD}] BY {'password'|'hash'}]
|
||||
[[ADD|DROP] HOST {LOCAL | NAME 'name' | REGEXP 'name_regexp' | IP 'address' | LIKE 'pattern'} [,...] | ANY | NONE]
|
||||
[DEFAULT ROLE role [,...] | ALL | ALL EXCEPT role [,...] ]
|
||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | PROFILE 'profile_name'] [,...]
|
||||
```
|
||||
|
||||
### Описание {#alter-user-dscr}
|
||||
|
||||
Для выполнения `ALTER USER` необходима привилегия [ALTER USER](grant.md#grant-access-management).
|
||||
|
||||
### Примеры {#alter-user-examples}
|
||||
|
||||
Установить ролями по умолчанию роли, назначенные пользователю:
|
||||
|
||||
``` sql
|
||||
ALTER USER user DEFAULT ROLE role1, role2
|
||||
```
|
||||
|
||||
Если роли не были назначены пользователю, ClickHouse выбрасывает исключение.
|
||||
|
||||
Установить ролями по умолчанию все роли, назначенные пользователю:
|
||||
|
||||
``` sql
|
||||
ALTER USER user DEFAULT ROLE ALL
|
||||
```
|
||||
|
||||
Если роль будет впоследствии назначена пользователю, она автоматически станет ролью по умолчанию.
|
||||
|
||||
Установить ролями по умолчанию все назначенные пользователю роли кроме `role1` и `role2`:
|
||||
|
||||
``` sql
|
||||
ALTER USER user DEFAULT ROLE ALL EXCEPT role1, role2
|
||||
```
|
||||
|
||||
|
||||
## ALTER ROLE {#alter-role-statement}
|
||||
|
||||
Изменяет роль.
|
||||
|
||||
### Синтаксис {#alter-role-syntax}
|
||||
|
||||
``` sql
|
||||
ALTER ROLE [IF EXISTS] name [ON CLUSTER cluster_name]
|
||||
[RENAME TO new_name]
|
||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | PROFILE 'profile_name'] [,...]
|
||||
```
|
||||
|
||||
|
||||
## ALTER ROW POLICY {#alter-row-policy-statement}
|
||||
|
||||
Изменяет политику доступа к строкам.
|
||||
|
||||
### Синтаксис {#alter-row-policy-syntax}
|
||||
|
||||
``` sql
|
||||
ALTER [ROW] POLICY [IF EXISTS] name [ON CLUSTER cluster_name] ON [database.]table
|
||||
[RENAME TO new_name]
|
||||
[AS {PERMISSIVE | RESTRICTIVE}]
|
||||
[FOR SELECT]
|
||||
[USING {condition | NONE}][,...]
|
||||
[TO {role [,...] | ALL | ALL EXCEPT role [,...]}]
|
||||
```
|
||||
|
||||
|
||||
## ALTER QUOTA {#alter-quota-statement}
|
||||
|
||||
Изменяет квоту.
|
||||
|
||||
### Синтаксис {#alter-quota-syntax}
|
||||
|
||||
``` sql
|
||||
ALTER QUOTA [IF EXISTS] name [ON CLUSTER cluster_name]
|
||||
[RENAME TO new_name]
|
||||
[KEYED BY {'none' | 'user name' | 'ip address' | 'client key' | 'client key or user name' | 'client key or ip address'}]
|
||||
[FOR [RANDOMIZED] INTERVAL number {SECOND | MINUTE | HOUR | DAY | WEEK | MONTH | QUARTER | YEAR}
|
||||
{MAX { {QUERIES | ERRORS | RESULT ROWS | RESULT BYTES | READ ROWS | READ BYTES | EXECUTION TIME} = number } [,...] |
|
||||
NO LIMITS | TRACKING ONLY} [,...]]
|
||||
[TO {role [,...] | ALL | ALL EXCEPT role [,...]}]
|
||||
```
|
||||
|
||||
|
||||
## ALTER SETTINGS PROFILE {#alter-settings-profile-statement}
|
||||
|
||||
Изменяет профили настроек.
|
||||
|
||||
### Синтаксис {#alter-settings-profile-syntax}
|
||||
|
||||
``` sql
|
||||
ALTER SETTINGS PROFILE [IF EXISTS] name [ON CLUSTER cluster_name]
|
||||
[RENAME TO new_name]
|
||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | INHERIT 'profile_name'] [,...]
|
||||
```
|
||||
|
||||
|
||||
|
||||
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/alter/) <!--hide-->
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user