Merge branch 'master' into fix-whole-text-serialization

This commit is contained in:
mergify[bot] 2021-11-24 12:44:53 +00:00 committed by GitHub
commit 2ab091a85b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
311 changed files with 6937 additions and 1911 deletions

View File

@ -250,7 +250,7 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 9 BUILD_NUMBER: 8
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
@ -1426,7 +1426,7 @@ jobs:
env: env:
TEMP_PATH: ${{runner.temp}}/unit_tests_ubsan TEMP_PATH: ${{runner.temp}}/unit_tests_ubsan
REPORTS_PATH: ${{runner.temp}}/reports_dir REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Unit tests (msan, actions)' CHECK_NAME: 'Unit tests (ubsan, actions)'
REPO_COPY: ${{runner.temp}}/unit_tests_ubsan/ClickHouse REPO_COPY: ${{runner.temp}}/unit_tests_ubsan/ClickHouse
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH

View File

@ -181,7 +181,7 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 9 BUILD_NUMBER: 8
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH

8
.gitmodules vendored
View File

@ -140,7 +140,7 @@
url = https://github.com/ClickHouse-Extras/libc-headers.git url = https://github.com/ClickHouse-Extras/libc-headers.git
[submodule "contrib/replxx"] [submodule "contrib/replxx"]
path = contrib/replxx path = contrib/replxx
url = https://github.com/AmokHuginnsson/replxx.git url = https://github.com/ClickHouse-Extras/replxx.git
[submodule "contrib/avro"] [submodule "contrib/avro"]
path = contrib/avro path = contrib/avro
url = https://github.com/ClickHouse-Extras/avro.git url = https://github.com/ClickHouse-Extras/avro.git
@ -171,12 +171,6 @@
[submodule "contrib/sentry-native"] [submodule "contrib/sentry-native"]
path = contrib/sentry-native path = contrib/sentry-native
url = https://github.com/ClickHouse-Extras/sentry-native.git url = https://github.com/ClickHouse-Extras/sentry-native.git
[submodule "contrib/gcem"]
path = contrib/gcem
url = https://github.com/kthohr/gcem.git
[submodule "contrib/stats"]
path = contrib/stats
url = https://github.com/kthohr/stats.git
[submodule "contrib/krb5"] [submodule "contrib/krb5"]
path = contrib/krb5 path = contrib/krb5
url = https://github.com/ClickHouse-Extras/krb5 url = https://github.com/ClickHouse-Extras/krb5

View File

@ -201,7 +201,7 @@ endif ()
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON) option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF) option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL) if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
# Only for Linux, x86_64 or aarch64. # Only for Linux, x86_64 or aarch64.
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON) option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
elseif(GLIBC_COMPATIBILITY) elseif(GLIBC_COMPATIBILITY)
@ -392,6 +392,8 @@ if (COMPILER_CLANG)
option(ENABLE_THINLTO "Clang-specific link time optimization" ON) option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
endif() endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstrict-vtable-pointers")
# Set new experimental pass manager, it's a performance, build time and binary size win. # Set new experimental pass manager, it's a performance, build time and binary size win.
# Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang. # Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang.
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager")
@ -402,9 +404,9 @@ if (COMPILER_CLANG)
# completely. # completely.
if (ENABLE_THINLTO AND NOT ENABLE_TESTS AND NOT SANITIZE) if (ENABLE_THINLTO AND NOT ENABLE_TESTS AND NOT SANITIZE)
# Link time optimization # Link time optimization
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin") set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin") set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin") set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
elseif (ENABLE_THINLTO) elseif (ENABLE_THINLTO)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable ThinLTO") message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable ThinLTO")
endif () endif ()
@ -435,20 +437,7 @@ endif ()
# Turns on all external libs like s3, kafka, ODBC, ... # Turns on all external libs like s3, kafka, ODBC, ...
option(ENABLE_LIBRARIES "Enable all external libraries by default" ON) option(ENABLE_LIBRARIES "Enable all external libraries by default" ON)
# We recommend avoiding this mode for production builds because we can't guarantee if (NOT (OS_LINUX OR OS_DARWIN))
# all needed libraries exist in your system.
# This mode exists for enthusiastic developers who are searching for trouble.
# The whole idea of using unknown version of libraries from the OS distribution is deeply flawed.
# Useful for maintainers of OS packages.
option (UNBUNDLED "Use system libraries instead of ones in contrib/" OFF)
if (UNBUNDLED)
set(NOT_UNBUNDLED OFF)
else ()
set(NOT_UNBUNDLED ON)
endif ()
if (UNBUNDLED OR NOT (OS_LINUX OR OS_DARWIN))
# Using system libs can cause a lot of warnings in includes (on macro expansion). # Using system libs can cause a lot of warnings in includes (on macro expansion).
option(WERROR "Enable -Werror compiler option" OFF) option(WERROR "Enable -Werror compiler option" OFF)
else () else ()
@ -527,7 +516,6 @@ message (STATUS
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES} USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
MAKE_STATIC_LIBRARIES=${MAKE_STATIC_LIBRARIES} MAKE_STATIC_LIBRARIES=${MAKE_STATIC_LIBRARIES}
SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES} SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES}
UNBUNDLED=${UNBUNDLED}
CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}") CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}")
include (GNUInstallDirs) include (GNUInstallDirs)
@ -590,7 +578,6 @@ include (cmake/find/avro.cmake)
include (cmake/find/msgpack.cmake) include (cmake/find/msgpack.cmake)
include (cmake/find/cassandra.cmake) include (cmake/find/cassandra.cmake)
include (cmake/find/sentry.cmake) include (cmake/find/sentry.cmake)
include (cmake/find/stats.cmake)
include (cmake/find/datasketches.cmake) include (cmake/find/datasketches.cmake)
include (cmake/find/libprotobuf-mutator.cmake) include (cmake/find/libprotobuf-mutator.cmake)

View File

@ -13,30 +13,21 @@
#if defined(__linux__) #if defined(__linux__)
#include <sys/prctl.h> #include <sys/prctl.h>
#endif #endif
#include <fcntl.h>
#include <errno.h> #include <errno.h>
#include <string.h> #include <string.h>
#include <signal.h> #include <signal.h>
#include <cxxabi.h>
#include <unistd.h> #include <unistd.h>
#include <typeinfo> #include <typeinfo>
#include <iostream> #include <iostream>
#include <fstream> #include <fstream>
#include <sstream>
#include <memory> #include <memory>
#include <base/scope_guard.h> #include <base/scope_guard.h>
#include <Poco/Observer.h>
#include <Poco/AutoPtr.h>
#include <Poco/PatternFormatter.h>
#include <Poco/Message.h> #include <Poco/Message.h>
#include <Poco/Util/Application.h> #include <Poco/Util/Application.h>
#include <Poco/Exception.h> #include <Poco/Exception.h>
#include <Poco/ErrorHandler.h> #include <Poco/ErrorHandler.h>
#include <Poco/Condition.h>
#include <Poco/SyslogChannel.h>
#include <Poco/DirectoryIterator.h>
#include <base/logger_useful.h> #include <base/logger_useful.h>
#include <base/ErrorHandlers.h> #include <base/ErrorHandlers.h>
@ -56,7 +47,6 @@
#include <Common/getMultipleKeysFromConfig.h> #include <Common/getMultipleKeysFromConfig.h>
#include <Common/ClickHouseRevision.h> #include <Common/ClickHouseRevision.h>
#include <Common/Config/ConfigProcessor.h> #include <Common/Config/ConfigProcessor.h>
#include <Common/MemorySanitizer.h>
#include <Common/SymbolIndex.h> #include <Common/SymbolIndex.h>
#include <Common/getExecutablePath.h> #include <Common/getExecutablePath.h>
#include <Common/getHashOfLoadedBinary.h> #include <Common/getHashOfLoadedBinary.h>

View File

@ -37,11 +37,3 @@ GraphiteWriter::GraphiteWriter(const std::string & config_name, const std::strin
root_path += sub_path; root_path += sub_path;
} }
} }
std::string GraphiteWriter::getPerServerPath(const std::string & server_name, const std::string & root_path)
{
std::string path = root_path + "." + server_name;
std::replace(path.begin() + root_path.size() + 1, path.end(), '.', '_');
return path;
}

View File

@ -8,10 +8,10 @@
#include <base/logger_useful.h> #include <base/logger_useful.h>
/// пишет в Graphite данные в формате /// Writes to Graphite in the following format
/// path value timestamp\n /// path value timestamp\n
/// path может иметь любую вложенность. Директории разделяются с помощью "." /// path can be arbitrary nested. Elements are separated by '.'
/// у нас принят следующий формат path - root_path.server_name.sub_path.key /// Example: root_path.server_name.sub_path.key
class GraphiteWriter class GraphiteWriter
{ {
public: public:
@ -32,8 +32,6 @@ public:
writeImpl(key_val_vec, timestamp, custom_root_path); writeImpl(key_val_vec, timestamp, custom_root_path);
} }
/// возвращает путь root_path.server_name
static std::string getPerServerPath(const std::string & server_name, const std::string & root_path = "one_min");
private: private:
template <typename T> template <typename T>
void writeImpl(const T & data, time_t timestamp, const std::string & custom_root_path) void writeImpl(const T & data, time_t timestamp, const std::string & custom_root_path)

View File

@ -1,16 +1,12 @@
#include "OwnPatternFormatter.h" #include "OwnPatternFormatter.h"
#include <functional> #include <functional>
#include <optional>
#include <sys/time.h>
#include <IO/WriteBufferFromString.h> #include <IO/WriteBufferFromString.h>
#include <IO/WriteHelpers.h> #include <IO/WriteHelpers.h>
#include <Common/HashTable/Hash.h> #include <Common/HashTable/Hash.h>
#include <Interpreters/InternalTextLogsQueue.h> #include <Interpreters/InternalTextLogsQueue.h>
#include <Common/CurrentThread.h> #include <Common/CurrentThread.h>
#include <base/getThreadId.h>
#include <base/terminalColors.h> #include <base/terminalColors.h>
#include "Loggers.h"
OwnPatternFormatter::OwnPatternFormatter(bool color_) OwnPatternFormatter::OwnPatternFormatter(bool color_)

View File

@ -13,10 +13,7 @@ add_library (mysqlxx
target_include_directories (mysqlxx PUBLIC ..) target_include_directories (mysqlxx PUBLIC ..)
if (USE_INTERNAL_MYSQL_LIBRARY) if (NOT USE_INTERNAL_MYSQL_LIBRARY)
target_include_directories (mysqlxx PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/include")
target_include_directories (mysqlxx PUBLIC "${ClickHouse_BINARY_DIR}/contrib/mariadb-connector-c/include")
else ()
set(PLATFORM_LIBRARIES ${CMAKE_DL_LIBS}) set(PLATFORM_LIBRARIES ${CMAKE_DL_LIBS})
if (USE_MYSQL) if (USE_MYSQL)

View File

@ -4,7 +4,7 @@ macro(find_contrib_lib LIB_NAME)
string(TOUPPER ${LIB_NAME} LIB_NAME_UC) string(TOUPPER ${LIB_NAME} LIB_NAME_UC)
string(REPLACE "-" "_" LIB_NAME_UC ${LIB_NAME_UC}) string(REPLACE "-" "_" LIB_NAME_UC ${LIB_NAME_UC})
option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ${NOT_UNBUNDLED}) option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ON)
if (NOT USE_INTERNAL_${LIB_NAME_UC}_LIBRARY) if (NOT USE_INTERNAL_${LIB_NAME_UC}_LIBRARY)
find_package ("${LIB_NAME}") find_package ("${LIB_NAME}")

View File

@ -28,6 +28,9 @@ option (ARCH_NATIVE "Add -march=native compiler flag. This makes your binaries n
if (ARCH_NATIVE) if (ARCH_NATIVE)
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=native") set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=native")
elseif (ARCH_AARCH64)
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=armv8-a+crc")
else () else ()
set (TEST_FLAG "-mssse3") set (TEST_FLAG "-mssse3")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
@ -43,7 +46,6 @@ else ()
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
endif () endif ()
set (TEST_FLAG "-msse4.1") set (TEST_FLAG "-msse4.1")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0") set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles(" check_cxx_source_compiles("

View File

@ -9,7 +9,7 @@ if (NOT ENABLE_AMQPCPP)
return() return()
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/src")
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init") message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library")
set (USE_AMQPCPP 0) set (USE_AMQPCPP 0)

View File

@ -8,10 +8,9 @@ if (NOT ENABLE_AVRO)
return() return()
endif() endif()
option (USE_INTERNAL_AVRO_LIBRARY option (USE_INTERNAL_AVRO_LIBRARY "Set to FALSE to use system avro library instead of bundled" ON)
"Set to FALSE to use system avro library instead of bundled" ON) # TODO: provide unbundled support
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang")
if (USE_INTERNAL_AVRO_LIBRARY) if (USE_INTERNAL_AVRO_LIBRARY)
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init") message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro") message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro")

View File

@ -7,12 +7,7 @@ if (NOT ENABLE_BROTLI)
return() return()
endif() endif()
if (UNBUNDLED)
# Many system ship only dynamic brotly libraries, so we back off to bundled by default
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ${USE_STATIC_LIBRARIES})
else()
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON) option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
if (USE_INTERNAL_BROTLI_LIBRARY) if (USE_INTERNAL_BROTLI_LIBRARY)

View File

@ -7,9 +7,9 @@ if (NOT ENABLE_CAPNP)
return() return()
endif() endif()
option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/c++")
if(USE_INTERNAL_CAPNP_LIBRARY) if(USE_INTERNAL_CAPNP_LIBRARY)
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init") message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto") message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto")

View File

@ -7,7 +7,7 @@ if (NOT ENABLE_CURL)
return() return()
endif() endif()
option (USE_INTERNAL_CURL "Use internal curl library" ${NOT_UNBUNDLED}) option (USE_INTERNAL_CURL "Use internal curl library" ON)
if (NOT USE_INTERNAL_CURL) if (NOT USE_INTERNAL_CURL)
find_package (CURL) find_package (CURL)
@ -22,8 +22,6 @@ if (NOT CURL_FOUND)
# find_package(CURL) compatibility for the following packages that uses # find_package(CURL) compatibility for the following packages that uses
# find_package(CURL)/include(FindCURL): # find_package(CURL)/include(FindCURL):
# - mariadb-connector-c
# - aws-s3-cmake
# - sentry-native # - sentry-native
set (CURL_FOUND ON CACHE BOOL "") set (CURL_FOUND ON CACHE BOOL "")
set (CURL_ROOT_DIR ${CURL_LIBRARY_DIR} CACHE PATH "") set (CURL_ROOT_DIR ${CURL_LIBRARY_DIR} CACHE PATH "")

View File

@ -1,4 +1,4 @@
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ${NOT_UNBUNDLED}) option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ON)
if (NOT USE_LIBCXX) if (NOT USE_LIBCXX)
if (USE_INTERNAL_LIBCXX_LIBRARY) if (USE_INTERNAL_LIBCXX_LIBRARY)
@ -10,12 +10,12 @@ if (NOT USE_LIBCXX)
return() return()
endif() endif()
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ${NOT_UNBUNDLED}) set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ON)
option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi libraries instead of bundled" option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi libraries instead of bundled"
${USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT}) ${USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT})
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/src")
if (USE_INTERNAL_LIBCXX_LIBRARY) if (USE_INTERNAL_LIBCXX_LIBRARY)
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init") message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx")

View File

@ -2,7 +2,7 @@ option (ENABLE_DATASKETCHES "Enable DataSketches" ${ENABLE_LIBRARIES})
if (ENABLE_DATASKETCHES) if (ENABLE_DATASKETCHES)
option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt")
if (USE_INTERNAL_DATASKETCHES_LIBRARY) if (USE_INTERNAL_DATASKETCHES_LIBRARY)

View File

@ -22,7 +22,7 @@ endif()
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case. # You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
# The external gRPC framework can be installed in the system by running # The external gRPC framework can be installed in the system by running
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc # sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED}) option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
if(USE_INTERNAL_GRPC_LIBRARY) if(USE_INTERNAL_GRPC_LIBRARY)

View File

@ -1,6 +1,6 @@
# included only if ENABLE_TESTS=1 # included only if ENABLE_TESTS=1
option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt")
if (USE_INTERNAL_GTEST_LIBRARY) if (USE_INTERNAL_GTEST_LIBRARY)

View File

@ -12,7 +12,7 @@ if (NOT ENABLE_ICU)
return() return()
endif() endif()
option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE")
if (USE_INTERNAL_ICU_LIBRARY) if (USE_INTERNAL_ICU_LIBRARY)

View File

@ -1,7 +1,3 @@
if (UNBUNDLED AND USE_STATIC_LIBRARIES)
set (ENABLE_LDAP OFF CACHE INTERNAL "")
endif()
option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES}) option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES})
if (NOT ENABLE_LDAP) if (NOT ENABLE_LDAP)
@ -11,7 +7,7 @@ if (NOT ENABLE_LDAP)
return() return()
endif() endif()
option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
if (USE_INTERNAL_LDAP_LIBRARY) if (USE_INTERNAL_LDAP_LIBRARY)

View File

@ -7,12 +7,7 @@ if (NOT ENABLE_GSASL_LIBRARY)
return() return()
endif() endif()
if (UNBUNDLED)
# when USE_STATIC_LIBRARIES we usually need to pick up hell a lot of dependencies for libgsasl
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${USE_STATIC_LIBRARIES})
else()
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON) option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h")
if (USE_INTERNAL_LIBGSASL_LIBRARY) if (USE_INTERNAL_LIBGSASL_LIBRARY)
@ -35,7 +30,7 @@ if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY) elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY)
set (LIBGSASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src" "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include") set (LIBGSASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src" "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include")
set (USE_INTERNAL_LIBGSASL_LIBRARY 1) set (USE_INTERNAL_LIBGSASL_LIBRARY 1)
set (LIBGSASL_LIBRARY libgsasl) set (LIBGSASL_LIBRARY gsasl)
endif () endif ()
if(LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR) if(LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)

View File

@ -4,7 +4,7 @@ if (NOT ENABLE_LIBPQXX)
return() return()
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/src")
message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init") message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library")
set (USE_LIBPQXX 0) set (USE_LIBPQXX 0)

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h")
if (USE_INTERNAL_LIBXML2_LIBRARY) if (USE_INTERNAL_LIBXML2_LIBRARY)

View File

@ -7,7 +7,7 @@ if(NOT ENABLE_MSGPACK)
return() return()
endif() endif()
option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp")
if(USE_INTERNAL_MSGPACK_LIBRARY) if(USE_INTERNAL_MSGPACK_LIBRARY)

View File

@ -12,7 +12,7 @@ if(NOT ENABLE_MYSQL)
return() return()
endif() endif()
option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ${NOT_UNBUNDLED}) option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README")
if(USE_INTERNAL_MYSQL_LIBRARY) if(USE_INTERNAL_MYSQL_LIBRARY)

View File

@ -6,7 +6,7 @@ if (NOT USE_INTERNAL_NANODBC_LIBRARY)
message (FATAL_ERROR "Only the bundled nanodbc library can be used") message (FATAL_ERROR "Only the bundled nanodbc library can be used")
endif () endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/nanodbc")
message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init") message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init")
endif() endif()

View File

@ -13,7 +13,7 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libstemmer_c/Makefile")
return() return()
endif () endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/wnb")
message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init") message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled")
set (USE_NLP 0) set (USE_NLP 0)

View File

@ -4,7 +4,7 @@ if (NOT ENABLE_NURAFT)
return() return()
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/src")
message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init") message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library")
set (USE_NURAFT 0) set (USE_NURAFT 0)

View File

@ -19,7 +19,7 @@ if (NOT ENABLE_ODBC)
return() return()
endif() endif()
option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ${NOT_UNBUNDLED}) option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ON)
if (NOT USE_INTERNAL_ODBC_LIBRARY) if (NOT USE_INTERNAL_ODBC_LIBRARY)
find_library (LIBRARY_ODBC NAMES unixodbc odbc) find_library (LIBRARY_ODBC NAMES unixodbc odbc)

View File

@ -1,5 +1,5 @@
if (Protobuf_PROTOC_EXECUTABLE) if (Protobuf_PROTOC_EXECUTABLE)
option (ENABLE_PARQUET "Enable parquet" ${ENABLE_LIBRARIES}) option (ENABLE_PARQUET "Enable parquet" ON)
elseif(ENABLE_PARQUET OR USE_INTERNAL_PARQUET_LIBRARY) elseif(ENABLE_PARQUET OR USE_INTERNAL_PARQUET_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use parquet without protoc executable") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use parquet without protoc executable")
endif() endif()
@ -13,7 +13,7 @@ if (NOT ENABLE_PARQUET)
endif() endif()
if (NOT OS_FREEBSD) # Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory if (NOT OS_FREEBSD) # Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory
option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ${NOT_UNBUNDLED}) option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ON)
elseif(USE_INTERNAL_PARQUET_LIBRARY) elseif(USE_INTERNAL_PARQUET_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet is not supported on freebsd") message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet is not supported on freebsd")
endif() endif()

View File

@ -11,7 +11,7 @@ endif()
# You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case. # You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case.
# The external protobuf library can be installed in the system by running # The external protobuf library can be installed in the system by running
# sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev # sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED}) option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
if(USE_INTERNAL_PROTOBUF_LIBRARY) if(USE_INTERNAL_PROTOBUF_LIBRARY)

View File

@ -6,7 +6,7 @@ if(NOT ENABLE_RAPIDJSON)
return() return()
endif() endif()
option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ${NOT_UNBUNDLED}) option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h")
if(USE_INTERNAL_RAPIDJSON_LIBRARY) if(USE_INTERNAL_RAPIDJSON_LIBRARY)

View File

@ -7,9 +7,9 @@ if (NOT ENABLE_RDKAFKA)
return() return()
endif() endif()
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/src")
if(USE_INTERNAL_RDKAFKA_LIBRARY) if(USE_INTERNAL_RDKAFKA_LIBRARY)
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init") message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka")
@ -18,7 +18,7 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
set (MISSING_INTERNAL_CPPKAFKA_LIBRARY 1) set (MISSING_INTERNAL_CPPKAFKA_LIBRARY 1)
endif () endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src")
if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY) if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY)
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init") message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka")
@ -40,7 +40,7 @@ if (NOT USE_INTERNAL_RDKAFKA_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sasl2 library needed for static librdkafka") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sasl2 library needed for static librdkafka")
endif() endif()
endif () endif ()
set (CPPKAFKA_LIBRARY cppkafka) # TODO: try to use unbundled version. set (CPPKAFKA_LIBRARY cppkafka)
endif () endif ()
if (RDKAFKA_LIB AND RDKAFKA_INCLUDE_DIR) if (RDKAFKA_LIB AND RDKAFKA_INCLUDE_DIR)

View File

@ -1,6 +1,6 @@
option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ${NOT_UNBUNDLED}) option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/re2")
if(USE_INTERNAL_RE2_LIBRARY) if(USE_INTERNAL_RE2_LIBRARY)
message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init") message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library")

View File

@ -11,9 +11,9 @@ if (NOT ENABLE_ROCKSDB)
return() return()
endif() endif()
option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ${NOT_UNBUNDLED}) option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/include")
if (USE_INTERNAL_ROCKSDB_LIBRARY) if (USE_INTERNAL_ROCKSDB_LIBRARY)
message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init") message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb") message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb")

View File

@ -9,7 +9,7 @@ if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h")
return() return()
endif () endif ()
if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT_UNBUNDLED AND NOT (OS_DARWIN AND COMPILER_CLANG)) if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT (OS_DARWIN AND COMPILER_CLANG))
option (USE_SENTRY "Use Sentry" ${ENABLE_LIBRARIES}) option (USE_SENTRY "Use Sentry" ${ENABLE_LIBRARIES})
set (SENTRY_TRANSPORT "curl" CACHE STRING "") set (SENTRY_TRANSPORT "curl" CACHE STRING "")
set (SENTRY_BACKEND "none" CACHE STRING "") set (SENTRY_BACKEND "none" CACHE STRING "")
@ -18,8 +18,6 @@ if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT_UNBUNDLED AND NOT (OS_
set (SENTRY_PIC OFF CACHE BOOL "") set (SENTRY_PIC OFF CACHE BOOL "")
set (BUILD_SHARED_LIBS OFF) set (BUILD_SHARED_LIBS OFF)
message (STATUS "Using sentry=${USE_SENTRY}: ${SENTRY_LIBRARY}") message (STATUS "Using sentry=${USE_SENTRY}: ${SENTRY_LIBRARY}")
include_directories("${SENTRY_INCLUDE_DIR}")
elseif (USE_SENTRY) elseif (USE_SENTRY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Sentry is not supported in current configuration") message (${RECONFIGURE_MESSAGE_LEVEL} "Sentry is not supported in current configuration")
endif () endif ()

View File

@ -1,4 +1,4 @@
option(USE_SNAPPY "Enable snappy library" ${ENABLE_LIBRARIES}) option(USE_SNAPPY "Enable snappy library" ON)
if(NOT USE_SNAPPY) if(NOT USE_SNAPPY)
if (USE_INTERNAL_SNAPPY_LIBRARY) if (USE_INTERNAL_SNAPPY_LIBRARY)
@ -7,7 +7,7 @@ if(NOT USE_SNAPPY)
return() return()
endif() endif()
option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ON)
if(NOT USE_INTERNAL_SNAPPY_LIBRARY) if(NOT USE_INTERNAL_SNAPPY_LIBRARY)
find_library(SNAPPY_LIBRARY snappy) find_library(SNAPPY_LIBRARY snappy)

View File

@ -9,7 +9,7 @@ if(NOT ENABLE_SSL)
return() return()
endif() endif()
option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ${NOT_UNBUNDLED}) option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md")
if(USE_INTERNAL_SSL_LIBRARY) if(USE_INTERNAL_SSL_LIBRARY)

View File

@ -1,24 +0,0 @@
option(ENABLE_STATS "Enable StatsLib library" ${ENABLE_LIBRARIES})
if (ENABLE_STATS)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/stats")
message (WARNING "submodule contrib/stats is missing. to fix try run: \n git submodule update --init")
set (ENABLE_STATS 0)
set (USE_STATS 0)
elseif (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/gcem")
message (WARNING "submodule contrib/gcem is missing. to fix try run: \n git submodule update --init")
set (ENABLE_STATS 0)
set (USE_STATS 0)
else()
set(STATS_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/stats/include)
set(GCEM_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/gcem/include)
set (USE_STATS 1)
endif()
if (NOT USE_STATS)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable stats library")
endif()
endif()
message (STATUS "Using stats=${USE_STATS} : ${STATS_INCLUDE_DIR}")
message (STATUS "Using gcem=${USE_STATS}: ${GCEM_INCLUDE_DIR}")

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h")
if(USE_INTERNAL_XZ_LIBRARY) if(USE_INTERNAL_XZ_LIBRARY)

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ON)
if (NOT MSVC) if (NOT MSVC)
set (INTERNAL_ZLIB_NAME "zlib-ng" CACHE INTERNAL "") set (INTERNAL_ZLIB_NAME "zlib-ng" CACHE INTERNAL "")
@ -29,9 +29,6 @@ if (NOT USE_INTERNAL_ZLIB_LIBRARY)
endif () endif ()
if (NOT ZLIB_FOUND AND NOT MISSING_INTERNAL_ZLIB_LIBRARY) if (NOT ZLIB_FOUND AND NOT MISSING_INTERNAL_ZLIB_LIBRARY)
# https://github.com/zlib-ng/zlib-ng/pull/733
# This is disabed by default
add_compile_definitions(Z_TLS=__thread)
set (USE_INTERNAL_ZLIB_LIBRARY 1) set (USE_INTERNAL_ZLIB_LIBRARY 1)
set (ZLIB_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}" "${ClickHouse_BINARY_DIR}/contrib/${INTERNAL_ZLIB_NAME}" CACHE INTERNAL "") # generated zconf.h set (ZLIB_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}" "${ClickHouse_BINARY_DIR}/contrib/${INTERNAL_ZLIB_NAME}" CACHE INTERNAL "") # generated zconf.h
set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR}) # for poco set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR}) # for poco

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library instead of bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h")
if(USE_INTERNAL_ZSTD_LIBRARY) if(USE_INTERNAL_ZSTD_LIBRARY)

View File

@ -28,7 +28,7 @@ set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS})
# glibc-compatibility library relies to constant version of libc headers # glibc-compatibility library relies to constant version of libc headers
# (because minor changes in function attributes between different glibc versions will introduce incompatibilities) # (because minor changes in function attributes between different glibc versions will introduce incompatibilities)
# This is for x86_64. For other architectures we have separate toolchains. # This is for x86_64. For other architectures we have separate toolchains.
if (ARCH_AMD64 AND NOT_UNBUNDLED AND NOT CMAKE_CROSSCOMPILING) if (ARCH_AMD64 AND NOT CMAKE_CROSSCOMPILING)
set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers) set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers) set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
endif () endif ()

View File

@ -7,9 +7,7 @@
# - sometimes warnings from 3rd party libraries may come from macro substitutions in our code # - sometimes warnings from 3rd party libraries may come from macro substitutions in our code
# and we have to wrap them with #pragma GCC/clang diagnostic ignored # and we have to wrap them with #pragma GCC/clang diagnostic ignored
if (NOT MSVC)
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra")
endif ()
# Add some warnings that are not available even with -Wall -Wextra -Wpedantic. # Add some warnings that are not available even with -Wall -Wextra -Wpedantic.
# Intended for exploration of new compiler warnings that may be found useful. # Intended for exploration of new compiler warnings that may be found useful.

View File

@ -51,7 +51,7 @@ if (USE_YAML_CPP)
endif() endif()
if (USE_INTERNAL_XZ_LIBRARY) if (USE_INTERNAL_XZ_LIBRARY)
add_subdirectory (xz) add_subdirectory (xz-cmake)
endif() endif()
add_subdirectory (poco-cmake) add_subdirectory (poco-cmake)
@ -64,9 +64,7 @@ if (USE_INTERNAL_ZSTD_LIBRARY)
endif () endif ()
if (USE_INTERNAL_RE2_LIBRARY) if (USE_INTERNAL_RE2_LIBRARY)
set(RE2_BUILD_TESTING 0 CACHE INTERNAL "") add_subdirectory (re2-cmake)
add_subdirectory (re2)
add_subdirectory (re2_st)
endif () endif ()
if (USE_INTERNAL_DOUBLE_CONVERSION_LIBRARY) if (USE_INTERNAL_DOUBLE_CONVERSION_LIBRARY)
@ -82,23 +80,10 @@ if (USE_INTERNAL_FARMHASH_LIBRARY)
endif () endif ()
if (USE_INTERNAL_ZLIB_LIBRARY) if (USE_INTERNAL_ZLIB_LIBRARY)
set (ZLIB_ENABLE_TESTS 0 CACHE INTERNAL "") if (INTERNAL_ZLIB_NAME STREQUAL "zlib-ng")
set (SKIP_INSTALL_ALL 1 CACHE INTERNAL "") add_subdirectory (zlib-ng-cmake)
set (ZLIB_COMPAT 1 CACHE INTERNAL "") # also enables WITH_GZFILEOP else ()
set (WITH_NATIVE_INSTRUCTIONS ${ARCH_NATIVE} CACHE INTERNAL "")
if (OS_FREEBSD OR ARCH_I386)
set (WITH_OPTIM 0 CACHE INTERNAL "") # Bug in assembler
endif ()
if (ARCH_AARCH64)
set(WITH_NEON 1 CACHE INTERNAL "")
set(WITH_ACLE 1 CACHE INTERNAL "")
endif ()
add_subdirectory (${INTERNAL_ZLIB_NAME}) add_subdirectory (${INTERNAL_ZLIB_NAME})
# We should use same defines when including zlib.h as used when zlib compiled
target_compile_definitions (zlib PUBLIC ZLIB_COMPAT WITH_GZFILEOP)
if (ARCH_AMD64 OR ARCH_AARCH64)
target_compile_definitions (zlib PUBLIC X86_64 UNALIGNED_OK)
endif () endif ()
endif () endif ()
@ -117,28 +102,8 @@ if (USE_INTERNAL_LDAP_LIBRARY)
add_subdirectory (openldap-cmake) add_subdirectory (openldap-cmake)
endif () endif ()
function(mysql_support) if (USE_INTERNAL_MYSQL_LIBRARY)
set(CLIENT_PLUGIN_CACHING_SHA2_PASSWORD STATIC) add_subdirectory (mariadb-connector-c-cmake)
set(CLIENT_PLUGIN_SHA256_PASSWORD STATIC)
set(CLIENT_PLUGIN_REMOTE_IO OFF)
set(CLIENT_PLUGIN_DIALOG OFF)
set(CLIENT_PLUGIN_AUTH_GSSAPI_CLIENT OFF)
set(CLIENT_PLUGIN_CLIENT_ED25519 OFF)
set(CLIENT_PLUGIN_MYSQL_CLEAR_PASSWORD OFF)
set(SKIP_TESTS 1)
if (GLIBC_COMPATIBILITY)
set(LIBM glibc-compatibility)
endif()
if (USE_INTERNAL_ZLIB_LIBRARY)
set(ZLIB_FOUND ON)
set(ZLIB_LIBRARY ${ZLIB_LIBRARIES})
set(WITH_EXTERNAL_ZLIB ON)
endif()
set(WITH_CURL OFF)
add_subdirectory (mariadb-connector-c)
endfunction()
if (ENABLE_MYSQL AND USE_INTERNAL_MYSQL_LIBRARY)
mysql_support()
endif () endif ()
if (USE_INTERNAL_RDKAFKA_LIBRARY) if (USE_INTERNAL_RDKAFKA_LIBRARY)
@ -194,11 +159,7 @@ if (USE_INTERNAL_AVRO_LIBRARY)
endif() endif()
if(USE_INTERNAL_GTEST_LIBRARY) if(USE_INTERNAL_GTEST_LIBRARY)
set(GOOGLETEST_VERSION 1.10.0) # master add_subdirectory(googletest-cmake)
# Google Test from sources
add_subdirectory(${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest)
# avoid problems with <regexp.h>
target_compile_definitions (gtest INTERFACE GTEST_HAS_POSIX_RE=0)
elseif(GTEST_SRC_DIR) elseif(GTEST_SRC_DIR)
add_subdirectory(${GTEST_SRC_DIR}/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest) add_subdirectory(${GTEST_SRC_DIR}/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest)
target_compile_definitions(gtest INTERFACE GTEST_HAS_POSIX_RE=0) target_compile_definitions(gtest INTERFACE GTEST_HAS_POSIX_RE=0)
@ -229,7 +190,7 @@ if (USE_EMBEDDED_COMPILER)
endif () endif ()
if (USE_INTERNAL_LIBGSASL_LIBRARY) if (USE_INTERNAL_LIBGSASL_LIBRARY)
add_subdirectory(libgsasl) add_subdirectory(libgsasl-cmake)
endif() endif()
if (USE_INTERNAL_LIBXML2_LIBRARY) if (USE_INTERNAL_LIBXML2_LIBRARY)
@ -281,14 +242,7 @@ if (USE_AMQPCPP)
add_subdirectory (amqpcpp-cmake) add_subdirectory (amqpcpp-cmake)
endif() endif()
if (USE_CASSANDRA) if (USE_CASSANDRA)
# Need to use C++17 since the compilation is not possible with C++20 currently. add_subdirectory (cassandra-cmake)
set (CMAKE_CXX_STANDARD_bak ${CMAKE_CXX_STANDARD})
set (CMAKE_CXX_STANDARD 17)
add_subdirectory (cassandra)
set (CMAKE_CXX_STANDARD ${CMAKE_CXX_STANDARD_bak})
unset (CMAKE_CXX_STANDARD_bak)
endif() endif()
# Should go before: # Should go before:
@ -296,16 +250,11 @@ endif()
add_subdirectory (curl-cmake) add_subdirectory (curl-cmake)
if (USE_SENTRY) if (USE_SENTRY)
add_subdirectory (sentry-native) add_subdirectory (sentry-native-cmake)
endif() endif()
add_subdirectory (fmtlib-cmake) add_subdirectory (fmtlib-cmake)
if (USE_STATS)
add_subdirectory (stats-cmake)
add_subdirectory (gcem)
endif()
if (USE_KRB5) if (USE_KRB5)
add_subdirectory (krb5-cmake) add_subdirectory (krb5-cmake)
if (USE_CYRUS_SASL) if (USE_CYRUS_SASL)
@ -326,7 +275,7 @@ if (USE_NURAFT)
add_subdirectory(nuraft-cmake) add_subdirectory(nuraft-cmake)
endif() endif()
add_subdirectory(fast_float) add_subdirectory(fast_float-cmake)
if (USE_NLP) if (USE_NLP)
add_subdirectory(libstemmer-c-cmake) add_subdirectory(libstemmer-c-cmake)

View File

@ -417,7 +417,49 @@ set(PARQUET_SRCS
#list(TRANSFORM PARQUET_SRCS PREPEND "${LIBRARY_DIR}/") # cmake 3.12 #list(TRANSFORM PARQUET_SRCS PREPEND "${LIBRARY_DIR}/") # cmake 3.12
add_library(${PARQUET_LIBRARY} ${PARQUET_SRCS}) add_library(${PARQUET_LIBRARY} ${PARQUET_SRCS})
target_include_directories(${PARQUET_LIBRARY} SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src" "${CMAKE_CURRENT_SOURCE_DIR}/cpp/src" PRIVATE ${OPENSSL_INCLUDE_DIR}) target_include_directories(${PARQUET_LIBRARY} SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src" "${CMAKE_CURRENT_SOURCE_DIR}/cpp/src" PRIVATE ${OPENSSL_INCLUDE_DIR})
include("${ClickHouse_SOURCE_DIR}/contrib/thrift/build/cmake/ConfigureChecks.cmake") # makes config.h
set (HAVE_ARPA_INET_H 1)
set (HAVE_FCNTL_H 1)
set (HAVE_GETOPT_H 1)
set (HAVE_INTTYPES_H 1)
set (HAVE_NETDB_H 1)
set (HAVE_NETINET_IN_H 1)
set (HAVE_SIGNAL_H 1)
set (HAVE_STDINT_H 1)
set (HAVE_UNISTD_H 1)
set (HAVE_PTHREAD_H 1)
set (HAVE_SYS_IOCTL_H 1)
set (HAVE_SYS_PARAM_H 1)
set (HAVE_SYS_RESOURCE_H 1)
set (HAVE_SYS_SOCKET_H 1)
set (HAVE_SYS_STAT_H 1)
set (HAVE_SYS_TIME_H 1)
set (HAVE_SYS_UN_H 1)
set (HAVE_POLL_H 1)
set (HAVE_SYS_POLL_H 1)
set (HAVE_SYS_SELECT_H 1)
set (HAVE_SCHED_H 1)
set (HAVE_STRING_H 1)
set (HAVE_STRINGS_H 1)
set (HAVE_GETHOSTBYNAME 1)
set (HAVE_STRERROR_R 1)
set (HAVE_SCHED_GET_PRIORITY_MAX 1)
set (HAVE_SCHED_GET_PRIORITY_MIN 1)
if (OS_LINUX)
set (STRERROR_R_CHAR_P 1)
endif ()
#set(PACKAGE ${PACKAGE_NAME})
#set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}")
#set(VERSION ${thrift_VERSION})
# generate a config.h file
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/build/cmake/config.h.in" "${CMAKE_CURRENT_BINARY_DIR}/thrift/config.h")
include_directories("${CMAKE_CURRENT_BINARY_DIR}")
target_link_libraries(${PARQUET_LIBRARY} PUBLIC ${ARROW_LIBRARY} PRIVATE ${THRIFT_LIBRARY} boost::headers_only boost::regex ${OPENSSL_LIBRARIES}) target_link_libraries(${PARQUET_LIBRARY} PUBLIC ${ARROW_LIBRARY} PRIVATE ${THRIFT_LIBRARY} boost::headers_only boost::regex ${OPENSSL_LIBRARIES})
if (SANITIZE STREQUAL "undefined") if (SANITIZE STREQUAL "undefined")

2
contrib/base64 vendored

@ -1 +1 @@
Subproject commit af9b331f2b4f30b41c70f3a571ff904a8251c1d3 Subproject commit 9499e0c4945589973b9ea1bc927377cfbc84aa46

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ${NOT_UNBUNDLED}) option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ON)
if (NOT USE_INTERNAL_BOOST_LIBRARY) if (NOT USE_INTERNAL_BOOST_LIBRARY)
# 1.70 like in contrib/boost # 1.70 like in contrib/boost

View File

@ -0,0 +1,127 @@
# Need to use C++17 since the compilation is not possible with C++20 currently.
set (CMAKE_CXX_STANDARD 17)
set(CASS_ROOT_DIR ${CMAKE_SOURCE_DIR}/contrib/cassandra)
set(CASS_SRC_DIR "${CASS_ROOT_DIR}/src")
set(CASS_INCLUDE_DIR "${CASS_ROOT_DIR}/include")
# Ensure functions/modules are available
list(APPEND CMAKE_MODULE_PATH ${CASS_ROOT_DIR}/cmake)
set(CASS_BUILD_SHARED 1)
set(CASS_BUILD_STATIC 1)
set(CASS_USE_KERBEROS 0)
set(CASS_USE_LIBSSH2 0)
set(CASS_USE_OPENSSL 1)
set(CASS_USE_STD_ATOMIC 1)
set(CASS_USE_ZLIB 1)
file(GLOB SOURCES ${CASS_SRC_DIR}/*.cpp)
if(APPLE)
list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-unix.cpp ${CASS_SRC_DIR}/get_time-win.cpp)
elseif(UNIX)
list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-mac.cpp ${CASS_SRC_DIR}/get_time-win.cpp)
elseif(WIN32)
list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-mac.cpp ${CASS_SRC_DIR}/get_time-unix.cpp)
endif()
if(CASS_USE_OPENSSL)
list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/ssl)
list(APPEND SOURCES ${CASS_SRC_DIR}/ssl/ssl_openssl_impl.cpp ${CASS_SRC_DIR}/ssl/ring_buffer_bio.cpp)
else()
list(APPEND SOURCES ${CASS_SRC_DIR}/ssl/ssl_no_impl.cpp)
endif()
if(CASS_USE_KERBEROS)
list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/gssapi)
list(APPEND SOURCES ${CASS_SRC_DIR}/gssapi/dse_auth_gssapi.cpp ${CASS_SRC_DIR}/gssapi/dse_auth_gssapi.hpp)
endif()
list(APPEND SOURCES ${CASS_SRC_DIR}/atomic/atomic_std.hpp)
add_library(curl_hostcheck OBJECT ${CASS_SRC_DIR}/third_party/curl/hostcheck.cpp)
add_library(hdr_histogram OBJECT ${CASS_SRC_DIR}/third_party/hdr_histogram/hdr_histogram.cpp)
add_library(http-parser OBJECT ${CASS_SRC_DIR}/third_party/http-parser/http_parser.c)
add_library(minizip OBJECT
${CASS_SRC_DIR}/third_party/minizip/ioapi.c
${CASS_SRC_DIR}/third_party/minizip/zip.c
${CASS_SRC_DIR}/third_party/minizip/unzip.c)
target_link_libraries(minizip zlib)
target_compile_definitions(minizip PRIVATE "-Dz_crc_t=unsigned long")
list(APPEND INCLUDE_DIRS
${CASS_SRC_DIR}/third_party/curl
${CASS_SRC_DIR}/third_party/hdr_histogram
${CASS_SRC_DIR}/third_party/http-parser
${CASS_SRC_DIR}/third_party/minizip
${CASS_SRC_DIR}/third_party/mt19937_64
${CASS_SRC_DIR}/third_party/rapidjson/rapidjson
${CASS_SRC_DIR}/third_party/sparsehash/src)
list(APPEND INCLUDE_DIRS ${CASS_INCLUDE_DIR} ${CASS_SRC_DIR})
set(HASH_FUN_H "functional")
set(HASH_NAME hash)
set(HASH_NAMESPACE "std")
set(HAVE_INTTYPES_H 1)
set(HAVE_STDINT_H 1)
set(HAVE_SYS_TYPES_H 1)
set(HAVE_MEMCPY 1)
set(HAVE_LONG_LONG 1)
set(HAVE_UINT16_T 1)
configure_file("${CASS_SRC_DIR}/third_party/sparsehash/config.h.cmake" "${CMAKE_CURRENT_BINARY_DIR}/sparsehash/internal/sparseconfig.h")
# Determine random availability
if (OS_LINUX)
#set (HAVE_GETRANDOM 1) - not on every Linux kernel
elseif (OS_FREEBSD OR OS_DARWIN)
set (HAVE_ARC4RANDOM 1)
endif ()
# Determine if sigpipe is available
if (OS_LINUX)
set (HAVE_SIGTIMEDWAIT 1)
else (OS_FREEBSD OR OS_DARWIN)
set (HAVE_NOSIGPIPE 1)
endif()
set (HAVE_BUILTIN_BSWAP32 1)
set (HAVE_BUILTIN_BSWAP64 1)
set(HAVE_BOOST_ATOMIC 0)
set(HAVE_STD_ATOMIC 1)
set(HAVE_KERBEROS ${CASS_USE_KERBEROS})
set(HAVE_OPENSSL ${CASS_USE_OPENSSL})
set(HAVE_ZLIB ${CASS_USE_ZLIB})
# Generate the driver_config.hpp file
configure_file(
${CASS_ROOT_DIR}/driver_config.hpp.in
${CMAKE_CURRENT_BINARY_DIR}/driver_config.hpp)
add_library(cassandra
${SOURCES}
$<TARGET_OBJECTS:curl_hostcheck>
$<TARGET_OBJECTS:hdr_histogram>
$<TARGET_OBJECTS:http-parser>
$<TARGET_OBJECTS:minizip>)
target_link_libraries(cassandra zlib)
add_library(cassandra_static ALIAS cassandra)
target_include_directories(cassandra PRIVATE ${CMAKE_CURRENT_BINARY_DIR} ${INCLUDE_DIRS})
target_compile_definitions(cassandra PRIVATE CASS_BUILDING)
target_link_libraries(cassandra uv)
if(CASS_USE_OPENSSL)
target_link_libraries(cassandra ssl)
endif()

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ${NOT_UNBUNDLED}) option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ON)
if (NOT USE_INTERNAL_CCTZ_LIBRARY) if (NOT USE_INTERNAL_CCTZ_LIBRARY)
find_library (LIBRARY_CCTZ cctz) find_library (LIBRARY_CCTZ cctz)

View File

@ -0,0 +1,2 @@
add_library(fast_float INTERFACE)
target_include_directories(fast_float INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/")

1
contrib/gcem vendored

@ -1 +0,0 @@
Subproject commit 8d4f1b5d76ea8f6ff12f3f4f34cda45424556b00

View File

@ -0,0 +1,11 @@
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest")
add_library(gtest "${SRC_DIR}/src/gtest-all.cc")
set_target_properties(gtest PROPERTIES VERSION "1.0.0")
target_compile_definitions (gtest INTERFACE GTEST_HAS_POSIX_RE=0)
target_include_directories(gtest SYSTEM PUBLIC "${SRC_DIR}/include")
target_include_directories(gtest PRIVATE "${SRC_DIR}")
add_library(gtest_main "${SRC_DIR}/src/gtest_main.cc")
set_target_properties(gtest_main PROPERTIES VERSION "1.0.0")
target_link_libraries(gtest_main PUBLIC gtest)

View File

@ -17,7 +17,7 @@ if (NOT ENABLE_HYPERSCAN)
return() return()
endif() endif()
option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ${NOT_UNBUNDLED}) option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ON)
if (NOT USE_INTERNAL_HYPERSCAN_LIBRARY) if (NOT USE_INTERNAL_HYPERSCAN_LIBRARY)
find_library (LIBRARY_HYPERSCAN hs) find_library (LIBRARY_HYPERSCAN hs)

View File

@ -0,0 +1,107 @@
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl")
set(SRCS
${SRC_DIR}/gl/gc-gnulib.c
${SRC_DIR}/gl/printf-parse.c
${SRC_DIR}/gl/c-ctype.c
${SRC_DIR}/gl/float.c
${SRC_DIR}/gl/printf-args.c
${SRC_DIR}/gl/hmac-sha1.c
${SRC_DIR}/gl/itold.c
${SRC_DIR}/gl/hmac-md5.c
${SRC_DIR}/gl/gc-pbkdf2-sha1.c
${SRC_DIR}/gl/md5.c
${SRC_DIR}/gl/base64.c
${SRC_DIR}/gl/memxor.c
${SRC_DIR}/gl/sha1.c
${SRC_DIR}/openid20/client.c
${SRC_DIR}/openid20/mechinfo.c
${SRC_DIR}/openid20/server.c
${SRC_DIR}/anonymous/client.c
${SRC_DIR}/anonymous/mechinfo.c
${SRC_DIR}/anonymous/server.c
${SRC_DIR}/saml20/client.c
${SRC_DIR}/saml20/mechinfo.c
${SRC_DIR}/saml20/server.c
${SRC_DIR}/scram/parser.c
${SRC_DIR}/scram/printer.c
${SRC_DIR}/scram/tokens.c
${SRC_DIR}/scram/client.c
${SRC_DIR}/scram/mechinfo.c
${SRC_DIR}/scram/server.c
${SRC_DIR}/scram/validate.c
${SRC_DIR}/src/free.c
${SRC_DIR}/src/supportp.c
${SRC_DIR}/src/init.c
${SRC_DIR}/src/mechtools.c
${SRC_DIR}/src/error.c
${SRC_DIR}/src/property.c
${SRC_DIR}/src/done.c
${SRC_DIR}/src/callback.c
${SRC_DIR}/src/xstart.c
${SRC_DIR}/src/xfinish.c
${SRC_DIR}/src/version.c
${SRC_DIR}/src/xstep.c
${SRC_DIR}/src/mechname.c
${SRC_DIR}/src/xcode.c
${SRC_DIR}/src/crypto.c
${SRC_DIR}/src/doxygen.c
${SRC_DIR}/src/suggest.c
${SRC_DIR}/src/saslprep.c
${SRC_DIR}/src/listmech.c
${SRC_DIR}/src/register.c
${SRC_DIR}/src/base64.c
${SRC_DIR}/src/md5pwd.c
${SRC_DIR}/external/client.c
${SRC_DIR}/external/mechinfo.c
${SRC_DIR}/external/server.c
${SRC_DIR}/securid/client.c
${SRC_DIR}/securid/mechinfo.c
${SRC_DIR}/securid/server.c
${SRC_DIR}/plain/client.c
${SRC_DIR}/plain/mechinfo.c
${SRC_DIR}/plain/server.c
${SRC_DIR}/cram-md5/client.c
${SRC_DIR}/cram-md5/challenge.c
${SRC_DIR}/cram-md5/mechinfo.c
${SRC_DIR}/cram-md5/server.c
${SRC_DIR}/cram-md5/digest.c
${SRC_DIR}/digest-md5/client.c
${SRC_DIR}/digest-md5/digesthmac.c
${SRC_DIR}/digest-md5/free.c
${SRC_DIR}/digest-md5/getsubopt.c
${SRC_DIR}/digest-md5/mechinfo.c
${SRC_DIR}/digest-md5/nonascii.c
${SRC_DIR}/digest-md5/parser.c
${SRC_DIR}/digest-md5/printer.c
${SRC_DIR}/digest-md5/qop.c
${SRC_DIR}/digest-md5/server.c
${SRC_DIR}/digest-md5/session.c
${SRC_DIR}/digest-md5/test-parser.c
${SRC_DIR}/digest-md5/validate.c
${SRC_DIR}/login/client.c
${SRC_DIR}/login/mechinfo.c
${SRC_DIR}/login/server.c
)
if (USE_KRB5)
set(SRCS ${SRCS}
${SRC_DIR}/gssapi/client.c
${SRC_DIR}/gssapi/mechinfo.c
${SRC_DIR}/gssapi/server.c)
endif()
add_library(gsasl ${SRCS})
target_include_directories(gsasl PUBLIC ${SRC_DIR})
target_include_directories(gsasl PUBLIC ${SRC_DIR}/gl)
target_include_directories(gsasl PUBLIC ${SRC_DIR}/src)
target_include_directories(gsasl PUBLIC ${SRC_DIR}/digest-md5)
target_include_directories(gsasl PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libgsasl-cmake/linux_x86_64/include")
target_compile_definitions (gsasl PRIVATE HAVE_CONFIG_H=1)
if (USE_KRB5)
target_link_libraries(gsasl PUBLIC ${KRB5_LIBRARY})
target_compile_definitions (gsasl PRIVATE HAVE_GSSAPI_H=1 USE_GSSAPI=1)
endif()

File diff suppressed because it is too large Load Diff

View File

@ -1,23 +1,4 @@
if (ENABLE_PROTOBUF AND NOT USE_INTERNAL_PROTOBUF_LIBRARY) if (${ENABLE_KRB5})
option(PROTOBUF_OLD_ABI_COMPAT "Set to ON for compatiability with external protobuf which was compiled old C++ ABI" OFF)
endif()
if (PROTOBUF_OLD_ABI_COMPAT)
if (NOT ENABLE_PROTOBUF OR USE_INTERNAL_PROTOBUF_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "PROTOBUF_OLD_ABI_COMPAT option is ignored")
endif()
endif()
if (NOT USE_INTERNAL_PROTOBUF_LIBRARY AND PROTOBUF_OLD_ABI_COMPAT)
# compatiable with protobuf which was compiled old C++ ABI
set(CMAKE_CXX_FLAGS "-D_GLIBCXX_USE_CXX11_ABI=0")
set(CMAKE_C_FLAGS "")
if (NOT (CMAKE_VERSION VERSION_LESS "3.8.0"))
unset(CMAKE_CXX_STANDARD)
endif ()
endif()
if (${ENABLE_LIBRARIES} AND ${ENABLE_KRB5})
SET(WITH_KERBEROS 1) SET(WITH_KERBEROS 1)
else() else()
SET(WITH_KERBEROS 0) SET(WITH_KERBEROS 0)
@ -46,9 +27,7 @@ set(PROTO_FILES
"${HDFS3_SOURCE_DIR}/proto/datatransfer.proto" "${HDFS3_SOURCE_DIR}/proto/datatransfer.proto"
) )
if(USE_PROTOBUF)
PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES}) PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES})
endif()
configure_file("${HDFS3_SOURCE_DIR}/platform.h.in" "${CMAKE_CURRENT_BINARY_DIR}/platform.h") configure_file("${HDFS3_SOURCE_DIR}/platform.h.in" "${CMAKE_CURRENT_BINARY_DIR}/platform.h")
@ -108,95 +87,14 @@ set(SRCS
"${HDFS3_SOURCE_DIR}/common/Hash.cpp" "${HDFS3_SOURCE_DIR}/common/Hash.cpp"
"${HDFS3_SOURCE_DIR}/common/SWCrc32c.cpp" "${HDFS3_SOURCE_DIR}/common/SWCrc32c.cpp"
"${HDFS3_SOURCE_DIR}/common/Thread.cpp" "${HDFS3_SOURCE_DIR}/common/Thread.cpp"
${PROTO_SOURCES}
"${HDFS3_SOURCE_DIR}/network/TcpSocket.h"
"${HDFS3_SOURCE_DIR}/network/BufferedSocketReader.h"
"${HDFS3_SOURCE_DIR}/network/Socket.h"
"${HDFS3_SOURCE_DIR}/network/DomainSocket.h"
"${HDFS3_SOURCE_DIR}/network/Syscall.h"
"${HDFS3_SOURCE_DIR}/client/InputStreamImpl.h"
"${HDFS3_SOURCE_DIR}/client/FileSystem.h"
"${HDFS3_SOURCE_DIR}/client/ReadShortCircuitInfo.h"
"${HDFS3_SOURCE_DIR}/client/InputStreamInter.h"
"${HDFS3_SOURCE_DIR}/client/FileSystemImpl.h"
"${HDFS3_SOURCE_DIR}/client/PacketPool.h"
"${HDFS3_SOURCE_DIR}/client/Pipeline.h"
"${HDFS3_SOURCE_DIR}/client/OutputStreamInter.h"
"${HDFS3_SOURCE_DIR}/client/RemoteBlockReader.h"
"${HDFS3_SOURCE_DIR}/client/Token.h"
"${HDFS3_SOURCE_DIR}/client/KerberosName.h"
"${HDFS3_SOURCE_DIR}/client/DirectoryIterator.h"
"${HDFS3_SOURCE_DIR}/client/hdfs.h"
"${HDFS3_SOURCE_DIR}/client/FileSystemStats.h"
"${HDFS3_SOURCE_DIR}/client/FileSystemKey.h"
"${HDFS3_SOURCE_DIR}/client/DataTransferProtocolSender.h"
"${HDFS3_SOURCE_DIR}/client/Packet.h"
"${HDFS3_SOURCE_DIR}/client/PacketHeader.h"
"${HDFS3_SOURCE_DIR}/client/FileSystemInter.h"
"${HDFS3_SOURCE_DIR}/client/LocalBlockReader.h"
"${HDFS3_SOURCE_DIR}/client/TokenInternal.h"
"${HDFS3_SOURCE_DIR}/client/InputStream.h"
"${HDFS3_SOURCE_DIR}/client/PipelineAck.h"
"${HDFS3_SOURCE_DIR}/client/BlockReader.h"
"${HDFS3_SOURCE_DIR}/client/Permission.h"
"${HDFS3_SOURCE_DIR}/client/OutputStreamImpl.h"
"${HDFS3_SOURCE_DIR}/client/LeaseRenewer.h"
"${HDFS3_SOURCE_DIR}/client/UserInfo.h"
"${HDFS3_SOURCE_DIR}/client/PeerCache.h"
"${HDFS3_SOURCE_DIR}/client/OutputStream.h"
"${HDFS3_SOURCE_DIR}/client/FileStatus.h"
"${HDFS3_SOURCE_DIR}/client/DataTransferProtocol.h"
"${HDFS3_SOURCE_DIR}/client/BlockLocation.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcConfig.h"
"${HDFS3_SOURCE_DIR}/rpc/SaslClient.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcAuth.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcClient.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcCall.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcContentWrapper.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcProtocolInfo.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcRemoteCall.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcServerInfo.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcChannel.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcChannelKey.h"
"${HDFS3_SOURCE_DIR}/server/BlockLocalPathInfo.h"
"${HDFS3_SOURCE_DIR}/server/LocatedBlocks.h"
"${HDFS3_SOURCE_DIR}/server/DatanodeInfo.h"
"${HDFS3_SOURCE_DIR}/server/RpcHelper.h"
"${HDFS3_SOURCE_DIR}/server/ExtendedBlock.h"
"${HDFS3_SOURCE_DIR}/server/NamenodeInfo.h"
"${HDFS3_SOURCE_DIR}/server/NamenodeImpl.h"
"${HDFS3_SOURCE_DIR}/server/LocatedBlock.h"
"${HDFS3_SOURCE_DIR}/server/NamenodeProxy.h"
"${HDFS3_SOURCE_DIR}/server/Datanode.h"
"${HDFS3_SOURCE_DIR}/server/Namenode.h"
"${HDFS3_SOURCE_DIR}/common/XmlConfig.h"
"${HDFS3_SOURCE_DIR}/common/Logger.h"
"${HDFS3_SOURCE_DIR}/common/WriteBuffer.h"
"${HDFS3_SOURCE_DIR}/common/HWCrc32c.h"
"${HDFS3_SOURCE_DIR}/common/Checksum.h"
"${HDFS3_SOURCE_DIR}/common/SessionConfig.h"
"${HDFS3_SOURCE_DIR}/common/Unordered.h"
"${HDFS3_SOURCE_DIR}/common/BigEndian.h"
"${HDFS3_SOURCE_DIR}/common/Thread.h"
"${HDFS3_SOURCE_DIR}/common/StackPrinter.h"
"${HDFS3_SOURCE_DIR}/common/Exception.h"
"${HDFS3_SOURCE_DIR}/common/WritableUtils.h"
"${HDFS3_SOURCE_DIR}/common/StringUtil.h"
"${HDFS3_SOURCE_DIR}/common/LruMap.h"
"${HDFS3_SOURCE_DIR}/common/Function.h"
"${HDFS3_SOURCE_DIR}/common/DateTime.h"
"${HDFS3_SOURCE_DIR}/common/Hash.h"
"${HDFS3_SOURCE_DIR}/common/SWCrc32c.h"
"${HDFS3_SOURCE_DIR}/common/ExceptionInternal.h"
"${HDFS3_SOURCE_DIR}/common/Memory.h"
"${HDFS3_SOURCE_DIR}/common/FileWrapper.h"
) )
# old kernels (< 3.17) doesn't have SYS_getrandom. Always use POSIX implementation to have better compatibility # old kernels (< 3.17) doesn't have SYS_getrandom. Always use POSIX implementation to have better compatibility
set_source_files_properties("${HDFS3_SOURCE_DIR}/rpc/RpcClient.cpp" PROPERTIES COMPILE_FLAGS "-DBOOST_UUID_RANDOM_PROVIDER_FORCE_POSIX=1") set_source_files_properties("${HDFS3_SOURCE_DIR}/rpc/RpcClient.cpp" PROPERTIES COMPILE_FLAGS "-DBOOST_UUID_RANDOM_PROVIDER_FORCE_POSIX=1")
# target # target
add_library(hdfs3 ${SRCS} ${PROTO_SOURCES} ${PROTO_HEADERS}) add_library(hdfs3 ${SRCS})
if(USE_INTERNAL_PROTOBUF_LIBRARY) if(USE_INTERNAL_PROTOBUF_LIBRARY)
add_dependencies(hdfs3 protoc) add_dependencies(hdfs3 protoc)
@ -218,6 +116,7 @@ target_link_libraries(hdfs3 PRIVATE ${LIBXML2_LIBRARIES})
# inherit from parent cmake # inherit from parent cmake
target_include_directories(hdfs3 PRIVATE ${Protobuf_INCLUDE_DIR}) target_include_directories(hdfs3 PRIVATE ${Protobuf_INCLUDE_DIR})
target_link_libraries(hdfs3 PRIVATE ${Protobuf_LIBRARY} boost::headers_only) target_link_libraries(hdfs3 PRIVATE ${Protobuf_LIBRARY} boost::headers_only)
if(OPENSSL_INCLUDE_DIR AND OPENSSL_LIBRARIES) if(OPENSSL_INCLUDE_DIR AND OPENSSL_LIBRARIES)
target_include_directories(hdfs3 PRIVATE ${OPENSSL_INCLUDE_DIR}) target_include_directories(hdfs3 PRIVATE ${OPENSSL_INCLUDE_DIR})
target_link_libraries(hdfs3 PRIVATE ${OPENSSL_LIBRARIES}) target_link_libraries(hdfs3 PRIVATE ${OPENSSL_LIBRARIES})

View File

@ -66,7 +66,7 @@
#cmakedefine WITH_SASL_OAUTHBEARER 1 #cmakedefine WITH_SASL_OAUTHBEARER 1
#cmakedefine WITH_SASL_CYRUS 1 #cmakedefine WITH_SASL_CYRUS 1
// crc32chw // crc32chw
#if !defined(__PPC__) && !defined(__riscv) && (!defined(__aarch64__) || defined(__ARM_FEATURE_CRC32)) && !(defined(__aarch64__) && defined(__APPLE__)) #if !defined(__PPC__) && !defined(__riscv) && !defined(__aarch64__)
#define WITH_CRC32C_HW 1 #define WITH_CRC32C_HW 1
#endif #endif
// regex // regex

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ${NOT_UNBUNDLED}) option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ON)
if (NOT USE_INTERNAL_LZ4_LIBRARY) if (NOT USE_INTERNAL_LZ4_LIBRARY)
find_library (LIBRARY_LZ4 lz4) find_library (LIBRARY_LZ4 lz4)

View File

@ -0,0 +1,243 @@
if (GLIBC_COMPATIBILITY)
set(LIBM glibc-compatibility)
endif()
# This is the LGPL libmariadb project.
set(CC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/mariadb-connector-c)
set(CC_BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR})
set(WITH_SSL ON)
set(MARIADB_CONNECTOR_C_COPYRIGHT "2013-2017 MariaDB Corporation Ab")
set(PROTOCOL_VERSION 10) # we adapted new password option from PHP's mysqlnd !
# if C/C is build as subproject inside MariaDB server tree we will
# use the version defined by server
if(MAJOR_VERSION)
set(MARIADB_CLIENT_VERSION_MAJOR ${MAJOR_VERSION})
set(MARIADB_CLIENT_VERSION_MINOR ${MINOR_VERSION})
set(MARIADB_CLIENT_VERSION_PATCH ${PATCH_VERSION})
set(MARIADB_CLIENT_VERSION_EXTRA ${EXTRA_VERSION})
else()
set(MARIADB_CLIENT_VERSION_MAJOR "10")
set(MARIADB_CLIENT_VERSION_MINOR "4")
set(MARIADB_CLIENT_VERSION_PATCH "3")
set(MARIADB_CLIENT_VERSION_EXTRA "")
endif()
set(MARIADB_CLIENT_VERSION "${MARIADB_CLIENT_VERSION_MAJOR}.${MARIADB_CLIENT_VERSION_MINOR}.${MARIADB_CLIENT_VERSION_PATCH}${MARIADB_CLIENT_VERSION_EXTRA}")
set(MARIADB_BASE_VERSION "mariadb-${MARIADB_CLIENT_VERSION_MAJOR}.${MARIADB_CLIENT_VERSION_MINOR}")
MATH(EXPR MARIADB_VERSION_ID "${MARIADB_CLIENT_VERSION_MAJOR} * 10000 +
${MARIADB_CLIENT_VERSION_MINOR} * 100 +
${MARIADB_CLIENT_VERSION_PATCH}")
IF (NOT MARIADB_PORT)
set(MARIADB_PORT 3306)
ENDIF ()
if(NOT MARIADB_UNIX_ADDR)
set(MARIADB_UNIX_ADDR "/tmp/mysql.sock")
endif()
set(HAVE_ALLOCA_H 1)
set(HAVE_ARPA_INET_H 1)
set(HAVE_DLFCN_H 1)
set(HAVE_FCNTL_H 1)
set(HAVE_FLOAT_H 1)
set(HAVE_LIMITS_H 1)
set(HAVE_PWD_H 1)
set(HAVE_SCHED_H 1)
set(HAVE_SELECT_H 0)
set(INCLUDE_SIGNAL 1)
set(HAVE_SIGNAL 1)
set(HAVE_STDDEF_H 1)
set(HAVE_STDINT_H 1)
set(HAVE_STDLIB_H 1)
set(HAVE_STRING_H 1)
set(HAVE_STRINGS_H 1)
set(HAVE_SYS_IOCTL_H 1)
set(HAVE_SYS_SELECT_H 1)
set(HAVE_SYS_SOCKET_H 1)
set(HAVE_SYS_TYPES_H 1)
set(HAVE_SYS_UN_H 1)
set(HAVE_UNISTD_H 1)
set(HAVE_UTIME_H 1)
set(HAVE_UCONTEXT_H 1)
set(HAVE_ALLOCA 1)
set(HAVE_DLERROR 0)
set(HAVE_DLOPEN 0)
set(HAVE_FCNTL 1)
set(HAVE_MEMCPY 1)
set(HAVE_NL_LANGINFO 0)
set(HAVE_SETLOCALE 0)
set(HAVE_POLL 1)
set(SIZEOF_CHARP 8)
set(SIZEOF_INT 4)
set(SIZEOF_LONG 8)
set(SIZEOF_LONG_LONG 8)
set(SIZEOF_SIZE_T 8)
set(SOCKET_SIZE_TYPE socklen_t)
set(SYSTEM_LIBS ${SYSTEM_LIBS} zlib)
if(CMAKE_HAVE_PTHREAD_H)
set(CMAKE_REQUIRED_INCLUDES pthread.h)
endif()
add_definitions(-DMARIADB_SYSTEM_TYPE="${CMAKE_SYSTEM_NAME}")
add_definitions(-DMARIADB_MACHINE_TYPE="${CMAKE_SYSTEM_PROCESSOR}")
set(HAVE_THREADS 1)
set(DEFAULT_CHARSET "utf8mb4")
add_definitions(-DHAVE_OPENSSL -DHAVE_TLS)
set(SSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
include_directories(BEFORE ${OPENSSL_INCLUDE_DIR})
set(TLS_LIBRARY_VERSION "OpenSSL ${OPENSSL_VERSION}")
set(ENABLED_LOCAL_INFILE OFF)
CONFIGURE_FILE(${CC_SOURCE_DIR}/include/ma_config.h.in
${CC_BINARY_DIR}/include-private/ma_config.h)
CONFIGURE_FILE(${CC_SOURCE_DIR}/include/ma_config.h.in
${CC_BINARY_DIR}/include-private/config.h)
CONFIGURE_FILE(${CC_SOURCE_DIR}/include/mariadb_version.h.in
${CC_BINARY_DIR}/include-public/mariadb_version.h)
if(WITH_SSL)
set(SYSTEM_LIBS ${SYSTEM_LIBS} ${SSL_LIBRARIES})
endif()
function(REGISTER_PLUGIN)
SET(one_value_keywords TARGET TYPE)
SET(multi_value_keywords SOURCES)
cmake_parse_arguments(CC_PLUGIN
"${options}"
"${one_value_keywords}"
"${multi_value_keywords}"
${ARGN})
# overwrite default if it was specified with cmake option
string(TOUPPER ${CC_PLUGIN_TARGET} cc_plugin)
if(NOT "${CLIENT_PLUGIN_${cc_plugin}}" STREQUAL "")
SET(CC_PLUGIN_DEFAULT ${CLIENT_PLUGIN_${cc_plugin}})
endif()
# use uppercase
string(TOUPPER ${CC_PLUGIN_TARGET} target_name)
string(TOUPPER "${CC_PLUGIN_CONFIGURATIONS}" CC_PLUGIN_CONFIGURATIONS)
if(NOT ${PLUGIN_${target_name}} STREQUAL "")
string(TOUPPER ${PLUGIN_${target_name}} PLUGIN_${target_name})
set(CC_PLUGIN_DEFAULT ${PLUGIN_${target_name}})
endif()
set(PLUGINS_STATIC ${PLUGINS_STATIC} ${CC_PLUGIN_TARGET} PARENT_SCOPE)
set(LIBMARIADB_PLUGIN_CFLAGS ${LIBMARIADB_PLUGIN_CFLAGS} ${CC_PLUGIN_COMPILE_OPTIONS} PARENT_SCOPE)
set(LIBMARIADB_PLUGIN_INCLUDES ${LIBMARIADB_PLUGIN_INCLUDES} ${CC_PLUGIN_INCLUDES} PARENT_SCOPE)
set(LIBMARIADB_PLUGIN_SOURCES ${LIBMARIADB_PLUGIN_SOURCES} ${CC_PLUGIN_SOURCES} PARENT_SCOPE)
set(LIBMARIADB_PLUGIN_LIBS ${LIBMARIADB_PLUGIN_LIBS} ${CC_PLUGIN_LIBRARIES} PARENT_SCOPE)
endfunction()
SET(PLUGIN_EXTRA_FILES ${CC_SOURCE_DIR}/libmariadb/ma_errmsg.c)
#native password
REGISTER_PLUGIN(TARGET pvio_socket
TYPE MARIADB_CLIENT_PLUGIN_PVIO
SOURCES "${CC_SOURCE_DIR}/plugins/pvio/pvio_socket.c")
# SHA256 caching plugin for MySQL 8.0 connection
REGISTER_PLUGIN(TARGET caching_sha2_password
TYPE MARIADB_CLIENT_PLUGIN_AUTH
SOURCES "${CC_SOURCE_DIR}/plugins/auth/caching_sha2_pw.c")
REGISTER_PLUGIN(TARGET sha256_password
TYPE MARIADB_CLIENT_PLUGIN_AUTH
SOURCES "${CC_SOURCE_DIR}/plugins/auth/sha256_pw.c")
#native password
REGISTER_PLUGIN(TARGET mysql_native_password
TYPE MARIADB_CLIENT_PLUGIN_AUTH
SOURCES "${CC_SOURCE_DIR}/plugins/auth/my_auth.c")
REGISTER_PLUGIN(TARGET aurora
TYPE MARIADB_CLIENT_PLUGIN_CONNECTION
SOURCES "${CC_SOURCE_DIR}/plugins/connection/aurora.c")
add_definitions(-D HAVE_COMPRESS)
add_definitions(-D LIBMARIADB)
add_definitions(-D THREAD)
# handle static plugins
set(LIBMARIADB_SOURCES ${LIBMARIADB_PLUGIN_SOURCES})
set(SYSTEM_LIBS ${SYSTEM_LIBS} ${LIBMARIADB_PLUGIN_LIBS})
add_definitions(${LIBMARIADB_PLUGIN_DEFS})
FOREACH(plugin ${PLUGINS_STATIC})
set(EXTERNAL_PLUGINS "${EXTERNAL_PLUGINS} extern struct st_mysql_client_plugin ${plugin}_client_plugin;\n")
set(BUILTIN_PLUGINS "${BUILTIN_PLUGINS} (struct st_mysql_client_plugin *)&${plugin}_client_plugin,\n")
ENDFOREACH()
CONFIGURE_FILE(${CC_SOURCE_DIR}/libmariadb/ma_client_plugin.c.in
${CC_BINARY_DIR}/libmariadb/ma_client_plugin.c)
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES}
${CC_SOURCE_DIR}/plugins/auth/my_auth.c
${CC_SOURCE_DIR}/libmariadb/ma_array.c
${CC_SOURCE_DIR}/libmariadb/ma_charset.c
${CC_SOURCE_DIR}/libmariadb/ma_hash.c
${CC_SOURCE_DIR}/libmariadb/ma_net.c
${CC_SOURCE_DIR}/libmariadb/mariadb_charset.c
${CC_SOURCE_DIR}/libmariadb/ma_time.c
${CC_SOURCE_DIR}/libmariadb/ma_default.c
${CC_SOURCE_DIR}/libmariadb/ma_errmsg.c
${CC_SOURCE_DIR}/libmariadb/mariadb_lib.c
${CC_SOURCE_DIR}/libmariadb/ma_list.c
${CC_SOURCE_DIR}/libmariadb/ma_pvio.c
${CC_SOURCE_DIR}/libmariadb/ma_tls.c
${CC_SOURCE_DIR}/libmariadb/ma_alloc.c
${CC_SOURCE_DIR}/libmariadb/ma_compress.c
${CC_SOURCE_DIR}/libmariadb/ma_init.c
${CC_SOURCE_DIR}/libmariadb/ma_password.c
${CC_SOURCE_DIR}/libmariadb/ma_ll2str.c
${CC_SOURCE_DIR}/libmariadb/ma_sha1.c
${CC_SOURCE_DIR}/libmariadb/mariadb_stmt.c
${CC_SOURCE_DIR}/libmariadb/ma_loaddata.c
${CC_SOURCE_DIR}/libmariadb/ma_stmt_codec.c
${CC_SOURCE_DIR}/libmariadb/ma_string.c
${CC_SOURCE_DIR}/libmariadb/ma_dtoa.c
${CC_SOURCE_DIR}/libmariadb/mariadb_rpl.c
${CC_SOURCE_DIR}/libmariadb/ma_io.c
${CC_SOURCE_DIR}/libmariadb/secure/openssl.c
${CC_SOURCE_DIR}/libmariadb/secure/openssl_crypt.c
${CC_BINARY_DIR}/libmariadb/ma_client_plugin.c
)
if(ICONV_INCLUDE_DIR)
include_directories(BEFORE ${ICONV_INCLUDE_DIR})
endif()
add_definitions(-DLIBICONV_PLUG)
if(ZLIB_FOUND AND WITH_EXTERNAL_ZLIB)
include_directories(${ZLIB_INCLUDE_DIR})
endif()
if(WITH_DYNCOL)
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_dyncol.c)
endif()
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_async.c ${CC_SOURCE_DIR}/libmariadb/ma_context.c)
add_library(mariadbclient STATIC ${LIBMARIADB_SOURCES})
target_link_libraries(mariadbclient ${SYSTEM_LIBS})
target_include_directories(mariadbclient
PRIVATE ${CC_BINARY_DIR}/include-private
PUBLIC ${CC_BINARY_DIR}/include-public ${CC_SOURCE_DIR}/include ${CC_SOURCE_DIR}/libmariadb)
set_target_properties(mariadbclient PROPERTIES IMPORTED_INTERFACE_LINK_LIBRARIES "${SYSTEM_LIBS}")

View File

@ -1,21 +1,220 @@
set(protobuf_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf") set(protobuf_source_dir "${ClickHouse_SOURCE_DIR}/contrib/protobuf")
set(protobuf_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/protobuf") set(protobuf_binary_dir "${ClickHouse_BINARY_DIR}/contrib/protobuf")
set(protobuf_WITH_ZLIB 0 CACHE INTERNAL "" FORCE) # actually will use zlib, but skip find
set(protobuf_BUILD_TESTS OFF CACHE INTERNAL "" FORCE)
if (MAKE_STATIC_LIBRARIES) add_definitions(-DGOOGLE_PROTOBUF_CMAKE_BUILD)
set(protobuf_BUILD_SHARED_LIBS OFF CACHE INTERNAL "" FORCE)
else () add_definitions(-DHAVE_PTHREAD)
set(protobuf_BUILD_SHARED_LIBS ON CACHE INTERNAL "" FORCE) add_definitions(-DHAVE_ZLIB)
include_directories(
${ZLIB_INCLUDE_DIRECTORIES}
${protobuf_binary_dir}
${protobuf_source_dir}/src)
set(libprotobuf_lite_files
${protobuf_source_dir}/src/google/protobuf/any_lite.cc
${protobuf_source_dir}/src/google/protobuf/arena.cc
${protobuf_source_dir}/src/google/protobuf/arenastring.cc
${protobuf_source_dir}/src/google/protobuf/extension_set.cc
${protobuf_source_dir}/src/google/protobuf/field_access_listener.cc
${protobuf_source_dir}/src/google/protobuf/generated_enum_util.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven_lite.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc
${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.cc
${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/io_win32.cc
${protobuf_source_dir}/src/google/protobuf/io/strtod.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc
${protobuf_source_dir}/src/google/protobuf/map.cc
${protobuf_source_dir}/src/google/protobuf/message_lite.cc
${protobuf_source_dir}/src/google/protobuf/parse_context.cc
${protobuf_source_dir}/src/google/protobuf/repeated_field.cc
${protobuf_source_dir}/src/google/protobuf/stubs/bytestream.cc
${protobuf_source_dir}/src/google/protobuf/stubs/common.cc
${protobuf_source_dir}/src/google/protobuf/stubs/int128.cc
${protobuf_source_dir}/src/google/protobuf/stubs/status.cc
${protobuf_source_dir}/src/google/protobuf/stubs/statusor.cc
${protobuf_source_dir}/src/google/protobuf/stubs/stringpiece.cc
${protobuf_source_dir}/src/google/protobuf/stubs/stringprintf.cc
${protobuf_source_dir}/src/google/protobuf/stubs/structurally_valid.cc
${protobuf_source_dir}/src/google/protobuf/stubs/strutil.cc
${protobuf_source_dir}/src/google/protobuf/stubs/time.cc
${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc
)
add_library(libprotobuf-lite ${libprotobuf_lite_files})
target_link_libraries(libprotobuf-lite pthread)
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
target_link_libraries(libprotobuf-lite log)
endif() endif()
target_include_directories(libprotobuf-lite SYSTEM PUBLIC ${protobuf_source_dir}/src)
add_library(protobuf::libprotobuf-lite ALIAS libprotobuf-lite)
if (CMAKE_CROSSCOMPILING)
# Will build 'protoc' for host arch instead of cross-compiling set(libprotobuf_files
set(protobuf_BUILD_PROTOC_BINARIES OFF CACHE INTERNAL "" FORCE) ${protobuf_source_dir}/src/google/protobuf/any.cc
${protobuf_source_dir}/src/google/protobuf/any.pb.cc
${protobuf_source_dir}/src/google/protobuf/api.pb.cc
${protobuf_source_dir}/src/google/protobuf/compiler/importer.cc
${protobuf_source_dir}/src/google/protobuf/compiler/parser.cc
${protobuf_source_dir}/src/google/protobuf/descriptor.cc
${protobuf_source_dir}/src/google/protobuf/descriptor.pb.cc
${protobuf_source_dir}/src/google/protobuf/descriptor_database.cc
${protobuf_source_dir}/src/google/protobuf/duration.pb.cc
${protobuf_source_dir}/src/google/protobuf/dynamic_message.cc
${protobuf_source_dir}/src/google/protobuf/empty.pb.cc
${protobuf_source_dir}/src/google/protobuf/extension_set_heavy.cc
${protobuf_source_dir}/src/google/protobuf/field_mask.pb.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_reflection.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven.cc
${protobuf_source_dir}/src/google/protobuf/io/gzip_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/printer.cc
${protobuf_source_dir}/src/google/protobuf/io/tokenizer.cc
${protobuf_source_dir}/src/google/protobuf/map_field.cc
${protobuf_source_dir}/src/google/protobuf/message.cc
${protobuf_source_dir}/src/google/protobuf/reflection_ops.cc
${protobuf_source_dir}/src/google/protobuf/service.cc
${protobuf_source_dir}/src/google/protobuf/source_context.pb.cc
${protobuf_source_dir}/src/google/protobuf/struct.pb.cc
${protobuf_source_dir}/src/google/protobuf/stubs/substitute.cc
${protobuf_source_dir}/src/google/protobuf/text_format.cc
${protobuf_source_dir}/src/google/protobuf/timestamp.pb.cc
${protobuf_source_dir}/src/google/protobuf/type.pb.cc
${protobuf_source_dir}/src/google/protobuf/unknown_field_set.cc
${protobuf_source_dir}/src/google/protobuf/util/delimited_message_util.cc
${protobuf_source_dir}/src/google/protobuf/util/field_comparator.cc
${protobuf_source_dir}/src/google/protobuf/util/field_mask_util.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/datapiece.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/default_value_objectwriter.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/error_listener.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/field_mask_utility.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/json_escaping.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/json_objectwriter.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/json_stream_parser.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/object_writer.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/proto_writer.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectsource.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/type_info.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/type_info_test_helper.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/utility.cc
${protobuf_source_dir}/src/google/protobuf/util/json_util.cc
${protobuf_source_dir}/src/google/protobuf/util/message_differencer.cc
${protobuf_source_dir}/src/google/protobuf/util/time_util.cc
${protobuf_source_dir}/src/google/protobuf/util/type_resolver_util.cc
${protobuf_source_dir}/src/google/protobuf/wire_format.cc
${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc
)
add_library(libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files})
target_link_libraries(libprotobuf pthread)
target_link_libraries(libprotobuf ${ZLIB_LIBRARIES})
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
target_link_libraries(libprotobuf log)
endif() endif()
target_include_directories(libprotobuf SYSTEM PUBLIC ${protobuf_source_dir}/src)
add_library(protobuf::libprotobuf ALIAS libprotobuf)
add_subdirectory("${protobuf_SOURCE_DIR}/cmake" "${protobuf_BINARY_DIR}")
set(libprotoc_files
${protobuf_source_dir}/src/google/protobuf/compiler/code_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/command_line_interface.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_padding_optimizer.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_parse_function_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_service.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_string_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_doc_comment.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_field_base.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_reflection_class.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_context.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_doc_comment.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator_factory.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_kotlin_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_name_resolver.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_service.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_shared_code_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/js/js_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/js/well_known_types_embed.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_oneof.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/php/php_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/plugin.cc
${protobuf_source_dir}/src/google/protobuf/compiler/plugin.pb.cc
${protobuf_source_dir}/src/google/protobuf/compiler/python/python_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/ruby/ruby_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/subprocess.cc
${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc
)
add_library(libprotoc ${libprotoc_files})
target_link_libraries(libprotoc libprotobuf)
add_library(protobuf::libprotoc ALIAS libprotoc)
set(protoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/main.cc)
if (NOT CMAKE_CROSSCOMPILING)
add_executable(protoc ${protoc_files})
target_link_libraries(protoc libprotoc libprotobuf pthread)
add_executable(protobuf::protoc ALIAS protoc)
if (ENABLE_FUZZING) if (ENABLE_FUZZING)
# `protoc` will be built with sanitizer and it could fail during ClickHouse build # `protoc` will be built with sanitizer and it could fail during ClickHouse build
@ -26,14 +225,9 @@ if (ENABLE_FUZZING)
# export MSAN_OPTIONS=exit_code=0 # export MSAN_OPTIONS=exit_code=0
target_compile_options(protoc PRIVATE "-fsanitize-recover=all") target_compile_options(protoc PRIVATE "-fsanitize-recover=all")
endif() endif()
else ()
# We don't want to stop compilation on warnings in protobuf's headers.
# The following line overrides the value assigned by the command target_include_directories() in libprotobuf.cmake
set_property(TARGET libprotobuf PROPERTY INTERFACE_SYSTEM_INCLUDE_DIRECTORIES "${protobuf_SOURCE_DIR}/src")
if (CMAKE_CROSSCOMPILING)
# Build 'protoc' for host arch # Build 'protoc' for host arch
set (PROTOC_BUILD_DIR "${protobuf_BINARY_DIR}/build") set (PROTOC_BUILD_DIR "${protobuf_binary_dir}/build")
if (NOT EXISTS "${PROTOC_BUILD_DIR}/protoc") if (NOT EXISTS "${PROTOC_BUILD_DIR}/protoc")
@ -53,7 +247,7 @@ if (CMAKE_CROSSCOMPILING)
"-Dprotobuf_BUILD_CONFORMANCE=0" "-Dprotobuf_BUILD_CONFORMANCE=0"
"-Dprotobuf_BUILD_EXAMPLES=0" "-Dprotobuf_BUILD_EXAMPLES=0"
"-Dprotobuf_BUILD_PROTOC_BINARIES=1" "-Dprotobuf_BUILD_PROTOC_BINARIES=1"
"${protobuf_SOURCE_DIR}/cmake" "${protobuf_source_dir}/cmake"
WORKING_DIRECTORY "${PROTOC_BUILD_DIR}" WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
COMMAND_ECHO STDOUT) COMMAND_ECHO STDOUT)
@ -78,7 +272,7 @@ if (CMAKE_CROSSCOMPILING)
# -Dprotobuf_BUILD_CONFORMANCE=0 # -Dprotobuf_BUILD_CONFORMANCE=0
# -Dprotobuf_BUILD_EXAMPLES=0 # -Dprotobuf_BUILD_EXAMPLES=0
# -Dprotobuf_BUILD_PROTOC_BINARIES=1 # -Dprotobuf_BUILD_PROTOC_BINARIES=1
# "${protobuf_SOURCE_DIR}/cmake" # "${protobuf_source_dir}/cmake"
# #
# DEPENDS "${PROTOC_BUILD_DIR}" # DEPENDS "${PROTOC_BUILD_DIR}"
# WORKING_DIRECTORY "${PROTOC_BUILD_DIR}" # WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
@ -97,5 +291,4 @@ if (CMAKE_CROSSCOMPILING)
add_executable(protoc IMPORTED GLOBAL) add_executable(protoc IMPORTED GLOBAL)
set_target_properties (protoc PROPERTIES IMPORTED_LOCATION "${PROTOC_BUILD_DIR}/protoc") set_target_properties (protoc PROPERTIES IMPORTED_LOCATION "${PROTOC_BUILD_DIR}/protoc")
add_dependencies(protoc "${PROTOC_BUILD_DIR}/protoc") add_dependencies(protoc "${PROTOC_BUILD_DIR}/protoc")
endif () endif ()

View File

@ -1,24 +1,54 @@
# Copyright 2015 The RE2 Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# This file was edited for ClickHouse
set(SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2")
set(RE2_SOURCES
${SRC_DIR}/re2/bitstate.cc
${SRC_DIR}/re2/compile.cc
${SRC_DIR}/re2/dfa.cc
${SRC_DIR}/re2/filtered_re2.cc
${SRC_DIR}/re2/mimics_pcre.cc
${SRC_DIR}/re2/nfa.cc
${SRC_DIR}/re2/onepass.cc
${SRC_DIR}/re2/parse.cc
${SRC_DIR}/re2/perl_groups.cc
${SRC_DIR}/re2/prefilter.cc
${SRC_DIR}/re2/prefilter_tree.cc
${SRC_DIR}/re2/prog.cc
${SRC_DIR}/re2/re2.cc
${SRC_DIR}/re2/regexp.cc
${SRC_DIR}/re2/set.cc
${SRC_DIR}/re2/simplify.cc
${SRC_DIR}/re2/stringpiece.cc
${SRC_DIR}/re2/tostring.cc
${SRC_DIR}/re2/unicode_casefold.cc
${SRC_DIR}/re2/unicode_groups.cc
${SRC_DIR}/util/rune.cc
${SRC_DIR}/util/strutil.cc
)
add_library(re2 ${RE2_SOURCES})
target_include_directories(re2 PUBLIC "${SRC_DIR}")
# Building re2 which is thread-safe and re2_st which is not. # Building re2 which is thread-safe and re2_st which is not.
# re2 changes its state during matching of regular expression, e.g. creates temporary DFA. # re2 changes its state during matching of regular expression, e.g. creates temporary DFA.
# It uses RWLock to process the same regular expression object from different threads. # It uses RWLock to process the same regular expression object from different threads.
# In order to avoid redundant locks in some cases, we use not thread-safe version of the library (re2_st). # In order to avoid redundant locks in some cases, we use not thread-safe version of the library (re2_st).
set (RE2_SOURCE_DIR ${ClickHouse_SOURCE_DIR}/contrib/re2/) add_library(re2_st ${RE2_SOURCES})
get_target_property (RE2_SOURCES_ re2 SOURCES)
foreach (src ${RE2_SOURCES_})
list(APPEND RE2_ST_SOURCES ${RE2_SOURCE_DIR}/${src})
endforeach ()
add_library(re2_st ${RE2_ST_SOURCES})
target_compile_definitions (re2_st PRIVATE NDEBUG NO_THREADS re2=re2_st) target_compile_definitions (re2_st PRIVATE NDEBUG NO_THREADS re2=re2_st)
target_include_directories (re2_st PRIVATE .) target_include_directories (re2_st PRIVATE .)
target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${RE2_SOURCE_DIR}) target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${SRC_DIR})
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/re2_st) file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/re2_st)
foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h) foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h)
add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/re2_st/${FILENAME}" add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/re2_st/${FILENAME}"
COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${RE2_SOURCE_DIR}/re2/${FILENAME}" COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${SRC_DIR}/re2/${FILENAME}"
-DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/re2_st/${FILENAME}" -DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/re2_st/${FILENAME}"
-P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake" -P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake"
COMMENT "Creating ${FILENAME} for re2_st library.") COMMENT "Creating ${FILENAME} for re2_st library.")
@ -29,7 +59,7 @@ endforeach ()
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/util) file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/util)
foreach (FILENAME mutex.h) foreach (FILENAME mutex.h)
add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}" add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${RE2_SOURCE_DIR}/util/${FILENAME}" COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${SRC_DIR}/util/${FILENAME}"
-DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}" -DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
-P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake" -P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake"
COMMENT "Creating ${FILENAME} for re2_st library.") COMMENT "Creating ${FILENAME} for re2_st library.")

2
contrib/replxx vendored

@ -1 +1 @@
Subproject commit b0c266c2d8a835784181e17292b421848c78c6b8 Subproject commit f019cba7ea1bcd1b4feb7826f28ed57fb581b04c

View File

@ -190,7 +190,7 @@ if(HAVE_PTHREAD_MUTEX_ADAPTIVE_NP)
endif() endif()
include(CheckCXXSymbolExists) include(CheckCXXSymbolExists)
if(CMAKE_SYSTEM_NAME MATCHES "^FreeBSD") if (OS_FREEBSD)
check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc_np.h" HAVE_MALLOC_USABLE_SIZE) check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc_np.h" HAVE_MALLOC_USABLE_SIZE)
else() else()
check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc.h" HAVE_MALLOC_USABLE_SIZE) check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc.h" HAVE_MALLOC_USABLE_SIZE)
@ -199,20 +199,14 @@ if(HAVE_MALLOC_USABLE_SIZE)
add_definitions(-DROCKSDB_MALLOC_USABLE_SIZE) add_definitions(-DROCKSDB_MALLOC_USABLE_SIZE)
endif() endif()
check_cxx_symbol_exists(sched_getcpu sched.h HAVE_SCHED_GETCPU) if (OS_LINUX)
if(HAVE_SCHED_GETCPU)
add_definitions(-DROCKSDB_SCHED_GETCPU_PRESENT) add_definitions(-DROCKSDB_SCHED_GETCPU_PRESENT)
add_definitions(-DROCKSDB_AUXV_SYSAUXV_PRESENT)
add_definitions(-DROCKSDB_AUXV_GETAUXVAL_PRESENT)
elseif (OS_FREEBSD)
add_definitions(-DROCKSDB_AUXV_SYSAUXV_PRESENT)
endif() endif()
check_cxx_symbol_exists(getauxval auvx.h HAVE_AUXV_GETAUXVAL)
if(HAVE_AUXV_GETAUXVAL)
add_definitions(-DROCKSDB_AUXV_GETAUXVAL_PRESENT)
endif()
check_cxx_symbol_exists(elf_aux_info sys/auxv.h HAVE_ELF_AUX_INFO)
if(HAVE_ELF_AUX_INFO)
add_definitions(-DROCKSDB_AUXV_GETAUXVAL_PRESENT)
endif()
include_directories(${ROCKSDB_SOURCE_DIR}) include_directories(${ROCKSDB_SOURCE_DIR})
include_directories("${ROCKSDB_SOURCE_DIR}/include") include_directories("${ROCKSDB_SOURCE_DIR}/include")

View File

@ -0,0 +1,47 @@
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native")
set (SRCS
${SRC_DIR}/vendor/mpack.c
${SRC_DIR}/src/sentry_alloc.c
${SRC_DIR}/src/sentry_backend.c
${SRC_DIR}/src/sentry_core.c
${SRC_DIR}/src/sentry_database.c
${SRC_DIR}/src/sentry_envelope.c
${SRC_DIR}/src/sentry_json.c
${SRC_DIR}/src/sentry_logger.c
${SRC_DIR}/src/sentry_options.c
${SRC_DIR}/src/sentry_random.c
${SRC_DIR}/src/sentry_ratelimiter.c
${SRC_DIR}/src/sentry_scope.c
${SRC_DIR}/src/sentry_session.c
${SRC_DIR}/src/sentry_slice.c
${SRC_DIR}/src/sentry_string.c
${SRC_DIR}/src/sentry_sync.c
${SRC_DIR}/src/sentry_transport.c
${SRC_DIR}/src/sentry_utils.c
${SRC_DIR}/src/sentry_uuid.c
${SRC_DIR}/src/sentry_value.c
${SRC_DIR}/src/path/sentry_path.c
${SRC_DIR}/src/transports/sentry_disk_transport.c
${SRC_DIR}/src/transports/sentry_function_transport.c
${SRC_DIR}/src/unwinder/sentry_unwinder.c
${SRC_DIR}/src/sentry_unix_pageallocator.c
${SRC_DIR}/src/path/sentry_path_unix.c
${SRC_DIR}/src/symbolizer/sentry_symbolizer_unix.c
${SRC_DIR}/src/modulefinder/sentry_modulefinder_linux.c
${SRC_DIR}/src/transports/sentry_transport_curl.c
${SRC_DIR}/src/backends/sentry_backend_none.c
)
add_library(sentry ${SRCS})
add_library(sentry::sentry ALIAS sentry)
if(BUILD_SHARED_LIBS)
target_compile_definitions(sentry PRIVATE SENTRY_BUILD_SHARED)
else()
target_compile_definitions(sentry PUBLIC SENTRY_BUILD_STATIC)
endif()
target_link_libraries(sentry PRIVATE curl pthread)
target_include_directories(sentry PUBLIC "${SRC_DIR}/include" PRIVATE "${SRC_DIR}/src")
target_compile_definitions(sentry PRIVATE SENTRY_WITH_INPROC_BACKEND SIZEOF_LONG=8)

View File

@ -2,15 +2,12 @@ set (SOURCE_DIR "${CMAKE_SOURCE_DIR}/contrib/snappy")
set (SNAPPY_IS_BIG_ENDIAN 0) set (SNAPPY_IS_BIG_ENDIAN 0)
include(CheckIncludeFile) set (HAVE_BYTESWAP_H 1)
check_include_file("byteswap.h" HAVE_BYTESWAP_H) set (HAVE_SYS_MMAN_H 1)
check_include_file("sys/endian.h" HAVE_SYS_ENDIAN_H) set (HAVE_SYS_RESOURCE_H 1)
check_include_file("sys/mman.h" HAVE_SYS_MMAN_H) set (HAVE_SYS_TIME_H 1)
check_include_file("sys/resource.h" HAVE_SYS_RESOURCE_H) set (HAVE_SYS_UIO_H 1)
check_include_file("sys/time.h" HAVE_SYS_TIME_H) set (HAVE_UNISTD_H 1)
check_include_file("sys/uio.h" HAVE_SYS_UIO_H)
check_include_file("unistd.h" HAVE_UNISTD_H)
check_include_file("windows.h" HAVE_WINDOWS_H)
set (HAVE_BUILTIN_EXPECT 1) set (HAVE_BUILTIN_EXPECT 1)
set (HAVE_BUILTIN_CTZ 1) set (HAVE_BUILTIN_CTZ 1)

1
contrib/stats vendored

@ -1 +0,0 @@
Subproject commit b6dd459c10a88c7ea04693c007e9e35820c5d9ad

View File

@ -0,0 +1,263 @@
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/xz")
# Author: Lasse Collin
#
# This file has been put into the public domain.
# You can do whatever you want with this file.
#
# The file was edited for ClickHouse
# Get the package version from version.h into XZ_VERSION variable.
file(READ ${SRC_DIR}/src/liblzma/api/lzma/version.h XZ_VERSION)
string(REGEX REPLACE
"^.*\n\
#define LZMA_VERSION_MAJOR ([0-9]+)\n\
#define LZMA_VERSION_MINOR ([0-9]+)\n\
#define LZMA_VERSION_PATCH ([0-9]+)\n\
.*$"
"\\1.\\2.\\3" XZ_VERSION "${XZ_VERSION}")
# Definitions common to all targets:
add_compile_definitions(
# Package info:
PACKAGE_NAME="XZ Utils"
PACKAGE_BUGREPORT="lasse.collin@tukaani.org"
PACKAGE_URL="https://tukaani.org/xz/"
# Features:
HAVE_CHECK_CRC32
HAVE_CHECK_CRC64
HAVE_CHECK_SHA256
HAVE_DECODERS
HAVE_DECODER_ARM
HAVE_DECODER_ARMTHUMB
HAVE_DECODER_DELTA
HAVE_DECODER_IA64
HAVE_DECODER_LZMA1
HAVE_DECODER_LZMA2
HAVE_DECODER_POWERPC
HAVE_DECODER_SPARC
HAVE_DECODER_X86
HAVE_ENCODERS
HAVE_ENCODER_ARM
HAVE_ENCODER_ARMTHUMB
HAVE_ENCODER_DELTA
HAVE_ENCODER_IA64
HAVE_ENCODER_LZMA1
HAVE_ENCODER_LZMA2
HAVE_ENCODER_POWERPC
HAVE_ENCODER_SPARC
HAVE_ENCODER_X86
HAVE_MF_BT2
HAVE_MF_BT3
HAVE_MF_BT4
HAVE_MF_HC3
HAVE_MF_HC4
# Standard headers and types are available:
HAVE_STDBOOL_H
HAVE__BOOL
HAVE_STDINT_H
HAVE_INTTYPES_H
HAVE___BUILTIN_BSWAPXX
HAVE___BUILTIN_ASSUME_ALIGNED
_GNU_SOURCE
__EXTENSIONS__
_POSIX_PTHREAD_SEMANTICS
_TANDEM_SOURCE
_ALL_SOURCE
HAVE_CLOCK_GETTIME=1
HAVE_DECL_CLOCK_MONOTONIC=1
HAVE_PTHREAD_CONDATTR_SETCLOCK
MYTHREAD_POSIX
)
if (OS_LINUX)
add_compile_definitions(
TUKLIB_CPUCORES_SCHED_GETAFFINITY
TUKLIB_PHYSMEM_SYSCONF)
elseif (OS_FREEBSD)
add_compile_definitions(
TUKLIB_CPUCORES_CPUSET
TUKLIB_PHYSMEM_SYSCTL)
elseif (OS_DARWIN)
add_compile_definitions(
TUKLIB_CPUCORES_SYSCTL
TUKLIB_PHYSMEM_SYSCTL)
endif ()
if (ARCH_AMD64 OR ARCH_AARCH64)
add_compile_definitions(TUKLIB_FAST_UNALIGNED_ACCESS=1)
endif ()
find_package(Threads REQUIRED)
add_library(liblzma
${SRC_DIR}/src/common/mythread.h
${SRC_DIR}/src/common/sysdefs.h
${SRC_DIR}/src/common/tuklib_common.h
${SRC_DIR}/src/common/tuklib_config.h
${SRC_DIR}/src/common/tuklib_cpucores.c
${SRC_DIR}/src/common/tuklib_cpucores.h
${SRC_DIR}/src/common/tuklib_integer.h
${SRC_DIR}/src/common/tuklib_physmem.c
${SRC_DIR}/src/common/tuklib_physmem.h
${SRC_DIR}/src/liblzma/api/lzma.h
${SRC_DIR}/src/liblzma/api/lzma/base.h
${SRC_DIR}/src/liblzma/api/lzma/bcj.h
${SRC_DIR}/src/liblzma/api/lzma/block.h
${SRC_DIR}/src/liblzma/api/lzma/check.h
${SRC_DIR}/src/liblzma/api/lzma/container.h
${SRC_DIR}/src/liblzma/api/lzma/delta.h
${SRC_DIR}/src/liblzma/api/lzma/filter.h
${SRC_DIR}/src/liblzma/api/lzma/hardware.h
${SRC_DIR}/src/liblzma/api/lzma/index.h
${SRC_DIR}/src/liblzma/api/lzma/index_hash.h
${SRC_DIR}/src/liblzma/api/lzma/lzma12.h
${SRC_DIR}/src/liblzma/api/lzma/stream_flags.h
${SRC_DIR}/src/liblzma/api/lzma/version.h
${SRC_DIR}/src/liblzma/api/lzma/vli.h
${SRC_DIR}/src/liblzma/check/check.c
${SRC_DIR}/src/liblzma/check/check.h
${SRC_DIR}/src/liblzma/check/crc32_fast.c
${SRC_DIR}/src/liblzma/check/crc32_table.c
${SRC_DIR}/src/liblzma/check/crc32_table_be.h
${SRC_DIR}/src/liblzma/check/crc32_table_le.h
${SRC_DIR}/src/liblzma/check/crc64_fast.c
${SRC_DIR}/src/liblzma/check/crc64_table.c
${SRC_DIR}/src/liblzma/check/crc64_table_be.h
${SRC_DIR}/src/liblzma/check/crc64_table_le.h
${SRC_DIR}/src/liblzma/check/crc_macros.h
${SRC_DIR}/src/liblzma/check/sha256.c
${SRC_DIR}/src/liblzma/common/alone_decoder.c
${SRC_DIR}/src/liblzma/common/alone_decoder.h
${SRC_DIR}/src/liblzma/common/alone_encoder.c
${SRC_DIR}/src/liblzma/common/auto_decoder.c
${SRC_DIR}/src/liblzma/common/block_buffer_decoder.c
${SRC_DIR}/src/liblzma/common/block_buffer_encoder.c
${SRC_DIR}/src/liblzma/common/block_buffer_encoder.h
${SRC_DIR}/src/liblzma/common/block_decoder.c
${SRC_DIR}/src/liblzma/common/block_decoder.h
${SRC_DIR}/src/liblzma/common/block_encoder.c
${SRC_DIR}/src/liblzma/common/block_encoder.h
${SRC_DIR}/src/liblzma/common/block_header_decoder.c
${SRC_DIR}/src/liblzma/common/block_header_encoder.c
${SRC_DIR}/src/liblzma/common/block_util.c
${SRC_DIR}/src/liblzma/common/common.c
${SRC_DIR}/src/liblzma/common/common.h
${SRC_DIR}/src/liblzma/common/easy_buffer_encoder.c
${SRC_DIR}/src/liblzma/common/easy_decoder_memusage.c
${SRC_DIR}/src/liblzma/common/easy_encoder.c
${SRC_DIR}/src/liblzma/common/easy_encoder_memusage.c
${SRC_DIR}/src/liblzma/common/easy_preset.c
${SRC_DIR}/src/liblzma/common/easy_preset.h
${SRC_DIR}/src/liblzma/common/file_info.c
${SRC_DIR}/src/liblzma/common/filter_buffer_decoder.c
${SRC_DIR}/src/liblzma/common/filter_buffer_encoder.c
${SRC_DIR}/src/liblzma/common/filter_common.c
${SRC_DIR}/src/liblzma/common/filter_common.h
${SRC_DIR}/src/liblzma/common/filter_decoder.c
${SRC_DIR}/src/liblzma/common/filter_decoder.h
${SRC_DIR}/src/liblzma/common/filter_encoder.c
${SRC_DIR}/src/liblzma/common/filter_encoder.h
${SRC_DIR}/src/liblzma/common/filter_flags_decoder.c
${SRC_DIR}/src/liblzma/common/filter_flags_encoder.c
${SRC_DIR}/src/liblzma/common/hardware_cputhreads.c
${SRC_DIR}/src/liblzma/common/hardware_physmem.c
${SRC_DIR}/src/liblzma/common/index.c
${SRC_DIR}/src/liblzma/common/index.h
${SRC_DIR}/src/liblzma/common/index_decoder.c
${SRC_DIR}/src/liblzma/common/index_decoder.h
${SRC_DIR}/src/liblzma/common/index_encoder.c
${SRC_DIR}/src/liblzma/common/index_encoder.h
${SRC_DIR}/src/liblzma/common/index_hash.c
${SRC_DIR}/src/liblzma/common/memcmplen.h
${SRC_DIR}/src/liblzma/common/outqueue.c
${SRC_DIR}/src/liblzma/common/outqueue.h
${SRC_DIR}/src/liblzma/common/stream_buffer_decoder.c
${SRC_DIR}/src/liblzma/common/stream_buffer_encoder.c
${SRC_DIR}/src/liblzma/common/stream_decoder.c
${SRC_DIR}/src/liblzma/common/stream_decoder.h
${SRC_DIR}/src/liblzma/common/stream_encoder.c
${SRC_DIR}/src/liblzma/common/stream_encoder_mt.c
${SRC_DIR}/src/liblzma/common/stream_flags_common.c
${SRC_DIR}/src/liblzma/common/stream_flags_common.h
${SRC_DIR}/src/liblzma/common/stream_flags_decoder.c
${SRC_DIR}/src/liblzma/common/stream_flags_encoder.c
${SRC_DIR}/src/liblzma/common/vli_decoder.c
${SRC_DIR}/src/liblzma/common/vli_encoder.c
${SRC_DIR}/src/liblzma/common/vli_size.c
${SRC_DIR}/src/liblzma/delta/delta_common.c
${SRC_DIR}/src/liblzma/delta/delta_common.h
${SRC_DIR}/src/liblzma/delta/delta_decoder.c
${SRC_DIR}/src/liblzma/delta/delta_decoder.h
${SRC_DIR}/src/liblzma/delta/delta_encoder.c
${SRC_DIR}/src/liblzma/delta/delta_encoder.h
${SRC_DIR}/src/liblzma/delta/delta_private.h
${SRC_DIR}/src/liblzma/lz/lz_decoder.c
${SRC_DIR}/src/liblzma/lz/lz_decoder.h
${SRC_DIR}/src/liblzma/lz/lz_encoder.c
${SRC_DIR}/src/liblzma/lz/lz_encoder.h
${SRC_DIR}/src/liblzma/lz/lz_encoder_hash.h
${SRC_DIR}/src/liblzma/lz/lz_encoder_hash_table.h
${SRC_DIR}/src/liblzma/lz/lz_encoder_mf.c
${SRC_DIR}/src/liblzma/lzma/fastpos.h
${SRC_DIR}/src/liblzma/lzma/fastpos_table.c
${SRC_DIR}/src/liblzma/lzma/lzma2_decoder.c
${SRC_DIR}/src/liblzma/lzma/lzma2_decoder.h
${SRC_DIR}/src/liblzma/lzma/lzma2_encoder.c
${SRC_DIR}/src/liblzma/lzma/lzma2_encoder.h
${SRC_DIR}/src/liblzma/lzma/lzma_common.h
${SRC_DIR}/src/liblzma/lzma/lzma_decoder.c
${SRC_DIR}/src/liblzma/lzma/lzma_decoder.h
${SRC_DIR}/src/liblzma/lzma/lzma_encoder.c
${SRC_DIR}/src/liblzma/lzma/lzma_encoder.h
${SRC_DIR}/src/liblzma/lzma/lzma_encoder_optimum_fast.c
${SRC_DIR}/src/liblzma/lzma/lzma_encoder_optimum_normal.c
${SRC_DIR}/src/liblzma/lzma/lzma_encoder_presets.c
${SRC_DIR}/src/liblzma/lzma/lzma_encoder_private.h
${SRC_DIR}/src/liblzma/rangecoder/price.h
${SRC_DIR}/src/liblzma/rangecoder/price_table.c
${SRC_DIR}/src/liblzma/rangecoder/range_common.h
${SRC_DIR}/src/liblzma/rangecoder/range_decoder.h
${SRC_DIR}/src/liblzma/rangecoder/range_encoder.h
${SRC_DIR}/src/liblzma/simple/arm.c
${SRC_DIR}/src/liblzma/simple/armthumb.c
${SRC_DIR}/src/liblzma/simple/ia64.c
${SRC_DIR}/src/liblzma/simple/powerpc.c
${SRC_DIR}/src/liblzma/simple/simple_coder.c
${SRC_DIR}/src/liblzma/simple/simple_coder.h
${SRC_DIR}/src/liblzma/simple/simple_decoder.c
${SRC_DIR}/src/liblzma/simple/simple_decoder.h
${SRC_DIR}/src/liblzma/simple/simple_encoder.c
${SRC_DIR}/src/liblzma/simple/simple_encoder.h
${SRC_DIR}/src/liblzma/simple/simple_private.h
${SRC_DIR}/src/liblzma/simple/sparc.c
${SRC_DIR}/src/liblzma/simple/x86.c
)
target_include_directories(liblzma PRIVATE
${SRC_DIR}/src/liblzma/api
${SRC_DIR}/src/liblzma/common
${SRC_DIR}/src/liblzma/check
${SRC_DIR}/src/liblzma/lz
${SRC_DIR}/src/liblzma/rangecoder
${SRC_DIR}/src/liblzma/lzma
${SRC_DIR}/src/liblzma/delta
${SRC_DIR}/src/liblzma/simple
${SRC_DIR}/src/common
)
target_link_libraries(liblzma Threads::Threads)
# Put the tuklib functions under the lzma_ namespace.
target_compile_definitions(liblzma PRIVATE TUKLIB_SYMBOL_PREFIX=lzma_)
if (ENABLE_SSE2)
target_compile_definitions(liblzma PRIVATE HAVE_IMMINTRIN_H HAVE__MM_MOVEMASK_EPI8)
endif()

View File

@ -0,0 +1,161 @@
set (SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/zlib-ng)
add_definitions(-DZLIB_COMPAT)
add_definitions(-DWITH_GZFILEOP)
add_definitions(-DUNALIGNED_OK)
add_definitions(-DUNALIGNED64_OK)
set (HAVE_UNISTD_H 1)
add_definitions(-D_LARGEFILE64_SOURCE=1 -D__USE_LARGEFILE64)
add_definitions(-DHAVE_VISIBILITY_HIDDEN)
add_definitions(-DHAVE_VISIBILITY_INTERNAL)
add_definitions(-DHAVE_BUILTIN_CTZ)
add_definitions(-DHAVE_BUILTIN_CTZLL)
set(ZLIB_ARCH_SRCS)
set(ZLIB_ARCH_HDRS)
set(ARCHDIR "arch/generic")
if(ARCH_AARCH64)
set(ARCHDIR "${SOURCE_DIR}/arch/arm")
add_definitions(-DARM_FEATURES)
add_definitions(-DARM_AUXV_HAS_CRC32 -DARM_ASM_HWCAP)
add_definitions(-DARM_AUXV_HAS_NEON)
add_definitions(-DARM_ACLE_CRC_HASH)
add_definitions(-DARM_NEON_ADLER32 -DARM_NEON_CHUNKSET -DARM_NEON_SLIDEHASH)
list(APPEND ZLIB_ARCH_HDRS ${ARCHDIR}/arm.h)
list(APPEND ZLIB_ARCH_SRCS ${ARCHDIR}/armfeature.c)
set(ACLE_SRCS ${ARCHDIR}/crc32_acle.c ${ARCHDIR}/insert_string_acle.c)
list(APPEND ZLIB_ARCH_SRCS ${ACLE_SRCS})
set(NEON_SRCS ${ARCHDIR}/adler32_neon.c ${ARCHDIR}/chunkset_neon.c ${ARCHDIR}/slide_neon.c)
list(APPEND ZLIB_ARCH_SRCS ${NEON_SRCS})
elseif(ARCH_PPC64LE)
set(ARCHDIR "${SOURCE_DIR}/arch/power")
add_definitions(-DPOWER8)
add_definitions(-DPOWER_FEATURES)
add_definitions(-DPOWER8_VSX_ADLER32)
add_definitions(-DPOWER8_VSX_SLIDEHASH)
list(APPEND ZLIB_ARCH_HDRS ${ARCHDIR}/power.h)
list(APPEND ZLIB_ARCH_SRCS ${ARCHDIR}/power.c)
set(POWER8_SRCS ${ARCHDIR}/adler32_power8.c ${ARCHDIR}/slide_hash_power8.c)
list(APPEND ZLIB_ARCH_SRCS ${POWER8_SRCS})
elseif(ARCH_AMD64)
set(ARCHDIR "${SOURCE_DIR}/arch/x86")
add_definitions(-DX86_FEATURES)
list(APPEND ZLIB_ARCH_HDRS ${ARCHDIR}/x86.h)
list(APPEND ZLIB_ARCH_SRCS ${ARCHDIR}/x86.c)
if(ENABLE_AVX2)
add_definitions(-DX86_AVX2 -DX86_AVX2_ADLER32 -DX86_AVX_CHUNKSET)
set(AVX2_SRCS ${ARCHDIR}/slide_avx.c)
list(APPEND AVX2_SRCS ${ARCHDIR}/chunkset_avx.c)
list(APPEND AVX2_SRCS ${ARCHDIR}/compare258_avx.c)
list(APPEND AVX2_SRCS ${ARCHDIR}/adler32_avx.c)
list(APPEND ZLIB_ARCH_SRCS ${AVX2_SRCS})
endif()
if(ENABLE_SSE42)
add_definitions(-DX86_SSE42_CRC_HASH)
set(SSE42_SRCS ${ARCHDIR}/insert_string_sse.c)
list(APPEND ZLIB_ARCH_SRCS ${SSE42_SRCS})
add_definitions(-DX86_SSE42_CRC_INTRIN)
add_definitions(-DX86_SSE42_CMP_STR)
set(SSE42_SRCS ${ARCHDIR}/compare258_sse.c)
list(APPEND ZLIB_ARCH_SRCS ${SSE42_SRCS})
endif()
if(ENABLE_SSSE3)
add_definitions(-DX86_SSSE3 -DX86_SSSE3_ADLER32)
set(SSSE3_SRCS ${ARCHDIR}/adler32_ssse3.c)
list(APPEND ZLIB_ARCH_SRCS ${SSSE3_SRCS})
endif()
if(ENABLE_PCLMULQDQ)
add_definitions(-DX86_PCLMULQDQ_CRC)
set(PCLMULQDQ_SRCS ${ARCHDIR}/crc_folding.c)
list(APPEND ZLIB_ARCH_SRCS ${PCLMULQDQ_SRCS})
endif()
add_definitions(-DX86_SSE2 -DX86_SSE2_CHUNKSET -DX86_SSE2_SLIDEHASH)
set(SSE2_SRCS ${ARCHDIR}/chunkset_sse.c ${ARCHDIR}/slide_sse.c)
list(APPEND ZLIB_ARCH_SRCS ${SSE2_SRCS})
add_definitions(-DX86_NOCHECK_SSE2)
endif ()
macro(generate_cmakein input output)
file(REMOVE ${output})
file(STRINGS ${input} _lines)
foreach(_line IN LISTS _lines)
string(REGEX REPLACE "#ifdef HAVE_UNISTD_H.*" "@ZCONF_UNISTD_LINE@" _line "${_line}")
string(REGEX REPLACE "#ifdef NEED_PTRDIFF_T.*" "@ZCONF_PTRDIFF_LINE@" _line "${_line}")
if(NEED_PTRDIFF_T)
string(REGEX REPLACE "typedef PTRDIFF_TYPE" "typedef @PTRDIFF_TYPE@" _line "${_line}")
endif()
file(APPEND ${output} "${_line}\n")
endforeach()
endmacro(generate_cmakein)
generate_cmakein(${SOURCE_DIR}/zconf.h.in ${CMAKE_CURRENT_BINARY_DIR}/zconf.h.cmakein)
set(ZLIB_SRCS
${SOURCE_DIR}/adler32.c
${SOURCE_DIR}/chunkset.c
${SOURCE_DIR}/compare258.c
${SOURCE_DIR}/compress.c
${SOURCE_DIR}/crc32.c
${SOURCE_DIR}/crc32_comb.c
${SOURCE_DIR}/deflate.c
${SOURCE_DIR}/deflate_fast.c
${SOURCE_DIR}/deflate_medium.c
${SOURCE_DIR}/deflate_quick.c
${SOURCE_DIR}/deflate_slow.c
${SOURCE_DIR}/functable.c
${SOURCE_DIR}/infback.c
${SOURCE_DIR}/inffast.c
${SOURCE_DIR}/inflate.c
${SOURCE_DIR}/inftrees.c
${SOURCE_DIR}/insert_string.c
${SOURCE_DIR}/trees.c
${SOURCE_DIR}/uncompr.c
${SOURCE_DIR}/zutil.c
${SOURCE_DIR}/gzlib.c
${SOURCE_DIR}/gzread.c
${SOURCE_DIR}/gzwrite.c
)
set(ZLIB_ALL_SRCS ${ZLIB_SRCS} ${ZLIB_ARCH_SRCS})
add_library(zlib ${ZLIB_ALL_SRCS})
add_library(zlibstatic ALIAS zlib)
# https://github.com/zlib-ng/zlib-ng/pull/733
# This is disabed by default
add_compile_definitions(Z_TLS=__thread)
if(HAVE_UNISTD_H)
SET(ZCONF_UNISTD_LINE "#if 1 /* was set to #if 1 by configure/cmake/etc */")
else()
SET(ZCONF_UNISTD_LINE "#if 0 /* was set to #if 0 by configure/cmake/etc */")
endif()
if(NEED_PTRDIFF_T)
SET(ZCONF_PTRDIFF_LINE "#if 1 /* was set to #if 1 by configure/cmake/etc */")
else()
SET(ZCONF_PTRDIFF_LINE "#ifdef NEED_PTRDIFF_T /* may be set to #if 1 by configure/cmake/etc */")
endif()
set(ZLIB_PC ${CMAKE_CURRENT_BINARY_DIR}/zlib.pc)
configure_file(${SOURCE_DIR}/zlib.pc.cmakein ${ZLIB_PC} @ONLY)
configure_file(${CMAKE_CURRENT_BINARY_DIR}/zconf.h.cmakein ${CMAKE_CURRENT_BINARY_DIR}/zconf.h @ONLY)
# We should use same defines when including zlib.h as used when zlib compiled
target_compile_definitions (zlib PUBLIC ZLIB_COMPAT WITH_GZFILEOP)
if (ARCH_AMD64 OR ARCH_AARCH64)
target_compile_definitions (zlib PUBLIC X86_64 UNALIGNED_OK)
endif ()
target_include_directories(zlib PUBLIC ${SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR})

View File

@ -1,9 +1,7 @@
{ {
"docker/packager/deb": { "docker/packager/deb": {
"name": "clickhouse/deb-builder", "name": "clickhouse/deb-builder",
"dependent": [ "dependent": []
"docker/packager/unbundled"
]
}, },
"docker/packager/binary": { "docker/packager/binary": {
"name": "clickhouse/binary-builder", "name": "clickhouse/binary-builder",
@ -13,10 +11,6 @@
"docker/test/codebrowser" "docker/test/codebrowser"
] ]
}, },
"docker/packager/unbundled": {
"name": "clickhouse/unbundled-builder",
"dependent": []
},
"docker/test/compatibility/centos": { "docker/test/compatibility/centos": {
"name": "clickhouse/test-old-centos", "name": "clickhouse/test-old-centos",
"dependent": [] "dependent": []
@ -138,23 +132,11 @@
"name": "clickhouse/test-base", "name": "clickhouse/test-base",
"dependent": [ "dependent": [
"docker/test/stateless", "docker/test/stateless",
"docker/test/stateless_unbundled",
"docker/test/integration/base", "docker/test/integration/base",
"docker/test/fuzzer", "docker/test/fuzzer",
"docker/test/keeper-jepsen" "docker/test/keeper-jepsen"
] ]
}, },
"docker/packager/unbundled": {
"name": "clickhouse/unbundled-builder",
"dependent": [
"docker/test/stateless_unbundled"
]
},
"docker/test/stateless_unbundled": {
"name": "clickhouse/stateless-unbundled-test",
"dependent": [
]
},
"docker/test/integration/kerberized_hadoop": { "docker/test/integration/kerberized_hadoop": {
"name": "clickhouse/kerberized-hadoop", "name": "clickhouse/kerberized-hadoop",
"dependent": [] "dependent": []

View File

@ -33,7 +33,6 @@ RUN curl -O https://clickhouse-datasets.s3.yandex.net/utils/1/dpkg-deb \
&& chmod +x dpkg-deb \ && chmod +x dpkg-deb \
&& cp dpkg-deb /usr/bin && cp dpkg-deb /usr/bin
# Libraries from OS are only needed to test the "unbundled" build (this is not used in production).
RUN apt-get update \ RUN apt-get update \
&& apt-get install \ && apt-get install \
alien \ alien \

View File

@ -11,7 +11,6 @@ SCRIPT_PATH = os.path.realpath(__file__)
IMAGE_MAP = { IMAGE_MAP = {
"deb": "clickhouse/deb-builder", "deb": "clickhouse/deb-builder",
"binary": "clickhouse/binary-builder", "binary": "clickhouse/binary-builder",
"unbundled": "clickhouse/unbundled-builder"
} }
def check_image_exists_locally(image_name): def check_image_exists_locally(image_name):
@ -55,7 +54,7 @@ def run_docker_image_with_env(image_name, output, env_variables, ch_root, ccache
subprocess.check_call(cmd, shell=True) subprocess.check_call(cmd, shell=True)
def parse_env_variables(build_type, compiler, sanitizer, package_type, image_type, cache, distcc_hosts, unbundled, split_binary, clang_tidy, version, author, official, alien_pkgs, with_coverage, with_binaries): def parse_env_variables(build_type, compiler, sanitizer, package_type, image_type, cache, distcc_hosts, split_binary, clang_tidy, version, author, official, alien_pkgs, with_coverage, with_binaries):
CLANG_PREFIX = "clang" CLANG_PREFIX = "clang"
DARWIN_SUFFIX = "-darwin" DARWIN_SUFFIX = "-darwin"
DARWIN_ARM_SUFFIX = "-darwin-aarch64" DARWIN_ARM_SUFFIX = "-darwin-aarch64"
@ -107,7 +106,7 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
cxx = cc.replace('gcc', 'g++').replace('clang', 'clang++') cxx = cc.replace('gcc', 'g++').replace('clang', 'clang++')
if image_type == "deb" or image_type == "unbundled": if image_type == "deb":
result.append("DEB_CC={}".format(cc)) result.append("DEB_CC={}".format(cc))
result.append("DEB_CXX={}".format(cxx)) result.append("DEB_CXX={}".format(cxx))
# For building fuzzers # For building fuzzers
@ -159,15 +158,6 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
cmake_flags.append('-DENABLE_TESTS=1') cmake_flags.append('-DENABLE_TESTS=1')
cmake_flags.append('-DUSE_GTEST=1') cmake_flags.append('-DUSE_GTEST=1')
# "Unbundled" build is not suitable for any production usage.
# But it is occasionally used by some developers.
# The whole idea of using unknown version of libraries from the OS distribution is deeply flawed.
# We wish these developers good luck.
if unbundled:
# We also disable all CPU features except basic x86_64.
# It is only slightly related to "unbundled" build, but it is a good place to test if code compiles without these instruction sets.
cmake_flags.append('-DUNBUNDLED=1 -DUSE_INTERNAL_RDKAFKA_LIBRARY=1 -DENABLE_ARROW=0 -DENABLE_AVRO=0 -DENABLE_ORC=0 -DENABLE_PARQUET=0 -DENABLE_SSSE3=0 -DENABLE_SSE41=0 -DENABLE_SSE42=0 -DENABLE_PCLMULQDQ=0 -DENABLE_POPCNT=0 -DENABLE_AVX=0 -DENABLE_AVX2=0')
if split_binary: if split_binary:
cmake_flags.append('-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1') cmake_flags.append('-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1')
# We can't always build utils because it requires too much space, but # We can't always build utils because it requires too much space, but
@ -213,7 +203,6 @@ if __name__ == "__main__":
"clang-13", "clang-13-darwin", "clang-13-darwin-aarch64", "clang-13-aarch64", "clang-13-ppc64le", "clang-13", "clang-13-darwin", "clang-13-darwin-aarch64", "clang-13-aarch64", "clang-13-ppc64le",
"clang-11-freebsd", "clang-12-freebsd", "clang-13-freebsd", "gcc-11"), default="clang-13") "clang-11-freebsd", "clang-12-freebsd", "clang-13-freebsd", "gcc-11"), default="clang-13")
parser.add_argument("--sanitizer", choices=("address", "thread", "memory", "undefined", ""), default="") parser.add_argument("--sanitizer", choices=("address", "thread", "memory", "undefined", ""), default="")
parser.add_argument("--unbundled", action="store_true")
parser.add_argument("--split-binary", action="store_true") parser.add_argument("--split-binary", action="store_true")
parser.add_argument("--clang-tidy", action="store_true") parser.add_argument("--clang-tidy", action="store_true")
parser.add_argument("--cache", choices=("", "ccache", "distcc"), default="") parser.add_argument("--cache", choices=("", "ccache", "distcc"), default="")
@ -232,7 +221,7 @@ if __name__ == "__main__":
if not os.path.isabs(args.output_dir): if not os.path.isabs(args.output_dir):
args.output_dir = os.path.abspath(os.path.join(os.getcwd(), args.output_dir)) args.output_dir = os.path.abspath(os.path.join(os.getcwd(), args.output_dir))
image_type = 'binary' if args.package_type == 'performance' else 'unbundled' if args.unbundled else args.package_type image_type = 'binary' if args.package_type == 'performance' else args.package_type
image_name = IMAGE_MAP[image_type] image_name = IMAGE_MAP[image_type]
if not os.path.isabs(args.clickhouse_repo_path): if not os.path.isabs(args.clickhouse_repo_path):
@ -256,7 +245,7 @@ if __name__ == "__main__":
build_image(image_with_version, dockerfile) build_image(image_with_version, dockerfile)
env_prepared = parse_env_variables( env_prepared = parse_env_variables(
args.build_type, args.compiler, args.sanitizer, args.package_type, image_type, args.build_type, args.compiler, args.sanitizer, args.package_type, image_type,
args.cache, args.distcc_hosts, args.unbundled, args.split_binary, args.clang_tidy, args.cache, args.distcc_hosts, args.split_binary, args.clang_tidy,
args.version, args.author, args.official, args.alien_pkgs, args.with_coverage, args.with_binaries) args.version, args.author, args.official, args.alien_pkgs, args.with_coverage, args.with_binaries)
run_docker_image_with_env(image_name, args.output_dir, env_prepared, ch_root, args.ccache_dir, args.docker_image_version) run_docker_image_with_env(image_name, args.output_dir, env_prepared, ch_root, args.ccache_dir, args.docker_image_version)

View File

@ -1,69 +0,0 @@
# docker build -t clickhouse/unbundled-builder .
FROM clickhouse/deb-builder
RUN export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
&& wget -nv -O /tmp/arrow-keyring.deb "https://apache.jfrog.io/artifactory/arrow/ubuntu/apache-arrow-apt-source-latest-${CODENAME}.deb" \
&& dpkg -i /tmp/arrow-keyring.deb
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
# Libraries from OS are only needed to test the "unbundled" build (that is not used in production).
RUN apt-get update \
&& apt-get install \
libicu-dev \
gperf \
perl \
pkg-config \
devscripts \
libc++-dev \
libc++abi-dev \
libboost-all-dev \
zlib1g-dev \
liblz4-dev \
libdouble-conversion-dev \
libxml2-dev \
librdkafka-dev \
libgoogle-perftools-dev \
libzstd-dev \
libltdl-dev \
libre2-dev \
libjemalloc-dev \
libmsgpack-dev \
libcurl4-openssl-dev \
unixodbc-dev \
odbcinst \
tzdata \
alien \
libcapnp-dev \
cmake \
gdb \
pigz \
moreutils \
libcctz-dev \
libldap2-dev \
libsasl2-dev \
libgsasl7-dev \
heimdal-multidev \
libhyperscan-dev \
libbrotli-dev \
protobuf-compiler \
libprotoc-dev \
libgrpc++-dev \
protobuf-compiler-grpc \
libc-ares-dev \
rapidjson-dev \
libsnappy-dev \
libparquet-dev \
libthrift-dev \
libutf8proc-dev \
libbz2-dev \
libavro-dev \
libfarmhash-dev \
librocksdb-dev \
libgflags-dev \
libmysqlclient-dev \
--yes --no-install-recommends
COPY build.sh /
CMD ["/bin/bash", "/build.sh"]

View File

@ -1,15 +0,0 @@
#!/usr/bin/env bash
set -x -e
ccache --show-stats ||:
ccache --zero-stats ||:
read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
build/release "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
mv /*.deb /output
mv -- *.changes /output
mv -- *.buildinfo /output
mv /*.rpm /output ||: # if exists
mv /*.tgz /output ||: # if exists
ccache --show-stats ||:

View File

@ -2,18 +2,17 @@
FROM sequenceiq/hadoop-docker:2.7.0 FROM sequenceiq/hadoop-docker:2.7.0
RUN sed -i -e 's/^\#baseurl/baseurl/' /etc/yum.repos.d/CentOS-Base.repo && \
sed -i -e 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/CentOS-Base.repo && \
sed -i -e 's#http://mirror.centos.org/#http://vault.centos.org/#' /etc/yum.repos.d/CentOS-Base.repo
# https://community.letsencrypt.org/t/rhel-centos-6-openssl-client-compatibility-after-dst-root-ca-x3-expiration/161032/81 # https://community.letsencrypt.org/t/rhel-centos-6-openssl-client-compatibility-after-dst-root-ca-x3-expiration/161032/81
RUN sed -i s/xMDkzMDE0MDExNVow/0MDkzMDE4MTQwM1ow/ /etc/pki/tls/certs/ca-bundle.crt RUN sed -i s/xMDkzMDE0MDExNVow/0MDkzMDE4MTQwM1ow/ /etc/pki/tls/certs/ca-bundle.crt
RUN yum clean all && \
rpm --rebuilddb && \ RUN curl -o krb5-libs-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.10/os/x86_64/Packages/krb5-libs-1.10.3-65.el6.x86_64.rpm && \
yum -y update && \ curl -o krb5-workstation-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/os/x86_64/Packages/krb5-workstation-1.10.3-65.el6.x86_64.rpm && \
yum -y install yum-plugin-ovl && \ curl -o libkadm5-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.10/os/x86_64/Packages/libkadm5-1.10.3-65.el6.x86_64.rpm && \
yum --quiet -y install krb5-workstation.x86_64 curl -o libss-1.41.12-24.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/cr/x86_64/Packages/libss-1.41.12-24.el6.x86_64.rpm && \
curl -o libcom_err-1.41.12-24.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/cr/x86_64/Packages/libcom_err-1.41.12-24.el6.x86_64.rpm && \
rpm -Uvh libkadm5-1.10.3-65.el6.x86_64.rpm libss-1.41.12-24.el6.x86_64.rpm krb5-libs-1.10.3-65.el6.x86_64.rpm krb5-workstation-1.10.3-65.el6.x86_64.rpm libcom_err-1.41.12-24.el6.x86_64.rpm && \
rm -fr *.rpm
RUN cd /tmp && \ RUN cd /tmp && \
curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \ curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \

View File

@ -1,82 +0,0 @@
# docker build -t clickhouse/stateless-unbundled-test .
FROM clickhouse/test-base
ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
RUN apt-get --allow-unauthenticated update -y \
&& env DEBIAN_FRONTEND=noninteractive \
apt-get --allow-unauthenticated install --yes --no-install-recommends \
alien \
brotli \
zstd \
cmake \
devscripts \
expect \
gdb \
gperf \
heimdal-multidev \
libboost-filesystem-dev \
libboost-iostreams-dev \
libboost-program-options-dev \
libboost-regex-dev \
libboost-system-dev \
libboost-thread-dev \
libc++-dev \
libc++abi-dev \
libcapnp-dev \
libcctz-dev \
libcurl4-openssl-dev \
libdouble-conversion-dev \
libgoogle-perftools-dev \
libhyperscan-dev \
libicu-dev \
libjemalloc-dev \
libldap2-dev \
libltdl-dev \
liblz4-dev \
libmsgpack-dev \
libpoco-dev \
libpoconetssl62 \
librdkafka-dev \
libre2-dev \
libreadline-dev \
libsasl2-dev \
libzstd-dev \
librocksdb-dev \
libgflags-dev \
lsof \
moreutils \
ncdu \
netcat-openbsd \
odbcinst \
openssl \
perl \
pigz \
pkg-config \
python3 \
python3-lxml \
python3-requests \
python3-termcolor \
python3-pip \
qemu-user-static \
sudo \
telnet \
tree \
tzdata \
unixodbc \
unixodbc-dev \
wget \
zlib1g-dev
RUN pip3 install numpy scipy pandas
RUN mkdir -p /tmp/clickhouse-odbc-tmp \
&& wget -nv -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \
&& cp /tmp/clickhouse-odbc-tmp/lib64/*.so /usr/local/lib/ \
&& odbcinst -i -d -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbcinst.ini.sample \
&& odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
&& rm -rf /tmp/clickhouse-odbc-tmp
COPY run.sh /
CMD ["/bin/bash", "/run.sh"]

View File

@ -1,15 +0,0 @@
# Since right now we can't set volumes to the docker during build, we split building container in stages:
# 1. build base container
# 2. run base conatiner with mounted volumes
# 3. commit container as image
FROM ubuntu:18.10 as clickhouse-test-runner-base
# A volume where directory with clickhouse packages to be mounted,
# for later installing.
VOLUME /packages
CMD apt-get update ;\
DEBIAN_FRONTEND=noninteractive \
apt install -y /packages/clickhouse-common-static_*.deb \
/packages/clickhouse-client_*.deb \
/packages/clickhouse-test_*.deb

View File

@ -1,16 +0,0 @@
#!/bin/bash
set -e -x
dpkg -i package_folder/clickhouse-common-static_*.deb
dpkg -i package_folder/clickhouse-common-static-dbg_*.deb
dpkg -i package_folder/clickhouse-server_*.deb
dpkg -i package_folder/clickhouse-client_*.deb
dpkg -i package_folder/clickhouse-test_*.deb
# install test configs
/usr/share/clickhouse-test/config/install.sh
service clickhouse-server start && sleep 5
clickhouse-test --testname --shard --zookeeper "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt

View File

@ -126,7 +126,6 @@ Builds ClickHouse in various configurations for use in further steps. You have t
- **Compiler**: `gcc-9` or `clang-10` (or `clang-10-xx` for other architectures e.g. `clang-10-freebsd`). - **Compiler**: `gcc-9` or `clang-10` (or `clang-10-xx` for other architectures e.g. `clang-10-freebsd`).
- **Build type**: `Debug` or `RelWithDebInfo` (cmake). - **Build type**: `Debug` or `RelWithDebInfo` (cmake).
- **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan). - **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan).
- **Bundled**: `bundled` build uses libraries from `contrib` folder, and `unbundled` build uses system libraries.
- **Splitted** `splitted` is a [split build](../development/build.md#split-build) - **Splitted** `splitted` is a [split build](../development/build.md#split-build)
- **Status**: `success` or `fail` - **Status**: `success` or `fail`
- **Build log**: link to the building and files copying log, useful when build failed. - **Build log**: link to the building and files copying log, useful when build failed.

View File

@ -15,7 +15,7 @@ This dataset can be obtained in two ways:
Downloading data: Downloading data:
``` bash ``` bash
echo https://transtats.bts.gov/PREZIP/On_Time_Reporting_Carrier_On_Time_Performance_1987_present_{1987..2021}_{1..12}.zip | xargs -P10 wget --no-check-certificate --continue wget --no-check-certificate --continue https://transtats.bts.gov/PREZIP/On_Time_Reporting_Carrier_On_Time_Performance_1987_present_{1987..2021}_{1..12}.zip
``` ```
Creating a table: Creating a table:

View File

@ -61,7 +61,7 @@ The supported formats are:
| [Native](#native) | ✔ | ✔ | | [Native](#native) | ✔ | ✔ |
| [Null](#null) | ✗ | ✔ | | [Null](#null) | ✗ | ✔ |
| [XML](#xml) | ✗ | ✔ | | [XML](#xml) | ✗ | ✔ |
| [CapnProto](#capnproto) | ✔ | | | [CapnProto](#capnproto) | ✔ | |
| [LineAsString](#lineasstring) | ✔ | ✗ | | [LineAsString](#lineasstring) | ✔ | ✗ |
| [Regexp](#data-format-regexp) | ✔ | ✗ | | [Regexp](#data-format-regexp) | ✔ | ✗ |
| [RawBLOB](#rawblob) | ✔ | ✔ | | [RawBLOB](#rawblob) | ✔ | ✔ |
@ -1095,9 +1095,41 @@ Arrays are output as `<array><elem>Hello</elem><elem>World</elem>...</array>`,an
## CapnProto {#capnproto} ## CapnProto {#capnproto}
Capn Proto is a binary message format similar to Protocol Buffers and Thrift, but not like JSON or MessagePack. CapnProto is a binary message format similar to [Protocol Buffers](https://developers.google.com/protocol-buffers/) and [Thrift](https://en.wikipedia.org/wiki/Apache_Thrift), but not like [JSON](#json) or [MessagePack](https://msgpack.org/).
Capn Proto messages are strictly typed and not self-describing, meaning they need an external schema description. The schema is applied on the fly and cached for each query. CapnProto messages are strictly typed and not self-describing, meaning they need an external schema description. The schema is applied on the fly and cached for each query.
See also [Format Schema](#formatschema).
### Data Types Matching {#data_types-matching-capnproto}
The table below shows supported data types and how they match ClickHouse [data types](../sql-reference/data-types/index.md) in `INSERT` and `SELECT` queries.
| CapnProto data type (`INSERT`) | ClickHouse data type | CapnProto data type (`SELECT`) |
|--------------------------------|-----------------------------------------------------------|--------------------------------|
| `UINT8`, `BOOL` | [UInt8](../sql-reference/data-types/int-uint.md) | `UINT8` |
| `INT8` | [Int8](../sql-reference/data-types/int-uint.md) | `INT8` |
| `UINT16` | [UInt16](../sql-reference/data-types/int-uint.md), [Date](../sql-reference/data-types/date.md) | `UINT16` |
| `INT16` | [Int16](../sql-reference/data-types/int-uint.md) | `INT16` |
| `UINT32` | [UInt32](../sql-reference/data-types/int-uint.md), [DateTime](../sql-reference/data-types/datetime.md) | `UINT32` |
| `INT32` | [Int32](../sql-reference/data-types/int-uint.md) | `INT32` |
| `UINT64` | [UInt64](../sql-reference/data-types/int-uint.md) | `UINT64` |
| `INT64` | [Int64](../sql-reference/data-types/int-uint.md), [DateTime64](../sql-reference/data-types/datetime.md) | `INT64` |
| `FLOAT32` | [Float32](../sql-reference/data-types/float.md) | `FLOAT32` |
| `FLOAT64` | [Float64](../sql-reference/data-types/float.md) | `FLOAT64` |
| `TEXT, DATA` | [String](../sql-reference/data-types/string.md), [FixedString](../sql-reference/data-types/fixedstring.md) | `TEXT, DATA` |
| `union(T, Void), union(Void, T)` | [Nullable(T)](../sql-reference/data-types/date.md) | `union(T, Void), union(Void, T)` |
| `ENUM` | [Enum(8\|16)](../sql-reference/data-types/enum.md) | `ENUM` |
| `LIST` | [Array](../sql-reference/data-types/array.md) | `LIST` |
| `STRUCT` | [Tuple](../sql-reference/data-types/tuple.md) | `STRUCT` |
For working with `Enum` in CapnProto format use the [format_capn_proto_enum_comparising_mode](../operations/settings/settings.md#format-capn-proto-enum-comparising-mode) setting.
Arrays can be nested and can have a value of the `Nullable` type as an argument. `Tuple` type also can be nested.
### Inserting and Selecting Data {#inserting-and-selecting-data-capnproto}
You can insert CapnProto data from a file into ClickHouse table by the following command:
``` bash ``` bash
$ cat capnproto_messages.bin | clickhouse-client --query "INSERT INTO test.hits FORMAT CapnProto SETTINGS format_schema = 'schema:Message'" $ cat capnproto_messages.bin | clickhouse-client --query "INSERT INTO test.hits FORMAT CapnProto SETTINGS format_schema = 'schema:Message'"
@ -1112,9 +1144,11 @@ struct Message {
} }
``` ```
Deserialization is effective and usually does not increase the system load. You can select data from a ClickHouse table and save them into some file in the CapnProto format by the following command:
See also [Format Schema](#formatschema). ``` bash
$ clickhouse-client --query = "SELECT * FROM test.hits FORMAT CapnProto SETTINGS format_schema = 'schema:Message'"
```
## Protobuf {#protobuf} ## Protobuf {#protobuf}

View File

@ -11,6 +11,7 @@ toc_title: Adopters
| Company | Industry | Usecase | Cluster Size | (Un)Compressed Data Size<abbr title="of single replica"><sup>\*</sup></abbr> | Reference | | Company | Industry | Usecase | Cluster Size | (Un)Compressed Data Size<abbr title="of single replica"><sup>\*</sup></abbr> | Reference |
|---------|----------|---------|--------------|------------------------------------------------------------------------------|-----------| |---------|----------|---------|--------------|------------------------------------------------------------------------------|-----------|
| <a href="https://2gis.ru" class="favicon">2gis</a> | Maps | Monitoring | — | — | [Talk in Russian, July 2019](https://youtu.be/58sPkXfq6nw) | | <a href="https://2gis.ru" class="favicon">2gis</a> | Maps | Monitoring | — | — | [Talk in Russian, July 2019](https://youtu.be/58sPkXfq6nw) |
| <a href="https://adapty.io/" class="favicon">Adapty</a> | Subscription Analytics | Main product | — | — | [Tweet, November 2021](https://twitter.com/iwitaly/status/1462698148061659139) |
| <a href="https://getadmiral.com/" class="favicon">Admiral</a> | Martech | Engagement Management | — | — | [Webinar Slides, June 2020](https://altinity.com/presentations/2020/06/16/big-data-in-real-time-how-clickhouse-powers-admirals-visitor-relationships-for-publishers) | | <a href="https://getadmiral.com/" class="favicon">Admiral</a> | Martech | Engagement Management | — | — | [Webinar Slides, June 2020](https://altinity.com/presentations/2020/06/16/big-data-in-real-time-how-clickhouse-powers-admirals-visitor-relationships-for-publishers) |
| <a href="http://www.adscribe.tv/" class="favicon">AdScribe</a> | Ads | TV Analytics | — | — | [A quote from CTO](https://altinity.com/24x7-support/) | | <a href="http://www.adscribe.tv/" class="favicon">AdScribe</a> | Ads | TV Analytics | — | — | [A quote from CTO](https://altinity.com/24x7-support/) |
| <a href="https://ahrefs.com/" class="favicon">Ahrefs</a> | SEO | Analytics | — | — | [Job listing](https://ahrefs.com/jobs/data-scientist-search) | | <a href="https://ahrefs.com/" class="favicon">Ahrefs</a> | SEO | Analytics | — | — | [Job listing](https://ahrefs.com/jobs/data-scientist-search) |
@ -19,7 +20,7 @@ toc_title: Adopters
| <a href="https://alohabrowser.com/" class="favicon">Aloha Browser</a> | Mobile App | Browser backend | — | — | [Slides in Russian, May 2019](https://presentations.clickhouse.com/meetup22/aloha.pdf) | | <a href="https://alohabrowser.com/" class="favicon">Aloha Browser</a> | Mobile App | Browser backend | — | — | [Slides in Russian, May 2019](https://presentations.clickhouse.com/meetup22/aloha.pdf) |
| <a href="https://altinity.com/" class="favicon">Altinity</a> | Cloud, SaaS | Main product | — | — | [Official Website](https://altinity.com/) | | <a href="https://altinity.com/" class="favicon">Altinity</a> | Cloud, SaaS | Main product | — | — | [Official Website](https://altinity.com/) |
| <a href="https://amadeus.com/" class="favicon">Amadeus</a> | Travel | Analytics | — | — | [Press Release, April 2018](https://www.altinity.com/blog/2018/4/5/amadeus-technologies-launches-investment-and-insights-tool-based-on-machine-learning-and-strategy-algorithms) | | <a href="https://amadeus.com/" class="favicon">Amadeus</a> | Travel | Analytics | — | — | [Press Release, April 2018](https://www.altinity.com/blog/2018/4/5/amadeus-technologies-launches-investment-and-insights-tool-based-on-machine-learning-and-strategy-algorithms) |
| <a href="https://apiroad.net/" class="favicon">ApiRoad</a> | API marketplace | Analytics | — | — | [Blog post, Nov 2018, Mar 2020](https://pixeljets.com/blog/clickhouse-vs-elasticsearch/) | | <a href="https://apiroad.net/" class="favicon">ApiRoad</a> | API marketplace | Analytics | — | — | [Blog post, November 2018, March 2020](https://pixeljets.com/blog/clickhouse-vs-elasticsearch/) |
| <a href="https://www.appsflyer.com" class="favicon">Appsflyer</a> | Mobile analytics | Main product | — | — | [Talk in Russian, July 2019](https://www.youtube.com/watch?v=M3wbRlcpBbY) | | <a href="https://www.appsflyer.com" class="favicon">Appsflyer</a> | Mobile analytics | Main product | — | — | [Talk in Russian, July 2019](https://www.youtube.com/watch?v=M3wbRlcpBbY) |
| <a href="https://arenadata.tech/" class="favicon">ArenaData</a> | Data Platform | Main product | — | — | [Slides in Russian, December 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup38/indexes.pdf) | | <a href="https://arenadata.tech/" class="favicon">ArenaData</a> | Data Platform | Main product | — | — | [Slides in Russian, December 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup38/indexes.pdf) |
| <a href="https://www.argedor.com/en/clickhouse/" class="favicon">Argedor</a> | ClickHouse support | — | — | — | [Official website](https://www.argedor.com/en/clickhouse/) | | <a href="https://www.argedor.com/en/clickhouse/" class="favicon">Argedor</a> | ClickHouse support | — | — | — | [Official website](https://www.argedor.com/en/clickhouse/) |
@ -50,6 +51,7 @@ toc_title: Adopters
| <a href="https://cryptology.com/" class="favicon">Cryptology</a> | Digital Assets Trading Platform | — | — | — | [Job advertisement, March 2021](https://career.habr.com/companies/cryptology/vacancies) | | <a href="https://cryptology.com/" class="favicon">Cryptology</a> | Digital Assets Trading Platform | — | — | — | [Job advertisement, March 2021](https://career.habr.com/companies/cryptology/vacancies) |
| <a href="https://www.chinatelecomglobal.com/" class="favicon">Dataliance for China Telecom</a> | Telecom | Analytics | — | — | [Slides in Chinese, January 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup12/telecom.pdf) | | <a href="https://www.chinatelecomglobal.com/" class="favicon">Dataliance for China Telecom</a> | Telecom | Analytics | — | — | [Slides in Chinese, January 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup12/telecom.pdf) |
| <a href="https://db.com" class="favicon">Deutsche Bank</a> | Finance | BI Analytics | — | — | [Slides in English, October 2019](https://bigdatadays.ru/wp-content/uploads/2019/10/D2-H3-3_Yakunin-Goihburg.pdf) | | <a href="https://db.com" class="favicon">Deutsche Bank</a> | Finance | BI Analytics | — | — | [Slides in English, October 2019](https://bigdatadays.ru/wp-content/uploads/2019/10/D2-H3-3_Yakunin-Goihburg.pdf) |
| <a href="https://www.deepl.com/" class="favicon">Deepl</a> | Machine Learning | — | — | — | [Video, October 2021](https://www.youtube.com/watch?v=WIYJiPwxXdM&t=1182s) |
| <a href="https://deeplay.io/eng/" class="favicon">Deeplay</a> | Gaming Analytics | — | — | — | [Job advertisement, 2020](https://career.habr.com/vacancies/1000062568) | | <a href="https://deeplay.io/eng/" class="favicon">Deeplay</a> | Gaming Analytics | — | — | — | [Job advertisement, 2020](https://career.habr.com/vacancies/1000062568) |
| <a href="https://www.diva-e.com" class="favicon">Diva-e</a> | Digital consulting | Main Product | — | — | [Slides in English, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup29/ClickHouse-MeetUp-Unusual-Applications-sd-2019-09-17.pdf) | | <a href="https://www.diva-e.com" class="favicon">Diva-e</a> | Digital consulting | Main Product | — | — | [Slides in English, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup29/ClickHouse-MeetUp-Unusual-Applications-sd-2019-09-17.pdf) |
| <a href="https://ecommpay.com/" class="favicon">Ecommpay</a> | Payment Processing | Logs | — | — | [Video, Nov 2019](https://www.youtube.com/watch?v=d3GdZTOWGLk) | | <a href="https://ecommpay.com/" class="favicon">Ecommpay</a> | Payment Processing | Logs | — | — | [Video, Nov 2019](https://www.youtube.com/watch?v=d3GdZTOWGLk) |
@ -65,6 +67,7 @@ toc_title: Adopters
| <a href="https://gigapipe.com/" class="favicon">Gigapipe</a> | Managed ClickHouse | Main product | — | — | [Official website](https://gigapipe.com/) | | <a href="https://gigapipe.com/" class="favicon">Gigapipe</a> | Managed ClickHouse | Main product | — | — | [Official website](https://gigapipe.com/) |
| <a href="https://glaber.io/" class="favicon">Glaber</a> | Monitoring | Main product | — | — | [Website](https://glaber.io/) | | <a href="https://glaber.io/" class="favicon">Glaber</a> | Monitoring | Main product | — | — | [Website](https://glaber.io/) |
| <a href="https://graphcdn.io/" class="favicon">GraphCDN</a> | CDN | Traffic Analytics | — | — | [Blog Post in English, August 2021](https://altinity.com/blog/delivering-insight-on-graphql-apis-with-clickhouse-at-graphcdn/) | | <a href="https://graphcdn.io/" class="favicon">GraphCDN</a> | CDN | Traffic Analytics | — | — | [Blog Post in English, August 2021](https://altinity.com/blog/delivering-insight-on-graphql-apis-with-clickhouse-at-graphcdn/) |
| <a href="https://www.grouparoo.com" class="favicon">Grouparoo</a> | Data Warehouse Integrations | Main product | — | — | [Official Website, November 2021](https://www.grouparoo.com/integrations) |
| <a href="https://www.huya.com/" class="favicon">HUYA</a> | Video Streaming | Analytics | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) | | <a href="https://www.huya.com/" class="favicon">HUYA</a> | Video Streaming | Analytics | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
| <a href="https://www.hydrolix.io/" class="favicon">Hydrolix</a> | Cloud data platform | Main product | — | — | [Documentation](https://docs.hydrolix.io/guide/query) | | <a href="https://www.hydrolix.io/" class="favicon">Hydrolix</a> | Cloud data platform | Main product | — | — | [Documentation](https://docs.hydrolix.io/guide/query) |
| <a href="https://www.the-ica.com/" class="favicon">ICA</a> | FinTech | Risk Management | — | — | [Blog Post in English, Sep 2020](https://altinity.com/blog/clickhouse-vs-redshift-performance-for-fintech-risk-management?utm_campaign=ClickHouse%20vs%20RedShift&utm_content=143520807&utm_medium=social&utm_source=twitter&hss_channel=tw-3894792263) | | <a href="https://www.the-ica.com/" class="favicon">ICA</a> | FinTech | Risk Management | — | — | [Blog Post in English, Sep 2020](https://altinity.com/blog/clickhouse-vs-redshift-performance-for-fintech-risk-management?utm_campaign=ClickHouse%20vs%20RedShift&utm_content=143520807&utm_medium=social&utm_source=twitter&hss_channel=tw-3894792263) |
@ -89,7 +92,7 @@ toc_title: Adopters
| <a href="https://mcs.mail.ru/" class="favicon">Mail.ru Cloud Solutions</a> | Cloud services | Main product | — | — | [Article in Russian](https://mcs.mail.ru/help/db-create/clickhouse#) | | <a href="https://mcs.mail.ru/" class="favicon">Mail.ru Cloud Solutions</a> | Cloud services | Main product | — | — | [Article in Russian](https://mcs.mail.ru/help/db-create/clickhouse#) |
| <a href="https://maxilect.com/" class="favicon">MAXILECT</a> | Ad Tech, Blockchain, ML, AI | — | — | — | [Job advertisement, 2021](https://www.linkedin.com/feed/update/urn:li:activity:6780842017229430784/) | | <a href="https://maxilect.com/" class="favicon">MAXILECT</a> | Ad Tech, Blockchain, ML, AI | — | — | — | [Job advertisement, 2021](https://www.linkedin.com/feed/update/urn:li:activity:6780842017229430784/) |
| <a href="https://tech.mymarilyn.ru" class="favicon">Marilyn</a> | Advertising | Statistics | — | — | [Talk in Russian, June 2017](https://www.youtube.com/watch?v=iXlIgx2khwc) | | <a href="https://tech.mymarilyn.ru" class="favicon">Marilyn</a> | Advertising | Statistics | — | — | [Talk in Russian, June 2017](https://www.youtube.com/watch?v=iXlIgx2khwc) |
| <a href="https://mellodesign.ru/" class="favicon">Mello</a> | Marketing | Analytics | 1 server | — | [Article, Oct 2020](https://vc.ru/marketing/166180-razrabotka-tipovogo-otcheta-skvoznoy-analitiki) | | <a href="https://mellodesign.ru/" class="favicon">Mello</a> | Marketing | Analytics | 1 server | — | [Article, October 2020](https://vc.ru/marketing/166180-razrabotka-tipovogo-otcheta-skvoznoy-analitiki) |
| <a href="https://www.messagebird.com" class="favicon">MessageBird</a> | Telecommunications | Statistics | — | — | [Slides in English, November 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup20/messagebird.pdf) | | <a href="https://www.messagebird.com" class="favicon">MessageBird</a> | Telecommunications | Statistics | — | — | [Slides in English, November 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup20/messagebird.pdf) |
| <a href="https://clarity.microsoft.com/" class="favicon">Microsoft</a> | Web Analytics | Clarity (Main Product) | — | — | [A question on GitHub](https://github.com/ClickHouse/ClickHouse/issues/21556) | | <a href="https://clarity.microsoft.com/" class="favicon">Microsoft</a> | Web Analytics | Clarity (Main Product) | — | — | [A question on GitHub](https://github.com/ClickHouse/ClickHouse/issues/21556) |
| <a href="https://www.mindsdb.com/" class="favicon">MindsDB</a> | Machine Learning | Main Product | — | — | [Official Website](https://www.mindsdb.com/blog/machine-learning-models-as-tables-in-ch) | | <a href="https://www.mindsdb.com/" class="favicon">MindsDB</a> | Machine Learning | Main Product | — | — | [Official Website](https://www.mindsdb.com/blog/machine-learning-models-as-tables-in-ch) |
@ -100,17 +103,16 @@ toc_title: Adopters
| <a href="https://getnoc.com/" class="favicon">NOC Project</a> | Network Monitoring | Analytics | Main Product | — | [Official Website](https://getnoc.com/features/big-data/) | | <a href="https://getnoc.com/" class="favicon">NOC Project</a> | Network Monitoring | Analytics | Main Product | — | [Official Website](https://getnoc.com/features/big-data/) |
| <a href="https://www.noction.com" class="favicon">Noction</a> | Network Technology | Main Product | — | — | [Official Website](https://www.noction.com/news/irp-3-11-remote-triggered-blackholing-capability) | <a href="https://www.noction.com" class="favicon">Noction</a> | Network Technology | Main Product | — | — | [Official Website](https://www.noction.com/news/irp-3-11-remote-triggered-blackholing-capability)
| <a href="https://www.nuna.com/" class="favicon">Nuna Inc.</a> | Health Data Analytics | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=170) | | <a href="https://www.nuna.com/" class="favicon">Nuna Inc.</a> | Health Data Analytics | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=170) |
| <a href="https://ok.ru" class="favicon">Ok.ru</a> | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, Oct 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) | | <a href="https://ok.ru" class="favicon">Ok.ru</a> | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, October 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) |
| <a href="https://omnicomm.ru/" class="favicon">Omnicomm</a> | Transportation Monitoring | — | — | — | [Facebook post, Oct 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) | | <a href="https://omnicomm.ru/" class="favicon">Omnicomm</a> | Transportation Monitoring | — | — | — | [Facebook post, October 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) |
| <a href="https://www.oneapm.com/" class="favicon">OneAPM</a> | Monitoring and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) | | <a href="https://www.oneapm.com/" class="favicon">OneAPM</a> | Monitoring and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) |
| <a href="https://www.opentargets.org/" class="favicon">Open Targets</a> | Genome Research | Genome Search | — | — | [Tweet, Oct 2021](https://twitter.com/OpenTargets/status/1452570865342758913?s=20), [Blog](https://blog.opentargets.org/graphql/) | | <a href="https://www.opentargets.org/" class="favicon">Open Targets</a> | Genome Research | Genome Search | — | — | [Tweet, October 2021](https://twitter.com/OpenTargets/status/1452570865342758913?s=20), [Blog](https://blog.opentargets.org/graphql/) |
| <a href="https://corp.ozon.com/" class="favicon">OZON</a> | E-commerce | — | — | — | [Official website](https://job.ozon.ru/vacancy/razrabotchik-clickhouse-ekspluatatsiya-40991870/) | | <a href="https://corp.ozon.com/" class="favicon">OZON</a> | E-commerce | — | — | — | [Official website](https://job.ozon.ru/vacancy/razrabotchik-clickhouse-ekspluatatsiya-40991870/) |
| <a href="https://panelbear.com/" class="favicon">Panelbear | Analytics | Monitoring and Analytics | — | — | [Tech Stack, November 2020](https://panelbear.com/blog/tech-stack/) | | <a href="https://panelbear.com/" class="favicon">Panelbear | Analytics | Monitoring and Analytics | — | — | [Tech Stack, November 2020](https://panelbear.com/blog/tech-stack/) |
| <a href="https://www.percent.cn/" class="favicon">Percent 百分点</a> | Analytics | Main Product | — | — | [Slides in Chinese, June 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/4.%20ClickHouse万亿数据双中心的设计与实践%20.pdf) | | <a href="https://www.percent.cn/" class="favicon">Percent 百分点</a> | Analytics | Main Product | — | — | [Slides in Chinese, June 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/4.%20ClickHouse万亿数据双中心的设计与实践%20.pdf) |
| <a href="https://www.percona.com/" class="favicon">Percona</a> | Performance analysis | Percona Monitoring and Management | — | — | [Official website, Mar 2020](https://www.percona.com/blog/2020/03/30/advanced-query-analysis-in-percona-monitoring-and-management-with-direct-clickhouse-access/) | | <a href="https://www.percona.com/" class="favicon">Percona</a> | Performance analysis | Percona Monitoring and Management | — | — | [Official website, Mar 2020](https://www.percona.com/blog/2020/03/30/advanced-query-analysis-in-percona-monitoring-and-management-with-direct-clickhouse-access/) |
| <a href="https://piwik.pro/" class="favicon">Piwik PRO</a> | Web Analytics | Main Product | — | — | [Official website, Dec 2018](https://piwik.pro/blog/piwik-pro-clickhouse-faster-efficient-reports/) |
| <a href="https://plausible.io/" class="favicon">Plausible</a> | Analytics | Main Product | — | — | [Blog post, June 2020](https://twitter.com/PlausibleHQ/status/1273889629087969280) | | <a href="https://plausible.io/" class="favicon">Plausible</a> | Analytics | Main Product | — | — | [Blog post, June 2020](https://twitter.com/PlausibleHQ/status/1273889629087969280) |
| <a href="https://posthog.com/" class="favicon">PostHog</a> | Product Analytics | Main Product | — | — | [Release Notes, Oct 2020](https://posthog.com/blog/the-posthog-array-1-15-0) | | <a href="https://posthog.com/" class="favicon">PostHog</a> | Product Analytics | Main Product | — | — | [Release Notes, October 2020](https://posthog.com/blog/the-posthog-array-1-15-0), [Blog, November 2021](https://posthog.com/blog/how-we-turned-clickhouse-into-our-eventmansion) |
| <a href="https://postmates.com/" class="favicon">Postmates</a> | Delivery | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=188) | | <a href="https://postmates.com/" class="favicon">Postmates</a> | Delivery | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=188) |
| <a href="http://www.pragma-innovation.fr/" class="favicon">Pragma Innovation</a> | Telemetry and Big Data Analysis | Main product | — | — | [Slides in English, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup18/4_pragma_innovation.pdf) | | <a href="http://www.pragma-innovation.fr/" class="favicon">Pragma Innovation</a> | Telemetry and Big Data Analysis | Main product | — | — | [Slides in English, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup18/4_pragma_innovation.pdf) |
| <a href="https://prana-system.com/en/" class="favicon">PRANA</a> | Industrial predictive analytics | Main product | — | — | [News (russian), Feb 2021](https://habr.com/en/news/t/541392/) | | <a href="https://prana-system.com/en/" class="favicon">PRANA</a> | Industrial predictive analytics | Main product | — | — | [News (russian), Feb 2021](https://habr.com/en/news/t/541392/) |
@ -152,6 +154,7 @@ toc_title: Adopters
| <a href="https://trafficstars.com/" class="favicon">Traffic Stars</a> | AD network | — | 300 servers in Europe/US | 1.8 PiB, 700 000 insert rps (as of 2021) | [Slides in Russian, May 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup15/lightning/ninja.pdf) | | <a href="https://trafficstars.com/" class="favicon">Traffic Stars</a> | AD network | — | 300 servers in Europe/US | 1.8 PiB, 700 000 insert rps (as of 2021) | [Slides in Russian, May 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup15/lightning/ninja.pdf) |
| <a href="https://www.uber.com" class="favicon">Uber</a> | Taxi | Logging | — | — | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/uber.pdf) | | <a href="https://www.uber.com" class="favicon">Uber</a> | Taxi | Logging | — | — | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/uber.pdf) |
| <a href="https://hello.utmstat.com/" class="favicon">UTMSTAT</a> | Analytics | Main product | — | — | [Blog post, June 2020](https://vc.ru/tribuna/133956-striming-dannyh-iz-servisa-skvoznoy-analitiki-v-clickhouse) | | <a href="https://hello.utmstat.com/" class="favicon">UTMSTAT</a> | Analytics | Main product | — | — | [Blog post, June 2020](https://vc.ru/tribuna/133956-striming-dannyh-iz-servisa-skvoznoy-analitiki-v-clickhouse) |
| <a href="https://vercel.com/" class="favicon">Vercel</a> | Traffic and Performance Analytics | — | — | — | Direct reference, October 2021 |
| <a href="https://vk.com" class="favicon">VKontakte</a> | Social Network | Statistics, Logging | — | — | [Slides in Russian, August 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup17/3_vk.pdf) | | <a href="https://vk.com" class="favicon">VKontakte</a> | Social Network | Statistics, Logging | — | — | [Slides in Russian, August 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup17/3_vk.pdf) |
| <a href="https://www.vmware.com/" class="favicon">VMware</a> | Cloud | VeloCloud, SDN | — | — | [Product documentation](https://docs.vmware.com/en/vRealize-Operations-Manager/8.3/com.vmware.vcom.metrics.doc/GUID-A9AD72E1-C948-4CA2-971B-919385AB3CA8.html) | | <a href="https://www.vmware.com/" class="favicon">VMware</a> | Cloud | VeloCloud, SDN | — | — | [Product documentation](https://docs.vmware.com/en/vRealize-Operations-Manager/8.3/com.vmware.vcom.metrics.doc/GUID-A9AD72E1-C948-4CA2-971B-919385AB3CA8.html) |
| <a href="https://www.walmartlabs.com/" class="favicon">Walmart Labs</a> | Internet, Retail | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=144) | | <a href="https://www.walmartlabs.com/" class="favicon">Walmart Labs</a> | Internet, Retail | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=144) |
@ -167,6 +170,7 @@ toc_title: Adopters
| <a href="https://market.yandex.ru/" class="favicon">Yandex Market</a> | e-Commerce | Metrics, Logging | — | — | [Talk in Russian, January 2019](https://youtu.be/_l1qP0DyBcA?t=478) | | <a href="https://market.yandex.ru/" class="favicon">Yandex Market</a> | e-Commerce | Metrics, Logging | — | — | [Talk in Russian, January 2019](https://youtu.be/_l1qP0DyBcA?t=478) |
| <a href="https://metrica.yandex.com" class="favicon">Yandex Metrica</a> | Web analytics | Main product | 630 servers in one cluster, 360 servers in another cluster, 1862 servers in one department | 133 PiB / 8.31 PiB / 120 trillion records | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/introduction/#13) | | <a href="https://metrica.yandex.com" class="favicon">Yandex Metrica</a> | Web analytics | Main product | 630 servers in one cluster, 360 servers in another cluster, 1862 servers in one department | 133 PiB / 8.31 PiB / 120 trillion records | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/introduction/#13) |
| <a href="https://www.yotascale.com/" class="favicon">Yotascale</a> | Cloud | Data pipeline | — | 2 bn records/day | [LinkedIn (Accomplishments)](https://www.linkedin.com/in/adilsaleem/) | | <a href="https://www.yotascale.com/" class="favicon">Yotascale</a> | Cloud | Data pipeline | — | 2 bn records/day | [LinkedIn (Accomplishments)](https://www.linkedin.com/in/adilsaleem/) |
| <a href="https://www.your-analytics.org/" class="favicon">Your Analytics</a> | Product Analytics | Main Product | — | - | [Tweet, November 2021](https://twitter.com/mikenikles/status/1459737241165565953) |
| <a href="https://zagravagames.com/en/" class="favicon">Zagrava Trading</a> | — | — | — | — | [Job offer, May 2021](https://twitter.com/datastackjobs/status/1394707267082063874) | | <a href="https://zagravagames.com/en/" class="favicon">Zagrava Trading</a> | — | — | — | — | [Job offer, May 2021](https://twitter.com/datastackjobs/status/1394707267082063874) |
| <a href="https://htc-cs.ru/" class="favicon">ЦВТ</a> | Software Development | Metrics, Logging | — | — | [Blog Post, March 2019, in Russian](https://vc.ru/dev/62715-kak-my-stroili-monitoring-na-prometheus-clickhouse-i-elk) | | <a href="https://htc-cs.ru/" class="favicon">ЦВТ</a> | Software Development | Metrics, Logging | — | — | [Blog Post, March 2019, in Russian](https://vc.ru/dev/62715-kak-my-stroili-monitoring-na-prometheus-clickhouse-i-elk) |
| <a href="https://mkb.ru/" class="favicon">МКБ</a> | Bank | Web-system monitoring | — | — | [Slides in Russian, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/mkb.pdf) | | <a href="https://mkb.ru/" class="favicon">МКБ</a> | Bank | Web-system monitoring | — | — | [Slides in Russian, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/mkb.pdf) |
@ -174,8 +178,5 @@ toc_title: Adopters
| <a href="https://promo.croc.ru/digitalworker" class="favicon">Цифровой Рабочий</a> | Industrial IoT, Analytics | — | — | — | [Blog post in Russian, March 2021](https://habr.com/en/company/croc/blog/548018/) | | <a href="https://promo.croc.ru/digitalworker" class="favicon">Цифровой Рабочий</a> | Industrial IoT, Analytics | — | — | — | [Blog post in Russian, March 2021](https://habr.com/en/company/croc/blog/548018/) |
| <a href="https://shop.okraina.ru/" class="favicon">ООО «МПЗ Богородский»</a> | Agriculture | — | — | — | [Article in Russian, November 2020](https://cloud.yandex.ru/cases/okraina) | | <a href="https://shop.okraina.ru/" class="favicon">ООО «МПЗ Богородский»</a> | Agriculture | — | — | — | [Article in Russian, November 2020](https://cloud.yandex.ru/cases/okraina) |
| <a href="https://domclick.ru/" class="favicon">ДомКлик</a> | Real Estate | — | — | — | [Article in Russian, October 2021](https://habr.com/ru/company/domclick/blog/585936/) | | <a href="https://domclick.ru/" class="favicon">ДомКлик</a> | Real Estate | — | — | — | [Article in Russian, October 2021](https://habr.com/ru/company/domclick/blog/585936/) |
| <a href="https://www.deepl.com/" class="favicon">Deepl</a> | Machine Learning | — | — | — | [Video, October 2021](https://www.youtube.com/watch?v=WIYJiPwxXdM&t=1182s) |
| <a href="https://vercel.com/" class="favicon">Vercel</a> | Traffic and Performance Analytics | — | — | — | Direct reference, October 2021 |
| <a href="https://www.your-analytics.org/" class="favicon">YourAnalytics</a> | Web Analytics | — | — | — | [Tweet, Nov 2021](https://twitter.com/mikenikles/status/1460860140249235461) |
[Original article](https://clickhouse.com/docs/en/introduction/adopters/) <!--hide--> [Original article](https://clickhouse.com/docs/en/introduction/adopters/) <!--hide-->

View File

@ -49,6 +49,7 @@ Internal coordination settings are located in `<keeper_server>.<coordination_set
- `auto_forwarding` — Allow to forward write requests from followers to the leader (default: true). - `auto_forwarding` — Allow to forward write requests from followers to the leader (default: true).
- `shutdown_timeout` — Wait to finish internal connections and shutdown (ms) (default: 5000). - `shutdown_timeout` — Wait to finish internal connections and shutdown (ms) (default: 5000).
- `startup_timeout` — If the server doesn't connect to other quorum participants in the specified timeout it will terminate (ms) (default: 30000). - `startup_timeout` — If the server doesn't connect to other quorum participants in the specified timeout it will terminate (ms) (default: 30000).
- `four_letter_word_white_list` — White list of 4lw commands (default: "conf,cons,crst,envi,ruok,srst,srvr,stat,wchc,wchs,dirs,mntr,isro").
Quorum configuration is located in `<keeper_server>.<raft_configuration>` section and contain servers description. Quorum configuration is located in `<keeper_server>.<raft_configuration>` section and contain servers description.
@ -104,6 +105,196 @@ ClickHouse Keeper is bundled into the ClickHouse server package, just add config
clickhouse-keeper --config /etc/your_path_to_config/config.xml --daemon clickhouse-keeper --config /etc/your_path_to_config/config.xml --daemon
``` ```
## Four Latter Word Commands
ClickHouse Keeper also provides 4lw commands which are almost the same with Zookeeper. Each command is composed of four letters such as `mntr`, `stat` etc. There are some more interesting commands: `stat` gives some general information about the server and connected clients, while `srvr` and `cons` give extended details on server and connections respectively.
The 4lw commands has a white list configuration `four_letter_word_white_list` which has default value "conf,cons,crst,envi,ruok,srst,srvr,stat,wchc,wchs,dirs,mntr,isro".
You can issue the commands to ClickHouse Keeper via telnet or nc, at the client port.
```
echo mntr | nc localhost 9181
```
Bellow is the detailed 4lw commands:
- ruok : Tests if server is running in a non-error state. The server will respond with imok if it is running. Otherwise it will not respond at all. A response of "imok" does not necessarily indicate that the server has joined the quorum, just that the server process is active and bound to the specified client port. Use "stat" for details on state wrt quorum and client connection information.
```
imok
```
- mntr : Outputs a list of variables that could be used for monitoring the health of the cluster.
```
zk_version v21.11.1.1-prestable-7a4a0b0edef0ad6e0aa662cd3b90c3f4acf796e7
zk_avg_latency 0
zk_max_latency 0
zk_min_latency 0
zk_packets_received 68
zk_packets_sent 68
zk_num_alive_connections 1
zk_outstanding_requests 0
zk_server_state leader
zk_znode_count 4
zk_watch_count 1
zk_ephemerals_count 0
zk_approximate_data_size 723
zk_open_file_descriptor_count 310
zk_max_file_descriptor_count 10240
zk_followers 0
zk_synced_followers 0
```
- srvr : Lists full details for the server.
```
ClickHouse Keeper version: v21.11.1.1-prestable-7a4a0b0edef0ad6e0aa662cd3b90c3f4acf796e7
Latency min/avg/max: 0/0/0
Received: 2
Sent : 2
Connections: 1
Outstanding: 0
Zxid: 34
Mode: leader
Node count: 4
```
- stat : Lists brief details for the server and connected clients.
```
ClickHouse Keeper version: v21.11.1.1-prestable-7a4a0b0edef0ad6e0aa662cd3b90c3f4acf796e7
Clients:
192.168.1.1:52852(recved=0,sent=0)
192.168.1.1:52042(recved=24,sent=48)
Latency min/avg/max: 0/0/0
Received: 4
Sent : 4
Connections: 1
Outstanding: 0
Zxid: 36
Mode: leader
Node count: 4
```
- srst : Reset server statistics. The command will affect the result of `srvr`, `mntr` and `stat`.
```
Server stats reset.
```
- conf : Print details about serving configuration.
```
server_id=1
tcp_port=2181
four_letter_word_white_list=*
log_storage_path=./coordination/logs
snapshot_storage_path=./coordination/snapshots
max_requests_batch_size=100
session_timeout_ms=30000
operation_timeout_ms=10000
dead_session_check_period_ms=500
heart_beat_interval_ms=500
election_timeout_lower_bound_ms=1000
election_timeout_upper_bound_ms=2000
reserved_log_items=1000000000000000
snapshot_distance=10000
auto_forwarding=true
shutdown_timeout=5000
startup_timeout=240000
raft_logs_level=information
snapshots_to_keep=3
rotate_log_storage_interval=100000
stale_log_gap=10000
fresh_log_gap=200
max_requests_batch_size=100
quorum_reads=false
force_sync=false
compress_logs=true
compress_snapshots_with_zstd_format=true
configuration_change_tries_count=20
```
- cons : List full connection/session details for all clients connected to this server. Includes information on numbers of packets received/sent, session id, operation latencies, last operation performed, etc...
```
192.168.1.1:52163(recved=0,sent=0,sid=0xffffffffffffffff,lop=NA,est=1636454787393,to=30000,lzxid=0xffffffffffffffff,lresp=0,llat=0,minlat=0,avglat=0,maxlat=0)
192.168.1.1:52042(recved=9,sent=18,sid=0x0000000000000001,lop=List,est=1636454739887,to=30000,lcxid=0x0000000000000005,lzxid=0x0000000000000005,lresp=1636454739892,llat=0,minlat=0,avglat=0,maxlat=0)
```
- crst : Reset connection/session statistics for all connections.
```
Connection stats reset.
```
- envi : Print details about serving environment
```
Environment:
clickhouse.keeper.version=v21.11.1.1-prestable-7a4a0b0edef0ad6e0aa662cd3b90c3f4acf796e7
host.name=ZBMAC-C02D4054M.local
os.name=Darwin
os.arch=x86_64
os.version=19.6.0
cpu.count=12
user.name=root
user.home=/Users/JackyWoo/
user.dir=/Users/JackyWoo/project/jd/clickhouse/cmake-build-debug/programs/
user.tmp=/var/folders/b4/smbq5mfj7578f2jzwn602tt40000gn/T/
```
- dirs : Shows the total size of snapshot and log files in bytes
```
snapshot_dir_size: 0
log_dir_size: 3875
```
- isro: Tests if server is running in read-only mode. The server will respond with "ro" if in read-only mode or "rw" if not in read-only mode.
```
rw
```
- wchs : Lists brief information on watches for the server.
```
1 connections watching 1 paths
Total watches:1
```
- wchc : Lists detailed information on watches for the server, by session. This outputs a list of sessions(connections) with associated watches (paths). Note, depending on the number of watches this operation may be expensive (ie impact server performance), use it carefully.
```
0x0000000000000001
/clickhouse/task_queue/ddl
```
- wchp : Lists detailed information on watches for the server, by path. This outputs a list of paths (znodes) with associated sessions. Note, depending on the number of watches this operation may be expensive (ie impact server performance), use it carefully.
```
/clickhouse/task_queue/ddl
0x0000000000000001
```
- dump : Lists the outstanding sessions and ephemeral nodes. This only works on the leader.
```
Sessions dump (2):
0x0000000000000001
0x0000000000000002
Sessions with Ephemerals (1):
0x0000000000000001
/clickhouse/task_queue/ddl
```
## [experimental] Migration from ZooKeeper ## [experimental] Migration from ZooKeeper
Seamlessly migration from ZooKeeper to ClickHouse Keeper is impossible you have to stop your ZooKeeper cluster, convert data and start ClickHouse Keeper. `clickhouse-keeper-converter` tool allows converting ZooKeeper logs and snapshots to ClickHouse Keeper snapshot. It works only with ZooKeeper > 3.4. Steps for migration: Seamlessly migration from ZooKeeper to ClickHouse Keeper is impossible you have to stop your ZooKeeper cluster, convert data and start ClickHouse Keeper. `clickhouse-keeper-converter` tool allows converting ZooKeeper logs and snapshots to ClickHouse Keeper snapshot. It works only with ZooKeeper > 3.4. Steps for migration:

View File

@ -4049,6 +4049,18 @@ Possible values:
Default value: `0`. Default value: `0`.
## format_capn_proto_enum_comparising_mode {#format-capn-proto-enum-comparising-mode}
Determines how to map ClickHouse `Enum` data type and [CapnProto](../../interfaces/formats.md#capnproto) `Enum` data type from schema.
Possible values:
- `'by_values'` — Values in enums should be the same, names can be different.
- `'by_names'` — Names in enums should be the same, values can be different.
- `'by_name_case_insensitive'` — Names in enums should be the same case-insensitive, values can be different.
Default value: `'by_values'`.
## min_bytes_to_use_mmap_io {#min-bytes-to-use-mmap-io} ## min_bytes_to_use_mmap_io {#min-bytes-to-use-mmap-io}
This is an experimental setting. Sets the minimum amount of memory for reading large files without copying data from the kernel to userspace. Recommended threshold is about 64 MB, because [mmap/munmap](https://en.wikipedia.org/wiki/Mmap) is slow. It makes sense only for large files and helps only if data reside in the page cache. This is an experimental setting. Sets the minimum amount of memory for reading large files without copying data from the kernel to userspace. Recommended threshold is about 64 MB, because [mmap/munmap](https://en.wikipedia.org/wiki/Mmap) is slow. It makes sense only for large files and helps only if data reside in the page cache.

View File

@ -373,7 +373,7 @@ The same as `multiMatchAny`, but returns the array of all indicies that match th
## multiFuzzyMatchAny(haystack, distance, \[pattern<sub>1</sub>, pattern<sub>2</sub>, …, pattern<sub>n</sub>\]) {#multifuzzymatchanyhaystack-distance-pattern1-pattern2-patternn} ## multiFuzzyMatchAny(haystack, distance, \[pattern<sub>1</sub>, pattern<sub>2</sub>, …, pattern<sub>n</sub>\]) {#multifuzzymatchanyhaystack-distance-pattern1-pattern2-patternn}
The same as `multiMatchAny`, but returns 1 if any pattern matches the haystack within a constant [edit distance](https://en.wikipedia.org/wiki/Edit_distance). This function is also in an experimental mode and can be extremely slow. For more information see [hyperscan documentation](https://intel.github.io/hyperscan/dev-reference/compilation.html#approximate-matching). The same as `multiMatchAny`, but returns 1 if any pattern matches the haystack within a constant [edit distance](https://en.wikipedia.org/wiki/Edit_distance). This function relies on the experimental feature of [hyperscan](https://intel.github.io/hyperscan/dev-reference/compilation.html#approximate-matching) library, and can be slow for some corner cases. The performance depends on the edit distance value and patterns used, but it's always more expensive compared to a non-fuzzy variants.
## multiFuzzyMatchAnyIndex(haystack, distance, \[pattern<sub>1</sub>, pattern<sub>2</sub>, …, pattern<sub>n</sub>\]) {#multifuzzymatchanyindexhaystack-distance-pattern1-pattern2-patternn} ## multiFuzzyMatchAnyIndex(haystack, distance, \[pattern<sub>1</sub>, pattern<sub>2</sub>, …, pattern<sub>n</sub>\]) {#multifuzzymatchanyindexhaystack-distance-pattern1-pattern2-patternn}

View File

@ -54,7 +54,7 @@ ClickHouse может принимать (`INSERT`) и отдавать (`SELECT
| [Native](#native) | ✔ | ✔ | | [Native](#native) | ✔ | ✔ |
| [Null](#null) | ✗ | ✔ | | [Null](#null) | ✗ | ✔ |
| [XML](#xml) | ✗ | ✔ | | [XML](#xml) | ✗ | ✔ |
| [CapnProto](#capnproto) | ✔ | | | [CapnProto](#capnproto) | ✔ | |
| [LineAsString](#lineasstring) | ✔ | ✗ | | [LineAsString](#lineasstring) | ✔ | ✗ |
| [Regexp](#data-format-regexp) | ✔ | ✗ | | [Regexp](#data-format-regexp) | ✔ | ✗ |
| [RawBLOB](#rawblob) | ✔ | ✔ | | [RawBLOB](#rawblob) | ✔ | ✔ |
@ -1013,9 +1013,41 @@ test: string with 'quotes' and with some special
## CapnProto {#capnproto} ## CapnProto {#capnproto}
Capn Proto - формат бинарных сообщений, похож на Protocol Buffers и Thrift, но не похож на JSON или MessagePack. CapnProto — формат бинарных сообщений, похож на [Protocol Buffers](https://developers.google.com/protocol-buffers/) и [Thrift](https://ru.wikipedia.org/wiki/Apache_Thrift), но не похож на [JSON](#json) или [MessagePack](https://msgpack.org/).
Сообщения Capn Proto строго типизированы и не самоописывающиеся, т.е. нуждаются во внешнем описании схемы. Схема применяется «на лету» и кешируется между запросами. Сообщения формата CapnProto строго типизированы и не самоописывающиеся, т.е. нуждаются во внешнем описании схемы. Схема применяется "на лету" и кешируется между запросами.
См. также [схема формата](#formatschema).
### Соответствие типов данных {#data_types-matching-capnproto}
Таблица ниже содержит поддерживаемые типы данных и их соответствие [типам данных](../sql-reference/data-types/index.md) ClickHouse для запросов `INSERT` и `SELECT`.
| Тип данных CapnProto (`INSERT`) | Тип данных ClickHouse | Тип данных CapnProto (`SELECT`) |
|--------------------------------|-----------------------------------------------------------|--------------------------------|
| `UINT8`, `BOOL` | [UInt8](../sql-reference/data-types/int-uint.md) | `UINT8` |
| `INT8` | [Int8](../sql-reference/data-types/int-uint.md) | `INT8` |
| `UINT16` | [UInt16](../sql-reference/data-types/int-uint.md), [Date](../sql-reference/data-types/date.md) | `UINT16` |
| `INT16` | [Int16](../sql-reference/data-types/int-uint.md) | `INT16` |
| `UINT32` | [UInt32](../sql-reference/data-types/int-uint.md), [DateTime](../sql-reference/data-types/datetime.md) | `UINT32` |
| `INT32` | [Int32](../sql-reference/data-types/int-uint.md) | `INT32` |
| `UINT64` | [UInt64](../sql-reference/data-types/int-uint.md) | `UINT64` |
| `INT64` | [Int64](../sql-reference/data-types/int-uint.md), [DateTime64](../sql-reference/data-types/datetime.md) | `INT64` |
| `FLOAT32` | [Float32](../sql-reference/data-types/float.md) | `FLOAT32` |
| `FLOAT64` | [Float64](../sql-reference/data-types/float.md) | `FLOAT64` |
| `TEXT, DATA` | [String](../sql-reference/data-types/string.md), [FixedString](../sql-reference/data-types/fixedstring.md) | `TEXT, DATA` |
| `union(T, Void), union(Void, T)` | [Nullable(T)](../sql-reference/data-types/date.md) | `union(T, Void), union(Void, T)` |
| `ENUM` | [Enum(8\|16)](../sql-reference/data-types/enum.md) | `ENUM` |
| `LIST` | [Array](../sql-reference/data-types/array.md) | `LIST` |
| `STRUCT` | [Tuple](../sql-reference/data-types/tuple.md) | `STRUCT` |
Для работы с типом данных `Enum` в формате CapnProto используйте настройку [format_capn_proto_enum_comparising_mode](../operations/settings/settings.md#format-capn-proto-enum-comparising-mode).
Массивы могут быть вложенными и иметь в качестве аргумента значение типа `Nullable`. Тип `Tuple` также может быть вложенным.
### Вставка и вывод данных {#inserting-and-selecting-data-capnproto}
Чтобы вставить в ClickHouse данные из файла в формате CapnProto, выполните команду следующего вида:
``` bash ``` bash
$ cat capnproto_messages.bin | clickhouse-client --query "INSERT INTO test.hits FORMAT CapnProto SETTINGS format_schema = 'schema:Message'" $ cat capnproto_messages.bin | clickhouse-client --query "INSERT INTO test.hits FORMAT CapnProto SETTINGS format_schema = 'schema:Message'"
@ -1030,9 +1062,11 @@ struct Message {
} }
``` ```
Десериализация эффективна и обычно не повышает нагрузку на систему. Чтобы получить данные из таблицы ClickHouse и сохранить их в файл формата CapnProto, используйте команду следующего вида:
См. также [схема формата](#formatschema). ``` bash
$ clickhouse-client --query = "SELECT * FROM test.hits FORMAT CapnProto SETTINGS format_schema = 'schema:Message'"
```
## Protobuf {#protobuf} ## Protobuf {#protobuf}

View File

@ -3808,6 +3808,18 @@ SELECT * FROM positional_arguments ORDER BY 2,3;
Значение по умолчанию: `0`. Значение по умолчанию: `0`.
## format_capn_proto_enum_comparising_mode {#format-capn-proto-enum-comparising-mode}
Определяет, как сопоставить тип данных ClickHouse `Enum` и тип данных `Enum` формата [CapnProto](../../interfaces/formats.md#capnproto) из схемы.
Возможные значения:
- `'by_values'` — значения в перечислениях должны быть одинаковыми, а имена могут быть разными.
- `'by_names'` — имена в перечислениях должны быть одинаковыми, а значения могут быть разными.
- `'by_name_case_insensitive'` — имена в перечислениях должны быть одинаковыми без учета регистра, а значения могут быть разными.
Значение по умолчанию: `'by_values'`.
## min_bytes_to_use_mmap_io {#min-bytes-to-use-mmap-io} ## min_bytes_to_use_mmap_io {#min-bytes-to-use-mmap-io}
Это экспериментальная настройка. Устанавливает минимальный объем памяти для чтения больших файлов без копирования данных из ядра в пространство пользователей. Рекомендуемый лимит составляет около 64 MB, поскольку [mmap/munmap](https://en.wikipedia.org/wiki/Mmap) работает медленно. Это имеет смысл только для больших файлов и помогает только в том случае, если данные находятся в кеше страниц. Это экспериментальная настройка. Устанавливает минимальный объем памяти для чтения больших файлов без копирования данных из ядра в пространство пользователей. Рекомендуемый лимит составляет около 64 MB, поскольку [mmap/munmap](https://en.wikipedia.org/wiki/Mmap) работает медленно. Это имеет смысл только для больших файлов и помогает только в том случае, если данные находятся в кеше страниц.

View File

@ -86,7 +86,6 @@ git push
- **Compiler**: `gcc-9``clang-10` (或其他架构的`clang-10-xx`, 比如`clang-10-freebsd`). - **Compiler**: `gcc-9``clang-10` (或其他架构的`clang-10-xx`, 比如`clang-10-freebsd`).
- **Build type**: `Debug` or `RelWithDebInfo` (cmake). - **Build type**: `Debug` or `RelWithDebInfo` (cmake).
- **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan). - **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan).
- **Bundled**: `bundled` 构建使用来自 `contrib` 库, 而 `unbundled` 构建使用系统库.
- **Splitted**: `splitted` is a [split build](https://clickhouse.com/docs/en/development/build/#split-build) - **Splitted**: `splitted` is a [split build](https://clickhouse.com/docs/en/development/build/#split-build)
- **Status**: `成功``失败` - **Status**: `成功``失败`
- **Build log**: 链接到构建和文件复制日志, 当构建失败时很有用. - **Build log**: 链接到构建和文件复制日志, 当构建失败时很有用.

View File

@ -20,6 +20,7 @@
#include <Poco/Environment.h> #include <Poco/Environment.h>
#include <sys/stat.h> #include <sys/stat.h>
#include <pwd.h> #include <pwd.h>
#include <Coordination/FourLetterCommand.h>
#if !defined(ARCADIA_BUILD) #if !defined(ARCADIA_BUILD)
# include "config_core.h" # include "config_core.h"
@ -367,6 +368,8 @@ int Keeper::main(const std::vector<std::string> & /*args*/)
/// Initialize keeper RAFT. Do nothing if no keeper_server in config. /// Initialize keeper RAFT. Do nothing if no keeper_server in config.
global_context->initializeKeeperDispatcher(/* start_async = */false); global_context->initializeKeeperDispatcher(/* start_async = */false);
FourLetterCommandFactory::registerCommands(*global_context->getKeeperDispatcher());
for (const auto & listen_host : listen_hosts) for (const auto & listen_host : listen_hosts)
{ {
/// TCP Keeper /// TCP Keeper

View File

@ -111,6 +111,7 @@
#endif #endif
#if USE_NURAFT #if USE_NURAFT
# include <Coordination/FourLetterCommand.h>
# include <Server/KeeperTCPHandlerFactory.h> # include <Server/KeeperTCPHandlerFactory.h>
#endif #endif
@ -1025,6 +1026,7 @@ if (ThreadFuzzer::instance().isEffective())
} }
/// Initialize keeper RAFT. /// Initialize keeper RAFT.
global_context->initializeKeeperDispatcher(can_initialize_keeper_async); global_context->initializeKeeperDispatcher(can_initialize_keeper_async);
FourLetterCommandFactory::registerCommands(*global_context->getKeeperDispatcher());
for (const auto & listen_host : listen_hosts) for (const auto & listen_host : listen_hosts)
{ {

View File

@ -229,6 +229,16 @@ void AccessControl::startPeriodicReloadingUsersConfigs()
} }
} }
void AccessControl::stopPeriodicReloadingUsersConfigs()
{
auto storages = getStoragesPtr();
for (const auto & storage : *storages)
{
if (auto users_config_storage = typeid_cast<std::shared_ptr<UsersConfigAccessStorage>>(storage))
users_config_storage->stopPeriodicReloading();
}
}
void AccessControl::addReplicatedStorage( void AccessControl::addReplicatedStorage(
const String & storage_name_, const String & storage_name_,
const String & zookeeper_path_, const String & zookeeper_path_,

View File

@ -71,6 +71,7 @@ public:
void reloadUsersConfigs(); void reloadUsersConfigs();
void startPeriodicReloadingUsersConfigs(); void startPeriodicReloadingUsersConfigs();
void stopPeriodicReloadingUsersConfigs();
/// Loads access entities from the directory on the local disk. /// Loads access entities from the directory on the local disk.
/// Use that directory to keep created users/roles/etc. /// Use that directory to keep created users/roles/etc.

View File

@ -591,6 +591,13 @@ void UsersConfigAccessStorage::startPeriodicReloading()
config_reloader->start(); config_reloader->start();
} }
void UsersConfigAccessStorage::stopPeriodicReloading()
{
std::lock_guard lock{load_mutex};
if (config_reloader)
config_reloader->stop();
}
std::optional<UUID> UsersConfigAccessStorage::findImpl(AccessEntityType type, const String & name) const std::optional<UUID> UsersConfigAccessStorage::findImpl(AccessEntityType type, const String & name) const
{ {
return memory_storage.find(type, name); return memory_storage.find(type, name);

View File

@ -39,6 +39,7 @@ public:
const zkutil::GetZooKeeper & get_zookeeper_function = {}); const zkutil::GetZooKeeper & get_zookeeper_function = {});
void reload(); void reload();
void startPeriodicReloading(); void startPeriodicReloading();
void stopPeriodicReloading();
private: private:
void parseFromConfig(const Poco::Util::AbstractConfiguration & config); void parseFromConfig(const Poco::Util::AbstractConfiguration & config);

View File

@ -178,6 +178,12 @@ AggregateFunctionPtr AggregateFunctionFactory::getImpl(
/// uniqCombinedIfMergeIf is useful in cases when the underlying /// uniqCombinedIfMergeIf is useful in cases when the underlying
/// storage stores AggregateFunction(uniqCombinedIf) and in SELECT you /// storage stores AggregateFunction(uniqCombinedIf) and in SELECT you
/// need to filter aggregation result based on another column. /// need to filter aggregation result based on another column.
#if defined(UNBUNDLED)
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wstringop-overread"
#endif
if (!combinator->supportsNesting() && nested_name.ends_with(combinator_name)) if (!combinator->supportsNesting() && nested_name.ends_with(combinator_name))
{ {
throw Exception(ErrorCodes::ILLEGAL_AGGREGATION, throw Exception(ErrorCodes::ILLEGAL_AGGREGATION,
@ -185,6 +191,10 @@ AggregateFunctionPtr AggregateFunctionFactory::getImpl(
combinator_name); combinator_name);
} }
#if defined(UNBUNDLED)
#pragma GCC diagnostic pop
#endif
DataTypes nested_types = combinator->transformArguments(argument_types); DataTypes nested_types = combinator->transformArguments(argument_types);
Array nested_parameters = combinator->transformParameters(parameters); Array nested_parameters = combinator->transformParameters(parameters);

View File

@ -366,13 +366,10 @@ dbms_target_link_libraries (
clickhouse_common_io clickhouse_common_io
) )
if (NOT_UNBUNDLED)
dbms_target_link_libraries ( dbms_target_link_libraries (
PUBLIC PUBLIC
boost::circular_buffer boost::circular_buffer
boost::heap boost::heap)
)
endif()
target_include_directories(clickhouse_common_io PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/Core/include") # uses some includes from core target_include_directories(clickhouse_common_io PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/Core/include") # uses some includes from core
dbms_target_include_directories(PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/Core/include") dbms_target_include_directories(PUBLIC "${CMAKE_CURRENT_BINARY_DIR}/Core/include")

View File

@ -518,7 +518,7 @@ StringRef ColumnAggregateFunction::serializeValueIntoArena(size_t n, Arena & are
{ {
WriteBufferFromArena out(arena, begin); WriteBufferFromArena out(arena, begin);
func->serialize(data[n], out); func->serialize(data[n], out);
return out.finish(); return out.complete();
} }
const char * ColumnAggregateFunction::deserializeAndInsertFromArena(const char * src_arena) const char * ColumnAggregateFunction::deserializeAndInsertFromArena(const char * src_arena)

Some files were not shown because too many files have changed in this diff Show More