Merge branch 'master' of github.com:ClickHouse/ClickHouse into remote-connection

This commit is contained in:
kssenii 2021-10-23 17:12:46 +03:00
commit 36d0f67b38
860 changed files with 12396 additions and 5772 deletions

53
.github/workflows/main.yml vendored Normal file
View File

@ -0,0 +1,53 @@
name: Ligthweight GithubActions
on: # yamllint disable-line rule:truthy
pull_request:
types:
- labeled
- unlabeled
- synchronize
- reopened
- opened
branches:
- master
jobs:
CheckLabels:
runs-on: [self-hosted]
steps:
- name: Check out repository code
uses: actions/checkout@v2
- name: Labels check
run: cd $GITHUB_WORKSPACE/tests/ci && python3 run_check.py
DockerHubPush:
needs: CheckLabels
runs-on: [self-hosted]
steps:
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: cd $GITHUB_WORKSPACE/tests/ci && python3 docker_images_check.py
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json
StyleCheck:
needs: DockerHubPush
runs-on: [self-hosted]
steps:
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/style_check
- name: Check out repository code
uses: actions/checkout@v2
- name: Style Check
run: cd $GITHUB_WORKSPACE/tests/ci && python3 style_check.py
FinishCheck:
needs: [StyleCheck, DockerHubPush, CheckLabels]
runs-on: [self-hosted]
steps:
- name: Check out repository code
uses: actions/checkout@v2
- name: Finish label
run: cd $GITHUB_WORKSPACE/tests/ci && python3 finish_check.py

6
.gitmodules vendored
View File

@ -140,7 +140,7 @@
url = https://github.com/ClickHouse-Extras/libc-headers.git url = https://github.com/ClickHouse-Extras/libc-headers.git
[submodule "contrib/replxx"] [submodule "contrib/replxx"]
path = contrib/replxx path = contrib/replxx
url = https://github.com/ClickHouse-Extras/replxx.git url = https://github.com/AmokHuginnsson/replxx.git
[submodule "contrib/avro"] [submodule "contrib/avro"]
path = contrib/avro path = contrib/avro
url = https://github.com/ClickHouse-Extras/avro.git url = https://github.com/ClickHouse-Extras/avro.git
@ -213,6 +213,7 @@
[submodule "contrib/boringssl"] [submodule "contrib/boringssl"]
path = contrib/boringssl path = contrib/boringssl
url = https://github.com/ClickHouse-Extras/boringssl.git url = https://github.com/ClickHouse-Extras/boringssl.git
branch = MergeWithUpstream
[submodule "contrib/NuRaft"] [submodule "contrib/NuRaft"]
path = contrib/NuRaft path = contrib/NuRaft
url = https://github.com/ClickHouse-Extras/NuRaft.git url = https://github.com/ClickHouse-Extras/NuRaft.git
@ -249,6 +250,9 @@
[submodule "contrib/magic_enum"] [submodule "contrib/magic_enum"]
path = contrib/magic_enum path = contrib/magic_enum
url = https://github.com/Neargye/magic_enum url = https://github.com/Neargye/magic_enum
[submodule "contrib/libprotobuf-mutator"]
path = contrib/libprotobuf-mutator
url = https://github.com/google/libprotobuf-mutator
[submodule "contrib/sysroot"] [submodule "contrib/sysroot"]
path = contrib/sysroot path = contrib/sysroot
url = https://github.com/ClickHouse-Extras/sysroot.git url = https://github.com/ClickHouse-Extras/sysroot.git

View File

@ -1,4 +1,4 @@
### ClickHouse release v21.10, 2021-10-14 ### ClickHouse release v21.10, 2021-10-16
#### Backward Incompatible Change #### Backward Incompatible Change

View File

@ -136,6 +136,21 @@ if (ENABLE_FUZZING)
message (STATUS "Fuzzing instrumentation enabled") message (STATUS "Fuzzing instrumentation enabled")
set (FUZZER "libfuzzer") set (FUZZER "libfuzzer")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -nostdlib++") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -nostdlib++")
set (ENABLE_CLICKHOUSE_ODBC_BRIDGE OFF)
set (ENABLE_LIBRARIES 0)
set (ENABLE_SSL 1)
set (USE_INTERNAL_SSL_LIBRARY 1)
set (USE_UNWIND ON)
set (ENABLE_EMBEDDED_COMPILER 0)
set (ENABLE_EXAMPLES 0)
set (ENABLE_UTILS 0)
set (ENABLE_THINLTO 0)
set (ENABLE_TCMALLOC 0)
set (ENABLE_JEMALLOC 0)
set (ENABLE_CHECK_HEAVY_BUILDS 1)
set (GLIBC_COMPATIBILITY OFF)
set (ENABLE_PROTOBUF ON)
set (USE_INTERNAL_PROTOBUF_LIBRARY ON)
endif() endif()
# Global libraries # Global libraries
@ -188,7 +203,7 @@ endif ()
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON) option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF) option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND CMAKE_VERSION VERSION_GREATER "3.9.0") if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
# Only for Linux, x86_64 or aarch64. # Only for Linux, x86_64 or aarch64.
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON) option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
elseif(GLIBC_COMPATIBILITY) elseif(GLIBC_COMPATIBILITY)
@ -203,10 +218,6 @@ if (GLIBC_COMPATIBILITY)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -include ${CMAKE_CURRENT_SOURCE_DIR}/base/glibc-compatibility/glibc-compat-2.32.h") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -include ${CMAKE_CURRENT_SOURCE_DIR}/base/glibc-compatibility/glibc-compat-2.32.h")
endif() endif()
if (NOT CMAKE_VERSION VERSION_GREATER "3.9.0")
message (WARNING "CMake version must be greater than 3.9.0 for production builds.")
endif ()
# Make sure the final executable has symbols exported # Make sure the final executable has symbols exported
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
@ -582,6 +593,7 @@ include (cmake/find/cassandra.cmake)
include (cmake/find/sentry.cmake) include (cmake/find/sentry.cmake)
include (cmake/find/stats.cmake) include (cmake/find/stats.cmake)
include (cmake/find/datasketches.cmake) include (cmake/find/datasketches.cmake)
include (cmake/find/libprotobuf-mutator.cmake)
set (USE_INTERNAL_CITYHASH_LIBRARY ON CACHE INTERNAL "") set (USE_INTERNAL_CITYHASH_LIBRARY ON CACHE INTERNAL "")
find_contrib_lib(cityhash) find_contrib_lib(cityhash)

View File

@ -5,6 +5,10 @@
#include <string.h> #include <string.h>
#include <unistd.h> #include <unistd.h>
#include <sys/select.h>
#include <sys/time.h>
#include <sys/types.h>
#ifdef OS_LINUX #ifdef OS_LINUX
/// We can detect if code is linked with one or another readline variants or open the library dynamically. /// We can detect if code is linked with one or another readline variants or open the library dynamically.

View File

@ -177,6 +177,10 @@ ReplxxLineReader::ReplxxLineReader(
/// bind C-p/C-n to history-previous/history-next like readline. /// bind C-p/C-n to history-previous/history-next like readline.
rx.bind_key(Replxx::KEY::control('N'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_NEXT, code); }); rx.bind_key(Replxx::KEY::control('N'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_NEXT, code); });
rx.bind_key(Replxx::KEY::control('P'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_PREVIOUS, code); }); rx.bind_key(Replxx::KEY::control('P'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_PREVIOUS, code); });
/// bind C-j to ENTER action.
rx.bind_key(Replxx::KEY::control('J'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::COMMIT_LINE, code); });
/// By default COMPLETE_NEXT/COMPLETE_PREV was binded to C-p/C-n, re-bind /// By default COMPLETE_NEXT/COMPLETE_PREV was binded to C-p/C-n, re-bind
/// to M-P/M-N (that was used for HISTORY_COMMON_PREFIX_SEARCH before, but /// to M-P/M-N (that was used for HISTORY_COMMON_PREFIX_SEARCH before, but
/// it also binded to M-p/M-n). /// it also binded to M-p/M-n).

View File

@ -6,7 +6,7 @@
#include <base/defines.h> #include <base/defines.h>
#if defined(__linux__) && !defined(THREAD_SANITIZER) #if defined(__linux__) && !defined(THREAD_SANITIZER) && !defined(USE_MUSL)
#define USE_PHDR_CACHE 1 #define USE_PHDR_CACHE 1
#endif #endif

View File

@ -84,7 +84,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, log_file); Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, log_file);
log->setLevel(log_level); log->setLevel(log_level);
split->addChannel(log); split->addChannel(log, "log");
} }
const auto errorlog_path = config.getString("logger.errorlog", ""); const auto errorlog_path = config.getString("logger.errorlog", "");
@ -116,7 +116,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
Poco::AutoPtr<DB::OwnFormattingChannel> errorlog = new DB::OwnFormattingChannel(pf, error_log_file); Poco::AutoPtr<DB::OwnFormattingChannel> errorlog = new DB::OwnFormattingChannel(pf, error_log_file);
errorlog->setLevel(errorlog_level); errorlog->setLevel(errorlog_level);
errorlog->open(); errorlog->open();
split->addChannel(errorlog); split->addChannel(errorlog, "errorlog");
} }
if (config.getBool("logger.use_syslog", false)) if (config.getBool("logger.use_syslog", false))
@ -155,7 +155,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, syslog_channel); Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, syslog_channel);
log->setLevel(syslog_level); log->setLevel(syslog_level);
split->addChannel(log); split->addChannel(log, "syslog");
} }
bool should_log_to_console = isatty(STDIN_FILENO) || isatty(STDERR_FILENO); bool should_log_to_console = isatty(STDIN_FILENO) || isatty(STDERR_FILENO);
@ -177,7 +177,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, new Poco::ConsoleChannel); Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, new Poco::ConsoleChannel);
logger.warning("Logging " + console_log_level_string + " to console"); logger.warning("Logging " + console_log_level_string + " to console");
log->setLevel(console_log_level); log->setLevel(console_log_level);
split->addChannel(log); split->addChannel(log, "console");
} }
split->open(); split->open();
@ -224,6 +224,89 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
} }
} }
void Loggers::updateLevels(Poco::Util::AbstractConfiguration & config, Poco::Logger & logger)
{
int max_log_level = 0;
const auto log_level_string = config.getString("logger.level", "trace");
int log_level = Poco::Logger::parseLevel(log_level_string);
if (log_level > max_log_level)
max_log_level = log_level;
const auto log_path = config.getString("logger.log", "");
if (!log_path.empty())
split->setLevel("log", log_level);
else
split->setLevel("log", 0);
// Set level to console
bool is_daemon = config.getBool("application.runAsDaemon", false);
bool should_log_to_console = isatty(STDIN_FILENO) || isatty(STDERR_FILENO);
if (config.getBool("logger.console", false)
|| (!config.hasProperty("logger.console") && !is_daemon && should_log_to_console))
split->setLevel("console", log_level);
else
split->setLevel("console", 0);
// Set level to errorlog
int errorlog_level = 0;
const auto errorlog_path = config.getString("logger.errorlog", "");
if (!errorlog_path.empty())
{
errorlog_level = Poco::Logger::parseLevel(config.getString("logger.errorlog_level", "notice"));
if (errorlog_level > max_log_level)
max_log_level = errorlog_level;
}
split->setLevel("errorlog", errorlog_level);
// Set level to syslog
int syslog_level = 0;
if (config.getBool("logger.use_syslog", false))
{
syslog_level = Poco::Logger::parseLevel(config.getString("logger.syslog_level", log_level_string));
if (syslog_level > max_log_level)
max_log_level = syslog_level;
}
split->setLevel("syslog", syslog_level);
// Global logging level (it can be overridden for specific loggers).
logger.setLevel(max_log_level);
// Set level to all already created loggers
std::vector<std::string> names;
logger.root().names(names);
for (const auto & name : names)
logger.root().get(name).setLevel(max_log_level);
logger.root().setLevel(max_log_level);
// Explicitly specified log levels for specific loggers.
{
Poco::Util::AbstractConfiguration::Keys loggers_level;
config.keys("logger.levels", loggers_level);
if (!loggers_level.empty())
{
for (const auto & key : loggers_level)
{
if (key == "logger" || key.starts_with("logger["))
{
const std::string name(config.getString("logger.levels." + key + ".name"));
const std::string level(config.getString("logger.levels." + key + ".level"));
logger.root().get(name).setLevel(level);
}
else
{
// Legacy syntax
const std::string level(config.getString("logger.levels." + key, "trace"));
logger.root().get(key).setLevel(level);
}
}
}
}
}
void Loggers::closeLogs(Poco::Logger & logger) void Loggers::closeLogs(Poco::Logger & logger)
{ {
if (log_file) if (log_file)

View File

@ -19,6 +19,8 @@ class Loggers
public: public:
void buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Logger & logger, const std::string & cmd_name = ""); void buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Logger & logger, const std::string & cmd_name = "");
void updateLevels(Poco::Util::AbstractConfiguration & config, Poco::Logger & logger);
/// Close log files. On next log write files will be reopened. /// Close log files. On next log write files will be reopened.
void closeLogs(Poco::Logger & logger); void closeLogs(Poco::Logger & logger);

View File

@ -1,4 +1,5 @@
#pragma once #pragma once
#include <atomic>
#include <Poco/AutoPtr.h> #include <Poco/AutoPtr.h>
#include <Poco/Channel.h> #include <Poco/Channel.h>
#include <Poco/FormattingChannel.h> #include <Poco/FormattingChannel.h>
@ -14,7 +15,7 @@ class OwnFormattingChannel : public Poco::Channel, public ExtendedLogChannel
public: public:
explicit OwnFormattingChannel( explicit OwnFormattingChannel(
Poco::AutoPtr<OwnPatternFormatter> pFormatter_ = nullptr, Poco::AutoPtr<Poco::Channel> pChannel_ = nullptr) Poco::AutoPtr<OwnPatternFormatter> pFormatter_ = nullptr, Poco::AutoPtr<Poco::Channel> pChannel_ = nullptr)
: pFormatter(std::move(pFormatter_)), pChannel(std::move(pChannel_)) : pFormatter(std::move(pFormatter_)), pChannel(std::move(pChannel_)), priority(Poco::Message::PRIO_TRACE)
{ {
} }
@ -45,7 +46,7 @@ public:
private: private:
Poco::AutoPtr<OwnPatternFormatter> pFormatter; Poco::AutoPtr<OwnPatternFormatter> pFormatter;
Poco::AutoPtr<Poco::Channel> pChannel; Poco::AutoPtr<Poco::Channel> pChannel;
Poco::Message::Priority priority = Poco::Message::PRIO_TRACE; std::atomic<Poco::Message::Priority> priority;
}; };
} }

View File

@ -1,4 +1,5 @@
#include "OwnSplitChannel.h" #include "OwnSplitChannel.h"
#include "OwnFormattingChannel.h"
#include <iostream> #include <iostream>
#include <Core/Block.h> #include <Core/Block.h>
@ -75,7 +76,7 @@ void OwnSplitChannel::logSplit(const Poco::Message & msg)
ExtendedLogMessage msg_ext = ExtendedLogMessage::getFrom(msg); ExtendedLogMessage msg_ext = ExtendedLogMessage::getFrom(msg);
/// Log data to child channels /// Log data to child channels
for (auto & channel : channels) for (auto & [name, channel] : channels)
{ {
if (channel.second) if (channel.second)
channel.second->logExtended(msg_ext); // extended child channel.second->logExtended(msg_ext); // extended child
@ -137,9 +138,9 @@ void OwnSplitChannel::logSplit(const Poco::Message & msg)
} }
void OwnSplitChannel::addChannel(Poco::AutoPtr<Poco::Channel> channel) void OwnSplitChannel::addChannel(Poco::AutoPtr<Poco::Channel> channel, const std::string & name)
{ {
channels.emplace_back(std::move(channel), dynamic_cast<ExtendedLogChannel *>(channel.get())); channels.emplace(name, ExtendedChannelPtrPair(std::move(channel), dynamic_cast<ExtendedLogChannel *>(channel.get())));
} }
void OwnSplitChannel::addTextLog(std::shared_ptr<DB::TextLog> log, int max_priority) void OwnSplitChannel::addTextLog(std::shared_ptr<DB::TextLog> log, int max_priority)
@ -149,4 +150,14 @@ void OwnSplitChannel::addTextLog(std::shared_ptr<DB::TextLog> log, int max_prior
text_log_max_priority.store(max_priority, std::memory_order_relaxed); text_log_max_priority.store(max_priority, std::memory_order_relaxed);
} }
void OwnSplitChannel::setLevel(const std::string & name, int level)
{
auto it = channels.find(name);
if (it != channels.end())
{
if (auto * channel = dynamic_cast<DB::OwnFormattingChannel *>(it->second.first.get()))
channel->setLevel(level);
}
}
} }

View File

@ -18,10 +18,12 @@ public:
/// Makes an extended message from msg and passes it to the client logs queue and child (if possible) /// Makes an extended message from msg and passes it to the client logs queue and child (if possible)
void log(const Poco::Message & msg) override; void log(const Poco::Message & msg) override;
/// Adds a child channel /// Adds a child channel
void addChannel(Poco::AutoPtr<Poco::Channel> channel); void addChannel(Poco::AutoPtr<Poco::Channel> channel, const std::string & name);
void addTextLog(std::shared_ptr<DB::TextLog> log, int max_priority); void addTextLog(std::shared_ptr<DB::TextLog> log, int max_priority);
void setLevel(const std::string & name, int level);
private: private:
void logSplit(const Poco::Message & msg); void logSplit(const Poco::Message & msg);
void tryLogSplit(const Poco::Message & msg); void tryLogSplit(const Poco::Message & msg);
@ -29,7 +31,7 @@ private:
using ChannelPtr = Poco::AutoPtr<Poco::Channel>; using ChannelPtr = Poco::AutoPtr<Poco::Channel>;
/// Handler and its pointer casted to extended interface /// Handler and its pointer casted to extended interface
using ExtendedChannelPtrPair = std::pair<ChannelPtr, ExtendedLogChannel *>; using ExtendedChannelPtrPair = std::pair<ChannelPtr, ExtendedLogChannel *>;
std::vector<ExtendedChannelPtrPair> channels; std::map<std::string, ExtendedChannelPtrPair> channels;
std::mutex text_log_mutex; std::mutex text_log_mutex;

View File

@ -13,6 +13,7 @@ TRIES=3
AMD64_BIN_URL="https://builds.clickhouse.com/master/amd64/clickhouse" AMD64_BIN_URL="https://builds.clickhouse.com/master/amd64/clickhouse"
AARCH64_BIN_URL="https://builds.clickhouse.com/master/aarch64/clickhouse" AARCH64_BIN_URL="https://builds.clickhouse.com/master/aarch64/clickhouse"
POWERPC64_BIN_URL="https://builds.clickhouse.com/master/ppc64le/clickhouse"
# Note: on older Ubuntu versions, 'axel' does not support IPv6. If you are using IPv6-only servers on very old Ubuntu, just don't install 'axel'. # Note: on older Ubuntu versions, 'axel' does not support IPv6. If you are using IPv6-only servers on very old Ubuntu, just don't install 'axel'.
@ -38,6 +39,8 @@ if [[ ! -f clickhouse ]]; then
$FASTER_DOWNLOAD "$AMD64_BIN_URL" $FASTER_DOWNLOAD "$AMD64_BIN_URL"
elif [[ $CPU == aarch64 ]]; then elif [[ $CPU == aarch64 ]]; then
$FASTER_DOWNLOAD "$AARCH64_BIN_URL" $FASTER_DOWNLOAD "$AARCH64_BIN_URL"
elif [[ $CPU == powerpc64le ]]; then
$FASTER_DOWNLOAD "$POWERPC64_BIN_URL"
else else
echo "Unsupported CPU type: $CPU" echo "Unsupported CPU type: $CPU"
exit 1 exit 1
@ -52,7 +55,7 @@ fi
if [[ ! -d data ]]; then if [[ ! -d data ]]; then
if [[ ! -f $DATASET ]]; then if [[ ! -f $DATASET ]]; then
$FASTER_DOWNLOAD "https://clickhouse-datasets.s3.yandex.net/hits/partitions/$DATASET" $FASTER_DOWNLOAD "https://datasets.clickhouse.com/hits/partitions/$DATASET"
fi fi
tar $TAR_PARAMS --strip-components=1 --directory=. -x -v -f $DATASET tar $TAR_PARAMS --strip-components=1 --directory=. -x -v -f $DATASET

View File

@ -18,6 +18,10 @@ option (ENABLE_PCLMULQDQ "Use pclmulqdq instructions on x86_64" 1)
option (ENABLE_POPCNT "Use popcnt instructions on x86_64" 1) option (ENABLE_POPCNT "Use popcnt instructions on x86_64" 1)
option (ENABLE_AVX "Use AVX instructions on x86_64" 0) option (ENABLE_AVX "Use AVX instructions on x86_64" 0)
option (ENABLE_AVX2 "Use AVX2 instructions on x86_64" 0) option (ENABLE_AVX2 "Use AVX2 instructions on x86_64" 0)
option (ENABLE_AVX512 "Use AVX512 instructions on x86_64" 0)
option (ENABLE_BMI "Use BMI instructions on x86_64" 0)
option (ENABLE_AVX2_FOR_SPEC_OP "Use avx2 instructions for specific operations on x86_64" 0)
option (ENABLE_AVX512_FOR_SPEC_OP "Use avx512 instructions for specific operations on x86_64" 0)
option (ARCH_NATIVE "Add -march=native compiler flag. This makes your binaries non-portable but more performant code may be generated. This option overrides ENABLE_* options for specific instruction set. Highly not recommended to use." 0) option (ARCH_NATIVE "Add -march=native compiler flag. This makes your binaries non-portable but more performant code may be generated. This option overrides ENABLE_* options for specific instruction set. Highly not recommended to use." 0)
@ -127,6 +131,57 @@ else ()
if (HAVE_AVX2 AND ENABLE_AVX2) if (HAVE_AVX2 AND ENABLE_AVX2)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}") set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
endif () endif ()
set (TEST_FLAG "-mavx512f -mavx512bw")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <immintrin.h>
int main() {
auto a = _mm512_setzero_epi32();
(void)a;
auto b = _mm512_add_epi16(__m512i(), __m512i());
(void)b;
return 0;
}
" HAVE_AVX512)
if (HAVE_AVX512 AND ENABLE_AVX512)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
endif ()
set (TEST_FLAG "-mbmi")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
#include <immintrin.h>
int main() {
auto a = _blsr_u32(0);
(void)a;
return 0;
}
" HAVE_BMI)
if (HAVE_BMI AND ENABLE_BMI)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
endif ()
#Limit avx2/avx512 flag for specific source build
set (X86_INTRINSICS_FLAGS "")
if (ENABLE_AVX2_FOR_SPEC_OP)
if (HAVE_BMI)
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi")
endif ()
if (HAVE_AVX AND HAVE_AVX2)
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx -mavx2")
endif ()
endif ()
if (ENABLE_AVX512_FOR_SPEC_OP)
set (X86_INTRINSICS_FLAGS "")
if (HAVE_BMI)
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi")
endif ()
if (HAVE_AVX512)
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw")
endif ()
endif ()
endif () endif ()
cmake_pop_check_state () cmake_pop_check_state ()

View File

@ -10,7 +10,7 @@ if (NOT ENABLE_AMQPCPP)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/CMakeLists.txt")
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library")
set (USE_AMQPCPP 0) set (USE_AMQPCPP 0)
return() return()

View File

@ -13,7 +13,7 @@ option (USE_INTERNAL_AVRO_LIBRARY
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt")
if (USE_INTERNAL_AVRO_LIBRARY) if (USE_INTERNAL_AVRO_LIBRARY)
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro") message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro")
set(USE_INTERNAL_AVRO_LIBRARY 0) set(USE_INTERNAL_AVRO_LIBRARY 0)
endif() endif()

View File

@ -10,11 +10,11 @@ endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64/LICENSE") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64/LICENSE")
set (MISSING_INTERNAL_BASE64_LIBRARY 1) set (MISSING_INTERNAL_BASE64_LIBRARY 1)
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init")
endif () endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64")
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init")
else() else()
set (BASE64_LIBRARY base64) set (BASE64_LIBRARY base64)
set (USE_BASE64 1) set (USE_BASE64 1)

View File

@ -16,7 +16,7 @@ endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
if (USE_INTERNAL_BROTLI_LIBRARY) if (USE_INTERNAL_BROTLI_LIBRARY)
message (WARNING "submodule contrib/brotli is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/brotli is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal brotli") message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal brotli")
set (USE_INTERNAL_BROTLI_LIBRARY 0) set (USE_INTERNAL_BROTLI_LIBRARY 0)
endif () endif ()

View File

@ -6,7 +6,7 @@ if (NOT ENABLE_BZIP2)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/bzip2/bzlib.h") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/bzip2/bzlib.h")
message (WARNING "submodule contrib/bzip2 is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/bzip2 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal bzip2 library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal bzip2 library")
set (USE_NLP 0) set (USE_NLP 0)
return() return()

View File

@ -11,7 +11,7 @@ option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/CMakeLists.txt")
if(USE_INTERNAL_CAPNP_LIBRARY) if(USE_INTERNAL_CAPNP_LIBRARY)
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto") message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto")
set(USE_INTERNAL_CAPNP_LIBRARY 0) set(USE_INTERNAL_CAPNP_LIBRARY 0)
endif() endif()
@ -34,8 +34,6 @@ endif()
if (CAPNP_LIBRARIES) if (CAPNP_LIBRARIES)
set (USE_CAPNP 1) set (USE_CAPNP 1)
elseif(NOT MISSING_INTERNAL_CAPNP_LIBRARY) elseif(NOT MISSING_INTERNAL_CAPNP_LIBRARY)
add_subdirectory(contrib/capnproto-cmake)
set (CAPNP_LIBRARIES capnpc) set (CAPNP_LIBRARIES capnpc)
set (USE_CAPNP 1) set (USE_CAPNP 1)
set (USE_INTERNAL_CAPNP_LIBRARY 1) set (USE_INTERNAL_CAPNP_LIBRARY 1)

View File

@ -14,7 +14,7 @@ if (APPLE)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cassandra") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cassandra")
message (ERROR "submodule contrib/cassandra is missing. to fix try run: \n git submodule update --init --recursive") message (ERROR "submodule contrib/cassandra is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal Cassandra") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal Cassandra")
set (USE_CASSANDRA 0) set (USE_CASSANDRA 0)
return() return()

View File

@ -17,7 +17,7 @@ option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt")
if (USE_INTERNAL_LIBCXX_LIBRARY) if (USE_INTERNAL_LIBCXX_LIBRARY)
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx")
set(USE_INTERNAL_LIBCXX_LIBRARY 0) set(USE_INTERNAL_LIBCXX_LIBRARY 0)
endif() endif()

View File

@ -6,7 +6,7 @@ endif()
OPTION(ENABLE_CYRUS_SASL "Enable cyrus-sasl" ${DEFAULT_ENABLE_CYRUS_SASL}) OPTION(ENABLE_CYRUS_SASL "Enable cyrus-sasl" ${DEFAULT_ENABLE_CYRUS_SASL})
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/README") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/README")
message (WARNING "submodule contrib/cyrus-sasl is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/cyrus-sasl is missing. to fix try run: \n git submodule update --init")
set (ENABLE_CYRUS_SASL 0) set (ENABLE_CYRUS_SASL 0)
endif () endif ()

View File

@ -6,7 +6,7 @@ option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketch
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt")
if (USE_INTERNAL_DATASKETCHES_LIBRARY) if (USE_INTERNAL_DATASKETCHES_LIBRARY)
message(WARNING "submodule contrib/datasketches-cpp is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/datasketches-cpp is missing. to fix try run: \n git submodule update --init")
endif() endif()
set(MISSING_INTERNAL_DATASKETCHES_LIBRARY 1) set(MISSING_INTERNAL_DATASKETCHES_LIBRARY 1)
set(USE_INTERNAL_DATASKETCHES_LIBRARY 0) set(USE_INTERNAL_DATASKETCHES_LIBRARY 0)

View File

@ -1,5 +1,5 @@
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/fast_float/fast_float.h") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/fast_float/fast_float.h")
message (FATAL_ERROR "submodule contrib/fast_float is missing. to fix try run: \n git submodule update --init --recursive") message (FATAL_ERROR "submodule contrib/fast_float is missing. to fix try run: \n git submodule update --init")
endif () endif ()
set(FAST_FLOAT_LIBRARY fast_float) set(FAST_FLOAT_LIBRARY fast_float)

View File

@ -10,7 +10,7 @@ if(NOT ENABLE_FASTOPS)
endif() endif()
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fastops/fastops/fastops.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fastops/fastops/fastops.h")
message(WARNING "submodule contrib/fastops is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/fastops is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal fastops library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal fastops library")
set(MISSING_INTERNAL_FASTOPS_LIBRARY 1) set(MISSING_INTERNAL_FASTOPS_LIBRARY 1)
endif() endif()

View File

@ -26,7 +26,7 @@ option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instea
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
if(USE_INTERNAL_GRPC_LIBRARY) if(USE_INTERNAL_GRPC_LIBRARY)
message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc") message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc")
set(USE_INTERNAL_GRPC_LIBRARY 0) set(USE_INTERNAL_GRPC_LIBRARY 0)
endif() endif()

View File

@ -4,7 +4,7 @@ option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test inste
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt")
if (USE_INTERNAL_GTEST_LIBRARY) if (USE_INTERNAL_GTEST_LIBRARY)
message (WARNING "submodule contrib/googletest is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/googletest is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gtest") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gtest")
set (USE_INTERNAL_GTEST_LIBRARY 0) set (USE_INTERNAL_GTEST_LIBRARY 0)
endif () endif ()

View File

@ -11,7 +11,7 @@ option(USE_INTERNAL_H3_LIBRARY "Set to FALSE to use system h3 library instead of
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include/h3Index.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include/h3Index.h")
if(USE_INTERNAL_H3_LIBRARY) if(USE_INTERNAL_H3_LIBRARY)
message(WARNING "submodule contrib/h3 is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/h3 is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal h3 library") message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal h3 library")
set(USE_INTERNAL_H3_LIBRARY 0) set(USE_INTERNAL_H3_LIBRARY 0)
endif() endif()

View File

@ -16,7 +16,7 @@ option(USE_INTERNAL_HDFS3_LIBRARY "Set to FALSE to use system HDFS3 instead of b
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include/hdfs/hdfs.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include/hdfs/hdfs.h")
if(USE_INTERNAL_HDFS3_LIBRARY) if(USE_INTERNAL_HDFS3_LIBRARY)
message(WARNING "submodule contrib/libhdfs3 is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/libhdfs3 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library") message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library")
set(USE_INTERNAL_HDFS3_LIBRARY 0) set(USE_INTERNAL_HDFS3_LIBRARY 0)
endif() endif()

View File

@ -16,7 +16,7 @@ option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE")
if (USE_INTERNAL_ICU_LIBRARY) if (USE_INTERNAL_ICU_LIBRARY)
message (WARNING "submodule contrib/icu is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/icu is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ICU") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ICU")
set (USE_INTERNAL_ICU_LIBRARY 0) set (USE_INTERNAL_ICU_LIBRARY 0)
endif () endif ()

View File

@ -1,7 +1,7 @@
OPTION(ENABLE_KRB5 "Enable krb5" ${ENABLE_LIBRARIES}) OPTION(ENABLE_KRB5 "Enable krb5" ${ENABLE_LIBRARIES})
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/krb5/README") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/krb5/README")
message (WARNING "submodule contrib/krb5 is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/krb5 is missing. to fix try run: \n git submodule update --init")
set (ENABLE_KRB5 0) set (ENABLE_KRB5 0)
endif () endif ()

View File

@ -15,7 +15,7 @@ option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library inst
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
if (USE_INTERNAL_LDAP_LIBRARY) if (USE_INTERNAL_LDAP_LIBRARY)
message (WARNING "Submodule contrib/openldap is missing. To fix try running:\n git submodule update --init --recursive") message (WARNING "Submodule contrib/openldap is missing. To fix try running:\n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal LDAP library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal LDAP library")
endif () endif ()

View File

@ -16,7 +16,7 @@ endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h")
if (USE_INTERNAL_LIBGSASL_LIBRARY) if (USE_INTERNAL_LIBGSASL_LIBRARY)
message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libgsasl") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libgsasl")
set (USE_INTERNAL_LIBGSASL_LIBRARY 0) set (USE_INTERNAL_LIBGSASL_LIBRARY 0)
endif () endif ()

View File

@ -5,14 +5,14 @@ if (NOT ENABLE_LIBPQXX)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/CMakeLists.txt")
message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library")
set (USE_LIBPQXX 0) set (USE_LIBPQXX 0)
return() return()
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpq/include") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpq/include")
message (ERROR "submodule contrib/libpq is missing. to fix try run: \n git submodule update --init --recursive") message (ERROR "submodule contrib/libpq is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpq needed for libpqxx") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpq needed for libpqxx")
set (USE_LIBPQXX 0) set (USE_LIBPQXX 0)
return() return()

View File

@ -0,0 +1,11 @@
option(USE_LIBPROTOBUF_MUTATOR "Enable libprotobuf-mutator" ${ENABLE_FUZZING})
if (NOT USE_LIBPROTOBUF_MUTATOR)
return()
endif()
set(LibProtobufMutator_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libprotobuf-mutator")
if (NOT EXISTS "${LibProtobufMutator_SOURCE_DIR}/README.md")
message (ERROR "submodule contrib/libprotobuf-mutator is missing. to fix try run: \n git submodule update --init")
endif()

View File

@ -5,7 +5,7 @@ if (OS_DARWIN AND COMPILER_GCC)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libuv") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libuv")
message (WARNING "submodule contrib/libuv is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/libuv is missing. to fix try run: \n git submodule update --init")
SET(MISSING_INTERNAL_LIBUV_LIBRARY 1) SET(MISSING_INTERNAL_LIBUV_LIBRARY 1)
return() return()
endif() endif()

View File

@ -2,7 +2,7 @@ option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h")
if (USE_INTERNAL_LIBXML2_LIBRARY) if (USE_INTERNAL_LIBXML2_LIBRARY)
message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libxml") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libxml")
set (USE_INTERNAL_LIBXML2_LIBRARY 0) set (USE_INTERNAL_LIBXML2_LIBRARY 0)
endif () endif ()

View File

@ -12,7 +12,7 @@ if (NOT ENABLE_EMBEDDED_COMPILER)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/CMakeLists.txt")
message (${RECONFIGURE_MESSAGE_LEVEL} "submodule /contrib/llvm is missing. to fix try run: \n git submodule update --init --recursive") message (${RECONFIGURE_MESSAGE_LEVEL} "submodule /contrib/llvm is missing. to fix try run: \n git submodule update --init")
endif () endif ()
set (USE_EMBEDDED_COMPILER 1) set (USE_EMBEDDED_COMPILER 1)

View File

@ -11,7 +11,7 @@ option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp")
if(USE_INTERNAL_MSGPACK_LIBRARY) if(USE_INTERNAL_MSGPACK_LIBRARY)
message(WARNING "Submodule contrib/msgpack-c is missing. To fix try run: \n git submodule update --init --recursive") message(WARNING "Submodule contrib/msgpack-c is missing. To fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal msgpack") message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal msgpack")
set(USE_INTERNAL_MSGPACK_LIBRARY 0) set(USE_INTERNAL_MSGPACK_LIBRARY 0)
endif() endif()

View File

@ -16,7 +16,7 @@ option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient librar
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README")
if(USE_INTERNAL_MYSQL_LIBRARY) if(USE_INTERNAL_MYSQL_LIBRARY)
message(WARNING "submodule contrib/mariadb-connector-c is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/mariadb-connector-c is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal mysql library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal mysql library")
set(USE_INTERNAL_MYSQL_LIBRARY 0) set(USE_INTERNAL_MYSQL_LIBRARY 0)
endif() endif()

View File

@ -7,7 +7,7 @@ if (NOT USE_INTERNAL_NANODBC_LIBRARY)
endif () endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/CMakeLists.txt")
message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init --recursive") message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init")
endif() endif()
set (NANODBC_LIBRARY nanodbc) set (NANODBC_LIBRARY nanodbc)

View File

@ -7,21 +7,21 @@ if (NOT ENABLE_NLP)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libstemmer_c/Makefile") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libstemmer_c/Makefile")
message (WARNING "submodule contrib/libstemmer_c is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/libstemmer_c is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libstemmer_c library, NLP functions will be disabled") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libstemmer_c library, NLP functions will be disabled")
set (USE_NLP 0) set (USE_NLP 0)
return() return()
endif () endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/CMakeLists.txt")
message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled")
set (USE_NLP 0) set (USE_NLP 0)
return() return()
endif () endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/lemmagen-c/README.md") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/lemmagen-c/README.md")
message (WARNING "submodule contrib/lemmagen-c is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/lemmagen-c is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal lemmagen-c library, NLP functions will be disabled") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal lemmagen-c library, NLP functions will be disabled")
set (USE_NLP 0) set (USE_NLP 0)
return() return()

View File

@ -5,7 +5,7 @@ if (NOT ENABLE_NURAFT)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/CMakeLists.txt")
message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library")
set (USE_NURAFT 0) set (USE_NURAFT 0)
return() return()

View File

@ -18,7 +18,7 @@ include(cmake/find/snappy.cmake)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include/orc/OrcFile.hh") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include/orc/OrcFile.hh")
if(USE_INTERNAL_ORC_LIBRARY) if(USE_INTERNAL_ORC_LIBRARY)
message(WARNING "submodule contrib/orc is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/orc is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ORC") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ORC")
set(USE_INTERNAL_ORC_LIBRARY 0) set(USE_INTERNAL_ORC_LIBRARY 0)
endif() endif()

View File

@ -20,7 +20,7 @@ endif()
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/CMakeLists.txt")
if(USE_INTERNAL_PARQUET_LIBRARY) if(USE_INTERNAL_PARQUET_LIBRARY)
message(WARNING "submodule contrib/arrow (required for Parquet) is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/arrow (required for Parquet) is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet library")
set(USE_INTERNAL_PARQUET_LIBRARY 0) set(USE_INTERNAL_PARQUET_LIBRARY 0)
endif() endif()

View File

@ -15,7 +15,7 @@ option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instea
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
if(USE_INTERNAL_PROTOBUF_LIBRARY) if(USE_INTERNAL_PROTOBUF_LIBRARY)
message(WARNING "submodule contrib/protobuf is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/protobuf is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf") message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf")
set(USE_INTERNAL_PROTOBUF_LIBRARY 0) set(USE_INTERNAL_PROTOBUF_LIBRARY 0)
endif() endif()

View File

@ -10,7 +10,7 @@ option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson libr
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h")
if(USE_INTERNAL_RAPIDJSON_LIBRARY) if(USE_INTERNAL_RAPIDJSON_LIBRARY)
message(WARNING "submodule contrib/rapidjson is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/rapidjson is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rapidjson library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rapidjson library")
set(USE_INTERNAL_RAPIDJSON_LIBRARY 0) set(USE_INTERNAL_RAPIDJSON_LIBRARY 0)
endif() endif()

View File

@ -11,7 +11,7 @@ option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka inst
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
if(USE_INTERNAL_RDKAFKA_LIBRARY) if(USE_INTERNAL_RDKAFKA_LIBRARY)
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka")
set (USE_INTERNAL_RDKAFKA_LIBRARY 0) set (USE_INTERNAL_RDKAFKA_LIBRARY 0)
endif() endif()
@ -20,7 +20,7 @@ endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/CMakeLists.txt")
if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY) if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY)
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka")
set (USE_INTERNAL_RDKAFKA_LIBRARY 0) set (USE_INTERNAL_RDKAFKA_LIBRARY 0)
endif() endif()

View File

@ -2,7 +2,7 @@ option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/CMakeLists.txt")
if(USE_INTERNAL_RE2_LIBRARY) if(USE_INTERNAL_RE2_LIBRARY)
message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library")
endif() endif()
set(USE_INTERNAL_RE2_LIBRARY 0) set(USE_INTERNAL_RE2_LIBRARY 0)

View File

@ -15,7 +15,7 @@ option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/CMakeLists.txt")
if (USE_INTERNAL_ROCKSDB_LIBRARY) if (USE_INTERNAL_ROCKSDB_LIBRARY)
message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb") message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb")
endif() endif()
set (MISSING_INTERNAL_ROCKSDB 1) set (MISSING_INTERNAL_ROCKSDB 1)

View File

@ -3,7 +3,7 @@ option(ENABLE_S2_GEOMETRY "Enable S2 geometry library" ${ENABLE_LIBRARIES})
if (ENABLE_S2_GEOMETRY) if (ENABLE_S2_GEOMETRY)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/s2geometry") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/s2geometry")
message (WARNING "submodule contrib/s2geometry is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/s2geometry is missing. to fix try run: \n git submodule update --init")
set (ENABLE_S2_GEOMETRY 0) set (ENABLE_S2_GEOMETRY 0)
set (USE_S2_GEOMETRY 0) set (USE_S2_GEOMETRY 0)
else() else()

View File

@ -23,7 +23,7 @@ if (NOT USE_INTERNAL_AWS_S3_LIBRARY)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
message (WARNING "submodule contrib/aws is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/aws is missing. to fix try run: \n git submodule update --init")
if (USE_INTERNAL_AWS_S3_LIBRARY) if (USE_INTERNAL_AWS_S3_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal S3 library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal S3 library")
endif () endif ()

View File

@ -2,7 +2,7 @@ set (SENTRY_LIBRARY "sentry")
set (SENTRY_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native/include") set (SENTRY_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native/include")
if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h") if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h")
message (WARNING "submodule contrib/sentry-native is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/sentry-native is missing. to fix try run: \n git submodule update --init")
if (USE_SENTRY) if (USE_SENTRY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sentry library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sentry library")
endif() endif()

View File

@ -1,7 +1,7 @@
option (USE_SIMDJSON "Use simdjson" ${ENABLE_LIBRARIES}) option (USE_SIMDJSON "Use simdjson" ${ENABLE_LIBRARIES})
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/simdjson/include/simdjson.h") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/simdjson/include/simdjson.h")
message (WARNING "submodule contrib/simdjson is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/simdjson is missing. to fix try run: \n git submodule update --init")
if (USE_SIMDJSON) if (USE_SIMDJSON)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal simdjson library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal simdjson library")
endif() endif()

View File

@ -5,7 +5,7 @@ if (NOT ENABLE_SQLITE)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/sqlite-amalgamation/sqlite3.c") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/sqlite-amalgamation/sqlite3.c")
message (WARNING "submodule contrib/sqlite3-amalgamation is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/sqlite3-amalgamation is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sqlite library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sqlite library")
set (USE_SQLITE 0) set (USE_SQLITE 0)
return() return()

View File

@ -13,7 +13,7 @@ option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md")
if(USE_INTERNAL_SSL_LIBRARY) if(USE_INTERNAL_SSL_LIBRARY)
message(WARNING "submodule contrib/boringssl is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/boringssl is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ssl library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ssl library")
endif() endif()
set(USE_INTERNAL_SSL_LIBRARY 0) set(USE_INTERNAL_SSL_LIBRARY 0)

View File

@ -2,11 +2,11 @@ option(ENABLE_STATS "Enable StatsLib library" ${ENABLE_LIBRARIES})
if (ENABLE_STATS) if (ENABLE_STATS)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/stats") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/stats")
message (WARNING "submodule contrib/stats is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/stats is missing. to fix try run: \n git submodule update --init")
set (ENABLE_STATS 0) set (ENABLE_STATS 0)
set (USE_STATS 0) set (USE_STATS 0)
elseif (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/gcem") elseif (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/gcem")
message (WARNING "submodule contrib/gcem is missing. to fix try run: \n git submodule update --init --recursive") message (WARNING "submodule contrib/gcem is missing. to fix try run: \n git submodule update --init")
set (ENABLE_STATS 0) set (ENABLE_STATS 0)
set (USE_STATS 0) set (USE_STATS 0)
else() else()

View File

@ -2,7 +2,7 @@ option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library inst
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h")
if(USE_INTERNAL_XZ_LIBRARY) if(USE_INTERNAL_XZ_LIBRARY)
message(WARNING "submodule contrib/xz is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/xz is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal xz (lzma) library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal xz (lzma) library")
set(USE_INTERNAL_XZ_LIBRARY 0) set(USE_INTERNAL_XZ_LIBRARY 0)
endif() endif()

View File

@ -5,5 +5,5 @@ if (NOT USE_YAML_CPP)
endif() endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/yaml-cpp/README.md") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/yaml-cpp/README.md")
message (ERROR "submodule contrib/yaml-cpp is missing. to fix try run: \n git submodule update --init --recursive") message (ERROR "submodule contrib/yaml-cpp is missing. to fix try run: \n git submodule update --init")
endif() endif()

View File

@ -12,7 +12,7 @@ endif ()
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}/zlib.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}/zlib.h")
if(USE_INTERNAL_ZLIB_LIBRARY) if(USE_INTERNAL_ZLIB_LIBRARY)
message(WARNING "submodule contrib/${INTERNAL_ZLIB_NAME} is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/${INTERNAL_ZLIB_NAME} is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zlib library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zlib library")
endif() endif()
set(USE_INTERNAL_ZLIB_LIBRARY 0) set(USE_INTERNAL_ZLIB_LIBRARY 0)

View File

@ -2,7 +2,7 @@ option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library inste
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h")
if(USE_INTERNAL_ZSTD_LIBRARY) if(USE_INTERNAL_ZSTD_LIBRARY)
message(WARNING "submodule contrib/zstd is missing. to fix try run: \n git submodule update --init --recursive") message(WARNING "submodule contrib/zstd is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zstd library") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zstd library")
set(USE_INTERNAL_ZSTD_LIBRARY 0) set(USE_INTERNAL_ZSTD_LIBRARY 0)
endif() endif()

View File

@ -14,6 +14,8 @@ endif ()
if (OS_ANDROID) if (OS_ANDROID)
# pthread and rt are included in libc # pthread and rt are included in libc
set (DEFAULT_LIBS "${DEFAULT_LIBS} ${BUILTINS_LIBRARY} ${COVERAGE_OPTION} -lc -lm -ldl") set (DEFAULT_LIBS "${DEFAULT_LIBS} ${BUILTINS_LIBRARY} ${COVERAGE_OPTION} -lc -lm -ldl")
elseif (USE_MUSL)
set (DEFAULT_LIBS "${DEFAULT_LIBS} ${BUILTINS_LIBRARY} ${COVERAGE_OPTION} -static -lc")
else () else ()
set (DEFAULT_LIBS "${DEFAULT_LIBS} ${BUILTINS_LIBRARY} ${COVERAGE_OPTION} -lc -lm -lrt -lpthread -ldl") set (DEFAULT_LIBS "${DEFAULT_LIBS} ${BUILTINS_LIBRARY} ${COVERAGE_OPTION} -lc -lm -lrt -lpthread -ldl")
endif () endif ()
@ -26,7 +28,7 @@ set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS})
# glibc-compatibility library relies to constant version of libc headers # glibc-compatibility library relies to constant version of libc headers
# (because minor changes in function attributes between different glibc versions will introduce incompatibilities) # (because minor changes in function attributes between different glibc versions will introduce incompatibilities)
# This is for x86_64. For other architectures we have separate toolchains. # This is for x86_64. For other architectures we have separate toolchains.
if (ARCH_AMD64 AND NOT_UNBUNDLED) if (ARCH_AMD64 AND NOT_UNBUNDLED AND NOT CMAKE_CROSSCOMPILING)
set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers) set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers) set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
endif () endif ()
@ -37,8 +39,10 @@ set(THREADS_PREFER_PTHREAD_FLAG ON)
find_package(Threads REQUIRED) find_package(Threads REQUIRED)
if (NOT OS_ANDROID) if (NOT OS_ANDROID)
# Our compatibility layer doesn't build under Android, many errors in musl. if (NOT USE_MUSL)
add_subdirectory(base/glibc-compatibility) # Our compatibility layer doesn't build under Android, many errors in musl.
add_subdirectory(base/glibc-compatibility)
endif ()
add_subdirectory(base/harmful) add_subdirectory(base/harmful)
endif () endif ()

View File

@ -0,0 +1,32 @@
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
set (CMAKE_SYSTEM_NAME "Linux")
set (CMAKE_SYSTEM_PROCESSOR "riscv64")
set (CMAKE_C_COMPILER_TARGET "riscv64-linux-gnu")
set (CMAKE_CXX_COMPILER_TARGET "riscv64-linux-gnu")
set (CMAKE_ASM_COMPILER_TARGET "riscv64-linux-gnu")
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-riscv64")
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}")
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)

View File

@ -0,0 +1,35 @@
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
set (CMAKE_SYSTEM_NAME "Linux")
set (CMAKE_SYSTEM_PROCESSOR "x86_64")
set (CMAKE_C_COMPILER_TARGET "x86_64-linux-musl")
set (CMAKE_CXX_COMPILER_TARGET "x86_64-linux-musl")
set (CMAKE_ASM_COMPILER_TARGET "x86_64-linux-musl")
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-x86_64-musl")
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}")
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
set (USE_MUSL 1)
add_definitions(-DUSE_MUSL=1)

View File

@ -1,16 +1,5 @@
# Third-party libraries may have substandard code. # Third-party libraries may have substandard code.
# Put all targets defined here and in added subfolders under "contrib/" folder in GUI-based IDEs by default.
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they will
# appear not in "contrib/" as originally planned here.
get_filename_component (_current_dir_name "${CMAKE_CURRENT_LIST_DIR}" NAME)
if (CMAKE_FOLDER)
set (CMAKE_FOLDER "${CMAKE_FOLDER}/${_current_dir_name}")
else ()
set (CMAKE_FOLDER "${_current_dir_name}")
endif ()
unset (_current_dir_name)
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w") set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w")
@ -49,6 +38,14 @@ add_subdirectory (replxx-cmake)
add_subdirectory (unixodbc-cmake) add_subdirectory (unixodbc-cmake)
add_subdirectory (nanodbc-cmake) add_subdirectory (nanodbc-cmake)
if (USE_INTERNAL_CAPNP_LIBRARY AND NOT MISSING_INTERNAL_CAPNP_LIBRARY)
add_subdirectory(capnproto-cmake)
endif ()
if (ENABLE_FUZZING)
add_subdirectory (libprotobuf-mutator-cmake)
endif()
if (USE_YAML_CPP) if (USE_YAML_CPP)
add_subdirectory (yaml-cpp-cmake) add_subdirectory (yaml-cpp-cmake)
endif() endif()
@ -348,3 +345,76 @@ endif()
if (USE_S2_GEOMETRY) if (USE_S2_GEOMETRY)
add_subdirectory(s2geometry-cmake) add_subdirectory(s2geometry-cmake)
endif() endif()
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
# in "contrib/..." as originally planned, so we workaround this by fixing FOLDER properties of all targets manually,
# instead of controlling it via CMAKE_FOLDER.
function (ensure_target_rooted_in _target _folder)
# Skip INTERFACE library targets, since FOLDER property is not available for them.
get_target_property (_target_type "${_target}" TYPE)
if (_target_type STREQUAL "INTERFACE_LIBRARY")
return ()
endif ()
# Read the original FOLDER property value, if any.
get_target_property (_folder_prop "${_target}" FOLDER)
# Normalize that value, so we avoid possible repetitions in folder names.
if (NOT _folder_prop)
set (_folder_prop "")
endif ()
if (CMAKE_FOLDER AND _folder_prop MATCHES "^${CMAKE_FOLDER}/(.*)\$")
set (_folder_prop "${CMAKE_MATCH_1}")
endif ()
if (_folder AND _folder_prop MATCHES "^${_folder}/(.*)\$")
set (_folder_prop "${CMAKE_MATCH_1}")
endif ()
if (_folder)
set (_folder_prop "${_folder}/${_folder_prop}")
endif ()
if (CMAKE_FOLDER)
set (_folder_prop "${CMAKE_FOLDER}/${_folder_prop}")
endif ()
# Set the updated FOLDER property value back.
set_target_properties ("${_target}" PROPERTIES FOLDER "${_folder_prop}")
endfunction ()
function (ensure_own_targets_are_rooted_in _dir _folder)
get_directory_property (_targets DIRECTORY "${_dir}" BUILDSYSTEM_TARGETS)
foreach (_target IN LISTS _targets)
ensure_target_rooted_in ("${_target}" "${_folder}")
endforeach ()
endfunction ()
function (ensure_all_targets_are_rooted_in _dir _folder)
ensure_own_targets_are_rooted_in ("${_dir}" "${_folder}")
get_property (_sub_dirs DIRECTORY "${_dir}" PROPERTY SUBDIRECTORIES)
foreach (_sub_dir IN LISTS _sub_dirs)
ensure_all_targets_are_rooted_in ("${_sub_dir}" "${_folder}")
endforeach ()
endfunction ()
function (organize_ide_folders_2_level _dir)
get_filename_component (_dir_name "${_dir}" NAME)
ensure_own_targets_are_rooted_in ("${_dir}" "${_dir_name}")
# Note, that we respect only first two levels of nesting, we don't want to
# reorganize target folders further within each third-party dir.
get_property (_sub_dirs DIRECTORY "${_dir}" PROPERTY SUBDIRECTORIES)
foreach (_sub_dir IN LISTS _sub_dirs)
get_filename_component (_sub_dir_name "${_sub_dir}" NAME)
ensure_all_targets_are_rooted_in ("${_sub_dir}" "${_dir_name}/${_sub_dir_name}")
endforeach ()
endfunction ()
organize_ide_folders_2_level ("${CMAKE_CURRENT_LIST_DIR}")

2
contrib/boringssl vendored

@ -1 +1 @@
Subproject commit a6a2e2ab3e44d97ce98e51c558e989f211de7eb3 Subproject commit c1e01a441d6db234f4f12e63a7657d1f9e6db9c1

View File

@ -4,7 +4,7 @@
# This file is created by generate_build_files.py and edited accordingly. # This file is created by generate_build_files.py and edited accordingly.
cmake_minimum_required(VERSION 3.0) cmake_minimum_required(VERSION 3.5)
project(BoringSSL LANGUAGES C CXX) project(BoringSSL LANGUAGES C CXX)
@ -20,12 +20,7 @@ if(CMAKE_COMPILER_IS_GNUCXX OR CLANG)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
endif() endif()
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fno-common") set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fno-common -std=c11")
if((CMAKE_C_COMPILER_VERSION VERSION_GREATER "4.8.99") OR CLANG)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c11")
else()
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c99")
endif()
endif() endif()
# pthread_rwlock_t requires a feature flag. # pthread_rwlock_t requires a feature flag.
@ -55,7 +50,7 @@ add_definitions(-DBORINGSSL_IMPLEMENTATION)
# builds. # builds.
if(NOT OPENSSL_NO_ASM AND CMAKE_OSX_ARCHITECTURES) if(NOT OPENSSL_NO_ASM AND CMAKE_OSX_ARCHITECTURES)
list(LENGTH CMAKE_OSX_ARCHITECTURES NUM_ARCHES) list(LENGTH CMAKE_OSX_ARCHITECTURES NUM_ARCHES)
if(NOT ${NUM_ARCHES} EQUAL 1) if(NOT NUM_ARCHES EQUAL 1)
message(FATAL_ERROR "Universal binaries not supported.") message(FATAL_ERROR "Universal binaries not supported.")
endif() endif()
list(GET CMAKE_OSX_ARCHITECTURES 0 CMAKE_SYSTEM_PROCESSOR) list(GET CMAKE_OSX_ARCHITECTURES 0 CMAKE_SYSTEM_PROCESSOR)
@ -78,7 +73,13 @@ elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "AMD64")
elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "x86") elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "x86")
set(ARCH "x86") set(ARCH "x86")
elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386") elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386")
set(ARCH "x86") # cmake uses `uname -p` to set the system processor, but Solaris
# systems support multiple architectures.
if((${CMAKE_SYSTEM_NAME} STREQUAL "SunOS") AND CMAKE_SIZEOF_VOID_P EQUAL 8)
set(ARCH "x86_64")
else()
set(ARCH "x86")
endif()
elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i686") elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i686")
set(ARCH "x86") set(ARCH "x86")
elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64") elseif(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "aarch64")
@ -289,6 +290,21 @@ set(
mac-x86_64/crypto/test/trampoline-x86_64.S mac-x86_64/crypto/test/trampoline-x86_64.S
) )
set(
CRYPTO_win_aarch64_SOURCES
win-aarch64/crypto/chacha/chacha-armv8.S
win-aarch64/crypto/fipsmodule/aesv8-armx64.S
win-aarch64/crypto/fipsmodule/armv8-mont.S
win-aarch64/crypto/fipsmodule/ghash-neon-armv8.S
win-aarch64/crypto/fipsmodule/ghashv8-armx64.S
win-aarch64/crypto/fipsmodule/sha1-armv8.S
win-aarch64/crypto/fipsmodule/sha256-armv8.S
win-aarch64/crypto/fipsmodule/sha512-armv8.S
win-aarch64/crypto/fipsmodule/vpaes-armv8.S
win-aarch64/crypto/test/trampoline-armv8.S
)
set( set(
CRYPTO_win_x86_SOURCES CRYPTO_win_x86_SOURCES
@ -331,9 +347,9 @@ set(
win-x86_64/crypto/test/trampoline-x86_64.asm win-x86_64/crypto/test/trampoline-x86_64.asm
) )
if(APPLE AND ${ARCH} STREQUAL "aarch64") if(APPLE AND ARCH STREQUAL "aarch64")
set(CRYPTO_ARCH_SOURCES ${CRYPTO_ios_aarch64_SOURCES}) set(CRYPTO_ARCH_SOURCES ${CRYPTO_ios_aarch64_SOURCES})
elseif(APPLE AND ${ARCH} STREQUAL "arm") elseif(APPLE AND ARCH STREQUAL "arm")
set(CRYPTO_ARCH_SOURCES ${CRYPTO_ios_arm_SOURCES}) set(CRYPTO_ARCH_SOURCES ${CRYPTO_ios_arm_SOURCES})
elseif(APPLE) elseif(APPLE)
set(CRYPTO_ARCH_SOURCES ${CRYPTO_mac_${ARCH}_SOURCES}) set(CRYPTO_ARCH_SOURCES ${CRYPTO_mac_${ARCH}_SOURCES})
@ -360,6 +376,7 @@ add_library(
"${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_object.c" "${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_object.c"
"${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_octet.c" "${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_octet.c"
"${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_print.c" "${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_print.c"
"${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_strex.c"
"${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_strnid.c" "${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_strnid.c"
"${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_time.c" "${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_time.c"
"${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_type.c" "${BORINGSSL_SOURCE_DIR}/crypto/asn1/a_type.c"
@ -389,6 +406,7 @@ add_library(
"${BORINGSSL_SOURCE_DIR}/crypto/bio/printf.c" "${BORINGSSL_SOURCE_DIR}/crypto/bio/printf.c"
"${BORINGSSL_SOURCE_DIR}/crypto/bio/socket.c" "${BORINGSSL_SOURCE_DIR}/crypto/bio/socket.c"
"${BORINGSSL_SOURCE_DIR}/crypto/bio/socket_helper.c" "${BORINGSSL_SOURCE_DIR}/crypto/bio/socket_helper.c"
"${BORINGSSL_SOURCE_DIR}/crypto/blake2/blake2.c"
"${BORINGSSL_SOURCE_DIR}/crypto/bn_extra/bn_asn1.c" "${BORINGSSL_SOURCE_DIR}/crypto/bn_extra/bn_asn1.c"
"${BORINGSSL_SOURCE_DIR}/crypto/bn_extra/convert.c" "${BORINGSSL_SOURCE_DIR}/crypto/bn_extra/convert.c"
"${BORINGSSL_SOURCE_DIR}/crypto/buf/buf.c" "${BORINGSSL_SOURCE_DIR}/crypto/buf/buf.c"
@ -413,6 +431,7 @@ add_library(
"${BORINGSSL_SOURCE_DIR}/crypto/conf/conf.c" "${BORINGSSL_SOURCE_DIR}/crypto/conf/conf.c"
"${BORINGSSL_SOURCE_DIR}/crypto/cpu-aarch64-fuchsia.c" "${BORINGSSL_SOURCE_DIR}/crypto/cpu-aarch64-fuchsia.c"
"${BORINGSSL_SOURCE_DIR}/crypto/cpu-aarch64-linux.c" "${BORINGSSL_SOURCE_DIR}/crypto/cpu-aarch64-linux.c"
"${BORINGSSL_SOURCE_DIR}/crypto/cpu-aarch64-win.c"
"${BORINGSSL_SOURCE_DIR}/crypto/cpu-arm-linux.c" "${BORINGSSL_SOURCE_DIR}/crypto/cpu-arm-linux.c"
"${BORINGSSL_SOURCE_DIR}/crypto/cpu-arm.c" "${BORINGSSL_SOURCE_DIR}/crypto/cpu-arm.c"
"${BORINGSSL_SOURCE_DIR}/crypto/cpu-intel.c" "${BORINGSSL_SOURCE_DIR}/crypto/cpu-intel.c"
@ -452,7 +471,6 @@ add_library(
"${BORINGSSL_SOURCE_DIR}/crypto/ex_data.c" "${BORINGSSL_SOURCE_DIR}/crypto/ex_data.c"
"${BORINGSSL_SOURCE_DIR}/crypto/fipsmodule/bcm.c" "${BORINGSSL_SOURCE_DIR}/crypto/fipsmodule/bcm.c"
"${BORINGSSL_SOURCE_DIR}/crypto/fipsmodule/fips_shared_support.c" "${BORINGSSL_SOURCE_DIR}/crypto/fipsmodule/fips_shared_support.c"
"${BORINGSSL_SOURCE_DIR}/crypto/fipsmodule/is_fips.c"
"${BORINGSSL_SOURCE_DIR}/crypto/hkdf/hkdf.c" "${BORINGSSL_SOURCE_DIR}/crypto/hkdf/hkdf.c"
"${BORINGSSL_SOURCE_DIR}/crypto/hpke/hpke.c" "${BORINGSSL_SOURCE_DIR}/crypto/hpke/hpke.c"
"${BORINGSSL_SOURCE_DIR}/crypto/hrss/hrss.c" "${BORINGSSL_SOURCE_DIR}/crypto/hrss/hrss.c"
@ -499,13 +517,13 @@ add_library(
"${BORINGSSL_SOURCE_DIR}/crypto/trust_token/voprf.c" "${BORINGSSL_SOURCE_DIR}/crypto/trust_token/voprf.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/a_digest.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/a_digest.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/a_sign.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/a_sign.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/a_strex.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/a_verify.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/a_verify.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/algorithm.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/algorithm.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/asn1_gen.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/asn1_gen.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/by_dir.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/by_dir.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/by_file.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/by_file.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/i2d_pr.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/i2d_pr.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/name_print.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/rsa_pss.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/rsa_pss.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/t_crl.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/t_crl.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/t_req.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/t_req.c"
@ -519,7 +537,6 @@ add_library(
"${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_ext.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_ext.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_lu.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_lu.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_obj.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_obj.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_r2x.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_req.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_req.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_set.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_set.c"
"${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_trs.c" "${BORINGSSL_SOURCE_DIR}/crypto/x509/x509_trs.c"
@ -589,6 +606,8 @@ add_library(
"${BORINGSSL_SOURCE_DIR}/ssl/d1_srtp.cc" "${BORINGSSL_SOURCE_DIR}/ssl/d1_srtp.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/dtls_method.cc" "${BORINGSSL_SOURCE_DIR}/ssl/dtls_method.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/dtls_record.cc" "${BORINGSSL_SOURCE_DIR}/ssl/dtls_record.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/encrypted_client_hello.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/extensions.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/handoff.cc" "${BORINGSSL_SOURCE_DIR}/ssl/handoff.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/handshake.cc" "${BORINGSSL_SOURCE_DIR}/ssl/handshake.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/handshake_client.cc" "${BORINGSSL_SOURCE_DIR}/ssl/handshake_client.cc"
@ -611,7 +630,6 @@ add_library(
"${BORINGSSL_SOURCE_DIR}/ssl/ssl_versions.cc" "${BORINGSSL_SOURCE_DIR}/ssl/ssl_versions.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/ssl_x509.cc" "${BORINGSSL_SOURCE_DIR}/ssl/ssl_x509.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/t1_enc.cc" "${BORINGSSL_SOURCE_DIR}/ssl/t1_enc.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/t1_lib.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/tls13_both.cc" "${BORINGSSL_SOURCE_DIR}/ssl/tls13_both.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/tls13_client.cc" "${BORINGSSL_SOURCE_DIR}/ssl/tls13_client.cc"
"${BORINGSSL_SOURCE_DIR}/ssl/tls13_enc.cc" "${BORINGSSL_SOURCE_DIR}/ssl/tls13_enc.cc"
@ -633,6 +651,7 @@ add_executable(
"${BORINGSSL_SOURCE_DIR}/tool/digest.cc" "${BORINGSSL_SOURCE_DIR}/tool/digest.cc"
"${BORINGSSL_SOURCE_DIR}/tool/fd.cc" "${BORINGSSL_SOURCE_DIR}/tool/fd.cc"
"${BORINGSSL_SOURCE_DIR}/tool/file.cc" "${BORINGSSL_SOURCE_DIR}/tool/file.cc"
"${BORINGSSL_SOURCE_DIR}/tool/generate_ech.cc"
"${BORINGSSL_SOURCE_DIR}/tool/generate_ed25519.cc" "${BORINGSSL_SOURCE_DIR}/tool/generate_ed25519.cc"
"${BORINGSSL_SOURCE_DIR}/tool/genrsa.cc" "${BORINGSSL_SOURCE_DIR}/tool/genrsa.cc"
"${BORINGSSL_SOURCE_DIR}/tool/pkcs12.cc" "${BORINGSSL_SOURCE_DIR}/tool/pkcs12.cc"

2
contrib/capnproto vendored

@ -1 +1 @@
Subproject commit a00ccd91b3746ef2ab51d40fe3265829949d1ace Subproject commit c8189ec3c27dacbd4a3288e682473010e377f593

View File

@ -45,6 +45,7 @@ set (CAPNP_SRCS
"${CAPNPROTO_SOURCE_DIR}/capnp/serialize-packed.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/serialize-packed.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/schema.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/schema.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/stream.capnp.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/schema-loader.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/schema-loader.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/dynamic.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/dynamic.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/stringify.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/stringify.c++"
@ -63,6 +64,7 @@ set (CAPNPC_SRCS
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/lexer.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/compiler/lexer.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/grammar.capnp.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/compiler/grammar.capnp.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/parser.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/compiler/parser.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/generics.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/node-translator.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/compiler/node-translator.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/compiler.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/compiler/compiler.c++"
"${CAPNPROTO_SOURCE_DIR}/capnp/schema-parser.c++" "${CAPNPROTO_SOURCE_DIR}/capnp/schema-parser.c++"

2
contrib/fastops vendored

@ -1 +1 @@
Subproject commit 012b777df9e2d145a24800a6c8c3d4a0249bb09e Subproject commit 1460583af7d13c0e980ce46aec8ee9400314669a

View File

@ -18,8 +18,10 @@
* Define overrides for non-standard allocator-related functions if they are * Define overrides for non-standard allocator-related functions if they are
* present on the system. * present on the system.
*/ */
#define JEMALLOC_OVERRIDE_MEMALIGN #if !defined(USE_MUSL)
#define JEMALLOC_OVERRIDE_VALLOC #define JEMALLOC_OVERRIDE_MEMALIGN
#define JEMALLOC_OVERRIDE_VALLOC
#endif
/* /*
* At least Linux omits the "const" in: * At least Linux omits the "const" in:

View File

@ -1,6 +1,6 @@
// OSX does not have this for system alloc functions, so you will get // OSX does not have this for system alloc functions, so you will get
// "exception specification in declaration" error. // "exception specification in declaration" error.
#if defined(__APPLE__) || defined(__FreeBSD__) #if defined(__APPLE__) || defined(__FreeBSD__) || defined(USE_MUSL)
# undef JEMALLOC_NOTHROW # undef JEMALLOC_NOTHROW
# define JEMALLOC_NOTHROW # define JEMALLOC_NOTHROW

View File

@ -13,12 +13,14 @@
* Define overrides for non-standard allocator-related functions if they are * Define overrides for non-standard allocator-related functions if they are
* present on the system. * present on the system.
*/ */
#define JEMALLOC_OVERRIDE___LIBC_CALLOC #if !defined(USE_MUSL)
#define JEMALLOC_OVERRIDE___LIBC_FREE #define JEMALLOC_OVERRIDE___LIBC_CALLOC
#define JEMALLOC_OVERRIDE___LIBC_MALLOC #define JEMALLOC_OVERRIDE___LIBC_FREE
#define JEMALLOC_OVERRIDE___LIBC_MEMALIGN #define JEMALLOC_OVERRIDE___LIBC_MALLOC
#define JEMALLOC_OVERRIDE___LIBC_REALLOC #define JEMALLOC_OVERRIDE___LIBC_MEMALIGN
#define JEMALLOC_OVERRIDE___LIBC_VALLOC #define JEMALLOC_OVERRIDE___LIBC_REALLOC
#define JEMALLOC_OVERRIDE___LIBC_VALLOC
#endif
/* #undef JEMALLOC_OVERRIDE___POSIX_MEMALIGN */ /* #undef JEMALLOC_OVERRIDE___POSIX_MEMALIGN */
/* /*

View File

@ -47,6 +47,7 @@ set(SRCS
) )
add_library(cxx ${SRCS}) add_library(cxx ${SRCS})
set_target_properties(cxx PROPERTIES FOLDER "contrib/libcxx-cmake")
target_include_directories(cxx SYSTEM BEFORE PUBLIC $<BUILD_INTERFACE:${LIBCXX_SOURCE_DIR}/include>) target_include_directories(cxx SYSTEM BEFORE PUBLIC $<BUILD_INTERFACE:${LIBCXX_SOURCE_DIR}/include>)
target_compile_definitions(cxx PRIVATE -D_LIBCPP_BUILDING_LIBRARY -DLIBCXX_BUILDING_LIBCXXABI) target_compile_definitions(cxx PRIVATE -D_LIBCPP_BUILDING_LIBRARY -DLIBCXX_BUILDING_LIBCXXABI)
@ -56,6 +57,10 @@ if (USE_UNWIND)
target_compile_definitions(cxx PUBLIC -DSTD_EXCEPTION_HAS_STACK_TRACE=1) target_compile_definitions(cxx PUBLIC -DSTD_EXCEPTION_HAS_STACK_TRACE=1)
endif () endif ()
if (USE_MUSL)
target_compile_definitions(cxx PUBLIC -D_LIBCPP_HAS_MUSL_LIBC=1)
endif ()
# Override the deduced attribute support that causes error. # Override the deduced attribute support that causes error.
if (OS_DARWIN AND COMPILER_GCC) if (OS_DARWIN AND COMPILER_GCC)
add_compile_definitions(_LIBCPP_INIT_PRIORITY_MAX) add_compile_definitions(_LIBCPP_INIT_PRIORITY_MAX)

View File

@ -22,6 +22,7 @@ set(SRCS
) )
add_library(cxxabi ${SRCS}) add_library(cxxabi ${SRCS})
set_target_properties(cxxabi PROPERTIES FOLDER "contrib/libcxxabi-cmake")
# Third party library may have substandard code. # Third party library may have substandard code.
target_compile_options(cxxabi PRIVATE -w) target_compile_options(cxxabi PRIVATE -w)

1
contrib/libprotobuf-mutator vendored Submodule

@ -0,0 +1 @@
Subproject commit ffd86a32874e5c08a143019aad1aaf0907294c9f

View File

@ -0,0 +1,14 @@
set(LIBRARY_DIR ${ClickHouse_SOURCE_DIR}/contrib/libprotobuf-mutator)
add_library(protobuf-mutator
${LIBRARY_DIR}/src/libfuzzer/libfuzzer_macro.cc
${LIBRARY_DIR}/src/libfuzzer/libfuzzer_mutator.cc
${LIBRARY_DIR}/src/binary_format.cc
${LIBRARY_DIR}/src/mutator.cc
${LIBRARY_DIR}/src/text_format.cc
${LIBRARY_DIR}/src/utf8_fix.cc)
target_include_directories(protobuf-mutator BEFORE PRIVATE "${LIBRARY_DIR}")
target_include_directories(protobuf-mutator BEFORE PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src")
target_link_libraries(protobuf-mutator ${Protobuf_LIBRARY})

View File

@ -39,6 +39,7 @@ set(LIBUNWIND_SOURCES
${LIBUNWIND_ASM_SOURCES}) ${LIBUNWIND_ASM_SOURCES})
add_library(unwind ${LIBUNWIND_SOURCES}) add_library(unwind ${LIBUNWIND_SOURCES})
set_target_properties(unwind PROPERTIES FOLDER "contrib/libunwind-cmake")
target_include_directories(unwind SYSTEM BEFORE PUBLIC $<BUILD_INTERFACE:${LIBUNWIND_SOURCE_DIR}/include>) target_include_directories(unwind SYSTEM BEFORE PUBLIC $<BUILD_INTERFACE:${LIBUNWIND_SOURCE_DIR}/include>)
target_compile_definitions(unwind PRIVATE -D_LIBUNWIND_NO_HEAP=1 -D_DEBUG -D_LIBUNWIND_IS_NATIVE_ONLY) target_compile_definitions(unwind PRIVATE -D_LIBUNWIND_NO_HEAP=1 -D_DEBUG -D_LIBUNWIND_IS_NATIVE_ONLY)

View File

@ -98,7 +98,9 @@
#define HAVE_BCOPY 1 #define HAVE_BCOPY 1
/* Define to 1 if you have the <bits/types.h> header file. */ /* Define to 1 if you have the <bits/types.h> header file. */
#define HAVE_BITS_TYPES_H 1 #if !defined(USE_MUSL)
#define HAVE_BITS_TYPES_H 1
#endif
/* Define to 1 if you have the `chroot' function. */ /* Define to 1 if you have the `chroot' function. */
#define HAVE_CHROOT 1 #define HAVE_CHROOT 1

2
contrib/replxx vendored

@ -1 +1 @@
Subproject commit f97765df14f4a6236d69b8f14b53ef2051ebd95a Subproject commit b0c266c2d8a835784181e17292b421848c78c6b8

2
contrib/sysroot vendored

@ -1 +1 @@
Subproject commit 002415524b5d14124bb8a61a3ce7ac65774f5479 Subproject commit 6172893931e19b028f9cabb7095a44361be863df

View File

@ -189,7 +189,7 @@ function clone_submodules
) )
git submodule sync git submodule sync
git submodule update --depth 1 --init --recursive "${SUBMODULES_TO_UPDATE[@]}" git submodule update --depth 1 --init "${SUBMODULES_TO_UPDATE[@]}"
git submodule foreach git reset --hard git submodule foreach git reset --hard
git submodule foreach git checkout @ -f git submodule foreach git checkout @ -f
git submodule foreach git clean -xfd git submodule foreach git clean -xfd

View File

@ -1,16 +1,22 @@
# docker build -t clickhouse/kerberized-hadoop . # docker build -t clickhouse/kerberized-hadoop .
FROM sequenceiq/hadoop-docker:2.7.0 FROM sequenceiq/hadoop-docker:2.7.0
RUN sed -i -e 's/^\#baseurl/baseurl/' /etc/yum.repos.d/CentOS-Base.repo
RUN sed -i -e 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/CentOS-Base.repo RUN sed -i -e 's/^\#baseurl/baseurl/' /etc/yum.repos.d/CentOS-Base.repo && \
RUN sed -i -e 's#http://mirror.centos.org/#http://vault.centos.org/#' /etc/yum.repos.d/CentOS-Base.repo sed -i -e 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/CentOS-Base.repo && \
sed -i -e 's#http://mirror.centos.org/#http://vault.centos.org/#' /etc/yum.repos.d/CentOS-Base.repo
# https://community.letsencrypt.org/t/rhel-centos-6-openssl-client-compatibility-after-dst-root-ca-x3-expiration/161032/81
RUN sed -i s/xMDkzMDE0MDExNVow/0MDkzMDE4MTQwM1ow/ /etc/pki/tls/certs/ca-bundle.crt
RUN yum clean all && \ RUN yum clean all && \
rpm --rebuilddb && \ rpm --rebuilddb && \
yum -y update && \ yum -y update && \
yum -y install yum-plugin-ovl && \ yum -y install yum-plugin-ovl && \
yum --quiet -y install krb5-workstation.x86_64 yum --quiet -y install krb5-workstation.x86_64
RUN cd /tmp && \ RUN cd /tmp && \
curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \ curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \
tar xzf commons-daemon-1.0.15-src.tar.gz && \ tar xzf commons-daemon-1.0.15-src.tar.gz && \
cd commons-daemon-1.0.15-src/src/native/unix && \ cd commons-daemon-1.0.15-src/src/native/unix && \
./configure && \ ./configure && \

View File

@ -37,7 +37,9 @@ RUN set -x \
|| echo "WARNING: Some file was just downloaded from the internet without any validation and we are installing it into the system"; } \ || echo "WARNING: Some file was just downloaded from the internet without any validation and we are installing it into the system"; } \
&& dpkg -i "${PKG_VERSION}.deb" && dpkg -i "${PKG_VERSION}.deb"
CMD echo "Running PVS version $PKG_VERSION" && cd /repo_folder && pvs-studio-analyzer credentials $LICENCE_NAME $LICENCE_KEY -o ./licence.lic \ ENV CCACHE_DIR=/test_output/ccache
CMD echo "Running PVS version $PKG_VERSION" && mkdir -p $CCACHE_DIR && cd /repo_folder && pvs-studio-analyzer credentials $LICENCE_NAME $LICENCE_KEY -o ./licence.lic \
&& cmake . -D"ENABLE_EMBEDDED_COMPILER"=OFF -D"USE_INTERNAL_PROTOBUF_LIBRARY"=OFF -D"USE_INTERNAL_GRPC_LIBRARY"=OFF -DCMAKE_C_COMPILER=clang-13 -DCMAKE_CXX_COMPILER=clang\+\+-13 \ && cmake . -D"ENABLE_EMBEDDED_COMPILER"=OFF -D"USE_INTERNAL_PROTOBUF_LIBRARY"=OFF -D"USE_INTERNAL_GRPC_LIBRARY"=OFF -DCMAKE_C_COMPILER=clang-13 -DCMAKE_CXX_COMPILER=clang\+\+-13 \
&& ninja re2_st clickhouse_grpc_protos \ && ninja re2_st clickhouse_grpc_protos \
&& pvs-studio-analyzer analyze -o pvs-studio.log -e contrib -j 4 -l ./licence.lic; \ && pvs-studio-analyzer analyze -o pvs-studio.log -e contrib -j 4 -l ./licence.lic; \

View File

@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
# yaml check is not the best one
cd /ClickHouse/utils/check-style || echo -e "failure\tRepo not found" > /test_output/check_status.tsv cd /ClickHouse/utils/check-style || echo -e "failure\tRepo not found" > /test_output/check_status.tsv
./check-style -n |& tee /test_output/style_output.txt ./check-style -n |& tee /test_output/style_output.txt
./check-typos |& tee /test_output/typos_output.txt ./check-typos |& tee /test_output/typos_output.txt

View File

@ -47,13 +47,17 @@ then
fi fi
URL="https://builds.clickhouse.com/master/${DIR}/clickhouse" URL="https://builds.clickhouse.com/master/${DIR}/clickhouse"
echo
echo "Will download ${URL}" echo "Will download ${URL}"
echo
curl -O "${URL}" && chmod a+x clickhouse && curl -O "${URL}" && chmod a+x clickhouse &&
echo
echo "Successfully downloaded the ClickHouse binary, you can run it as: echo "Successfully downloaded the ClickHouse binary, you can run it as:
./clickhouse" ./clickhouse"
if [ "${OS}" = "Linux" ] if [ "${OS}" = "Linux" ]
then then
echo
echo "You can also install it: echo "You can also install it:
sudo ./clickhouse install" sudo ./clickhouse install"
fi fi

View File

@ -3,9 +3,15 @@ toc_priority: 65
toc_title: Build on Mac OS X toc_title: Build on Mac OS X
--- ---
# You don't have to build ClickHouse
You can install ClickHouse as follows: https://clickhouse.com/#quick-start
Choose Mac x86 or M1.
# How to Build ClickHouse on Mac OS X {#how-to-build-clickhouse-on-mac-os-x} # How to Build ClickHouse on Mac OS X {#how-to-build-clickhouse-on-mac-os-x}
Build should work on x86_64 (Intel) and arm64 (Apple Silicon) based macOS 10.15 (Catalina) and higher with recent Xcode's native AppleClang, or Homebrew's vanilla Clang or GCC compilers. Build should work on x86_64 (Intel) and arm64 (Apple Silicon) based macOS 10.15 (Catalina) and higher with Homebrew's vanilla Clang.
It is always recommended to use `clang` compiler. It is possible to use XCode's `AppleClang` or `gcc` but it's strongly discouraged.
## Install Homebrew {#install-homebrew} ## Install Homebrew {#install-homebrew}
@ -45,18 +51,6 @@ git clone --recursive git@github.com:ClickHouse/ClickHouse.git
## Build ClickHouse {#build-clickhouse} ## Build ClickHouse {#build-clickhouse}
To build using Xcode's native AppleClang compiler:
``` bash
cd ClickHouse
rm -rf build
mkdir build
cd build
cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo ..
cmake --build . --config RelWithDebInfo
cd ..
```
To build using Homebrew's vanilla Clang compiler: To build using Homebrew's vanilla Clang compiler:
``` bash ``` bash
@ -69,7 +63,19 @@ cmake --build . --config RelWithDebInfo
cd .. cd ..
``` ```
To build using Homebrew's vanilla GCC compiler: To build using Xcode's native AppleClang compiler (this option is strongly not recommended; use the option above):
``` bash
cd ClickHouse
rm -rf build
mkdir build
cd build
cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo ..
cmake --build . --config RelWithDebInfo
cd ..
```
To build using Homebrew's vanilla GCC compiler (this option is absolutely not recommended, I'm wondering why do we ever have it):
``` bash ``` bash
cd ClickHouse cd ClickHouse

View File

@ -37,7 +37,7 @@ Next, you need to download the source files onto your working machine. This is c
In the command line terminal run: In the command line terminal run:
git clone --recursive git@github.com:your_github_username/ClickHouse.git git clone git@github.com:your_github_username/ClickHouse.git
cd ClickHouse cd ClickHouse
Note: please, substitute *your_github_username* with what is appropriate! Note: please, substitute *your_github_username* with what is appropriate!
@ -79,7 +79,7 @@ After successfully running this command you will be able to pull updates from th
Working with submodules in git could be painful. Next commands will help to manage it: Working with submodules in git could be painful. Next commands will help to manage it:
# ! each command accepts --recursive # ! each command accepts
# Update remote URLs for submodules. Barely rare case # Update remote URLs for submodules. Barely rare case
git submodule sync git submodule sync
# Add new submodules # Add new submodules
@ -92,16 +92,16 @@ Working with submodules in git could be painful. Next commands will help to mana
The next commands would help you to reset all submodules to the initial state (!WARNING! - any changes inside will be deleted): The next commands would help you to reset all submodules to the initial state (!WARNING! - any changes inside will be deleted):
# Synchronizes submodules' remote URL with .gitmodules # Synchronizes submodules' remote URL with .gitmodules
git submodule sync --recursive git submodule sync
# Update the registered submodules with initialize not yet initialized # Update the registered submodules with initialize not yet initialized
git submodule update --init --recursive git submodule update --init
# Reset all changes done after HEAD # Reset all changes done after HEAD
git submodule foreach git reset --hard git submodule foreach git reset --hard
# Clean files from .gitignore # Clean files from .gitignore
git submodule foreach git clean -xfd git submodule foreach git clean -xfd
# Repeat last 4 commands for all submodule # Repeat last 4 commands for all submodule
git submodule foreach git submodule sync --recursive git submodule foreach git submodule sync
git submodule foreach git submodule update --init --recursive git submodule foreach git submodule update --init
git submodule foreach git submodule foreach git reset --hard git submodule foreach git submodule foreach git reset --hard
git submodule foreach git submodule foreach git clean -xfd git submodule foreach git submodule foreach git clean -xfd

View File

@ -36,7 +36,7 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name
Create a table in ClickHouse which allows to read data from MongoDB collection: Create a table in ClickHouse which allows to read data from MongoDB collection:
``` text ``` sql
CREATE TABLE mongo_table CREATE TABLE mongo_table
( (
key UInt64, key UInt64,
@ -46,7 +46,7 @@ CREATE TABLE mongo_table
To read from an SSL secured MongoDB server: To read from an SSL secured MongoDB server:
``` text ``` sql
CREATE TABLE mongo_table_ssl CREATE TABLE mongo_table_ssl
( (
key UInt64, key UInt64,

View File

@ -320,7 +320,7 @@ SELECT count() FROM table WHERE u64 * i32 == 10 AND u64 * length(s) >= 1234
- `ngrambf_v1(n, size_of_bloom_filter_in_bytes, number_of_hash_functions, random_seed)` - `ngrambf_v1(n, size_of_bloom_filter_in_bytes, number_of_hash_functions, random_seed)`
Stores a [Bloom filter](https://en.wikipedia.org/wiki/Bloom_filter) that contains all ngrams from a block of data. Works only with strings. Can be used for optimization of `equals`, `like` and `in` expressions. Stores a [Bloom filter](https://en.wikipedia.org/wiki/Bloom_filter) that contains all ngrams from a block of data. Works only with datatypes: [String](../../../sql-reference/data-types/string.md), [FixedString](../../../sql-reference/data-types/fixedstring.md) and [Map](../../../sql-reference/data-types/map.md). Can be used for optimization of `EQUALS`, `LIKE` and `IN` expressions.
- `n` — ngram size, - `n` — ngram size,
- `size_of_bloom_filter_in_bytes` — Bloom filter size in bytes (you can use large values here, for example, 256 or 512, because it can be compressed well). - `size_of_bloom_filter_in_bytes` — Bloom filter size in bytes (you can use large values here, for example, 256 or 512, because it can be compressed well).
@ -337,7 +337,9 @@ SELECT count() FROM table WHERE u64 * i32 == 10 AND u64 * length(s) >= 1234
Supported data types: `Int*`, `UInt*`, `Float*`, `Enum`, `Date`, `DateTime`, `String`, `FixedString`, `Array`, `LowCardinality`, `Nullable`, `UUID`, `Map`. Supported data types: `Int*`, `UInt*`, `Float*`, `Enum`, `Date`, `DateTime`, `String`, `FixedString`, `Array`, `LowCardinality`, `Nullable`, `UUID`, `Map`.
For `Map` data type client can specify if index should be created for keys or values using [mapKeys](../../../sql-reference/functions/tuple-map-functions.md#mapkeys) or [mapValues](../../../sql-reference/functions/tuple-map-functions.md#mapvalues) function. For `Map` data type client can specify if index should be created for keys or values using [mapKeys](../../../sql-reference/functions/tuple-map-functions.md#mapkeys) or [mapValues](../../../sql-reference/functions/tuple-map-functions.md#mapvalues) function.
The following functions can use the filter: [equals](../../../sql-reference/functions/comparison-functions.md), [notEquals](../../../sql-reference/functions/comparison-functions.md), [in](../../../sql-reference/functions/in-functions.md), [notIn](../../../sql-reference/functions/in-functions.md), [has](../../../sql-reference/functions/array-functions.md#hasarr-elem).
Example of index creation for `Map` data type Example of index creation for `Map` data type
@ -346,9 +348,6 @@ INDEX map_key_index mapKeys(map_column) TYPE bloom_filter GRANULARITY 1
INDEX map_key_index mapValues(map_column) TYPE bloom_filter GRANULARITY 1 INDEX map_key_index mapValues(map_column) TYPE bloom_filter GRANULARITY 1
``` ```
The following functions can use it: [equals](../../../sql-reference/functions/comparison-functions.md), [notEquals](../../../sql-reference/functions/comparison-functions.md), [in](../../../sql-reference/functions/in-functions.md), [notIn](../../../sql-reference/functions/in-functions.md), [has](../../../sql-reference/functions/array-functions.md).
<!-- -->
``` sql ``` sql
INDEX sample_index (u64 * length(s)) TYPE minmax GRANULARITY 4 INDEX sample_index (u64 * length(s)) TYPE minmax GRANULARITY 4

View File

@ -5,7 +5,7 @@ toc_title: Distributed
# Distributed Table Engine {#distributed} # Distributed Table Engine {#distributed}
Tables with Distributed engine do not store any data by their own, but allow distributed query processing on multiple servers. Tables with Distributed engine do not store any data of their own, but allow distributed query processing on multiple servers.
Reading is automatically parallelized. During a read, the table indexes on remote servers are used, if there are any. Reading is automatically parallelized. During a read, the table indexes on remote servers are used, if there are any.
The Distributed engine accepts parameters: The Distributed engine accepts parameters:
@ -167,20 +167,20 @@ If this parameter is set to `true`, the write operation selects the first health
If it is set to `false` (the default), data is written to all replicas. In essence, this means that the Distributed table replicates data itself. This is worse than using replicated tables, because the consistency of replicas is not checked, and over time they will contain slightly different data. If it is set to `false` (the default), data is written to all replicas. In essence, this means that the Distributed table replicates data itself. This is worse than using replicated tables, because the consistency of replicas is not checked, and over time they will contain slightly different data.
To select the shard that a row of data is sent to, the sharding expression is analyzed, and its remainder is taken from dividing it by the total weight of the shards. The row is sent to the shard that corresponds to the half-interval of the remainders from `prev_weight` to `prev_weights + weight`, where `prev_weights` is the total weight of the shards with the smallest number, and `weight` is the weight of this shard. For example, if there are two shards, and the first has a weight of 9 while the second has a weight of 10, the row will be sent to the first shard for the remainders from the range \[0, 9), and to the second for the remainders from the range \[9, 19). To select the shard that a row of data is sent to, the sharding expression is analyzed, and its remainder is taken from dividing it by the total weight of the shards. The row is sent to the shard that corresponds to the half-interval of the remainders from `prev_weights` to `prev_weights + weight`, where `prev_weights` is the total weight of the shards with the smallest number, and `weight` is the weight of this shard. For example, if there are two shards, and the first has a weight of 9 while the second has a weight of 10, the row will be sent to the first shard for the remainders from the range \[0, 9), and to the second for the remainders from the range \[9, 19).
The sharding expression can be any expression from constants and table columns that returns an integer. For example, you can use the expression `rand()` for random distribution of data, or `UserID` for distribution by the remainder from dividing the users ID (then the data of a single user will reside on a single shard, which simplifies running IN and JOIN by users). If one of the columns is not distributed evenly enough, you can wrap it in a hash function: intHash64(UserID). The sharding expression can be any expression from constants and table columns that returns an integer. For example, you can use the expression `rand()` for random distribution of data, or `UserID` for distribution by the remainder from dividing the users ID (then the data of a single user will reside on a single shard, which simplifies running IN and JOIN by users). If one of the columns is not distributed evenly enough, you can wrap it in a hash function: intHash64(UserID).
A simple reminder from the division is a limited solution for sharding and isnt always appropriate. It works for medium and large volumes of data (dozens of servers), but not for very large volumes of data (hundreds of servers or more). In the latter case, use the sharding scheme required by the subject area, rather than using entries in Distributed tables. A simple remainder from the division is a limited solution for sharding and isnt always appropriate. It works for medium and large volumes of data (dozens of servers), but not for very large volumes of data (hundreds of servers or more). In the latter case, use the sharding scheme required by the subject area, rather than using entries in Distributed tables.
SELECT queries are sent to all the shards and work regardless of how data is distributed across the shards (they can be distributed completely randomly). When you add a new shard, you do not have to transfer the old data to it. You can write new data with a heavier weight the data will be distributed slightly unevenly, but queries will work correctly and efficiently. SELECT queries are sent to all the shards and work regardless of how data is distributed across the shards (they can be distributed completely randomly). When you add a new shard, you do not have to transfer old data into it. Instead, you can write new data to it by using a heavier weight the data will be distributed slightly unevenly, but queries will work correctly and efficiently.
You should be concerned about the sharding scheme in the following cases: You should be concerned about the sharding scheme in the following cases:
- Queries are used that require joining data (IN or JOIN) by a specific key. If data is sharded by this key, you can use local IN or JOIN instead of GLOBAL IN or GLOBAL JOIN, which is much more efficient. - Queries are used that require joining data (IN or JOIN) by a specific key. If data is sharded by this key, you can use local IN or JOIN instead of GLOBAL IN or GLOBAL JOIN, which is much more efficient.
- A large number of servers is used (hundreds or more) with a large number of small queries (queries of individual clients - websites, advertisers, or partners). In order for the small queries to not affect the entire cluster, it makes sense to locate data for a single client on a single shard. Alternatively, as weve done in Yandex.Metrica, you can set up bi-level sharding: divide the entire cluster into “layers”, where a layer may consist of multiple shards. Data for a single client is located on a single layer, but shards can be added to a layer as necessary, and data is randomly distributed within them. Distributed tables are created for each layer, and a single shared distributed table is created for global queries. - A large number of servers is used (hundreds or more) with a large number of small queries (queries of individual clients - websites, advertisers, or partners). In order for the small queries to not affect the entire cluster, it makes sense to locate data for a single client on a single shard. Alternatively, as weve done in Yandex.Metrica, you can set up bi-level sharding: divide the entire cluster into “layers”, where a layer may consist of multiple shards. Data for a single client is located on a single layer, but shards can be added to a layer as necessary, and data is randomly distributed within them. Distributed tables are created for each layer, and a single shared distributed table is created for global queries.
Data is written asynchronously. When inserted in the table, the data block is just written to the local file system. The data is sent to the remote servers in the background as soon as possible. The period for sending data is managed by the [distributed_directory_monitor_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_sleep_time_ms) and [distributed_directory_monitor_max_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_max_sleep_time_ms) settings. The `Distributed` engine sends each file with inserted data separately, but you can enable batch sending of files with the [distributed_directory_monitor_batch_inserts](../../../operations/settings/settings.md#distributed_directory_monitor_batch_inserts) setting. This setting improves cluster performance by better utilizing local server and network resources. You should check whether data is sent successfully by checking the list of files (data waiting to be sent) in the table directory: `/var/lib/clickhouse/data/database/table/`. The number of threads performing background tasks can be set by [background_distributed_schedule_pool_size](../../../operations/settings/settings.md#background_distributed_schedule_pool_size) setting. Data is written asynchronously. When inserted in the table, the data block is just written to the local file system. The data is sent to the remote servers in the background as soon as possible. The periodicity for sending data is managed by the [distributed_directory_monitor_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_sleep_time_ms) and [distributed_directory_monitor_max_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_max_sleep_time_ms) settings. The `Distributed` engine sends each file with inserted data separately, but you can enable batch sending of files with the [distributed_directory_monitor_batch_inserts](../../../operations/settings/settings.md#distributed_directory_monitor_batch_inserts) setting. This setting improves cluster performance by better utilizing local server and network resources. You should check whether data is sent successfully by checking the list of files (data waiting to be sent) in the table directory: `/var/lib/clickhouse/data/database/table/`. The number of threads performing background tasks can be set by [background_distributed_schedule_pool_size](../../../operations/settings/settings.md#background_distributed_schedule_pool_size) setting.
If the server ceased to exist or had a rough restart (for example, after a device failure) after an INSERT to a Distributed table, the inserted data might be lost. If a damaged data part is detected in the table directory, it is transferred to the `broken` subdirectory and no longer used. If the server ceased to exist or had a rough restart (for example, after a device failure) after an INSERT to a Distributed table, the inserted data might be lost. If a damaged data part is detected in the table directory, it is transferred to the `broken` subdirectory and no longer used.

View File

@ -27,7 +27,7 @@ It is recommended to use official pre-compiled `deb` packages for Debian or Ubun
{% include 'install/deb.sh' %} {% include 'install/deb.sh' %}
``` ```
If you want to use the most recent version, replace `stable` with `testing` (this is recommended for your testing environments). You can replace `stable` with `lts` or `testing` to use different [release trains](../faq/operations/production.md) based on your needs.
You can also download and install packages manually from [here](https://repo.clickhouse.com/deb/stable/main/). You can also download and install packages manually from [here](https://repo.clickhouse.com/deb/stable/main/).

View File

@ -127,6 +127,9 @@ You can pass parameters to `clickhouse-client` (all parameters have a default va
- `--secure` If specified, will connect to server over secure connection. - `--secure` If specified, will connect to server over secure connection.
- `--history_file` — Path to a file containing command history. - `--history_file` — Path to a file containing command history.
- `--param_<name>` — Value for a [query with parameters](#cli-queries-with-parameters). - `--param_<name>` — Value for a [query with parameters](#cli-queries-with-parameters).
- `--hardware-utilization` — Print hardware utilization information in progress bar.
- `--print-profile-events` Print `ProfileEvents` packets.
- `--profile-events-delay-ms` Delay between printing `ProfileEvents` packets (-1 - print only totals, 0 - print every single packet).
Since version 20.5, `clickhouse-client` has automatic syntax highlighting (always enabled). Since version 20.5, `clickhouse-client` has automatic syntax highlighting (always enabled).

View File

@ -22,10 +22,13 @@ toc_title: Adopters
| <a href="https://apiroad.net/" class="favicon">ApiRoad</a> | API marketplace | Analytics | — | — | [Blog post, Nov 2018, Mar 2020](https://pixeljets.com/blog/clickhouse-vs-elasticsearch/) | | <a href="https://apiroad.net/" class="favicon">ApiRoad</a> | API marketplace | Analytics | — | — | [Blog post, Nov 2018, Mar 2020](https://pixeljets.com/blog/clickhouse-vs-elasticsearch/) |
| <a href="https://www.appsflyer.com" class="favicon">Appsflyer</a> | Mobile analytics | Main product | — | — | [Talk in Russian, July 2019](https://www.youtube.com/watch?v=M3wbRlcpBbY) | | <a href="https://www.appsflyer.com" class="favicon">Appsflyer</a> | Mobile analytics | Main product | — | — | [Talk in Russian, July 2019](https://www.youtube.com/watch?v=M3wbRlcpBbY) |
| <a href="https://arenadata.tech/" class="favicon">ArenaData</a> | Data Platform | Main product | — | — | [Slides in Russian, December 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup38/indexes.pdf) | | <a href="https://arenadata.tech/" class="favicon">ArenaData</a> | Data Platform | Main product | — | — | [Slides in Russian, December 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup38/indexes.pdf) |
| <a href="https://www.argedor.com/en/clickhouse/" class="favicon">Argedor</a> | ClickHouse support | — | — | — | [Official website](https://www.argedor.com/en/clickhouse/) |
| <a href="https://avito.ru/" class="favicon">Avito</a> | Classifieds | Monitoring | — | — | [Meetup, April 2020](https://www.youtube.com/watch?v=n1tm4j4W8ZQ) | | <a href="https://avito.ru/" class="favicon">Avito</a> | Classifieds | Monitoring | — | — | [Meetup, April 2020](https://www.youtube.com/watch?v=n1tm4j4W8ZQ) |
| <a href="https://badoo.com" class="favicon">Badoo</a> | Dating | Timeseries | — | — | [Slides in Russian, December 2019](https://presentations.clickhouse.com/meetup38/forecast.pdf) | | <a href="https://badoo.com" class="favicon">Badoo</a> | Dating | Timeseries | — | — | [Slides in Russian, December 2019](https://presentations.clickhouse.com/meetup38/forecast.pdf) |
| <a href="https://beeline.ru/" class="favicon">Beeline</a> | Telecom | Data Platform | — | — | [Blog post, July 2021](https://habr.com/en/company/beeline/blog/567508/) |
| <a href="https://www.benocs.com/" class="favicon">Benocs</a> | Network Telemetry and Analytics | Main Product | — | — | [Slides in English, October 2017](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup9/lpm.pdf) | | <a href="https://www.benocs.com/" class="favicon">Benocs</a> | Network Telemetry and Analytics | Main Product | — | — | [Slides in English, October 2017](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup9/lpm.pdf) |
| <a href="https://www.bigo.sg/" class="favicon">BIGO</a> | Video | Computing Platform | — | — | [Blog Article, August 2020](https://www.programmersought.com/article/44544895251/) | | <a href="https://www.bigo.sg/" class="favicon">BIGO</a> | Video | Computing Platform | — | — | [Blog Article, August 2020](https://www.programmersought.com/article/44544895251/) |
| <a href="https://www.bilibili.com/" class="favicon">BiliBili</a> | Video sharing | — | — | — | [Blog post, June 2021](https://chowdera.com/2021/06/20210622012241476b.html) |
| <a href="https://www.bloomberg.com/">Bloomberg</a> | Finance, Media | Monitoring | — | — | [Slides, May 2018](https://www.slideshare.net/Altinity/http-analytics-for-6m-requests-per-second-using-clickhouse-by-alexander-bocharov) | | <a href="https://www.bloomberg.com/">Bloomberg</a> | Finance, Media | Monitoring | — | — | [Slides, May 2018](https://www.slideshare.net/Altinity/http-analytics-for-6m-requests-per-second-using-clickhouse-by-alexander-bocharov) |
| <a href="https://bloxy.info" class="favicon">Bloxy</a> | Blockchain | Analytics | — | — | [Slides in Russian, August 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup17/4_bloxy.pptx) | | <a href="https://bloxy.info" class="favicon">Bloxy</a> | Blockchain | Analytics | — | — | [Slides in Russian, August 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup17/4_bloxy.pptx) |
| <a href="https://www.bytedance.com" class="favicon">Bytedance</a> | Social platforms | — | — | — | [The ClickHouse Meetup East, October 2020](https://www.youtube.com/watch?v=ckChUkC3Pns) | | <a href="https://www.bytedance.com" class="favicon">Bytedance</a> | Social platforms | — | — | — | [The ClickHouse Meetup East, October 2020](https://www.youtube.com/watch?v=ckChUkC3Pns) |
@ -33,6 +36,7 @@ toc_title: Adopters
| <a href="https://carto.com/" class="favicon">CARTO</a> | Business Intelligence | Geo analytics | — | — | [Geospatial processing with ClickHouse](https://carto.com/blog/geospatial-processing-with-clickhouse/) | | <a href="https://carto.com/" class="favicon">CARTO</a> | Business Intelligence | Geo analytics | — | — | [Geospatial processing with ClickHouse](https://carto.com/blog/geospatial-processing-with-clickhouse/) |
| <a href="http://public.web.cern.ch/public/" class="favicon">CERN</a> | Research | Experiment | — | — | [Press release, April 2012](https://www.yandex.com/company/press_center/press_releases/2012/2012-04-10/) | | <a href="http://public.web.cern.ch/public/" class="favicon">CERN</a> | Research | Experiment | — | — | [Press release, April 2012](https://www.yandex.com/company/press_center/press_releases/2012/2012-04-10/) |
| <a href="https://www.checklyhq.com/" class="favicon">Checkly</a> | Software Development | Analytics | — | — | [Tweet, October 2021](https://twitter.com/tim_nolet/status/1445810665743081474?s=20) | | <a href="https://www.checklyhq.com/" class="favicon">Checkly</a> | Software Development | Analytics | — | — | [Tweet, October 2021](https://twitter.com/tim_nolet/status/1445810665743081474?s=20) |
| <a href="https://chelpipegroup.com/" class="favicon">ChelPipe Group</a> | Analytics | — | — | — | [Blog post, June 2021](https://vc.ru/trade/253172-tyazhelomu-proizvodstvu-user-friendly-sayt-internet-magazin-trub-dlya-chtpz) |
| <a href="http://cisco.com/" class="favicon">Cisco</a> | Networking | Traffic analysis | — | — | [Lightning talk, October 2019](https://youtu.be/-hI1vDR2oPY?t=5057) | | <a href="http://cisco.com/" class="favicon">Cisco</a> | Networking | Traffic analysis | — | — | [Lightning talk, October 2019](https://youtu.be/-hI1vDR2oPY?t=5057) |
| <a href="https://www.citadelsecurities.com/" class="favicon">Citadel Securities</a> | Finance | — | — | — | [Contribution, March 2019](https://github.com/ClickHouse/ClickHouse/pull/4774) | | <a href="https://www.citadelsecurities.com/" class="favicon">Citadel Securities</a> | Finance | — | — | — | [Contribution, March 2019](https://github.com/ClickHouse/ClickHouse/pull/4774) |
| <a href="https://city-mobil.ru" class="favicon">Citymobil</a> | Taxi | Analytics | — | — | [Blog Post in Russian, March 2020](https://habr.com/en/company/citymobil/blog/490660/) | | <a href="https://city-mobil.ru" class="favicon">Citymobil</a> | Taxi | Analytics | — | — | [Blog Post in Russian, March 2020](https://habr.com/en/company/citymobil/blog/490660/) |
@ -48,6 +52,7 @@ toc_title: Adopters
| <a href="https://db.com" class="favicon">Deutsche Bank</a> | Finance | BI Analytics | — | — | [Slides in English, October 2019](https://bigdatadays.ru/wp-content/uploads/2019/10/D2-H3-3_Yakunin-Goihburg.pdf) | | <a href="https://db.com" class="favicon">Deutsche Bank</a> | Finance | BI Analytics | — | — | [Slides in English, October 2019](https://bigdatadays.ru/wp-content/uploads/2019/10/D2-H3-3_Yakunin-Goihburg.pdf) |
| <a href="https://deeplay.io/eng/" class="favicon">Deeplay</a> | Gaming Analytics | — | — | — | [Job advertisement, 2020](https://career.habr.com/vacancies/1000062568) | | <a href="https://deeplay.io/eng/" class="favicon">Deeplay</a> | Gaming Analytics | — | — | — | [Job advertisement, 2020](https://career.habr.com/vacancies/1000062568) |
| <a href="https://www.diva-e.com" class="favicon">Diva-e</a> | Digital consulting | Main Product | — | — | [Slides in English, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup29/ClickHouse-MeetUp-Unusual-Applications-sd-2019-09-17.pdf) | | <a href="https://www.diva-e.com" class="favicon">Diva-e</a> | Digital consulting | Main Product | — | — | [Slides in English, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup29/ClickHouse-MeetUp-Unusual-Applications-sd-2019-09-17.pdf) |
| <a href="https://ecommpay.com/" class="favicon">Ecommpay</a> | Payment Processing | Logs | — | — | [Video, Nov 2019](https://www.youtube.com/watch?v=d3GdZTOWGLk) |
| <a href="https://www.ecwid.com/" class="favicon">Ecwid</a> | E-commerce SaaS | Metrics, Logging | — | — | [Slides in Russian, April 2019](https://nastachku.ru/var/files/1/presentation/backend/2_Backend_6.pdf) | | <a href="https://www.ecwid.com/" class="favicon">Ecwid</a> | E-commerce SaaS | Metrics, Logging | — | — | [Slides in Russian, April 2019](https://nastachku.ru/var/files/1/presentation/backend/2_Backend_6.pdf) |
| <a href="https://www.ebay.com/" class="favicon">eBay</a> | E-commerce | Logs, Metrics and Events | — | — | [Official website, Sep 2020](https://tech.ebayinc.com/engineering/ou-online-analytical-processing/) | | <a href="https://www.ebay.com/" class="favicon">eBay</a> | E-commerce | Logs, Metrics and Events | — | — | [Official website, Sep 2020](https://tech.ebayinc.com/engineering/ou-online-analytical-processing/) |
| <a href="https://www.exness.com/" class="favicon">Exness</a> | Trading | Metrics, Logging | — | — | [Talk in Russian, May 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) | | <a href="https://www.exness.com/" class="favicon">Exness</a> | Trading | Metrics, Logging | — | — | [Talk in Russian, May 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) |
@ -57,9 +62,11 @@ toc_title: Adopters
| <a href="https://fun.co/rp" class="favicon">FunCorp</a> | Games | | — | 14 bn records/day as of Jan 2021 | [Article](https://www.altinity.com/blog/migrating-from-redshift-to-clickhouse) | | <a href="https://fun.co/rp" class="favicon">FunCorp</a> | Games | | — | 14 bn records/day as of Jan 2021 | [Article](https://www.altinity.com/blog/migrating-from-redshift-to-clickhouse) |
| <a href="https://geniee.co.jp" class="favicon">Geniee</a> | Ad network | Main product | — | — | [Blog post in Japanese, July 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) | | <a href="https://geniee.co.jp" class="favicon">Geniee</a> | Ad network | Main product | — | — | [Blog post in Japanese, July 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
| <a href="https://www.genotek.ru/" class="favicon">Genotek</a> | Bioinformatics | Main product | — | — | [Video, August 2020](https://youtu.be/v3KyZbz9lEE) | | <a href="https://www.genotek.ru/" class="favicon">Genotek</a> | Bioinformatics | Main product | — | — | [Video, August 2020](https://youtu.be/v3KyZbz9lEE) |
| <a href="https://gigapipe.com/" class="favicon">Gigapipe</a> | Managed ClickHouse | Main product | — | — | [Official website](https://gigapipe.com/) |
| <a href="https://glaber.io/" class="favicon">Glaber</a> | Monitoring | Main product | — | — | [Website](https://glaber.io/) | | <a href="https://glaber.io/" class="favicon">Glaber</a> | Monitoring | Main product | — | — | [Website](https://glaber.io/) |
| <a href="https://graphcdn.io/" class="favicon">GraphCDN</a> | CDN | Traffic Analytics | — | — | [Blog Post in English, August 2021](https://altinity.com/blog/delivering-insight-on-graphql-apis-with-clickhouse-at-graphcdn/) | | <a href="https://graphcdn.io/" class="favicon">GraphCDN</a> | CDN | Traffic Analytics | — | — | [Blog Post in English, August 2021](https://altinity.com/blog/delivering-insight-on-graphql-apis-with-clickhouse-at-graphcdn/) |
| <a href="https://www.huya.com/" class="favicon">HUYA</a> | Video Streaming | Analytics | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) | | <a href="https://www.huya.com/" class="favicon">HUYA</a> | Video Streaming | Analytics | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
| <a href="https://www.hydrolix.io/" class="favicon">Hydrolix</a> | Cloud data platform | Main product | — | — | [Documentation](https://docs.hydrolix.io/guide/query) |
| <a href="https://www.the-ica.com/" class="favicon">ICA</a> | FinTech | Risk Management | — | — | [Blog Post in English, Sep 2020](https://altinity.com/blog/clickhouse-vs-redshift-performance-for-fintech-risk-management?utm_campaign=ClickHouse%20vs%20RedShift&utm_content=143520807&utm_medium=social&utm_source=twitter&hss_channel=tw-3894792263) | | <a href="https://www.the-ica.com/" class="favicon">ICA</a> | FinTech | Risk Management | — | — | [Blog Post in English, Sep 2020](https://altinity.com/blog/clickhouse-vs-redshift-performance-for-fintech-risk-management?utm_campaign=ClickHouse%20vs%20RedShift&utm_content=143520807&utm_medium=social&utm_source=twitter&hss_channel=tw-3894792263) |
| <a href="https://www.idealista.com" class="favicon">Idealista</a> | Real Estate | Analytics | — | — | [Blog Post in English, April 2019](https://clickhouse.com/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) | | <a href="https://www.idealista.com" class="favicon">Idealista</a> | Real Estate | Analytics | — | — | [Blog Post in English, April 2019](https://clickhouse.com/blog/en/clickhouse-meetup-in-madrid-on-april-2-2019) |
| <a href="https://infobaleen.com" class="favicon">Infobaleen</a> | AI markting tool | Analytics | — | — | [Official site](https://infobaleen.com) | | <a href="https://infobaleen.com" class="favicon">Infobaleen</a> | AI markting tool | Analytics | — | — | [Official site](https://infobaleen.com) |
@ -71,9 +78,11 @@ toc_title: Adopters
| <a href="https://ippon.tech" class="favicon">Ippon Technologies</a> | Technology Consulting | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=205) | | <a href="https://ippon.tech" class="favicon">Ippon Technologies</a> | Technology Consulting | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=205) |
| <a href="https://www.ivi.ru/" class="favicon">Ivi</a> | Online Cinema | Analytics, Monitoring | — | — | [Article in Russian, Jan 2018](https://habr.com/en/company/ivi/blog/347408/) | | <a href="https://www.ivi.ru/" class="favicon">Ivi</a> | Online Cinema | Analytics, Monitoring | — | — | [Article in Russian, Jan 2018](https://habr.com/en/company/ivi/blog/347408/) |
| <a href="https://jinshuju.net" class="favicon">Jinshuju 金数据</a> | BI Analytics | Main product | — | — | [Slides in Chinese, October 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/3.%20金数据数据架构调整方案Public.pdf) | | <a href="https://jinshuju.net" class="favicon">Jinshuju 金数据</a> | BI Analytics | Main product | — | — | [Slides in Chinese, October 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/3.%20金数据数据架构调整方案Public.pdf) |
| <a href="https://www.kakaocorp.com/" class="favicon">kakaocorp</a> | Internet company | — | — | — | [if(kakao)2020 conference](https://if.kakao.com/session/117) |
| <a href="https://www.kodiakdata.com/" class="favicon">Kodiak Data</a> | Clouds | Main product | — | — | [Slides in Engish, April 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup13/kodiak_data.pdf) | | <a href="https://www.kodiakdata.com/" class="favicon">Kodiak Data</a> | Clouds | Main product | — | — | [Slides in Engish, April 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup13/kodiak_data.pdf) |
| <a href="https://kontur.ru" class="favicon">Kontur</a> | Software Development | Metrics | — | — | [Talk in Russian, November 2018](https://www.youtube.com/watch?v=U4u4Bd0FtrY) | | <a href="https://kontur.ru" class="favicon">Kontur</a> | Software Development | Metrics | — | — | [Talk in Russian, November 2018](https://www.youtube.com/watch?v=U4u4Bd0FtrY) |
| <a href="https://www.kuaishou.com/" class="favicon">Kuaishou</a> | Video | — | — | — | [ClickHouse Meetup, October 2018](https://clickhouse.com/blog/en/2018/clickhouse-community-meetup-in-beijing-on-october-28-2018/) | | <a href="https://www.kuaishou.com/" class="favicon">Kuaishou</a> | Video | — | — | — | [ClickHouse Meetup, October 2018](https://clickhouse.com/blog/en/2018/clickhouse-community-meetup-in-beijing-on-october-28-2018/) |
| <a href="https://www.kgk-global.com/en/" class="favicon">KGK Global</a> | Vehicle monitoring | — | — | — | [Press release, June 2021](https://zoom.cnews.ru/news/item/530921) |
| <a href="https://www.lbl.gov" class="favicon">Lawrence Berkeley National Laboratory</a> | Research | Traffic analysis | 1 server | 11.8 TiB | [Slides in English, April 2019](https://www.smitasin.com/presentations/2019-04-17_DOE-NSM.pdf) | | <a href="https://www.lbl.gov" class="favicon">Lawrence Berkeley National Laboratory</a> | Research | Traffic analysis | 1 server | 11.8 TiB | [Slides in English, April 2019](https://www.smitasin.com/presentations/2019-04-17_DOE-NSM.pdf) |
| <a href="https://lifestreet.com/" class="favicon">LifeStreet</a> | Ad network | Main product | 75 servers (3 replicas) | 5.27 PiB | [Blog post in Russian, February 2017](https://habr.com/en/post/322620/) | | <a href="https://lifestreet.com/" class="favicon">LifeStreet</a> | Ad network | Main product | 75 servers (3 replicas) | 5.27 PiB | [Blog post in Russian, February 2017](https://habr.com/en/post/322620/) |
| <a href="https://mcs.mail.ru/" class="favicon">Mail.ru Cloud Solutions</a> | Cloud services | Main product | — | — | [Article in Russian](https://mcs.mail.ru/help/db-create/clickhouse#) | | <a href="https://mcs.mail.ru/" class="favicon">Mail.ru Cloud Solutions</a> | Cloud services | Main product | — | — | [Article in Russian](https://mcs.mail.ru/help/db-create/clickhouse#) |
@ -88,7 +97,10 @@ toc_title: Adopters
| <a href="https://www.netskope.com/" class="favicon">Netskope</a> | Network Security | — | — | — | [Job advertisement, March 2021](https://www.mendeley.com/careers/job/senior-software-developer-backend-developer-1346348) | | <a href="https://www.netskope.com/" class="favicon">Netskope</a> | Network Security | — | — | — | [Job advertisement, March 2021](https://www.mendeley.com/careers/job/senior-software-developer-backend-developer-1346348) |
| <a href="https://niclabs.cl/" class="favicon">NIC Labs</a> | Network Monitoring | RaTA-DNS | — | — | [Blog post, March 2021](https://niclabs.cl/ratadns/2021/03/Clickhouse) | | <a href="https://niclabs.cl/" class="favicon">NIC Labs</a> | Network Monitoring | RaTA-DNS | — | — | [Blog post, March 2021](https://niclabs.cl/ratadns/2021/03/Clickhouse) |
| <a href="https://getnoc.com/" class="favicon">NOC Project</a> | Network Monitoring | Analytics | Main Product | — | [Official Website](https://getnoc.com/features/big-data/) | | <a href="https://getnoc.com/" class="favicon">NOC Project</a> | Network Monitoring | Analytics | Main Product | — | [Official Website](https://getnoc.com/features/big-data/) |
| <a href="https://www.noction.com" class="favicon">Noction</a> | Network Technology | Main Product | — | — | [Official Website](https://www.noction.com/news/irp-3-11-remote-triggered-blackholing-capability)
| <a href="https://www.nuna.com/" class="favicon">Nuna Inc.</a> | Health Data Analytics | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=170) | | <a href="https://www.nuna.com/" class="favicon">Nuna Inc.</a> | Health Data Analytics | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=170) |
| <a href="https://ok.ru" class="favicon">Ok.ru</a> | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, Oct 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) |
| <a href="https://omnicomm.ru/" class="favicon">Omnicomm</a> | Transportation Monitoring | — | — | — | [Facebook post, Oct 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) |
| <a href="https://www.oneapm.com/" class="favicon">OneAPM</a> | Monitorings and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) | | <a href="https://www.oneapm.com/" class="favicon">OneAPM</a> | Monitorings and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) |
| <a href="https://corp.ozon.com/" class="favicon">OZON</a> | E-commerce | — | — | — | [Official website](https://job.ozon.ru/vacancy/razrabotchik-clickhouse-ekspluatatsiya-40991870/) | | <a href="https://corp.ozon.com/" class="favicon">OZON</a> | E-commerce | — | — | — | [Official website](https://job.ozon.ru/vacancy/razrabotchik-clickhouse-ekspluatatsiya-40991870/) |
| <a href="https://panelbear.com/" class="favicon">Panelbear | Analytics | Monitoring and Analytics | — | — | [Tech Stack, November 2020](https://panelbear.com/blog/tech-stack/) | | <a href="https://panelbear.com/" class="favicon">Panelbear | Analytics | Monitoring and Analytics | — | — | [Tech Stack, November 2020](https://panelbear.com/blog/tech-stack/) |
@ -103,6 +115,7 @@ toc_title: Adopters
| <a href="https://qrator.net" class="favicon">Qrator</a> | DDoS protection | Main product | — | — | [Blog Post, March 2019](https://blog.qrator.net/en/clickhouse-ddos-mitigation_37/) | | <a href="https://qrator.net" class="favicon">Qrator</a> | DDoS protection | Main product | — | — | [Blog Post, March 2019](https://blog.qrator.net/en/clickhouse-ddos-mitigation_37/) |
| <a href="https://www.rbinternational.com/" class="favicon">Raiffeisenbank</a> | Banking | Analytics | — | — | [Lecture in Russian, December 2020](https://cs.hse.ru/announcements/421965599.html) | | <a href="https://www.rbinternational.com/" class="favicon">Raiffeisenbank</a> | Banking | Analytics | — | — | [Lecture in Russian, December 2020](https://cs.hse.ru/announcements/421965599.html) |
| <a href="https://rambler.ru" class="favicon">Rambler</a> | Internet services | Analytics | — | — | [Talk in Russian, April 2018](https://medium.com/@ramblertop/разработка-api-clickhouse-для-рамблер-топ-100-f4c7e56f3141) | | <a href="https://rambler.ru" class="favicon">Rambler</a> | Internet services | Analytics | — | — | [Talk in Russian, April 2018](https://medium.com/@ramblertop/разработка-api-clickhouse-для-рамблер-топ-100-f4c7e56f3141) |
| <a href="https://replicahq.com" class="favicon">Replica</a> | Urban Planning | Analytics | — | — | [Job advertisement](https://boards.greenhouse.io/replica/jobs/5547732002?gh_jid=5547732002) |
| <a href="https://retell.cc/" class="favicon">Retell</a> | Speech synthesis | Analytics | — | — | [Blog Article, August 2020](https://vc.ru/services/153732-kak-sozdat-audiostati-na-vashem-sayte-i-zachem-eto-nuzhno) | | <a href="https://retell.cc/" class="favicon">Retell</a> | Speech synthesis | Analytics | — | — | [Blog Article, August 2020](https://vc.ru/services/153732-kak-sozdat-audiostati-na-vashem-sayte-i-zachem-eto-nuzhno) |
| <a href="https://www.rollbar.com" class="favicon">Rollbar</a> | Software Development | Main Product | — | — | [Official Website](https://www.rollbar.com) | | <a href="https://www.rollbar.com" class="favicon">Rollbar</a> | Software Development | Main Product | — | — | [Official Website](https://www.rollbar.com) |
| <a href="https://rspamd.com/" class="favicon">Rspamd</a> | Antispam | Analytics | — | — | [Official Website](https://rspamd.com/doc/modules/clickhouse.html) | | <a href="https://rspamd.com/" class="favicon">Rspamd</a> | Antispam | Analytics | — | — | [Official Website](https://rspamd.com/doc/modules/clickhouse.html) |
@ -116,6 +129,7 @@ toc_title: Adopters
| <a href="https://sentry.io/" class="favicon">Sentry</a> | Software Development | Main product | — | — | [Blog Post in English, May 2019](https://blog.sentry.io/2019/05/16/introducing-snuba-sentrys-new-search-infrastructure) | | <a href="https://sentry.io/" class="favicon">Sentry</a> | Software Development | Main product | — | — | [Blog Post in English, May 2019](https://blog.sentry.io/2019/05/16/introducing-snuba-sentrys-new-search-infrastructure) |
| <a href="https://seo.do/" class="favicon">seo.do</a> | Analytics | Main product | — | — | [Slides in English, November 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup35/CH%20Presentation-%20Metehan%20Çetinkaya.pdf) | | <a href="https://seo.do/" class="favicon">seo.do</a> | Analytics | Main product | — | — | [Slides in English, November 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup35/CH%20Presentation-%20Metehan%20Çetinkaya.pdf) |
| <a href="http://www.sgk.gov.tr/wps/portal/sgk/tr" class="favicon">SGK</a> | Government Social Security | Analytics | — | — | [Slides in English, November 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup35/ClickHouse%20Meetup-Ramazan%20POLAT.pdf) | | <a href="http://www.sgk.gov.tr/wps/portal/sgk/tr" class="favicon">SGK</a> | Government Social Security | Analytics | — | — | [Slides in English, November 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup35/ClickHouse%20Meetup-Ramazan%20POLAT.pdf) |
| <a href="https://signoz.io/" class="favicon">SigNoz</a> | Observability Platform | Main Product | — | — | [Source code](https://github.com/SigNoz/signoz) |
| <a href="http://english.sina.com/index.html" class="favicon">Sina</a> | News | — | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/6.%20ClickHouse最佳实践%20高鹏_新浪.pdf) | | <a href="http://english.sina.com/index.html" class="favicon">Sina</a> | News | — | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/6.%20ClickHouse最佳实践%20高鹏_新浪.pdf) |
| <a href="https://www.sipfront.com/" class="favicon">Sipfront</a> | Software Development | Analytics | — | — | [Tweet, October 2021](https://twitter.com/andreasgranig/status/1446404332337913895?s=20) | | <a href="https://www.sipfront.com/" class="favicon">Sipfront</a> | Software Development | Analytics | — | — | [Tweet, October 2021](https://twitter.com/andreasgranig/status/1446404332337913895?s=20) |
| <a href="https://smi2.ru/" class="favicon">SMI2</a> | News | Analytics | — | — | [Blog Post in Russian, November 2017](https://habr.com/ru/company/smi2/blog/314558/) | | <a href="https://smi2.ru/" class="favicon">SMI2</a> | News | Analytics | — | — | [Blog Post in Russian, November 2017](https://habr.com/ru/company/smi2/blog/314558/) |
@ -129,12 +143,14 @@ toc_title: Adopters
| <a href="https://www.tencent.com" class="favicon">Tencent</a> | Big Data | Data processing | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/5.%20ClickHouse大数据集群应用_李俊飞腾讯网媒事业部.pdf) | | <a href="https://www.tencent.com" class="favicon">Tencent</a> | Big Data | Data processing | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/5.%20ClickHouse大数据集群应用_李俊飞腾讯网媒事业部.pdf) |
| <a href="https://www.tencent.com" class="favicon">Tencent</a> | Messaging | Logging | — | — | [Talk in Chinese, November 2019](https://youtu.be/T-iVQRuw-QY?t=5050) | | <a href="https://www.tencent.com" class="favicon">Tencent</a> | Messaging | Logging | — | — | [Talk in Chinese, November 2019](https://youtu.be/T-iVQRuw-QY?t=5050) |
| <a href="https://www.tencentmusic.com/" class="favicon">Tencent Music Entertainment (TME)</a> | BigData | Data processing | — | — | [Blog in Chinese, June 2020](https://cloud.tencent.com/developer/article/1637840) | | <a href="https://www.tencentmusic.com/" class="favicon">Tencent Music Entertainment (TME)</a> | BigData | Data processing | — | — | [Blog in Chinese, June 2020](https://cloud.tencent.com/developer/article/1637840) |
| <a href="https://www.tesla.com/" class="favicon">Tesla</a> | Electric vehicle and clean energy company | — | — | — | [Vacancy description, March 2021](https://news.ycombinator.com/item?id=26306170) |
| <a href="https://timeflow.systems" class="favicon">Timeflow</a> | Software | Analytics | — | — | [Blog](https://timeflow.systems/why-we-moved-from-druid-to-clickhouse/ ) |
| <a href="https://www.tinybird.co/" class="favicon">Tinybird</a> | Real-time Data Products | Data processing | — | — | [Official website](https://www.tinybird.co/) | | <a href="https://www.tinybird.co/" class="favicon">Tinybird</a> | Real-time Data Products | Data processing | — | — | [Official website](https://www.tinybird.co/) |
| <a href="https://trafficstars.com/" class="favicon">Traffic Stars</a> | AD network | — | — | — | [Slides in Russian, May 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup15/lightning/ninja.pdf) | | <a href="https://trafficstars.com/" class="favicon">Traffic Stars</a> | AD network | — | 300 servers in Europe/US | 1.8 PiB, 700 000 insert rps (as of 2021) | [Slides in Russian, May 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup15/lightning/ninja.pdf) |
| <a href="https://www.uber.com" class="favicon">Uber</a> | Taxi | Logging | — | — | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/uber.pdf) | | <a href="https://www.uber.com" class="favicon">Uber</a> | Taxi | Logging | — | — | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/uber.pdf) |
| <a href="https://hello.utmstat.com/" class="favicon">UTMSTAT</a> | Analytics | Main product | — | — | [Blog post, June 2020](https://vc.ru/tribuna/133956-striming-dannyh-iz-servisa-skvoznoy-analitiki-v-clickhouse) | | <a href="https://hello.utmstat.com/" class="favicon">UTMSTAT</a> | Analytics | Main product | — | — | [Blog post, June 2020](https://vc.ru/tribuna/133956-striming-dannyh-iz-servisa-skvoznoy-analitiki-v-clickhouse) |
| <a href="https://vk.com" class="favicon">VKontakte</a> | Social Network | Statistics, Logging | — | — | [Slides in Russian, August 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup17/3_vk.pdf) | | <a href="https://vk.com" class="favicon">VKontakte</a> | Social Network | Statistics, Logging | — | — | [Slides in Russian, August 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup17/3_vk.pdf) |
| <a href="https://www.vmware.com/" class="favicon">VMWare</a> | Cloud | VeloCloud, SDN | — | — | [Product documentation](https://docs.vmware.com/en/vRealize-Operations-Manager/8.3/com.vmware.vcom.metrics.doc/GUID-A9AD72E1-C948-4CA2-971B-919385AB3CA8.html) | | <a href="https://www.vmware.com/" class="favicon">VMware</a> | Cloud | VeloCloud, SDN | — | — | [Product documentation](https://docs.vmware.com/en/vRealize-Operations-Manager/8.3/com.vmware.vcom.metrics.doc/GUID-A9AD72E1-C948-4CA2-971B-919385AB3CA8.html) |
| <a href="https://www.walmartlabs.com/" class="favicon">Walmart Labs</a> | Internet, Retail | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=144) | | <a href="https://www.walmartlabs.com/" class="favicon">Walmart Labs</a> | Internet, Retail | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=144) |
| <a href="https://wargaming.com/en/" class="favicon">Wargaming</a> | Games | | — | — | [Interview](https://habr.com/en/post/496954/) | | <a href="https://wargaming.com/en/" class="favicon">Wargaming</a> | Games | | — | — | [Interview](https://habr.com/en/post/496954/) |
| <a href="https://www.wildberries.ru/" class="favicon">Wildberries</a> | E-commerce | | — | — | [Official website](https://it.wildberries.ru/) | | <a href="https://www.wildberries.ru/" class="favicon">Wildberries</a> | E-commerce | | — | — | [Official website](https://it.wildberries.ru/) |
@ -148,23 +164,11 @@ toc_title: Adopters
| <a href="https://market.yandex.ru/" class="favicon">Yandex Market</a> | e-Commerce | Metrics, Logging | — | — | [Talk in Russian, January 2019](https://youtu.be/_l1qP0DyBcA?t=478) | | <a href="https://market.yandex.ru/" class="favicon">Yandex Market</a> | e-Commerce | Metrics, Logging | — | — | [Talk in Russian, January 2019](https://youtu.be/_l1qP0DyBcA?t=478) |
| <a href="https://metrica.yandex.com" class="favicon">Yandex Metrica</a> | Web analytics | Main product | 630 servers in one cluster, 360 servers in another cluster, 1862 servers in one department | 133 PiB / 8.31 PiB / 120 trillion records | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/introduction/#13) | | <a href="https://metrica.yandex.com" class="favicon">Yandex Metrica</a> | Web analytics | Main product | 630 servers in one cluster, 360 servers in another cluster, 1862 servers in one department | 133 PiB / 8.31 PiB / 120 trillion records | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/introduction/#13) |
| <a href="https://www.yotascale.com/" class="favicon">Yotascale</a> | Cloud | Data pipeline | — | 2 bn records/day | [LinkedIn (Accomplishments)](https://www.linkedin.com/in/adilsaleem/) | | <a href="https://www.yotascale.com/" class="favicon">Yotascale</a> | Cloud | Data pipeline | — | 2 bn records/day | [LinkedIn (Accomplishments)](https://www.linkedin.com/in/adilsaleem/) |
| <a href="https://zagravagames.com/en/" class="favicon">Zagrava Trading</a> | — | — | — | — | [Job offer, May 2021](https://twitter.com/datastackjobs/status/1394707267082063874) |
| <a href="https://htc-cs.ru/" class="favicon">ЦВТ</a> | Software Development | Metrics, Logging | — | — | [Blog Post, March 2019, in Russian](https://vc.ru/dev/62715-kak-my-stroili-monitoring-na-prometheus-clickhouse-i-elk) | | <a href="https://htc-cs.ru/" class="favicon">ЦВТ</a> | Software Development | Metrics, Logging | — | — | [Blog Post, March 2019, in Russian](https://vc.ru/dev/62715-kak-my-stroili-monitoring-na-prometheus-clickhouse-i-elk) |
| <a href="https://mkb.ru/" class="favicon">МКБ</a> | Bank | Web-system monitoring | — | — | [Slides in Russian, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/mkb.pdf) | | <a href="https://mkb.ru/" class="favicon">МКБ</a> | Bank | Web-system monitoring | — | — | [Slides in Russian, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/mkb.pdf) |
| <a href="https://cft.ru/" class="favicon">ЦФТ</a> | Banking, Financial products, Payments | — | — | — | [Meetup in Russian, April 2020](https://team.cft.ru/events/162) | | <a href="https://cft.ru/" class="favicon">ЦФТ</a> | Banking, Financial products, Payments | — | — | — | [Meetup in Russian, April 2020](https://team.cft.ru/events/162) |
| <a href="https://promo.croc.ru/digitalworker" class="favicon">Цифровой Рабочий</a> | Industrial IoT, Analytics | — | — | — | [Blog post in Russian, March 2021](https://habr.com/en/company/croc/blog/548018/) | | <a href="https://promo.croc.ru/digitalworker" class="favicon">Цифровой Рабочий</a> | Industrial IoT, Analytics | — | — | — | [Blog post in Russian, March 2021](https://habr.com/en/company/croc/blog/548018/) |
| <a href="https://www.kakaocorp.com/" class="favicon">kakaocorp</a> | Internet company | — | — | — | [if(kakao)2020 conference](https://if.kakao.com/session/117) |
| <a href="https://shop.okraina.ru/" class="favicon">ООО «МПЗ Богородский»</a> | Agriculture | — | — | — | [Article in Russian, November 2020](https://cloud.yandex.ru/cases/okraina) | | <a href="https://shop.okraina.ru/" class="favicon">ООО «МПЗ Богородский»</a> | Agriculture | — | — | — | [Article in Russian, November 2020](https://cloud.yandex.ru/cases/okraina) |
| <a href="https://www.tesla.com/" class="favicon">Tesla</a> | Electric vehicle and clean energy company | — | — | — | [Vacancy description, March 2021](https://news.ycombinator.com/item?id=26306170) |
| <a href="https://www.kgk-global.com/en/" class="favicon">KGK Global</a> | Vehicle monitoring | — | — | — | [Press release, June 2021](https://zoom.cnews.ru/news/item/530921) |
| <a href="https://www.bilibili.com/" class="favicon">BiliBili</a> | Video sharing | — | — | — | [Blog post, June 2021](https://chowdera.com/2021/06/20210622012241476b.html) |
| <a href="https://gigapipe.com/" class="favicon">Gigapipe</a> | Managed ClickHouse | Main product | — | — | [Official website](https://gigapipe.com/) |
| <a href="https://www.hydrolix.io/" class="favicon">Hydrolix</a> | Cloud data platform | Main product | — | — | [Documentation](https://docs.hydrolix.io/guide/query) |
| <a href="https://www.argedor.com/en/clickhouse/" class="favicon">Argedor</a> | ClickHouse support | — | — | — | [Official website](https://www.argedor.com/en/clickhouse/) |
| <a href="https://signoz.io/" class="favicon">SigNoz</a> | Observability Platform | Main Product | — | — | [Source code](https://github.com/SigNoz/signoz) |
| <a href="https://chelpipegroup.com/" class="favicon">ChelPipe Group</a> | Analytics | — | — | — | [Blog post, June 2021](https://vc.ru/trade/253172-tyazhelomu-proizvodstvu-user-friendly-sayt-internet-magazin-trub-dlya-chtpz) |
| <a href="https://zagravagames.com/en/" class="favicon">Zagrava Trading</a> | — | — | — | — | [Job offer, May 2021](https://twitter.com/datastackjobs/status/1394707267082063874) |
| <a href="https://beeline.ru/" class="favicon">Beeline</a> | Telecom | Data Platform | — | — | [Blog post, July 2021](https://habr.com/en/company/beeline/blog/567508/) |
| <a href="https://ecommpay.com/" class="favicon">Ecommpay</a> | Payment Processing | Logs | — | — | [Video, Nov 2019](https://www.youtube.com/watch?v=d3GdZTOWGLk) |
| <a href="https://omnicomm.ru/" class="favicon">Omnicomm</a> | Transportation Monitoring | — | — | — | [Facebook post, Oct 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) |
[Original article](https://clickhouse.com/docs/en/introduction/adopters/) <!--hide--> [Original article](https://clickhouse.com/docs/en/introduction/adopters/) <!--hide-->

View File

@ -643,7 +643,7 @@ On hosts with low RAM and swap, you possibly need setting `max_server_memory_usa
## max_concurrent_queries {#max-concurrent-queries} ## max_concurrent_queries {#max-concurrent-queries}
The maximum number of simultaneously processed queries related to MergeTree table. Queries may be limited by other settings: [max_concurrent_queries_for_all_users](#max-concurrent-queries-for-all-users), [min_marks_to_honor_max_concurrent_queries](#min-marks-to-honor-max-concurrent-queries). The maximum number of simultaneously processed queries related to MergeTree table. Queries may be limited by other settings: [max_concurrent_queries_for_user](#max-concurrent-queries-for-user), [max_concurrent_queries_for_all_users](#max-concurrent-queries-for-all-users), [min_marks_to_honor_max_concurrent_queries](#min-marks-to-honor-max-concurrent-queries).
!!! info "Note" !!! info "Note"
These settings can be modified at runtime and will take effect immediately. Queries that are already running will remain unchanged. These settings can be modified at runtime and will take effect immediately. Queries that are already running will remain unchanged.
@ -659,6 +659,21 @@ Possible values:
<max_concurrent_queries>100</max_concurrent_queries> <max_concurrent_queries>100</max_concurrent_queries>
``` ```
## max_concurrent_queries_for_user {#max-concurrent-queries-for-user}
The maximum number of simultaneously processed queries related to MergeTree table per user.
Possible values:
- Positive integer.
- 0 — Disabled.
**Example**
``` xml
<max_concurrent_queries_for_user>5</max_concurrent_queries_for_user>
```
## max_concurrent_queries_for_all_users {#max-concurrent-queries-for-all-users} ## max_concurrent_queries_for_all_users {#max-concurrent-queries-for-all-users}
Throw exception if the value of this setting is less or equal than the current number of simultaneously processed queries. Throw exception if the value of this setting is less or equal than the current number of simultaneously processed queries.

View File

@ -10,7 +10,7 @@ Columns:
- `[]` — All users share the same quota. - `[]` — All users share the same quota.
- `['user_name']` — Connections with the same user name share the same quota. - `['user_name']` — Connections with the same user name share the same quota.
- `['ip_address']` — Connections from the same IP share the same quota. - `['ip_address']` — Connections from the same IP share the same quota.
- `['client_key']` — Connections with the same key share the same quota. A key must be explicitly provided by a client. When using [clickhouse-client](../../interfaces/cli.md), pass a key value in the `--quota-key` parameter, or use the `quota_key` parameter in the client configuration file. When using HTTP interface, use the `X-ClickHouse-Quota` header. - `['client_key']` — Connections with the same key share the same quota. A key must be explicitly provided by a client. When using [clickhouse-client](../../interfaces/cli.md), pass a key value in the `--quota_key` parameter, or use the `quota_key` parameter in the client configuration file. When using HTTP interface, use the `X-ClickHouse-Quota` header.
- `['user_name', 'client_key']` — Connections with the same `client_key` share the same quota. If a key isnt provided by a client, the qouta is tracked for `user_name`. - `['user_name', 'client_key']` — Connections with the same `client_key` share the same quota. If a key isnt provided by a client, the qouta is tracked for `user_name`.
- `['client_key', 'ip_address']` — Connections with the same `client_key` share the same quota. If a key isnt provided by a client, the qouta is tracked for `ip_address`. - `['client_key', 'ip_address']` — Connections with the same `client_key` share the same quota. If a key isnt provided by a client, the qouta is tracked for `ip_address`.
- `durations` ([Array](../../sql-reference/data-types/array.md)([UInt64](../../sql-reference/data-types/int-uint.md))) — Time interval lengths in seconds. - `durations` ([Array](../../sql-reference/data-types/array.md)([UInt64](../../sql-reference/data-types/int-uint.md))) — Time interval lengths in seconds.

View File

@ -70,7 +70,7 @@ For HDD, enable the write cache.
## File System {#file-system} ## File System {#file-system}
Ext4 is the most reliable option. Set the mount options `noatime, nobarrier`. Ext4 is the most reliable option. Set the mount options `noatime`.
XFS is also suitable, but it hasnt been as thoroughly tested with ClickHouse. XFS is also suitable, but it hasnt been as thoroughly tested with ClickHouse.
Most other file systems should also work fine. File systems with delayed allocation work better. Most other file systems should also work fine. File systems with delayed allocation work better.

View File

@ -53,15 +53,17 @@ LAYOUT(LAYOUT_TYPE(param value)) -- layout settings
- [flat](#flat) - [flat](#flat)
- [hashed](#dicts-external_dicts_dict_layout-hashed) - [hashed](#dicts-external_dicts_dict_layout-hashed)
- [sparse_hashed](#dicts-external_dicts_dict_layout-sparse_hashed) - [sparse_hashed](#dicts-external_dicts_dict_layout-sparse_hashed)
- [cache](#cache)
- [ssd_cache](#ssd-cache)
- [direct](#direct)
- [range_hashed](#range-hashed)
- [complex_key_hashed](#complex-key-hashed) - [complex_key_hashed](#complex-key-hashed)
- [complex_key_sparse_hashed](#complex-key-sparse-hashed)
- [hashed_array](#dicts-external_dicts_dict_layout-hashed-array)
- [complex_key_hashed_array](#complex-key-hashed-array)
- [range_hashed](#range-hashed)
- [complex_key_range_hashed](#complex-key-range-hashed) - [complex_key_range_hashed](#complex-key-range-hashed)
- [cache](#cache)
- [complex_key_cache](#complex-key-cache) - [complex_key_cache](#complex-key-cache)
- [ssd_cache](#ssd-cache) - [ssd_cache](#ssd-cache)
- [ssd_complex_key_cache](#complex-key-ssd-cache) - [complex_key_ssd_cache](#complex-key-ssd-cache)
- [direct](#direct)
- [complex_key_direct](#complex-key-direct) - [complex_key_direct](#complex-key-direct)
- [ip_trie](#ip-trie) - [ip_trie](#ip-trie)
@ -151,10 +153,69 @@ Configuration example:
</layout> </layout>
``` ```
or
``` sql ``` sql
LAYOUT(COMPLEX_KEY_HASHED()) LAYOUT(COMPLEX_KEY_HASHED())
``` ```
### complex_key_sparse_hashed {#complex-key-sparse-hashed}
This type of storage is for use with composite [keys](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-structure.md). Similar to [sparse_hashed](#dicts-external_dicts_dict_layout-sparse_hashed).
Configuration example:
``` xml
<layout>
<complex_key_sparse_hashed />
</layout>
```
or
``` sql
LAYOUT(COMPLEX_KEY_SPARSE_HASHED())
```
### hashed_array {#dicts-external_dicts_dict_layout-hashed-array}
The dictionary is completely stored in memory. Each attribute is stored in an array. The key attribute is stored in the form of a hashed table where value is an index in the attributes array. The dictionary can contain any number of elements with any identifiers. In practice, the number of keys can reach tens of millions of items.
All types of sources are supported. When updating, data (from a file or from a table) is read in its entirety.
Configuration example:
``` xml
<layout>
<hashed_array>
</hashed_array>
</layout>
```
or
``` sql
LAYOUT(HASHED_ARRAY())
```
### complex_key_hashed_array {#complex-key-hashed-array}
This type of storage is for use with composite [keys](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-structure.md). Similar to [hashed_array](#dicts-external_dicts_dict_layout-hashed-array).
Configuration example:
``` xml
<layout>
<complex_key_hashed_array />
</layout>
```
or
``` sql
LAYOUT(COMPLEX_KEY_HASHED_ARRAY())
```
### range_hashed {#range-hashed} ### range_hashed {#range-hashed}
The dictionary is stored in memory in the form of a hash table with an ordered array of ranges and their corresponding values. The dictionary is stored in memory in the form of a hash table with an ordered array of ranges and their corresponding values.
@ -302,8 +363,9 @@ When searching for a dictionary, the cache is searched first. For each block of
If keys are not found in dictionary, then update cache task is created and added into update queue. Update queue properties can be controlled with settings `max_update_queue_size`, `update_queue_push_timeout_milliseconds`, `query_wait_timeout_milliseconds`, `max_threads_for_updates`. If keys are not found in dictionary, then update cache task is created and added into update queue. Update queue properties can be controlled with settings `max_update_queue_size`, `update_queue_push_timeout_milliseconds`, `query_wait_timeout_milliseconds`, `max_threads_for_updates`.
For cache dictionaries, the expiration [lifetime](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-lifetime.md) of data in the cache can be set. If more time than `lifetime` has passed since loading the data in a cell, the cells value is not used and key becomes expired, and it is re-requested the next time it needs to be used this behaviour can be configured with setting `allow_read_expired_keys`. For cache dictionaries, the expiration [lifetime](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-lifetime.md) of data in the cache can be set. If more time than `lifetime` has passed since loading the data in a cell, the cells value is not used and key becomes expired. The key is re-requested the next time it needs to be used. This behaviour can be configured with setting `allow_read_expired_keys`.
This is the least effective of all the ways to store dictionaries. The speed of the cache depends strongly on correct settings and the usage scenario. A cache type dictionary performs well only when the hit rates are high enough (recommended 99% and higher). You can view the average hit rate in the `system.dictionaries` table.
This is the least effective of all the ways to store dictionaries. The speed of the cache depends strongly on correct settings and the usage scenario. A cache type dictionary performs well only when the hit rates are high enough (recommended 99% and higher). You can view the average hit rate in the [system.dictionaries](../../../operations/system-tables/dictionaries.md) table.
If setting `allow_read_expired_keys` is set to 1, by default 0. Then dictionary can support asynchronous updates. If a client requests keys and all of them are in cache, but some of them are expired, then dictionary will return expired keys for a client and request them asynchronously from the source. If setting `allow_read_expired_keys` is set to 1, by default 0. Then dictionary can support asynchronous updates. If a client requests keys and all of them are in cache, but some of them are expired, then dictionary will return expired keys for a client and request them asynchronously from the source.
@ -368,7 +430,7 @@ Similar to `cache`, but stores data on SSD and index in RAM. All cache dictionar
<!-- Size of RAM buffer in bytes for aggregating elements before flushing to SSD. --> <!-- Size of RAM buffer in bytes for aggregating elements before flushing to SSD. -->
<write_buffer_size>1048576</write_buffer_size> <write_buffer_size>1048576</write_buffer_size>
<!-- Path where cache file will be stored. --> <!-- Path where cache file will be stored. -->
<path>/var/lib/clickhouse/clickhouse_dictionaries/test_dict</path> <path>/var/lib/clickhouse/user_files/test_dict</path>
</ssd_cache> </ssd_cache>
</layout> </layout>
``` ```
@ -377,7 +439,7 @@ or
``` sql ``` sql
LAYOUT(SSD_CACHE(BLOCK_SIZE 4096 FILE_SIZE 16777216 READ_BUFFER_SIZE 1048576 LAYOUT(SSD_CACHE(BLOCK_SIZE 4096 FILE_SIZE 16777216 READ_BUFFER_SIZE 1048576
PATH ./user_files/test_dict)) PATH '/var/lib/clickhouse/user_files/test_dict'))
``` ```
### complex_key_ssd_cache {#complex-key-ssd-cache} ### complex_key_ssd_cache {#complex-key-ssd-cache}
@ -491,4 +553,3 @@ dictGetString('prefix', 'asn', tuple(IPv6StringToNum('2001:db8::1')))
Other types are not supported yet. The function returns the attribute for the prefix that corresponds to this IP address. If there are overlapping prefixes, the most specific one is returned. Other types are not supported yet. The function returns the attribute for the prefix that corresponds to this IP address. If there are overlapping prefixes, the most specific one is returned.
Data must completely fit into RAM. Data must completely fit into RAM.

View File

@ -2427,3 +2427,39 @@ Type: [UInt32](../../sql-reference/data-types/int-uint.md).
**See Also** **See Also**
- [shardNum()](#shard-num) function example also contains `shardCount()` function call. - [shardNum()](#shard-num) function example also contains `shardCount()` function call.
## getOSKernelVersion {#getoskernelversion}
Returns a string with the current OS kernel version.
**Syntax**
``` sql
getOSKernelVersion()
```
**Arguments**
- None.
**Returned value**
- The current OS kernel version.
Type: [String](../../sql-reference/data-types/string.md).
**Example**
Query:
``` sql
SELECT getOSKernelVersion();
```
Result:
``` text
┌─getOSKernelVersion()────┐
│ Linux 4.15.0-55-generic │
└─────────────────────────┘
```

Some files were not shown because too many files have changed in this diff Show More