mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-25 17:12:03 +00:00
Merge branch 'master' of github.com:ClickHouse/ClickHouse into disk-async-read
This commit is contained in:
commit
39b9e9c258
53
.github/workflows/main.yml
vendored
Normal file
53
.github/workflows/main.yml
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
name: Ligthweight GithubActions
|
||||
on: # yamllint disable-line rule:truthy
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- unlabeled
|
||||
- synchronize
|
||||
- reopened
|
||||
- opened
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
CheckLabels:
|
||||
runs-on: [self-hosted]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Labels check
|
||||
run: cd $GITHUB_WORKSPACE/tests/ci && python3 run_check.py
|
||||
DockerHubPush:
|
||||
needs: CheckLabels
|
||||
runs-on: [self-hosted]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: cd $GITHUB_WORKSPACE/tests/ci && python3 docker_images_check.py
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images.json
|
||||
StyleCheck:
|
||||
needs: DockerHubPush
|
||||
runs-on: [self-hosted]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/style_check
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Style Check
|
||||
run: cd $GITHUB_WORKSPACE/tests/ci && python3 style_check.py
|
||||
FinishCheck:
|
||||
needs: [StyleCheck, DockerHubPush, CheckLabels]
|
||||
runs-on: [self-hosted]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Finish label
|
||||
run: cd $GITHUB_WORKSPACE/tests/ci && python3 finish_check.py
|
2
.gitmodules
vendored
2
.gitmodules
vendored
@ -140,7 +140,7 @@
|
||||
url = https://github.com/ClickHouse-Extras/libc-headers.git
|
||||
[submodule "contrib/replxx"]
|
||||
path = contrib/replxx
|
||||
url = https://github.com/ClickHouse-Extras/replxx.git
|
||||
url = https://github.com/AmokHuginnsson/replxx.git
|
||||
[submodule "contrib/avro"]
|
||||
path = contrib/avro
|
||||
url = https://github.com/ClickHouse-Extras/avro.git
|
||||
|
@ -203,7 +203,7 @@ endif ()
|
||||
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
||||
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
||||
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND CMAKE_VERSION VERSION_GREATER "3.9.0")
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
||||
# Only for Linux, x86_64 or aarch64.
|
||||
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
|
||||
elseif(GLIBC_COMPATIBILITY)
|
||||
@ -218,10 +218,6 @@ if (GLIBC_COMPATIBILITY)
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -include ${CMAKE_CURRENT_SOURCE_DIR}/base/glibc-compatibility/glibc-compat-2.32.h")
|
||||
endif()
|
||||
|
||||
if (NOT CMAKE_VERSION VERSION_GREATER "3.9.0")
|
||||
message (WARNING "CMake version must be greater than 3.9.0 for production builds.")
|
||||
endif ()
|
||||
|
||||
# Make sure the final executable has symbols exported
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
|
||||
|
||||
|
@ -5,6 +5,10 @@
|
||||
|
||||
#include <string.h>
|
||||
#include <unistd.h>
|
||||
#include <sys/select.h>
|
||||
#include <sys/time.h>
|
||||
#include <sys/types.h>
|
||||
|
||||
|
||||
#ifdef OS_LINUX
|
||||
/// We can detect if code is linked with one or another readline variants or open the library dynamically.
|
||||
|
@ -177,6 +177,10 @@ ReplxxLineReader::ReplxxLineReader(
|
||||
/// bind C-p/C-n to history-previous/history-next like readline.
|
||||
rx.bind_key(Replxx::KEY::control('N'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_NEXT, code); });
|
||||
rx.bind_key(Replxx::KEY::control('P'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_PREVIOUS, code); });
|
||||
|
||||
/// bind C-j to ENTER action.
|
||||
rx.bind_key(Replxx::KEY::control('J'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::COMMIT_LINE, code); });
|
||||
|
||||
/// By default COMPLETE_NEXT/COMPLETE_PREV was binded to C-p/C-n, re-bind
|
||||
/// to M-P/M-N (that was used for HISTORY_COMMON_PREFIX_SEARCH before, but
|
||||
/// it also binded to M-p/M-n).
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
#include <base/defines.h>
|
||||
|
||||
#if defined(__linux__) && !defined(THREAD_SANITIZER)
|
||||
#if defined(__linux__) && !defined(THREAD_SANITIZER) && !defined(USE_MUSL)
|
||||
#define USE_PHDR_CACHE 1
|
||||
#endif
|
||||
|
||||
|
@ -84,7 +84,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
|
||||
|
||||
Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, log_file);
|
||||
log->setLevel(log_level);
|
||||
split->addChannel(log);
|
||||
split->addChannel(log, "log");
|
||||
}
|
||||
|
||||
const auto errorlog_path = config.getString("logger.errorlog", "");
|
||||
@ -116,7 +116,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
|
||||
Poco::AutoPtr<DB::OwnFormattingChannel> errorlog = new DB::OwnFormattingChannel(pf, error_log_file);
|
||||
errorlog->setLevel(errorlog_level);
|
||||
errorlog->open();
|
||||
split->addChannel(errorlog);
|
||||
split->addChannel(errorlog, "errorlog");
|
||||
}
|
||||
|
||||
if (config.getBool("logger.use_syslog", false))
|
||||
@ -155,7 +155,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
|
||||
Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, syslog_channel);
|
||||
log->setLevel(syslog_level);
|
||||
|
||||
split->addChannel(log);
|
||||
split->addChannel(log, "syslog");
|
||||
}
|
||||
|
||||
bool should_log_to_console = isatty(STDIN_FILENO) || isatty(STDERR_FILENO);
|
||||
@ -177,7 +177,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
|
||||
Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, new Poco::ConsoleChannel);
|
||||
logger.warning("Logging " + console_log_level_string + " to console");
|
||||
log->setLevel(console_log_level);
|
||||
split->addChannel(log);
|
||||
split->addChannel(log, "console");
|
||||
}
|
||||
|
||||
split->open();
|
||||
@ -224,6 +224,89 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
|
||||
}
|
||||
}
|
||||
|
||||
void Loggers::updateLevels(Poco::Util::AbstractConfiguration & config, Poco::Logger & logger)
|
||||
{
|
||||
int max_log_level = 0;
|
||||
|
||||
const auto log_level_string = config.getString("logger.level", "trace");
|
||||
int log_level = Poco::Logger::parseLevel(log_level_string);
|
||||
if (log_level > max_log_level)
|
||||
max_log_level = log_level;
|
||||
|
||||
const auto log_path = config.getString("logger.log", "");
|
||||
if (!log_path.empty())
|
||||
split->setLevel("log", log_level);
|
||||
else
|
||||
split->setLevel("log", 0);
|
||||
|
||||
// Set level to console
|
||||
bool is_daemon = config.getBool("application.runAsDaemon", false);
|
||||
bool should_log_to_console = isatty(STDIN_FILENO) || isatty(STDERR_FILENO);
|
||||
if (config.getBool("logger.console", false)
|
||||
|| (!config.hasProperty("logger.console") && !is_daemon && should_log_to_console))
|
||||
split->setLevel("console", log_level);
|
||||
else
|
||||
split->setLevel("console", 0);
|
||||
|
||||
// Set level to errorlog
|
||||
int errorlog_level = 0;
|
||||
const auto errorlog_path = config.getString("logger.errorlog", "");
|
||||
if (!errorlog_path.empty())
|
||||
{
|
||||
errorlog_level = Poco::Logger::parseLevel(config.getString("logger.errorlog_level", "notice"));
|
||||
if (errorlog_level > max_log_level)
|
||||
max_log_level = errorlog_level;
|
||||
}
|
||||
split->setLevel("errorlog", errorlog_level);
|
||||
|
||||
// Set level to syslog
|
||||
int syslog_level = 0;
|
||||
if (config.getBool("logger.use_syslog", false))
|
||||
{
|
||||
syslog_level = Poco::Logger::parseLevel(config.getString("logger.syslog_level", log_level_string));
|
||||
if (syslog_level > max_log_level)
|
||||
max_log_level = syslog_level;
|
||||
}
|
||||
split->setLevel("syslog", syslog_level);
|
||||
|
||||
// Global logging level (it can be overridden for specific loggers).
|
||||
logger.setLevel(max_log_level);
|
||||
|
||||
// Set level to all already created loggers
|
||||
std::vector<std::string> names;
|
||||
|
||||
logger.root().names(names);
|
||||
for (const auto & name : names)
|
||||
logger.root().get(name).setLevel(max_log_level);
|
||||
|
||||
logger.root().setLevel(max_log_level);
|
||||
|
||||
// Explicitly specified log levels for specific loggers.
|
||||
{
|
||||
Poco::Util::AbstractConfiguration::Keys loggers_level;
|
||||
config.keys("logger.levels", loggers_level);
|
||||
|
||||
if (!loggers_level.empty())
|
||||
{
|
||||
for (const auto & key : loggers_level)
|
||||
{
|
||||
if (key == "logger" || key.starts_with("logger["))
|
||||
{
|
||||
const std::string name(config.getString("logger.levels." + key + ".name"));
|
||||
const std::string level(config.getString("logger.levels." + key + ".level"));
|
||||
logger.root().get(name).setLevel(level);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Legacy syntax
|
||||
const std::string level(config.getString("logger.levels." + key, "trace"));
|
||||
logger.root().get(key).setLevel(level);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Loggers::closeLogs(Poco::Logger & logger)
|
||||
{
|
||||
if (log_file)
|
||||
|
@ -19,6 +19,8 @@ class Loggers
|
||||
public:
|
||||
void buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Logger & logger, const std::string & cmd_name = "");
|
||||
|
||||
void updateLevels(Poco::Util::AbstractConfiguration & config, Poco::Logger & logger);
|
||||
|
||||
/// Close log files. On next log write files will be reopened.
|
||||
void closeLogs(Poco::Logger & logger);
|
||||
|
||||
|
@ -1,4 +1,5 @@
|
||||
#pragma once
|
||||
#include <atomic>
|
||||
#include <Poco/AutoPtr.h>
|
||||
#include <Poco/Channel.h>
|
||||
#include <Poco/FormattingChannel.h>
|
||||
@ -14,7 +15,7 @@ class OwnFormattingChannel : public Poco::Channel, public ExtendedLogChannel
|
||||
public:
|
||||
explicit OwnFormattingChannel(
|
||||
Poco::AutoPtr<OwnPatternFormatter> pFormatter_ = nullptr, Poco::AutoPtr<Poco::Channel> pChannel_ = nullptr)
|
||||
: pFormatter(std::move(pFormatter_)), pChannel(std::move(pChannel_))
|
||||
: pFormatter(std::move(pFormatter_)), pChannel(std::move(pChannel_)), priority(Poco::Message::PRIO_TRACE)
|
||||
{
|
||||
}
|
||||
|
||||
@ -45,7 +46,7 @@ public:
|
||||
private:
|
||||
Poco::AutoPtr<OwnPatternFormatter> pFormatter;
|
||||
Poco::AutoPtr<Poco::Channel> pChannel;
|
||||
Poco::Message::Priority priority = Poco::Message::PRIO_TRACE;
|
||||
std::atomic<Poco::Message::Priority> priority;
|
||||
};
|
||||
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
#include "OwnSplitChannel.h"
|
||||
#include "OwnFormattingChannel.h"
|
||||
|
||||
#include <iostream>
|
||||
#include <Core/Block.h>
|
||||
@ -75,7 +76,7 @@ void OwnSplitChannel::logSplit(const Poco::Message & msg)
|
||||
ExtendedLogMessage msg_ext = ExtendedLogMessage::getFrom(msg);
|
||||
|
||||
/// Log data to child channels
|
||||
for (auto & channel : channels)
|
||||
for (auto & [name, channel] : channels)
|
||||
{
|
||||
if (channel.second)
|
||||
channel.second->logExtended(msg_ext); // extended child
|
||||
@ -137,9 +138,9 @@ void OwnSplitChannel::logSplit(const Poco::Message & msg)
|
||||
}
|
||||
|
||||
|
||||
void OwnSplitChannel::addChannel(Poco::AutoPtr<Poco::Channel> channel)
|
||||
void OwnSplitChannel::addChannel(Poco::AutoPtr<Poco::Channel> channel, const std::string & name)
|
||||
{
|
||||
channels.emplace_back(std::move(channel), dynamic_cast<ExtendedLogChannel *>(channel.get()));
|
||||
channels.emplace(name, ExtendedChannelPtrPair(std::move(channel), dynamic_cast<ExtendedLogChannel *>(channel.get())));
|
||||
}
|
||||
|
||||
void OwnSplitChannel::addTextLog(std::shared_ptr<DB::TextLog> log, int max_priority)
|
||||
@ -149,4 +150,14 @@ void OwnSplitChannel::addTextLog(std::shared_ptr<DB::TextLog> log, int max_prior
|
||||
text_log_max_priority.store(max_priority, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
void OwnSplitChannel::setLevel(const std::string & name, int level)
|
||||
{
|
||||
auto it = channels.find(name);
|
||||
if (it != channels.end())
|
||||
{
|
||||
if (auto * channel = dynamic_cast<DB::OwnFormattingChannel *>(it->second.first.get()))
|
||||
channel->setLevel(level);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -18,10 +18,12 @@ public:
|
||||
/// Makes an extended message from msg and passes it to the client logs queue and child (if possible)
|
||||
void log(const Poco::Message & msg) override;
|
||||
/// Adds a child channel
|
||||
void addChannel(Poco::AutoPtr<Poco::Channel> channel);
|
||||
void addChannel(Poco::AutoPtr<Poco::Channel> channel, const std::string & name);
|
||||
|
||||
void addTextLog(std::shared_ptr<DB::TextLog> log, int max_priority);
|
||||
|
||||
void setLevel(const std::string & name, int level);
|
||||
|
||||
private:
|
||||
void logSplit(const Poco::Message & msg);
|
||||
void tryLogSplit(const Poco::Message & msg);
|
||||
@ -29,7 +31,7 @@ private:
|
||||
using ChannelPtr = Poco::AutoPtr<Poco::Channel>;
|
||||
/// Handler and its pointer casted to extended interface
|
||||
using ExtendedChannelPtrPair = std::pair<ChannelPtr, ExtendedLogChannel *>;
|
||||
std::vector<ExtendedChannelPtrPair> channels;
|
||||
std::map<std::string, ExtendedChannelPtrPair> channels;
|
||||
|
||||
std::mutex text_log_mutex;
|
||||
|
||||
|
@ -13,6 +13,7 @@ TRIES=3
|
||||
|
||||
AMD64_BIN_URL="https://builds.clickhouse.com/master/amd64/clickhouse"
|
||||
AARCH64_BIN_URL="https://builds.clickhouse.com/master/aarch64/clickhouse"
|
||||
POWERPC64_BIN_URL="https://builds.clickhouse.com/master/ppc64le/clickhouse"
|
||||
|
||||
# Note: on older Ubuntu versions, 'axel' does not support IPv6. If you are using IPv6-only servers on very old Ubuntu, just don't install 'axel'.
|
||||
|
||||
@ -38,6 +39,8 @@ if [[ ! -f clickhouse ]]; then
|
||||
$FASTER_DOWNLOAD "$AMD64_BIN_URL"
|
||||
elif [[ $CPU == aarch64 ]]; then
|
||||
$FASTER_DOWNLOAD "$AARCH64_BIN_URL"
|
||||
elif [[ $CPU == powerpc64le ]]; then
|
||||
$FASTER_DOWNLOAD "$POWERPC64_BIN_URL"
|
||||
else
|
||||
echo "Unsupported CPU type: $CPU"
|
||||
exit 1
|
||||
@ -52,7 +55,7 @@ fi
|
||||
|
||||
if [[ ! -d data ]]; then
|
||||
if [[ ! -f $DATASET ]]; then
|
||||
$FASTER_DOWNLOAD "https://clickhouse-datasets.s3.yandex.net/hits/partitions/$DATASET"
|
||||
$FASTER_DOWNLOAD "https://datasets.clickhouse.com/hits/partitions/$DATASET"
|
||||
fi
|
||||
|
||||
tar $TAR_PARAMS --strip-components=1 --directory=. -x -v -f $DATASET
|
||||
|
@ -18,6 +18,10 @@ option (ENABLE_PCLMULQDQ "Use pclmulqdq instructions on x86_64" 1)
|
||||
option (ENABLE_POPCNT "Use popcnt instructions on x86_64" 1)
|
||||
option (ENABLE_AVX "Use AVX instructions on x86_64" 0)
|
||||
option (ENABLE_AVX2 "Use AVX2 instructions on x86_64" 0)
|
||||
option (ENABLE_AVX512 "Use AVX512 instructions on x86_64" 0)
|
||||
option (ENABLE_BMI "Use BMI instructions on x86_64" 0)
|
||||
option (ENABLE_AVX2_FOR_SPEC_OP "Use avx2 instructions for specific operations on x86_64" 0)
|
||||
option (ENABLE_AVX512_FOR_SPEC_OP "Use avx512 instructions for specific operations on x86_64" 0)
|
||||
|
||||
option (ARCH_NATIVE "Add -march=native compiler flag. This makes your binaries non-portable but more performant code may be generated. This option overrides ENABLE_* options for specific instruction set. Highly not recommended to use." 0)
|
||||
|
||||
@ -127,6 +131,57 @@ else ()
|
||||
if (HAVE_AVX2 AND ENABLE_AVX2)
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||
endif ()
|
||||
|
||||
set (TEST_FLAG "-mavx512f -mavx512bw")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
check_cxx_source_compiles("
|
||||
#include <immintrin.h>
|
||||
int main() {
|
||||
auto a = _mm512_setzero_epi32();
|
||||
(void)a;
|
||||
auto b = _mm512_add_epi16(__m512i(), __m512i());
|
||||
(void)b;
|
||||
return 0;
|
||||
}
|
||||
" HAVE_AVX512)
|
||||
if (HAVE_AVX512 AND ENABLE_AVX512)
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||
endif ()
|
||||
|
||||
set (TEST_FLAG "-mbmi")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
check_cxx_source_compiles("
|
||||
#include <immintrin.h>
|
||||
int main() {
|
||||
auto a = _blsr_u32(0);
|
||||
(void)a;
|
||||
return 0;
|
||||
}
|
||||
" HAVE_BMI)
|
||||
if (HAVE_BMI AND ENABLE_BMI)
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||
endif ()
|
||||
|
||||
#Limit avx2/avx512 flag for specific source build
|
||||
set (X86_INTRINSICS_FLAGS "")
|
||||
if (ENABLE_AVX2_FOR_SPEC_OP)
|
||||
if (HAVE_BMI)
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi")
|
||||
endif ()
|
||||
if (HAVE_AVX AND HAVE_AVX2)
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx -mavx2")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (ENABLE_AVX512_FOR_SPEC_OP)
|
||||
set (X86_INTRINSICS_FLAGS "")
|
||||
if (HAVE_BMI)
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi")
|
||||
endif ()
|
||||
if (HAVE_AVX512)
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw")
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
cmake_pop_check_state ()
|
||||
|
@ -10,7 +10,7 @@ if (NOT ENABLE_AMQPCPP)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library")
|
||||
set (USE_AMQPCPP 0)
|
||||
return()
|
||||
|
@ -13,7 +13,7 @@ option (USE_INTERNAL_AVRO_LIBRARY
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt")
|
||||
if (USE_INTERNAL_AVRO_LIBRARY)
|
||||
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro")
|
||||
set(USE_INTERNAL_AVRO_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -10,11 +10,11 @@ endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64/LICENSE")
|
||||
set (MISSING_INTERNAL_BASE64_LIBRARY 1)
|
||||
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init")
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64")
|
||||
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init")
|
||||
else()
|
||||
set (BASE64_LIBRARY base64)
|
||||
set (USE_BASE64 1)
|
||||
|
@ -16,7 +16,7 @@ endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
|
||||
if (USE_INTERNAL_BROTLI_LIBRARY)
|
||||
message (WARNING "submodule contrib/brotli is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/brotli is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal brotli")
|
||||
set (USE_INTERNAL_BROTLI_LIBRARY 0)
|
||||
endif ()
|
||||
|
@ -6,7 +6,7 @@ if (NOT ENABLE_BZIP2)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/bzip2/bzlib.h")
|
||||
message (WARNING "submodule contrib/bzip2 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/bzip2 is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal bzip2 library")
|
||||
set (USE_NLP 0)
|
||||
return()
|
||||
|
@ -11,7 +11,7 @@ option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/CMakeLists.txt")
|
||||
if(USE_INTERNAL_CAPNP_LIBRARY)
|
||||
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto")
|
||||
set(USE_INTERNAL_CAPNP_LIBRARY 0)
|
||||
endif()
|
||||
@ -34,8 +34,6 @@ endif()
|
||||
if (CAPNP_LIBRARIES)
|
||||
set (USE_CAPNP 1)
|
||||
elseif(NOT MISSING_INTERNAL_CAPNP_LIBRARY)
|
||||
add_subdirectory(contrib/capnproto-cmake)
|
||||
|
||||
set (CAPNP_LIBRARIES capnpc)
|
||||
set (USE_CAPNP 1)
|
||||
set (USE_INTERNAL_CAPNP_LIBRARY 1)
|
||||
|
@ -14,7 +14,7 @@ if (APPLE)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cassandra")
|
||||
message (ERROR "submodule contrib/cassandra is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (ERROR "submodule contrib/cassandra is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal Cassandra")
|
||||
set (USE_CASSANDRA 0)
|
||||
return()
|
||||
|
@ -17,7 +17,7 @@ option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt")
|
||||
if (USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx")
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -6,7 +6,7 @@ endif()
|
||||
|
||||
OPTION(ENABLE_CYRUS_SASL "Enable cyrus-sasl" ${DEFAULT_ENABLE_CYRUS_SASL})
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/README")
|
||||
message (WARNING "submodule contrib/cyrus-sasl is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/cyrus-sasl is missing. to fix try run: \n git submodule update --init")
|
||||
set (ENABLE_CYRUS_SASL 0)
|
||||
endif ()
|
||||
|
||||
|
@ -6,7 +6,7 @@ option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketch
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt")
|
||||
if (USE_INTERNAL_DATASKETCHES_LIBRARY)
|
||||
message(WARNING "submodule contrib/datasketches-cpp is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/datasketches-cpp is missing. to fix try run: \n git submodule update --init")
|
||||
endif()
|
||||
set(MISSING_INTERNAL_DATASKETCHES_LIBRARY 1)
|
||||
set(USE_INTERNAL_DATASKETCHES_LIBRARY 0)
|
||||
|
@ -1,5 +1,5 @@
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/fast_float/fast_float.h")
|
||||
message (FATAL_ERROR "submodule contrib/fast_float is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (FATAL_ERROR "submodule contrib/fast_float is missing. to fix try run: \n git submodule update --init")
|
||||
endif ()
|
||||
|
||||
set(FAST_FLOAT_LIBRARY fast_float)
|
||||
|
@ -10,7 +10,7 @@ if(NOT ENABLE_FASTOPS)
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fastops/fastops/fastops.h")
|
||||
message(WARNING "submodule contrib/fastops is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/fastops is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal fastops library")
|
||||
set(MISSING_INTERNAL_FASTOPS_LIBRARY 1)
|
||||
endif()
|
||||
|
@ -26,7 +26,7 @@ option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instea
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
|
||||
if(USE_INTERNAL_GRPC_LIBRARY)
|
||||
message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc")
|
||||
set(USE_INTERNAL_GRPC_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -4,7 +4,7 @@ option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test inste
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt")
|
||||
if (USE_INTERNAL_GTEST_LIBRARY)
|
||||
message (WARNING "submodule contrib/googletest is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/googletest is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gtest")
|
||||
set (USE_INTERNAL_GTEST_LIBRARY 0)
|
||||
endif ()
|
||||
|
@ -11,7 +11,7 @@ option(USE_INTERNAL_H3_LIBRARY "Set to FALSE to use system h3 library instead of
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include/h3Index.h")
|
||||
if(USE_INTERNAL_H3_LIBRARY)
|
||||
message(WARNING "submodule contrib/h3 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/h3 is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal h3 library")
|
||||
set(USE_INTERNAL_H3_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -16,7 +16,7 @@ option(USE_INTERNAL_HDFS3_LIBRARY "Set to FALSE to use system HDFS3 instead of b
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include/hdfs/hdfs.h")
|
||||
if(USE_INTERNAL_HDFS3_LIBRARY)
|
||||
message(WARNING "submodule contrib/libhdfs3 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/libhdfs3 is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library")
|
||||
set(USE_INTERNAL_HDFS3_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -16,7 +16,7 @@ option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE")
|
||||
if (USE_INTERNAL_ICU_LIBRARY)
|
||||
message (WARNING "submodule contrib/icu is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/icu is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ICU")
|
||||
set (USE_INTERNAL_ICU_LIBRARY 0)
|
||||
endif ()
|
||||
|
@ -1,7 +1,7 @@
|
||||
OPTION(ENABLE_KRB5 "Enable krb5" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/krb5/README")
|
||||
message (WARNING "submodule contrib/krb5 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/krb5 is missing. to fix try run: \n git submodule update --init")
|
||||
set (ENABLE_KRB5 0)
|
||||
endif ()
|
||||
|
||||
|
@ -15,7 +15,7 @@ option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library inst
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
|
||||
if (USE_INTERNAL_LDAP_LIBRARY)
|
||||
message (WARNING "Submodule contrib/openldap is missing. To fix try running:\n git submodule update --init --recursive")
|
||||
message (WARNING "Submodule contrib/openldap is missing. To fix try running:\n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal LDAP library")
|
||||
endif ()
|
||||
|
||||
|
@ -16,7 +16,7 @@ endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h")
|
||||
if (USE_INTERNAL_LIBGSASL_LIBRARY)
|
||||
message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libgsasl")
|
||||
set (USE_INTERNAL_LIBGSASL_LIBRARY 0)
|
||||
endif ()
|
||||
|
@ -5,14 +5,14 @@ if (NOT ENABLE_LIBPQXX)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library")
|
||||
set (USE_LIBPQXX 0)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpq/include")
|
||||
message (ERROR "submodule contrib/libpq is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (ERROR "submodule contrib/libpq is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpq needed for libpqxx")
|
||||
set (USE_LIBPQXX 0)
|
||||
return()
|
||||
|
@ -7,5 +7,5 @@ endif()
|
||||
set(LibProtobufMutator_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libprotobuf-mutator")
|
||||
|
||||
if (NOT EXISTS "${LibProtobufMutator_SOURCE_DIR}/README.md")
|
||||
message (ERROR "submodule contrib/libprotobuf-mutator is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (ERROR "submodule contrib/libprotobuf-mutator is missing. to fix try run: \n git submodule update --init")
|
||||
endif()
|
||||
|
@ -5,7 +5,7 @@ if (OS_DARWIN AND COMPILER_GCC)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libuv")
|
||||
message (WARNING "submodule contrib/libuv is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/libuv is missing. to fix try run: \n git submodule update --init")
|
||||
SET(MISSING_INTERNAL_LIBUV_LIBRARY 1)
|
||||
return()
|
||||
endif()
|
||||
|
@ -2,7 +2,7 @@ option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h")
|
||||
if (USE_INTERNAL_LIBXML2_LIBRARY)
|
||||
message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libxml")
|
||||
set (USE_INTERNAL_LIBXML2_LIBRARY 0)
|
||||
endif ()
|
||||
|
@ -12,7 +12,7 @@ if (NOT ENABLE_EMBEDDED_COMPILER)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/CMakeLists.txt")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "submodule /contrib/llvm is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "submodule /contrib/llvm is missing. to fix try run: \n git submodule update --init")
|
||||
endif ()
|
||||
|
||||
set (USE_EMBEDDED_COMPILER 1)
|
||||
|
@ -11,7 +11,7 @@ option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp")
|
||||
if(USE_INTERNAL_MSGPACK_LIBRARY)
|
||||
message(WARNING "Submodule contrib/msgpack-c is missing. To fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "Submodule contrib/msgpack-c is missing. To fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal msgpack")
|
||||
set(USE_INTERNAL_MSGPACK_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -16,7 +16,7 @@ option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient librar
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README")
|
||||
if(USE_INTERNAL_MYSQL_LIBRARY)
|
||||
message(WARNING "submodule contrib/mariadb-connector-c is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/mariadb-connector-c is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal mysql library")
|
||||
set(USE_INTERNAL_MYSQL_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -7,7 +7,7 @@ if (NOT USE_INTERNAL_NANODBC_LIBRARY)
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/CMakeLists.txt")
|
||||
message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init")
|
||||
endif()
|
||||
|
||||
set (NANODBC_LIBRARY nanodbc)
|
||||
|
@ -7,21 +7,21 @@ if (NOT ENABLE_NLP)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libstemmer_c/Makefile")
|
||||
message (WARNING "submodule contrib/libstemmer_c is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/libstemmer_c is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libstemmer_c library, NLP functions will be disabled")
|
||||
set (USE_NLP 0)
|
||||
return()
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled")
|
||||
set (USE_NLP 0)
|
||||
return()
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/lemmagen-c/README.md")
|
||||
message (WARNING "submodule contrib/lemmagen-c is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/lemmagen-c is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal lemmagen-c library, NLP functions will be disabled")
|
||||
set (USE_NLP 0)
|
||||
return()
|
||||
|
@ -5,7 +5,7 @@ if (NOT ENABLE_NURAFT)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library")
|
||||
set (USE_NURAFT 0)
|
||||
return()
|
||||
|
@ -18,7 +18,7 @@ include(cmake/find/snappy.cmake)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include/orc/OrcFile.hh")
|
||||
if(USE_INTERNAL_ORC_LIBRARY)
|
||||
message(WARNING "submodule contrib/orc is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/orc is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ORC")
|
||||
set(USE_INTERNAL_ORC_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -20,7 +20,7 @@ endif()
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/CMakeLists.txt")
|
||||
if(USE_INTERNAL_PARQUET_LIBRARY)
|
||||
message(WARNING "submodule contrib/arrow (required for Parquet) is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/arrow (required for Parquet) is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet library")
|
||||
set(USE_INTERNAL_PARQUET_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -15,7 +15,7 @@ option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instea
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
|
||||
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
message(WARNING "submodule contrib/protobuf is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/protobuf is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf")
|
||||
set(USE_INTERNAL_PROTOBUF_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -10,7 +10,7 @@ option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson libr
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h")
|
||||
if(USE_INTERNAL_RAPIDJSON_LIBRARY)
|
||||
message(WARNING "submodule contrib/rapidjson is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/rapidjson is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rapidjson library")
|
||||
set(USE_INTERNAL_RAPIDJSON_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -11,7 +11,7 @@ option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka inst
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
|
||||
if(USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka")
|
||||
set (USE_INTERNAL_RDKAFKA_LIBRARY 0)
|
||||
endif()
|
||||
@ -20,7 +20,7 @@ endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/CMakeLists.txt")
|
||||
if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY)
|
||||
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka")
|
||||
set (USE_INTERNAL_RDKAFKA_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -2,7 +2,7 @@ option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/CMakeLists.txt")
|
||||
if(USE_INTERNAL_RE2_LIBRARY)
|
||||
message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library")
|
||||
endif()
|
||||
set(USE_INTERNAL_RE2_LIBRARY 0)
|
||||
|
@ -15,7 +15,7 @@ option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/CMakeLists.txt")
|
||||
if (USE_INTERNAL_ROCKSDB_LIBRARY)
|
||||
message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb")
|
||||
endif()
|
||||
set (MISSING_INTERNAL_ROCKSDB 1)
|
||||
|
@ -3,7 +3,7 @@ option(ENABLE_S2_GEOMETRY "Enable S2 geometry library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_S2_GEOMETRY)
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/s2geometry")
|
||||
message (WARNING "submodule contrib/s2geometry is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/s2geometry is missing. to fix try run: \n git submodule update --init")
|
||||
set (ENABLE_S2_GEOMETRY 0)
|
||||
set (USE_S2_GEOMETRY 0)
|
||||
else()
|
||||
|
@ -23,7 +23,7 @@ if (NOT USE_INTERNAL_AWS_S3_LIBRARY)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
|
||||
message (WARNING "submodule contrib/aws is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/aws is missing. to fix try run: \n git submodule update --init")
|
||||
if (USE_INTERNAL_AWS_S3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal S3 library")
|
||||
endif ()
|
||||
|
@ -2,7 +2,7 @@ set (SENTRY_LIBRARY "sentry")
|
||||
|
||||
set (SENTRY_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native/include")
|
||||
if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h")
|
||||
message (WARNING "submodule contrib/sentry-native is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/sentry-native is missing. to fix try run: \n git submodule update --init")
|
||||
if (USE_SENTRY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sentry library")
|
||||
endif()
|
||||
|
@ -1,7 +1,7 @@
|
||||
option (USE_SIMDJSON "Use simdjson" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/simdjson/include/simdjson.h")
|
||||
message (WARNING "submodule contrib/simdjson is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/simdjson is missing. to fix try run: \n git submodule update --init")
|
||||
if (USE_SIMDJSON)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal simdjson library")
|
||||
endif()
|
||||
|
@ -5,7 +5,7 @@ if (NOT ENABLE_SQLITE)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/sqlite-amalgamation/sqlite3.c")
|
||||
message (WARNING "submodule contrib/sqlite3-amalgamation is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/sqlite3-amalgamation is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sqlite library")
|
||||
set (USE_SQLITE 0)
|
||||
return()
|
||||
|
@ -13,7 +13,7 @@ option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md")
|
||||
if(USE_INTERNAL_SSL_LIBRARY)
|
||||
message(WARNING "submodule contrib/boringssl is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/boringssl is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ssl library")
|
||||
endif()
|
||||
set(USE_INTERNAL_SSL_LIBRARY 0)
|
||||
|
@ -2,11 +2,11 @@ option(ENABLE_STATS "Enable StatsLib library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_STATS)
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/stats")
|
||||
message (WARNING "submodule contrib/stats is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/stats is missing. to fix try run: \n git submodule update --init")
|
||||
set (ENABLE_STATS 0)
|
||||
set (USE_STATS 0)
|
||||
elseif (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/gcem")
|
||||
message (WARNING "submodule contrib/gcem is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (WARNING "submodule contrib/gcem is missing. to fix try run: \n git submodule update --init")
|
||||
set (ENABLE_STATS 0)
|
||||
set (USE_STATS 0)
|
||||
else()
|
||||
|
@ -2,7 +2,7 @@ option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library inst
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h")
|
||||
if(USE_INTERNAL_XZ_LIBRARY)
|
||||
message(WARNING "submodule contrib/xz is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/xz is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal xz (lzma) library")
|
||||
set(USE_INTERNAL_XZ_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -5,5 +5,5 @@ if (NOT USE_YAML_CPP)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/yaml-cpp/README.md")
|
||||
message (ERROR "submodule contrib/yaml-cpp is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message (ERROR "submodule contrib/yaml-cpp is missing. to fix try run: \n git submodule update --init")
|
||||
endif()
|
||||
|
@ -12,7 +12,7 @@ endif ()
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}/zlib.h")
|
||||
if(USE_INTERNAL_ZLIB_LIBRARY)
|
||||
message(WARNING "submodule contrib/${INTERNAL_ZLIB_NAME} is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/${INTERNAL_ZLIB_NAME} is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zlib library")
|
||||
endif()
|
||||
set(USE_INTERNAL_ZLIB_LIBRARY 0)
|
||||
|
@ -2,7 +2,7 @@ option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library inste
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h")
|
||||
if(USE_INTERNAL_ZSTD_LIBRARY)
|
||||
message(WARNING "submodule contrib/zstd is missing. to fix try run: \n git submodule update --init --recursive")
|
||||
message(WARNING "submodule contrib/zstd is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal zstd library")
|
||||
set(USE_INTERNAL_ZSTD_LIBRARY 0)
|
||||
endif()
|
||||
|
@ -14,6 +14,8 @@ endif ()
|
||||
if (OS_ANDROID)
|
||||
# pthread and rt are included in libc
|
||||
set (DEFAULT_LIBS "${DEFAULT_LIBS} ${BUILTINS_LIBRARY} ${COVERAGE_OPTION} -lc -lm -ldl")
|
||||
elseif (USE_MUSL)
|
||||
set (DEFAULT_LIBS "${DEFAULT_LIBS} ${BUILTINS_LIBRARY} ${COVERAGE_OPTION} -static -lc")
|
||||
else ()
|
||||
set (DEFAULT_LIBS "${DEFAULT_LIBS} ${BUILTINS_LIBRARY} ${COVERAGE_OPTION} -lc -lm -lrt -lpthread -ldl")
|
||||
endif ()
|
||||
@ -26,7 +28,7 @@ set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS})
|
||||
# glibc-compatibility library relies to constant version of libc headers
|
||||
# (because minor changes in function attributes between different glibc versions will introduce incompatibilities)
|
||||
# This is for x86_64. For other architectures we have separate toolchains.
|
||||
if (ARCH_AMD64 AND NOT_UNBUNDLED)
|
||||
if (ARCH_AMD64 AND NOT_UNBUNDLED AND NOT CMAKE_CROSSCOMPILING)
|
||||
set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
|
||||
set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
|
||||
endif ()
|
||||
@ -37,8 +39,10 @@ set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package(Threads REQUIRED)
|
||||
|
||||
if (NOT OS_ANDROID)
|
||||
# Our compatibility layer doesn't build under Android, many errors in musl.
|
||||
add_subdirectory(base/glibc-compatibility)
|
||||
if (NOT USE_MUSL)
|
||||
# Our compatibility layer doesn't build under Android, many errors in musl.
|
||||
add_subdirectory(base/glibc-compatibility)
|
||||
endif ()
|
||||
add_subdirectory(base/harmful)
|
||||
endif ()
|
||||
|
||||
|
32
cmake/linux/toolchain-riscv64.cmake
Normal file
32
cmake/linux/toolchain-riscv64.cmake
Normal file
@ -0,0 +1,32 @@
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "Linux")
|
||||
set (CMAKE_SYSTEM_PROCESSOR "riscv64")
|
||||
set (CMAKE_C_COMPILER_TARGET "riscv64-linux-gnu")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "riscv64-linux-gnu")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "riscv64-linux-gnu")
|
||||
|
||||
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-riscv64")
|
||||
|
||||
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}")
|
||||
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
|
||||
|
||||
set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
|
||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
|
||||
|
||||
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
|
||||
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
|
||||
|
||||
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
|
||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
||||
set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
35
cmake/linux/toolchain-x86_64-musl.cmake
Normal file
35
cmake/linux/toolchain-x86_64-musl.cmake
Normal file
@ -0,0 +1,35 @@
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "Linux")
|
||||
set (CMAKE_SYSTEM_PROCESSOR "x86_64")
|
||||
set (CMAKE_C_COMPILER_TARGET "x86_64-linux-musl")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "x86_64-linux-musl")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "x86_64-linux-musl")
|
||||
|
||||
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-x86_64-musl")
|
||||
|
||||
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}")
|
||||
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
|
||||
|
||||
set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
|
||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
|
||||
|
||||
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
|
||||
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
|
||||
|
||||
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
|
||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
||||
set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
||||
set (USE_MUSL 1)
|
||||
add_definitions(-DUSE_MUSL=1)
|
88
contrib/CMakeLists.txt
vendored
88
contrib/CMakeLists.txt
vendored
@ -1,16 +1,5 @@
|
||||
# Third-party libraries may have substandard code.
|
||||
|
||||
# Put all targets defined here and in added subfolders under "contrib/" folder in GUI-based IDEs by default.
|
||||
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they will
|
||||
# appear not in "contrib/" as originally planned here.
|
||||
get_filename_component (_current_dir_name "${CMAKE_CURRENT_LIST_DIR}" NAME)
|
||||
if (CMAKE_FOLDER)
|
||||
set (CMAKE_FOLDER "${CMAKE_FOLDER}/${_current_dir_name}")
|
||||
else ()
|
||||
set (CMAKE_FOLDER "${_current_dir_name}")
|
||||
endif ()
|
||||
unset (_current_dir_name)
|
||||
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w")
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w")
|
||||
|
||||
@ -49,6 +38,10 @@ add_subdirectory (replxx-cmake)
|
||||
add_subdirectory (unixodbc-cmake)
|
||||
add_subdirectory (nanodbc-cmake)
|
||||
|
||||
if (USE_INTERNAL_CAPNP_LIBRARY AND NOT MISSING_INTERNAL_CAPNP_LIBRARY)
|
||||
add_subdirectory(capnproto-cmake)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_FUZZING)
|
||||
add_subdirectory (libprotobuf-mutator-cmake)
|
||||
endif()
|
||||
@ -352,3 +345,76 @@ endif()
|
||||
if (USE_S2_GEOMETRY)
|
||||
add_subdirectory(s2geometry-cmake)
|
||||
endif()
|
||||
|
||||
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
||||
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
||||
# in "contrib/..." as originally planned, so we workaround this by fixing FOLDER properties of all targets manually,
|
||||
# instead of controlling it via CMAKE_FOLDER.
|
||||
|
||||
function (ensure_target_rooted_in _target _folder)
|
||||
# Skip INTERFACE library targets, since FOLDER property is not available for them.
|
||||
get_target_property (_target_type "${_target}" TYPE)
|
||||
if (_target_type STREQUAL "INTERFACE_LIBRARY")
|
||||
return ()
|
||||
endif ()
|
||||
|
||||
# Read the original FOLDER property value, if any.
|
||||
get_target_property (_folder_prop "${_target}" FOLDER)
|
||||
|
||||
# Normalize that value, so we avoid possible repetitions in folder names.
|
||||
|
||||
if (NOT _folder_prop)
|
||||
set (_folder_prop "")
|
||||
endif ()
|
||||
|
||||
if (CMAKE_FOLDER AND _folder_prop MATCHES "^${CMAKE_FOLDER}/(.*)\$")
|
||||
set (_folder_prop "${CMAKE_MATCH_1}")
|
||||
endif ()
|
||||
|
||||
if (_folder AND _folder_prop MATCHES "^${_folder}/(.*)\$")
|
||||
set (_folder_prop "${CMAKE_MATCH_1}")
|
||||
endif ()
|
||||
|
||||
if (_folder)
|
||||
set (_folder_prop "${_folder}/${_folder_prop}")
|
||||
endif ()
|
||||
|
||||
if (CMAKE_FOLDER)
|
||||
set (_folder_prop "${CMAKE_FOLDER}/${_folder_prop}")
|
||||
endif ()
|
||||
|
||||
# Set the updated FOLDER property value back.
|
||||
set_target_properties ("${_target}" PROPERTIES FOLDER "${_folder_prop}")
|
||||
endfunction ()
|
||||
|
||||
function (ensure_own_targets_are_rooted_in _dir _folder)
|
||||
get_directory_property (_targets DIRECTORY "${_dir}" BUILDSYSTEM_TARGETS)
|
||||
foreach (_target IN LISTS _targets)
|
||||
ensure_target_rooted_in ("${_target}" "${_folder}")
|
||||
endforeach ()
|
||||
endfunction ()
|
||||
|
||||
function (ensure_all_targets_are_rooted_in _dir _folder)
|
||||
ensure_own_targets_are_rooted_in ("${_dir}" "${_folder}")
|
||||
|
||||
get_property (_sub_dirs DIRECTORY "${_dir}" PROPERTY SUBDIRECTORIES)
|
||||
foreach (_sub_dir IN LISTS _sub_dirs)
|
||||
ensure_all_targets_are_rooted_in ("${_sub_dir}" "${_folder}")
|
||||
endforeach ()
|
||||
endfunction ()
|
||||
|
||||
function (organize_ide_folders_2_level _dir)
|
||||
get_filename_component (_dir_name "${_dir}" NAME)
|
||||
ensure_own_targets_are_rooted_in ("${_dir}" "${_dir_name}")
|
||||
|
||||
# Note, that we respect only first two levels of nesting, we don't want to
|
||||
# reorganize target folders further within each third-party dir.
|
||||
|
||||
get_property (_sub_dirs DIRECTORY "${_dir}" PROPERTY SUBDIRECTORIES)
|
||||
foreach (_sub_dir IN LISTS _sub_dirs)
|
||||
get_filename_component (_sub_dir_name "${_sub_dir}" NAME)
|
||||
ensure_all_targets_are_rooted_in ("${_sub_dir}" "${_dir_name}/${_sub_dir_name}")
|
||||
endforeach ()
|
||||
endfunction ()
|
||||
|
||||
organize_ide_folders_2_level ("${CMAKE_CURRENT_LIST_DIR}")
|
||||
|
2
contrib/capnproto
vendored
2
contrib/capnproto
vendored
@ -1 +1 @@
|
||||
Subproject commit a00ccd91b3746ef2ab51d40fe3265829949d1ace
|
||||
Subproject commit c8189ec3c27dacbd4a3288e682473010e377f593
|
@ -45,6 +45,7 @@ set (CAPNP_SRCS
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/serialize-packed.c++"
|
||||
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/schema.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/stream.capnp.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/schema-loader.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/dynamic.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/stringify.c++"
|
||||
@ -63,6 +64,7 @@ set (CAPNPC_SRCS
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/lexer.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/grammar.capnp.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/parser.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/generics.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/node-translator.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/compiler/compiler.c++"
|
||||
"${CAPNPROTO_SOURCE_DIR}/capnp/schema-parser.c++"
|
||||
|
2
contrib/fastops
vendored
2
contrib/fastops
vendored
@ -1 +1 @@
|
||||
Subproject commit 012b777df9e2d145a24800a6c8c3d4a0249bb09e
|
||||
Subproject commit 1460583af7d13c0e980ce46aec8ee9400314669a
|
@ -18,8 +18,10 @@
|
||||
* Define overrides for non-standard allocator-related functions if they are
|
||||
* present on the system.
|
||||
*/
|
||||
#define JEMALLOC_OVERRIDE_MEMALIGN
|
||||
#define JEMALLOC_OVERRIDE_VALLOC
|
||||
#if !defined(USE_MUSL)
|
||||
#define JEMALLOC_OVERRIDE_MEMALIGN
|
||||
#define JEMALLOC_OVERRIDE_VALLOC
|
||||
#endif
|
||||
|
||||
/*
|
||||
* At least Linux omits the "const" in:
|
||||
|
@ -1,6 +1,6 @@
|
||||
// OSX does not have this for system alloc functions, so you will get
|
||||
// "exception specification in declaration" error.
|
||||
#if defined(__APPLE__) || defined(__FreeBSD__)
|
||||
#if defined(__APPLE__) || defined(__FreeBSD__) || defined(USE_MUSL)
|
||||
# undef JEMALLOC_NOTHROW
|
||||
# define JEMALLOC_NOTHROW
|
||||
|
||||
|
@ -13,12 +13,14 @@
|
||||
* Define overrides for non-standard allocator-related functions if they are
|
||||
* present on the system.
|
||||
*/
|
||||
#define JEMALLOC_OVERRIDE___LIBC_CALLOC
|
||||
#define JEMALLOC_OVERRIDE___LIBC_FREE
|
||||
#define JEMALLOC_OVERRIDE___LIBC_MALLOC
|
||||
#define JEMALLOC_OVERRIDE___LIBC_MEMALIGN
|
||||
#define JEMALLOC_OVERRIDE___LIBC_REALLOC
|
||||
#define JEMALLOC_OVERRIDE___LIBC_VALLOC
|
||||
#if !defined(USE_MUSL)
|
||||
#define JEMALLOC_OVERRIDE___LIBC_CALLOC
|
||||
#define JEMALLOC_OVERRIDE___LIBC_FREE
|
||||
#define JEMALLOC_OVERRIDE___LIBC_MALLOC
|
||||
#define JEMALLOC_OVERRIDE___LIBC_MEMALIGN
|
||||
#define JEMALLOC_OVERRIDE___LIBC_REALLOC
|
||||
#define JEMALLOC_OVERRIDE___LIBC_VALLOC
|
||||
#endif
|
||||
/* #undef JEMALLOC_OVERRIDE___POSIX_MEMALIGN */
|
||||
|
||||
/*
|
||||
|
@ -47,6 +47,7 @@ set(SRCS
|
||||
)
|
||||
|
||||
add_library(cxx ${SRCS})
|
||||
set_target_properties(cxx PROPERTIES FOLDER "contrib/libcxx-cmake")
|
||||
|
||||
target_include_directories(cxx SYSTEM BEFORE PUBLIC $<BUILD_INTERFACE:${LIBCXX_SOURCE_DIR}/include>)
|
||||
target_compile_definitions(cxx PRIVATE -D_LIBCPP_BUILDING_LIBRARY -DLIBCXX_BUILDING_LIBCXXABI)
|
||||
@ -56,6 +57,10 @@ if (USE_UNWIND)
|
||||
target_compile_definitions(cxx PUBLIC -DSTD_EXCEPTION_HAS_STACK_TRACE=1)
|
||||
endif ()
|
||||
|
||||
if (USE_MUSL)
|
||||
target_compile_definitions(cxx PUBLIC -D_LIBCPP_HAS_MUSL_LIBC=1)
|
||||
endif ()
|
||||
|
||||
# Override the deduced attribute support that causes error.
|
||||
if (OS_DARWIN AND COMPILER_GCC)
|
||||
add_compile_definitions(_LIBCPP_INIT_PRIORITY_MAX)
|
||||
|
@ -22,6 +22,7 @@ set(SRCS
|
||||
)
|
||||
|
||||
add_library(cxxabi ${SRCS})
|
||||
set_target_properties(cxxabi PROPERTIES FOLDER "contrib/libcxxabi-cmake")
|
||||
|
||||
# Third party library may have substandard code.
|
||||
target_compile_options(cxxabi PRIVATE -w)
|
||||
|
@ -39,6 +39,7 @@ set(LIBUNWIND_SOURCES
|
||||
${LIBUNWIND_ASM_SOURCES})
|
||||
|
||||
add_library(unwind ${LIBUNWIND_SOURCES})
|
||||
set_target_properties(unwind PROPERTIES FOLDER "contrib/libunwind-cmake")
|
||||
|
||||
target_include_directories(unwind SYSTEM BEFORE PUBLIC $<BUILD_INTERFACE:${LIBUNWIND_SOURCE_DIR}/include>)
|
||||
target_compile_definitions(unwind PRIVATE -D_LIBUNWIND_NO_HEAP=1 -D_DEBUG -D_LIBUNWIND_IS_NATIVE_ONLY)
|
||||
|
@ -98,7 +98,9 @@
|
||||
#define HAVE_BCOPY 1
|
||||
|
||||
/* Define to 1 if you have the <bits/types.h> header file. */
|
||||
#define HAVE_BITS_TYPES_H 1
|
||||
#if !defined(USE_MUSL)
|
||||
#define HAVE_BITS_TYPES_H 1
|
||||
#endif
|
||||
|
||||
/* Define to 1 if you have the `chroot' function. */
|
||||
#define HAVE_CHROOT 1
|
||||
|
2
contrib/replxx
vendored
2
contrib/replxx
vendored
@ -1 +1 @@
|
||||
Subproject commit f97765df14f4a6236d69b8f14b53ef2051ebd95a
|
||||
Subproject commit b0c266c2d8a835784181e17292b421848c78c6b8
|
2
contrib/sysroot
vendored
2
contrib/sysroot
vendored
@ -1 +1 @@
|
||||
Subproject commit 002415524b5d14124bb8a61a3ce7ac65774f5479
|
||||
Subproject commit 6172893931e19b028f9cabb7095a44361be863df
|
@ -189,7 +189,7 @@ function clone_submodules
|
||||
)
|
||||
|
||||
git submodule sync
|
||||
git submodule update --depth 1 --init --recursive "${SUBMODULES_TO_UPDATE[@]}"
|
||||
git submodule update --depth 1 --init "${SUBMODULES_TO_UPDATE[@]}"
|
||||
git submodule foreach git reset --hard
|
||||
git submodule foreach git checkout @ -f
|
||||
git submodule foreach git clean -xfd
|
||||
|
@ -1,16 +1,22 @@
|
||||
# docker build -t clickhouse/kerberized-hadoop .
|
||||
|
||||
FROM sequenceiq/hadoop-docker:2.7.0
|
||||
RUN sed -i -e 's/^\#baseurl/baseurl/' /etc/yum.repos.d/CentOS-Base.repo
|
||||
RUN sed -i -e 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/CentOS-Base.repo
|
||||
RUN sed -i -e 's#http://mirror.centos.org/#http://vault.centos.org/#' /etc/yum.repos.d/CentOS-Base.repo
|
||||
|
||||
RUN sed -i -e 's/^\#baseurl/baseurl/' /etc/yum.repos.d/CentOS-Base.repo && \
|
||||
sed -i -e 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/CentOS-Base.repo && \
|
||||
sed -i -e 's#http://mirror.centos.org/#http://vault.centos.org/#' /etc/yum.repos.d/CentOS-Base.repo
|
||||
|
||||
# https://community.letsencrypt.org/t/rhel-centos-6-openssl-client-compatibility-after-dst-root-ca-x3-expiration/161032/81
|
||||
RUN sed -i s/xMDkzMDE0MDExNVow/0MDkzMDE4MTQwM1ow/ /etc/pki/tls/certs/ca-bundle.crt
|
||||
|
||||
RUN yum clean all && \
|
||||
rpm --rebuilddb && \
|
||||
yum -y update && \
|
||||
yum -y install yum-plugin-ovl && \
|
||||
yum --quiet -y install krb5-workstation.x86_64
|
||||
|
||||
RUN cd /tmp && \
|
||||
curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \
|
||||
curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \
|
||||
tar xzf commons-daemon-1.0.15-src.tar.gz && \
|
||||
cd commons-daemon-1.0.15-src/src/native/unix && \
|
||||
./configure && \
|
||||
|
@ -37,7 +37,9 @@ RUN set -x \
|
||||
|| echo "WARNING: Some file was just downloaded from the internet without any validation and we are installing it into the system"; } \
|
||||
&& dpkg -i "${PKG_VERSION}.deb"
|
||||
|
||||
CMD echo "Running PVS version $PKG_VERSION" && cd /repo_folder && pvs-studio-analyzer credentials $LICENCE_NAME $LICENCE_KEY -o ./licence.lic \
|
||||
ENV CCACHE_DIR=/test_output/ccache
|
||||
|
||||
CMD echo "Running PVS version $PKG_VERSION" && mkdir -p $CCACHE_DIR && cd /repo_folder && pvs-studio-analyzer credentials $LICENCE_NAME $LICENCE_KEY -o ./licence.lic \
|
||||
&& cmake . -D"ENABLE_EMBEDDED_COMPILER"=OFF -D"USE_INTERNAL_PROTOBUF_LIBRARY"=OFF -D"USE_INTERNAL_GRPC_LIBRARY"=OFF -DCMAKE_C_COMPILER=clang-13 -DCMAKE_CXX_COMPILER=clang\+\+-13 \
|
||||
&& ninja re2_st clickhouse_grpc_protos \
|
||||
&& pvs-studio-analyzer analyze -o pvs-studio.log -e contrib -j 4 -l ./licence.lic; \
|
||||
|
@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
# yaml check is not the best one
|
||||
|
||||
cd /ClickHouse/utils/check-style || echo -e "failure\tRepo not found" > /test_output/check_status.tsv
|
||||
./check-style -n |& tee /test_output/style_output.txt
|
||||
./check-typos |& tee /test_output/typos_output.txt
|
||||
|
@ -47,13 +47,17 @@ then
|
||||
fi
|
||||
|
||||
URL="https://builds.clickhouse.com/master/${DIR}/clickhouse"
|
||||
echo
|
||||
echo "Will download ${URL}"
|
||||
echo
|
||||
curl -O "${URL}" && chmod a+x clickhouse &&
|
||||
echo
|
||||
echo "Successfully downloaded the ClickHouse binary, you can run it as:
|
||||
./clickhouse"
|
||||
|
||||
if [ "${OS}" = "Linux" ]
|
||||
then
|
||||
echo
|
||||
echo "You can also install it:
|
||||
sudo ./clickhouse install"
|
||||
fi
|
||||
|
@ -37,7 +37,7 @@ Next, you need to download the source files onto your working machine. This is c
|
||||
|
||||
In the command line terminal run:
|
||||
|
||||
git clone --recursive git@github.com:your_github_username/ClickHouse.git
|
||||
git clone git@github.com:your_github_username/ClickHouse.git
|
||||
cd ClickHouse
|
||||
|
||||
Note: please, substitute *your_github_username* with what is appropriate!
|
||||
@ -79,7 +79,7 @@ After successfully running this command you will be able to pull updates from th
|
||||
|
||||
Working with submodules in git could be painful. Next commands will help to manage it:
|
||||
|
||||
# ! each command accepts --recursive
|
||||
# ! each command accepts
|
||||
# Update remote URLs for submodules. Barely rare case
|
||||
git submodule sync
|
||||
# Add new submodules
|
||||
@ -92,16 +92,16 @@ Working with submodules in git could be painful. Next commands will help to mana
|
||||
The next commands would help you to reset all submodules to the initial state (!WARNING! - any changes inside will be deleted):
|
||||
|
||||
# Synchronizes submodules' remote URL with .gitmodules
|
||||
git submodule sync --recursive
|
||||
git submodule sync
|
||||
# Update the registered submodules with initialize not yet initialized
|
||||
git submodule update --init --recursive
|
||||
git submodule update --init
|
||||
# Reset all changes done after HEAD
|
||||
git submodule foreach git reset --hard
|
||||
# Clean files from .gitignore
|
||||
git submodule foreach git clean -xfd
|
||||
# Repeat last 4 commands for all submodule
|
||||
git submodule foreach git submodule sync --recursive
|
||||
git submodule foreach git submodule update --init --recursive
|
||||
git submodule foreach git submodule sync
|
||||
git submodule foreach git submodule update --init
|
||||
git submodule foreach git submodule foreach git reset --hard
|
||||
git submodule foreach git submodule foreach git clean -xfd
|
||||
|
||||
|
@ -36,7 +36,7 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name
|
||||
|
||||
Create a table in ClickHouse which allows to read data from MongoDB collection:
|
||||
|
||||
``` text
|
||||
``` sql
|
||||
CREATE TABLE mongo_table
|
||||
(
|
||||
key UInt64,
|
||||
@ -46,7 +46,7 @@ CREATE TABLE mongo_table
|
||||
|
||||
To read from an SSL secured MongoDB server:
|
||||
|
||||
``` text
|
||||
``` sql
|
||||
CREATE TABLE mongo_table_ssl
|
||||
(
|
||||
key UInt64,
|
||||
|
@ -320,7 +320,7 @@ SELECT count() FROM table WHERE u64 * i32 == 10 AND u64 * length(s) >= 1234
|
||||
|
||||
- `ngrambf_v1(n, size_of_bloom_filter_in_bytes, number_of_hash_functions, random_seed)`
|
||||
|
||||
Stores a [Bloom filter](https://en.wikipedia.org/wiki/Bloom_filter) that contains all ngrams from a block of data. Works only with strings. Can be used for optimization of `equals`, `like` and `in` expressions.
|
||||
Stores a [Bloom filter](https://en.wikipedia.org/wiki/Bloom_filter) that contains all ngrams from a block of data. Works only with datatypes: [String](../../../sql-reference/data-types/string.md), [FixedString](../../../sql-reference/data-types/fixedstring.md) and [Map](../../../sql-reference/data-types/map.md). Can be used for optimization of `EQUALS`, `LIKE` and `IN` expressions.
|
||||
|
||||
- `n` — ngram size,
|
||||
- `size_of_bloom_filter_in_bytes` — Bloom filter size in bytes (you can use large values here, for example, 256 or 512, because it can be compressed well).
|
||||
@ -337,7 +337,9 @@ SELECT count() FROM table WHERE u64 * i32 == 10 AND u64 * length(s) >= 1234
|
||||
|
||||
Supported data types: `Int*`, `UInt*`, `Float*`, `Enum`, `Date`, `DateTime`, `String`, `FixedString`, `Array`, `LowCardinality`, `Nullable`, `UUID`, `Map`.
|
||||
|
||||
For `Map` data type client can specify if index should be created for keys or values using [mapKeys](../../../sql-reference/functions/tuple-map-functions.md#mapkeys) or [mapValues](../../../sql-reference/functions/tuple-map-functions.md#mapvalues) function.
|
||||
For `Map` data type client can specify if index should be created for keys or values using [mapKeys](../../../sql-reference/functions/tuple-map-functions.md#mapkeys) or [mapValues](../../../sql-reference/functions/tuple-map-functions.md#mapvalues) function.
|
||||
|
||||
The following functions can use the filter: [equals](../../../sql-reference/functions/comparison-functions.md), [notEquals](../../../sql-reference/functions/comparison-functions.md), [in](../../../sql-reference/functions/in-functions.md), [notIn](../../../sql-reference/functions/in-functions.md), [has](../../../sql-reference/functions/array-functions.md#hasarr-elem).
|
||||
|
||||
Example of index creation for `Map` data type
|
||||
|
||||
@ -346,9 +348,6 @@ INDEX map_key_index mapKeys(map_column) TYPE bloom_filter GRANULARITY 1
|
||||
INDEX map_key_index mapValues(map_column) TYPE bloom_filter GRANULARITY 1
|
||||
```
|
||||
|
||||
The following functions can use it: [equals](../../../sql-reference/functions/comparison-functions.md), [notEquals](../../../sql-reference/functions/comparison-functions.md), [in](../../../sql-reference/functions/in-functions.md), [notIn](../../../sql-reference/functions/in-functions.md), [has](../../../sql-reference/functions/array-functions.md).
|
||||
|
||||
<!-- -->
|
||||
|
||||
``` sql
|
||||
INDEX sample_index (u64 * length(s)) TYPE minmax GRANULARITY 4
|
||||
|
@ -5,7 +5,7 @@ toc_title: Distributed
|
||||
|
||||
# Distributed Table Engine {#distributed}
|
||||
|
||||
Tables with Distributed engine do not store any data by their own, but allow distributed query processing on multiple servers.
|
||||
Tables with Distributed engine do not store any data of their own, but allow distributed query processing on multiple servers.
|
||||
Reading is automatically parallelized. During a read, the table indexes on remote servers are used, if there are any.
|
||||
|
||||
The Distributed engine accepts parameters:
|
||||
@ -167,20 +167,20 @@ If this parameter is set to `true`, the write operation selects the first health
|
||||
|
||||
If it is set to `false` (the default), data is written to all replicas. In essence, this means that the Distributed table replicates data itself. This is worse than using replicated tables, because the consistency of replicas is not checked, and over time they will contain slightly different data.
|
||||
|
||||
To select the shard that a row of data is sent to, the sharding expression is analyzed, and its remainder is taken from dividing it by the total weight of the shards. The row is sent to the shard that corresponds to the half-interval of the remainders from `prev_weight` to `prev_weights + weight`, where `prev_weights` is the total weight of the shards with the smallest number, and `weight` is the weight of this shard. For example, if there are two shards, and the first has a weight of 9 while the second has a weight of 10, the row will be sent to the first shard for the remainders from the range \[0, 9), and to the second for the remainders from the range \[9, 19).
|
||||
To select the shard that a row of data is sent to, the sharding expression is analyzed, and its remainder is taken from dividing it by the total weight of the shards. The row is sent to the shard that corresponds to the half-interval of the remainders from `prev_weights` to `prev_weights + weight`, where `prev_weights` is the total weight of the shards with the smallest number, and `weight` is the weight of this shard. For example, if there are two shards, and the first has a weight of 9 while the second has a weight of 10, the row will be sent to the first shard for the remainders from the range \[0, 9), and to the second for the remainders from the range \[9, 19).
|
||||
|
||||
The sharding expression can be any expression from constants and table columns that returns an integer. For example, you can use the expression `rand()` for random distribution of data, or `UserID` for distribution by the remainder from dividing the user’s ID (then the data of a single user will reside on a single shard, which simplifies running IN and JOIN by users). If one of the columns is not distributed evenly enough, you can wrap it in a hash function: intHash64(UserID).
|
||||
|
||||
A simple reminder from the division is a limited solution for sharding and isn’t always appropriate. It works for medium and large volumes of data (dozens of servers), but not for very large volumes of data (hundreds of servers or more). In the latter case, use the sharding scheme required by the subject area, rather than using entries in Distributed tables.
|
||||
A simple remainder from the division is a limited solution for sharding and isn’t always appropriate. It works for medium and large volumes of data (dozens of servers), but not for very large volumes of data (hundreds of servers or more). In the latter case, use the sharding scheme required by the subject area, rather than using entries in Distributed tables.
|
||||
|
||||
SELECT queries are sent to all the shards and work regardless of how data is distributed across the shards (they can be distributed completely randomly). When you add a new shard, you do not have to transfer the old data to it. You can write new data with a heavier weight – the data will be distributed slightly unevenly, but queries will work correctly and efficiently.
|
||||
SELECT queries are sent to all the shards and work regardless of how data is distributed across the shards (they can be distributed completely randomly). When you add a new shard, you do not have to transfer old data into it. Instead, you can write new data to it by using a heavier weight – the data will be distributed slightly unevenly, but queries will work correctly and efficiently.
|
||||
|
||||
You should be concerned about the sharding scheme in the following cases:
|
||||
|
||||
- Queries are used that require joining data (IN or JOIN) by a specific key. If data is sharded by this key, you can use local IN or JOIN instead of GLOBAL IN or GLOBAL JOIN, which is much more efficient.
|
||||
- A large number of servers is used (hundreds or more) with a large number of small queries (queries of individual clients - websites, advertisers, or partners). In order for the small queries to not affect the entire cluster, it makes sense to locate data for a single client on a single shard. Alternatively, as we’ve done in Yandex.Metrica, you can set up bi-level sharding: divide the entire cluster into “layers”, where a layer may consist of multiple shards. Data for a single client is located on a single layer, but shards can be added to a layer as necessary, and data is randomly distributed within them. Distributed tables are created for each layer, and a single shared distributed table is created for global queries.
|
||||
|
||||
Data is written asynchronously. When inserted in the table, the data block is just written to the local file system. The data is sent to the remote servers in the background as soon as possible. The period for sending data is managed by the [distributed_directory_monitor_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_sleep_time_ms) and [distributed_directory_monitor_max_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_max_sleep_time_ms) settings. The `Distributed` engine sends each file with inserted data separately, but you can enable batch sending of files with the [distributed_directory_monitor_batch_inserts](../../../operations/settings/settings.md#distributed_directory_monitor_batch_inserts) setting. This setting improves cluster performance by better utilizing local server and network resources. You should check whether data is sent successfully by checking the list of files (data waiting to be sent) in the table directory: `/var/lib/clickhouse/data/database/table/`. The number of threads performing background tasks can be set by [background_distributed_schedule_pool_size](../../../operations/settings/settings.md#background_distributed_schedule_pool_size) setting.
|
||||
Data is written asynchronously. When inserted in the table, the data block is just written to the local file system. The data is sent to the remote servers in the background as soon as possible. The periodicity for sending data is managed by the [distributed_directory_monitor_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_sleep_time_ms) and [distributed_directory_monitor_max_sleep_time_ms](../../../operations/settings/settings.md#distributed_directory_monitor_max_sleep_time_ms) settings. The `Distributed` engine sends each file with inserted data separately, but you can enable batch sending of files with the [distributed_directory_monitor_batch_inserts](../../../operations/settings/settings.md#distributed_directory_monitor_batch_inserts) setting. This setting improves cluster performance by better utilizing local server and network resources. You should check whether data is sent successfully by checking the list of files (data waiting to be sent) in the table directory: `/var/lib/clickhouse/data/database/table/`. The number of threads performing background tasks can be set by [background_distributed_schedule_pool_size](../../../operations/settings/settings.md#background_distributed_schedule_pool_size) setting.
|
||||
|
||||
If the server ceased to exist or had a rough restart (for example, after a device failure) after an INSERT to a Distributed table, the inserted data might be lost. If a damaged data part is detected in the table directory, it is transferred to the `broken` subdirectory and no longer used.
|
||||
|
||||
|
@ -27,10 +27,11 @@ It is recommended to use official pre-compiled `deb` packages for Debian or Ubun
|
||||
{% include 'install/deb.sh' %}
|
||||
```
|
||||
|
||||
If you want to use the most recent version, replace `stable` with `testing` (this is recommended for your testing environments).
|
||||
You can replace `stable` with `lts` or `testing` to use different [“release trains”](../faq/operations/production.md) based on your needs.
|
||||
|
||||
You can also download and install packages manually from [here](https://repo.clickhouse.com/deb/stable/main/).
|
||||
|
||||
|
||||
#### Packages {#packages}
|
||||
|
||||
- `clickhouse-common-static` — Installs ClickHouse compiled binary files.
|
||||
|
@ -128,6 +128,8 @@ You can pass parameters to `clickhouse-client` (all parameters have a default va
|
||||
- `--history_file` — Path to a file containing command history.
|
||||
- `--param_<name>` — Value for a [query with parameters](#cli-queries-with-parameters).
|
||||
- `--hardware-utilization` — Print hardware utilization information in progress bar.
|
||||
- `--print-profile-events` – Print `ProfileEvents` packets.
|
||||
- `--profile-events-delay-ms` – Delay between printing `ProfileEvents` packets (-1 - print only totals, 0 - print every single packet).
|
||||
|
||||
Since version 20.5, `clickhouse-client` has automatic syntax highlighting (always enabled).
|
||||
|
||||
|
@ -166,5 +166,6 @@ toc_title: Adopters
|
||||
| <a href="https://beeline.ru/" class="favicon">Beeline</a> | Telecom | Data Platform | — | — | [Blog post, July 2021](https://habr.com/en/company/beeline/blog/567508/) |
|
||||
| <a href="https://ecommpay.com/" class="favicon">Ecommpay</a> | Payment Processing | Logs | — | — | [Video, Nov 2019](https://www.youtube.com/watch?v=d3GdZTOWGLk) |
|
||||
| <a href="https://omnicomm.ru/" class="favicon">Omnicomm</a> | Transportation Monitoring | — | — | — | [Facebook post, Oct 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) |
|
||||
| <a href="https://ok.ru" class="favicon">Ok.ru</a> | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, Oct 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) |
|
||||
|
||||
[Original article](https://clickhouse.com/docs/en/introduction/adopters/) <!--hide-->
|
||||
|
@ -643,7 +643,7 @@ On hosts with low RAM and swap, you possibly need setting `max_server_memory_usa
|
||||
|
||||
## max_concurrent_queries {#max-concurrent-queries}
|
||||
|
||||
The maximum number of simultaneously processed queries related to MergeTree table. Queries may be limited by other settings: [max_concurrent_queries_for_all_users](#max-concurrent-queries-for-all-users), [min_marks_to_honor_max_concurrent_queries](#min-marks-to-honor-max-concurrent-queries).
|
||||
The maximum number of simultaneously processed queries related to MergeTree table. Queries may be limited by other settings: [max_concurrent_queries_for_user](#max-concurrent-queries-for-user), [max_concurrent_queries_for_all_users](#max-concurrent-queries-for-all-users), [min_marks_to_honor_max_concurrent_queries](#min-marks-to-honor-max-concurrent-queries).
|
||||
|
||||
!!! info "Note"
|
||||
These settings can be modified at runtime and will take effect immediately. Queries that are already running will remain unchanged.
|
||||
@ -659,6 +659,21 @@ Possible values:
|
||||
<max_concurrent_queries>100</max_concurrent_queries>
|
||||
```
|
||||
|
||||
## max_concurrent_queries_for_user {#max-concurrent-queries-for-user}
|
||||
|
||||
The maximum number of simultaneously processed queries related to MergeTree table per user.
|
||||
|
||||
Possible values:
|
||||
|
||||
- Positive integer.
|
||||
- 0 — Disabled.
|
||||
|
||||
**Example**
|
||||
|
||||
``` xml
|
||||
<max_concurrent_queries_for_user>5</max_concurrent_queries_for_user>
|
||||
```
|
||||
|
||||
## max_concurrent_queries_for_all_users {#max-concurrent-queries-for-all-users}
|
||||
|
||||
Throw exception if the value of this setting is less or equal than the current number of simultaneously processed queries.
|
||||
|
@ -10,7 +10,7 @@ Columns:
|
||||
- `[]` — All users share the same quota.
|
||||
- `['user_name']` — Connections with the same user name share the same quota.
|
||||
- `['ip_address']` — Connections from the same IP share the same quota.
|
||||
- `['client_key']` — Connections with the same key share the same quota. A key must be explicitly provided by a client. When using [clickhouse-client](../../interfaces/cli.md), pass a key value in the `--quota-key` parameter, or use the `quota_key` parameter in the client configuration file. When using HTTP interface, use the `X-ClickHouse-Quota` header.
|
||||
- `['client_key']` — Connections with the same key share the same quota. A key must be explicitly provided by a client. When using [clickhouse-client](../../interfaces/cli.md), pass a key value in the `--quota_key` parameter, or use the `quota_key` parameter in the client configuration file. When using HTTP interface, use the `X-ClickHouse-Quota` header.
|
||||
- `['user_name', 'client_key']` — Connections with the same `client_key` share the same quota. If a key isn’t provided by a client, the qouta is tracked for `user_name`.
|
||||
- `['client_key', 'ip_address']` — Connections with the same `client_key` share the same quota. If a key isn’t provided by a client, the qouta is tracked for `ip_address`.
|
||||
- `durations` ([Array](../../sql-reference/data-types/array.md)([UInt64](../../sql-reference/data-types/int-uint.md))) — Time interval lengths in seconds.
|
||||
|
@ -70,7 +70,7 @@ For HDD, enable the write cache.
|
||||
|
||||
## File System {#file-system}
|
||||
|
||||
Ext4 is the most reliable option. Set the mount options `noatime, nobarrier`.
|
||||
Ext4 is the most reliable option. Set the mount options `noatime`.
|
||||
XFS is also suitable, but it hasn’t been as thoroughly tested with ClickHouse.
|
||||
Most other file systems should also work fine. File systems with delayed allocation work better.
|
||||
|
||||
|
@ -155,6 +155,60 @@ Configuration example:
|
||||
LAYOUT(COMPLEX_KEY_HASHED())
|
||||
```
|
||||
|
||||
### complex_key_sparse_hashed {#complex-key-sparse-hashed}
|
||||
|
||||
This type of storage is for use with composite [keys](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-structure.md). Similar to `sparse_hashed`.
|
||||
|
||||
Configuration example:
|
||||
|
||||
``` xml
|
||||
<layout>
|
||||
<complex_key_sparse_hashed />
|
||||
</layout>
|
||||
```
|
||||
|
||||
``` sql
|
||||
LAYOUT(COMPLEX_KEY_SPARSE_HASHED())
|
||||
```
|
||||
|
||||
### hashed_array {#dicts-external_dicts_dict_layout-hashed-array}
|
||||
|
||||
The dictionary is completely stored in memory. Each attribute is stored in array. Key attribute is stored in the form of hashed table where value is index in attributes array. The dictionary can contain any number of elements with any identifiers In practice, the number of keys can reach tens of millions of items.
|
||||
|
||||
All types of sources are supported. When updating, data (from a file or from a table) is read in its entirety.
|
||||
|
||||
Configuration example:
|
||||
|
||||
``` xml
|
||||
<layout>
|
||||
<hashed_array>
|
||||
</hashed_array>
|
||||
</layout>
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
``` sql
|
||||
LAYOUT(HASHED_ARRAY())
|
||||
```
|
||||
|
||||
### complex_key_hashed_array {#complex-key-hashed-array}
|
||||
|
||||
This type of storage is for use with composite [keys](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-structure.md). Similar to `hashed_array`.
|
||||
|
||||
Configuration example:
|
||||
|
||||
``` xml
|
||||
<layout>
|
||||
<complex_key_hashed_array />
|
||||
</layout>
|
||||
```
|
||||
|
||||
``` sql
|
||||
LAYOUT(COMPLEX_KEY_HASHED_ARRAY())
|
||||
```
|
||||
|
||||
|
||||
### range_hashed {#range-hashed}
|
||||
|
||||
The dictionary is stored in memory in the form of a hash table with an ordered array of ranges and their corresponding values.
|
||||
@ -302,8 +356,9 @@ When searching for a dictionary, the cache is searched first. For each block of
|
||||
|
||||
If keys are not found in dictionary, then update cache task is created and added into update queue. Update queue properties can be controlled with settings `max_update_queue_size`, `update_queue_push_timeout_milliseconds`, `query_wait_timeout_milliseconds`, `max_threads_for_updates`.
|
||||
|
||||
For cache dictionaries, the expiration [lifetime](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-lifetime.md) of data in the cache can be set. If more time than `lifetime` has passed since loading the data in a cell, the cell’s value is not used and key becomes expired, and it is re-requested the next time it needs to be used this behaviour can be configured with setting `allow_read_expired_keys`.
|
||||
This is the least effective of all the ways to store dictionaries. The speed of the cache depends strongly on correct settings and the usage scenario. A cache type dictionary performs well only when the hit rates are high enough (recommended 99% and higher). You can view the average hit rate in the `system.dictionaries` table.
|
||||
For cache dictionaries, the expiration [lifetime](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-lifetime.md) of data in the cache can be set. If more time than `lifetime` has passed since loading the data in a cell, the cell’s value is not used and key becomes expired. The key is re-requested the next time it needs to be used. This behaviour can be configured with setting `allow_read_expired_keys`.
|
||||
|
||||
This is the least effective of all the ways to store dictionaries. The speed of the cache depends strongly on correct settings and the usage scenario. A cache type dictionary performs well only when the hit rates are high enough (recommended 99% and higher). You can view the average hit rate in the [system.dictionaries](../../../operations/system-tables/dictionaries.md) table.
|
||||
|
||||
If setting `allow_read_expired_keys` is set to 1, by default 0. Then dictionary can support asynchronous updates. If a client requests keys and all of them are in cache, but some of them are expired, then dictionary will return expired keys for a client and request them asynchronously from the source.
|
||||
|
||||
@ -368,7 +423,7 @@ Similar to `cache`, but stores data on SSD and index in RAM. All cache dictionar
|
||||
<!-- Size of RAM buffer in bytes for aggregating elements before flushing to SSD. -->
|
||||
<write_buffer_size>1048576</write_buffer_size>
|
||||
<!-- Path where cache file will be stored. -->
|
||||
<path>/var/lib/clickhouse/clickhouse_dictionaries/test_dict</path>
|
||||
<path>/var/lib/clickhouse/user_files/test_dict</path>
|
||||
</ssd_cache>
|
||||
</layout>
|
||||
```
|
||||
@ -377,7 +432,7 @@ or
|
||||
|
||||
``` sql
|
||||
LAYOUT(SSD_CACHE(BLOCK_SIZE 4096 FILE_SIZE 16777216 READ_BUFFER_SIZE 1048576
|
||||
PATH ./user_files/test_dict))
|
||||
PATH '/var/lib/clickhouse/user_files/test_dict'))
|
||||
```
|
||||
|
||||
### complex_key_ssd_cache {#complex-key-ssd-cache}
|
||||
|
@ -2427,3 +2427,39 @@ Type: [UInt32](../../sql-reference/data-types/int-uint.md).
|
||||
**See Also**
|
||||
|
||||
- [shardNum()](#shard-num) function example also contains `shardCount()` function call.
|
||||
|
||||
## getOSKernelVersion {#getoskernelversion}
|
||||
|
||||
Returns a string with the current OS kernel version.
|
||||
|
||||
**Syntax**
|
||||
|
||||
``` sql
|
||||
getOSKernelVersion()
|
||||
```
|
||||
|
||||
**Arguments**
|
||||
|
||||
- None.
|
||||
|
||||
**Returned value**
|
||||
|
||||
- The current OS kernel version.
|
||||
|
||||
Type: [String](../../sql-reference/data-types/string.md).
|
||||
|
||||
**Example**
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT getOSKernelVersion();
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌─getOSKernelVersion()────┐
|
||||
│ Linux 4.15.0-55-generic │
|
||||
└─────────────────────────┘
|
||||
```
|
||||
|
@ -270,3 +270,40 @@ Result:
|
||||
│ [['abc','123'],['8','"hkl"']] │
|
||||
└───────────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## ngrams {#ngrams}
|
||||
|
||||
Splits the UTF-8 string into n-grams of `ngramsize` symbols.
|
||||
|
||||
**Syntax**
|
||||
|
||||
``` sql
|
||||
ngrams(string, ngramsize)
|
||||
```
|
||||
|
||||
**Arguments**
|
||||
|
||||
- `string` — String. [String](../../sql-reference/data-types/string.md) or [FixedString](../../sql-reference/data-types/fixedstring.md).
|
||||
- `ngramsize` — The size of an n-gram. [UInt](../../sql-reference/data-types/int-uint.md).
|
||||
|
||||
**Returned values**
|
||||
|
||||
- Array with n-grams.
|
||||
|
||||
Type: [Array](../../sql-reference/data-types/array.md)([FixedString](../../sql-reference/data-types/fixedstring.md)).
|
||||
|
||||
**Example**
|
||||
|
||||
Query:
|
||||
|
||||
``` sql
|
||||
SELECT ngrams('ClickHouse', 3);
|
||||
```
|
||||
|
||||
Result:
|
||||
|
||||
``` text
|
||||
┌─ngrams('ClickHouse', 3)───────────────────────────┐
|
||||
│ ['Cli','lic','ick','ckH','kHo','Hou','ous','use'] │
|
||||
└───────────────────────────────────────────────────┘
|
||||
```
|
||||
|
@ -20,7 +20,7 @@ Subquery is another `SELECT` query that may be specified in parenthesis inside `
|
||||
|
||||
When `FINAL` is specified, ClickHouse fully merges the data before returning the result and thus performs all data transformations that happen during merges for the given table engine.
|
||||
|
||||
It is applicable when selecting data from tables that use the [MergeTree](../../../engines/table-engines/mergetree-family/mergetree.md)-engine family (except `GraphiteMergeTree`). Also supported for:
|
||||
It is applicable when selecting data from tables that use the [MergeTree](../../../engines/table-engines/mergetree-family/mergetree.md)-engine family. Also supported for:
|
||||
|
||||
- [Replicated](../../../engines/table-engines/mergetree-family/replication.md) versions of `MergeTree` engines.
|
||||
- [View](../../../engines/table-engines/special/view.md), [Buffer](../../../engines/table-engines/special/buffer.md), [Distributed](../../../engines/table-engines/special/distributed.md), and [MaterializedView](../../../engines/table-engines/special/materializedview.md) engines that operate over other engines, provided they were created over `MergeTree`-engine tables.
|
||||
|
@ -41,7 +41,7 @@ Ubuntuでこれを行うには、コマンドラインターミナルで実行
|
||||
|
||||
コマンドラインターミナルで実行:
|
||||
|
||||
git clone --recursive git@github.com:your_github_username/ClickHouse.git
|
||||
git clone git@github.com:your_github_username/ClickHouse.git
|
||||
cd ClickHouse
|
||||
|
||||
注:、代理して下さい *your_github_username* 適切なもので!
|
||||
@ -83,7 +83,7 @@ ClickHouseリポジトリは以下を使用します `submodules`. That is what
|
||||
|
||||
Gitでサブモジュールを操作するのは苦痛です。 次のコマンドは管理に役立ちます:
|
||||
|
||||
# ! each command accepts --recursive
|
||||
# ! each command accepts
|
||||
# Update remote URLs for submodules. Barely rare case
|
||||
git submodule sync
|
||||
# Add new submodules
|
||||
@ -96,16 +96,16 @@ Gitでサブモジュールを操作するのは苦痛です。 次のコマン
|
||||
次のコマンドは、すべてのサブモジュールを初期状態にリセットするのに役立ちます(!ツづツつキツ。 -内部の変更は削除されます):
|
||||
|
||||
# Synchronizes submodules' remote URL with .gitmodules
|
||||
git submodule sync --recursive
|
||||
git submodule sync
|
||||
# Update the registered submodules with initialize not yet initialized
|
||||
git submodule update --init --recursive
|
||||
git submodule update --init
|
||||
# Reset all changes done after HEAD
|
||||
git submodule foreach git reset --hard
|
||||
# Clean files from .gitignore
|
||||
git submodule foreach git clean -xfd
|
||||
# Repeat last 4 commands for all submodule
|
||||
git submodule foreach git submodule sync --recursive
|
||||
git submodule foreach git submodule update --init --recursive
|
||||
git submodule foreach git submodule sync
|
||||
git submodule foreach git submodule update --init
|
||||
git submodule foreach git submodule foreach git reset --hard
|
||||
git submodule foreach git submodule foreach git clean -xfd
|
||||
|
||||
|
@ -78,7 +78,7 @@ HDDの場合、ライトキャッシュを有効にします。
|
||||
|
||||
## ファイルシス {#file-system}
|
||||
|
||||
Ext4は最も信頼性の高いオプションです。 マウントオプションの設定 `noatime, nobarrier`.
|
||||
Ext4は最も信頼性の高いオプションです。 マウントオプションの設定 `noatime`.
|
||||
XFSも適していますが、ClickHouseで徹底的にテストされていません。
|
||||
他のほとんどのファイルシステム仕様。 ファイルシステムの遅配ます。
|
||||
|
||||
|
@ -40,7 +40,7 @@ ClickHouse не работает и не собирается на 32-битны
|
||||
|
||||
Выполните в терминале:
|
||||
|
||||
git clone --recursive git@github.com:ClickHouse/ClickHouse.git
|
||||
git clone git@github.com:ClickHouse/ClickHouse.git
|
||||
cd ClickHouse
|
||||
|
||||
Замените первое вхождение слова `ClickHouse` в команде для git на имя вашего аккаунта на GitHub.
|
||||
@ -82,7 +82,7 @@ ClickHouse не работает и не собирается на 32-битны
|
||||
|
||||
Работа с сабмодулями git может быть достаточно болезненной. Следующие команды позволят содержать их в порядке:
|
||||
|
||||
# ! Каждая команда принимает аргумент --recursive
|
||||
# ! Каждая команда принимает аргумент
|
||||
# Обновить URLs удалённого репозитория для каждого сабмодуля, используется относительно редко
|
||||
git submodule sync
|
||||
# Добавить новые сабмодули
|
||||
@ -96,16 +96,16 @@ ClickHouse не работает и не собирается на 32-битны
|
||||
|
||||
# Synchronizes submodules' remote URL with .gitmodules
|
||||
# Обновить URLs удалённого репозитория для каждого сабмодуля
|
||||
git submodule sync --recursive
|
||||
git submodule sync
|
||||
# Обновить существующие модули и добавить отсутствующие
|
||||
git submodule update --init --recursive
|
||||
git submodule update --init
|
||||
# Удалить все изменения в сабмодуле относительно HEAD
|
||||
git submodule foreach git reset --hard
|
||||
# Очистить игнорируемые файлы
|
||||
git submodule foreach git clean -xfd
|
||||
# Повторить последние 4 команды для каждого из сабмодулей
|
||||
git submodule foreach git submodule sync --recursive
|
||||
git submodule foreach git submodule update --init --recursive
|
||||
git submodule foreach git submodule sync
|
||||
git submodule foreach git submodule update --init
|
||||
git submodule foreach git submodule foreach git reset --hard
|
||||
git submodule foreach git submodule foreach git clean -xfd
|
||||
|
||||
|
@ -15,7 +15,7 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name
|
||||
name1 [type1],
|
||||
name2 [type2],
|
||||
...
|
||||
) ENGINE = MongoDB(host:port, database, collection, user, password);
|
||||
) ENGINE = MongoDB(host:port, database, collection, user, password [, options]);
|
||||
```
|
||||
|
||||
**Параметры движка**
|
||||
@ -30,11 +30,13 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name
|
||||
|
||||
- `password` — пароль пользователя.
|
||||
|
||||
- `options` — MongoDB connection string options (optional parameter).
|
||||
|
||||
## Примеры использования {#usage-example}
|
||||
|
||||
Таблица в ClickHouse для чтения данных из колекции MongoDB:
|
||||
Создание таблицы в ClickHouse для чтения данных из коллекции MongoDB:
|
||||
|
||||
``` text
|
||||
``` sql
|
||||
CREATE TABLE mongo_table
|
||||
(
|
||||
key UInt64,
|
||||
@ -42,6 +44,18 @@ CREATE TABLE mongo_table
|
||||
) ENGINE = MongoDB('mongo1:27017', 'test', 'simple_table', 'testuser', 'clickhouse');
|
||||
```
|
||||
|
||||
Чтение с сервера MongoDB, защищенного SSL:
|
||||
|
||||
``` sql
|
||||
CREATE TABLE mongo_table_ssl
|
||||
(
|
||||
key UInt64,
|
||||
data String
|
||||
) ENGINE = MongoDB('mongo2:27017', 'test', 'simple_table', 'testuser', 'clickhouse', 'ssl=true');
|
||||
```
|
||||
|
||||
|
||||
|
||||
Запрос к таблице:
|
||||
|
||||
``` sql
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user