Merge branch 'master' of github.com:ClickHouse/ClickHouse into add-mutation-for-storagememory

This commit is contained in:
feng lv 2020-10-05 21:48:34 +08:00
commit 55247bf3a9
559 changed files with 10037 additions and 5138 deletions

17
.github/codecov.yml vendored Normal file
View File

@ -0,0 +1,17 @@
codecov:
max_report_age: off
strict_yaml_branch: "master"
ignore:
- "contrib"
- "docs"
- "benchmark"
- "tests"
- "docker"
- "debian"
- "cmake"
comment: false
github_checks:
annotations: false

2
.gitmodules vendored
View File

@ -107,7 +107,6 @@
[submodule "contrib/grpc"] [submodule "contrib/grpc"]
path = contrib/grpc path = contrib/grpc
url = https://github.com/ClickHouse-Extras/grpc.git url = https://github.com/ClickHouse-Extras/grpc.git
branch = v1.25.0
[submodule "contrib/aws"] [submodule "contrib/aws"]
path = contrib/aws path = contrib/aws
url = https://github.com/ClickHouse-Extras/aws-sdk-cpp.git url = https://github.com/ClickHouse-Extras/aws-sdk-cpp.git
@ -186,3 +185,4 @@
[submodule "contrib/cyrus-sasl"] [submodule "contrib/cyrus-sasl"]
path = contrib/cyrus-sasl path = contrib/cyrus-sasl
url = https://github.com/cyrusimap/cyrus-sasl url = https://github.com/cyrusimap/cyrus-sasl
branch = cyrus-sasl-2.1

View File

@ -173,7 +173,7 @@ endif ()
# Make sure the final executable has symbols exported # Make sure the final executable has symbols exported
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic") set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
find_program (OBJCOPY_PATH NAMES "llvm-objcopy" "llvm-objcopy-10" "llvm-objcopy-9" "llvm-objcopy-8" "objcopy") find_program (OBJCOPY_PATH NAMES "llvm-objcopy" "llvm-objcopy-11" "llvm-objcopy-10" "llvm-objcopy-9" "llvm-objcopy-8" "objcopy")
if (OBJCOPY_PATH) if (OBJCOPY_PATH)
message(STATUS "Using objcopy: ${OBJCOPY_PATH}.") message(STATUS "Using objcopy: ${OBJCOPY_PATH}.")
@ -313,7 +313,7 @@ if (COMPILER_CLANG)
endif () endif ()
# Always prefer llvm tools when using clang. For instance, we cannot use GNU ar when llvm LTO is enabled # Always prefer llvm tools when using clang. For instance, we cannot use GNU ar when llvm LTO is enabled
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8") find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
if (LLVM_AR_PATH) if (LLVM_AR_PATH)
message(STATUS "Using llvm-ar: ${LLVM_AR_PATH}.") message(STATUS "Using llvm-ar: ${LLVM_AR_PATH}.")
@ -322,7 +322,7 @@ if (COMPILER_CLANG)
message(WARNING "Cannot find llvm-ar. System ar will be used instead. It does not work with ThinLTO.") message(WARNING "Cannot find llvm-ar. System ar will be used instead. It does not work with ThinLTO.")
endif () endif ()
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-10" "llvm-ranlib-9" "llvm-ranlib-8") find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9" "llvm-ranlib-8")
if (LLVM_RANLIB_PATH) if (LLVM_RANLIB_PATH)
message(STATUS "Using llvm-ranlib: ${LLVM_RANLIB_PATH}.") message(STATUS "Using llvm-ranlib: ${LLVM_RANLIB_PATH}.")
@ -513,7 +513,13 @@ endif ()
macro (add_executable target) macro (add_executable target)
# invoke built-in add_executable # invoke built-in add_executable
# explicitly acquire and interpose malloc symbols by clickhouse_malloc # explicitly acquire and interpose malloc symbols by clickhouse_malloc
# if GLIBC_COMPATIBILITY is ON and ENABLE_THINLTO is on than provide memcpy symbol explicitly to neutrialize thinlto's libcall generation.
if (GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc> $<TARGET_OBJECTS:clickhouse_memcpy>)
else ()
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>) _add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>)
endif ()
get_target_property (type ${target} TYPE) get_target_property (type ${target} TYPE)
if (${type} STREQUAL EXECUTABLE) if (${type} STREQUAL EXECUTABLE)
# operator::new/delete for executables (MemoryTracker stuff) # operator::new/delete for executables (MemoryTracker stuff)

View File

@ -17,5 +17,4 @@ ClickHouse is an open-source column-oriented database management system that all
## Upcoming Events ## Upcoming Events
* [ClickHouse for Edge Analytics](https://ones2020.sched.com/event/bWPs) on September 29, 2020.
* [ClickHouse online meetup (in Russian)](https://clck.ru/R2zB9) on October 1, 2020. * [ClickHouse online meetup (in Russian)](https://clck.ru/R2zB9) on October 1, 2020.

View File

@ -10,11 +10,14 @@ currently being supported with security updates:
| 1.x | :x: | | 1.x | :x: |
| 18.x | :x: | | 18.x | :x: |
| 19.x | :x: | | 19.x | :x: |
| 19.14 | :white_check_mark: |
| 20.1 | :x: | | 20.1 | :x: |
| 20.3 | :white_check_mark: | | 20.3 | :white_check_mark: |
| 20.4 | :white_check_mark: | | 20.4 | :x: |
| 20.5 | :white_check_mark: | | 20.5 | :x: |
| 20.6 | :x: |
| 20.7 | :white_check_mark: |
| 20.8 | :white_check_mark: |
| 20.9 | :white_check_mark: |
## Reporting a Vulnerability ## Reporting a Vulnerability

View File

@ -38,10 +38,10 @@ bool hasInputData()
} }
LineReader::Suggest::WordsRange LineReader::Suggest::getCompletions(const String & prefix, size_t prefix_length) const std::optional<LineReader::Suggest::WordsRange> LineReader::Suggest::getCompletions(const String & prefix, size_t prefix_length) const
{ {
if (!ready) if (!ready)
return std::make_pair(words.end(), words.end()); return std::nullopt;
std::string_view last_word; std::string_view last_word;

View File

@ -4,6 +4,7 @@
#include <atomic> #include <atomic>
#include <vector> #include <vector>
#include <optional>
class LineReader class LineReader
{ {
@ -18,7 +19,7 @@ public:
std::atomic<bool> ready{false}; std::atomic<bool> ready{false};
/// Get iterators for the matched range of words if any. /// Get iterators for the matched range of words if any.
WordsRange getCompletions(const String & prefix, size_t prefix_length) const; std::optional<WordsRange> getCompletions(const String & prefix, size_t prefix_length) const;
}; };
using Patterns = std::vector<const char *>; using Patterns = std::vector<const char *>;

View File

@ -30,7 +30,8 @@ static LineReader::Suggest::Words::const_iterator end;
static void findRange(const char * prefix, size_t prefix_length) static void findRange(const char * prefix, size_t prefix_length)
{ {
std::string prefix_str(prefix); std::string prefix_str(prefix);
std::tie(pos, end) = suggest->getCompletions(prefix_str, prefix_length); if (auto completions = suggest->getCompletions(prefix_str, prefix_length))
std::tie(pos, end) = *completions;
} }
/// Iterates through matched range. /// Iterates through matched range.

View File

@ -70,8 +70,9 @@ ReplxxLineReader::ReplxxLineReader(
auto callback = [&suggest] (const String & context, size_t context_size) auto callback = [&suggest] (const String & context, size_t context_size)
{ {
auto range = suggest.getCompletions(context, context_size); if (auto range = suggest.getCompletions(context, context_size))
return Replxx::completions_t(range.first, range.second); return Replxx::completions_t(range->first, range->second);
return Replxx::completions_t();
}; };
rx.set_completion_callback(callback); rx.set_completion_callback(callback);

View File

@ -358,7 +358,12 @@ private:
} }
else else
{ {
if (number * sizeof(base_type) < sizeof(T)) if constexpr (sizeof(T) <= sizeof(base_type))
{
if (!number)
return x;
}
else if (number * sizeof(base_type) < sizeof(T))
return x >> (number * base_bits); // & std::numeric_limits<base_type>::max() return x >> (number * base_bits); // & std::numeric_limits<base_type>::max()
return 0; return 0;
} }
@ -366,26 +371,32 @@ private:
template <typename T> template <typename T>
constexpr static integer<Bits, Signed> constexpr static integer<Bits, Signed>
op_minus(const integer<Bits, Signed> & lhs, T rhs) minus(const integer<Bits, Signed> & lhs, T rhs)
{ {
integer<Bits, Signed> res; constexpr const unsigned rhs_items = (sizeof(T) > sizeof(base_type)) ? (sizeof(T) / sizeof(base_type)) : 1;
constexpr const unsigned op_items = (item_count < rhs_items) ? item_count : rhs_items;
bool is_underflow = false; integer<Bits, Signed> res(lhs);
for (unsigned i = 0; i < item_count; ++i) bool underflows[item_count] = {};
for (unsigned i = 0; i < op_items; ++i)
{ {
base_type lhs_item = lhs.items[little(i)];
base_type rhs_item = get_item(rhs, i); base_type rhs_item = get_item(rhs, i);
base_type & res_item = res.items[little(i)];
if (is_underflow) underflows[i] = res_item < rhs_item;
{ res_item -= rhs_item;
is_underflow = (lhs_item == 0);
--lhs_item;
} }
if (lhs_item < rhs_item) for (unsigned i = 1; i < item_count; ++i)
is_underflow = true; {
if (underflows[i-1])
res.items[little(i)] = lhs_item - rhs_item; {
base_type & res_item = res.items[little(i)];
if (res_item == 0)
underflows[i] = true;
--res_item;
}
} }
return res; return res;
@ -393,39 +404,91 @@ private:
template <typename T> template <typename T>
constexpr static integer<Bits, Signed> constexpr static integer<Bits, Signed>
op_plus(const integer<Bits, Signed> & lhs, T rhs) plus(const integer<Bits, Signed> & lhs, T rhs)
{ {
integer<Bits, Signed> res; constexpr const unsigned rhs_items = (sizeof(T) > sizeof(base_type)) ? (sizeof(T) / sizeof(base_type)) : 1;
constexpr const unsigned op_items = (item_count < rhs_items) ? item_count : rhs_items;
bool is_overflow = false; integer<Bits, Signed> res(lhs);
for (unsigned i = 0; i < item_count; ++i) bool overflows[item_count] = {};
for (unsigned i = 0; i < op_items; ++i)
{ {
base_type lhs_item = lhs.items[little(i)];
base_type rhs_item = get_item(rhs, i); base_type rhs_item = get_item(rhs, i);
base_type & res_item = res.items[little(i)];
if (is_overflow) res_item += rhs_item;
{ overflows[i] = res_item < rhs_item;
++lhs_item;
is_overflow = (lhs_item == 0);
} }
for (unsigned i = 1; i < item_count; ++i)
{
if (overflows[i-1])
{
base_type & res_item = res.items[little(i)]; base_type & res_item = res.items[little(i)];
res_item = lhs_item + rhs_item; ++res_item;
if (res_item == 0)
if (res_item < rhs_item) overflows[i] = true;
is_overflow = true; }
} }
return res; return res;
} }
template <typename T> template <typename T>
constexpr static auto op_multiply(const integer<Bits, Signed> & lhs, const T & rhs) constexpr static integer<Bits, Signed>
multiply(const integer<Bits, Signed> & lhs, const T & rhs)
{
if constexpr (Bits == 256 && sizeof(base_type) == 8)
{
/// @sa https://github.com/abseil/abseil-cpp/blob/master/absl/numeric/int128.h
using HalfType = unsigned __int128;
HalfType a01 = (HalfType(lhs.items[little(1)]) << 64) + lhs.items[little(0)];
HalfType a23 = (HalfType(lhs.items[little(3)]) << 64) + lhs.items[little(2)];
HalfType a0 = lhs.items[little(0)];
HalfType a1 = lhs.items[little(1)];
HalfType b01 = rhs;
uint64_t b0 = b01;
uint64_t b1 = 0;
HalfType b23 = 0;
if constexpr (sizeof(T) > 8)
b1 = b01 >> 64;
if constexpr (sizeof(T) > 16)
b23 = (HalfType(rhs.items[little(3)]) << 64) + rhs.items[little(2)];
HalfType r23 = a23 * b01 + a01 * b23 + a1 * b1;
HalfType r01 = a0 * b0;
HalfType r12 = (r01 >> 64) + (r23 << 64);
HalfType r12_x = a1 * b0;
integer<Bits, Signed> res;
res.items[little(0)] = r01;
res.items[little(3)] = r23 >> 64;
if constexpr (sizeof(T) > 8)
{
HalfType r12_y = a0 * b1;
r12_x += r12_y;
if (r12_x < r12_y)
++res.items[little(3)];
}
r12 += r12_x;
if (r12 < r12_x)
++res.items[little(3)];
res.items[little(1)] = r12;
res.items[little(2)] = r12 >> 64;
return res;
}
else
{ {
integer<Bits, Signed> res{}; integer<Bits, Signed> res{};
#if 1 #if 1
integer<Bits, Signed> lhs2 = op_plus(lhs, shift_left(lhs, 1)); integer<Bits, Signed> lhs2 = plus(lhs, shift_left(lhs, 1));
integer<Bits, Signed> lhs3 = op_plus(lhs2, shift_left(lhs, 2)); integer<Bits, Signed> lhs3 = plus(lhs2, shift_left(lhs, 2));
#endif #endif
for (unsigned i = 0; i < item_count; ++i) for (unsigned i = 0; i < item_count; ++i)
{ {
@ -437,7 +500,7 @@ private:
#if 1 /// optimization #if 1 /// optimization
if ((rhs_item & 0x7) == 0x7) if ((rhs_item & 0x7) == 0x7)
{ {
res = op_plus(res, shift_left(lhs3, pos)); res = plus(res, shift_left(lhs3, pos));
rhs_item >>= 3; rhs_item >>= 3;
pos += 3; pos += 3;
continue; continue;
@ -445,14 +508,14 @@ private:
if ((rhs_item & 0x3) == 0x3) if ((rhs_item & 0x3) == 0x3)
{ {
res = op_plus(res, shift_left(lhs2, pos)); res = plus(res, shift_left(lhs2, pos));
rhs_item >>= 2; rhs_item >>= 2;
pos += 2; pos += 2;
continue; continue;
} }
#endif #endif
if (rhs_item & 1) if (rhs_item & 1)
res = op_plus(res, shift_left(lhs, pos)); res = plus(res, shift_left(lhs, pos));
rhs_item >>= 1; rhs_item >>= 1;
++pos; ++pos;
@ -461,6 +524,7 @@ private:
return res; return res;
} }
}
public: public:
constexpr static integer<Bits, Signed> operator_unary_tilda(const integer<Bits, Signed> & lhs) noexcept constexpr static integer<Bits, Signed> operator_unary_tilda(const integer<Bits, Signed> & lhs) noexcept
@ -475,7 +539,7 @@ public:
constexpr static integer<Bits, Signed> constexpr static integer<Bits, Signed>
operator_unary_minus(const integer<Bits, Signed> & lhs) noexcept(std::is_same_v<Signed, unsigned>) operator_unary_minus(const integer<Bits, Signed> & lhs) noexcept(std::is_same_v<Signed, unsigned>)
{ {
return op_plus(operator_unary_tilda(lhs), 1); return plus(operator_unary_tilda(lhs), 1);
} }
template <typename T> template <typename T>
@ -484,9 +548,9 @@ public:
if constexpr (should_keep_size<T>()) if constexpr (should_keep_size<T>())
{ {
if (is_negative(rhs)) if (is_negative(rhs))
return op_minus(lhs, -rhs); return minus(lhs, -rhs);
else else
return op_plus(lhs, rhs); return plus(lhs, rhs);
} }
else else
{ {
@ -502,9 +566,9 @@ public:
if constexpr (should_keep_size<T>()) if constexpr (should_keep_size<T>())
{ {
if (is_negative(rhs)) if (is_negative(rhs))
return op_plus(lhs, -rhs); return plus(lhs, -rhs);
else else
return op_minus(lhs, rhs); return minus(lhs, rhs);
} }
else else
{ {
@ -523,12 +587,12 @@ public:
if constexpr (std::is_signed_v<Signed>) if constexpr (std::is_signed_v<Signed>)
{ {
res = op_multiply((is_negative(lhs) ? make_positive(lhs) : lhs), res = multiply((is_negative(lhs) ? make_positive(lhs) : lhs),
(is_negative(rhs) ? make_positive(rhs) : rhs)); (is_negative(rhs) ? make_positive(rhs) : rhs));
} }
else else
{ {
res = op_multiply(lhs, (is_negative(rhs) ? make_positive(rhs) : rhs)); res = multiply(lhs, (is_negative(rhs) ? make_positive(rhs) : rhs));
} }
if (std::is_same_v<Signed, signed> && is_negative(lhs) != is_negative(rhs)) if (std::is_same_v<Signed, signed> && is_negative(lhs) != is_negative(rhs))
@ -775,20 +839,20 @@ public:
{ {
if (*c >= '0' && *c <= '9') if (*c >= '0' && *c <= '9')
{ {
res = op_multiply(res, 16U); res = multiply(res, 16U);
res = op_plus(res, *c - '0'); res = plus(res, *c - '0');
++c; ++c;
} }
else if (*c >= 'a' && *c <= 'f') else if (*c >= 'a' && *c <= 'f')
{ {
res = op_multiply(res, 16U); res = multiply(res, 16U);
res = op_plus(res, *c - 'a' + 10U); res = plus(res, *c - 'a' + 10U);
++c; ++c;
} }
else if (*c >= 'A' && *c <= 'F') else if (*c >= 'A' && *c <= 'F')
{ // tolower must be used, but it is not constexpr { // tolower must be used, but it is not constexpr
res = op_multiply(res, 16U); res = multiply(res, 16U);
res = op_plus(res, *c - 'A' + 10U); res = plus(res, *c - 'A' + 10U);
++c; ++c;
} }
else else
@ -802,8 +866,8 @@ public:
if (*c < '0' || *c > '9') if (*c < '0' || *c > '9')
throwError("invalid char from"); throwError("invalid char from");
res = op_multiply(res, 10U); res = multiply(res, 10U);
res = op_plus(res, *c - '0'); res = plus(res, *c - '0');
++c; ++c;
} }
} }

View File

@ -27,6 +27,10 @@ if (GLIBC_COMPATIBILITY)
list(APPEND glibc_compatibility_sources musl/getentropy.c) list(APPEND glibc_compatibility_sources musl/getentropy.c)
endif() endif()
add_library (clickhouse_memcpy OBJECT
${ClickHouse_SOURCE_DIR}/contrib/FastMemcpy/memcpy_wrapper.c
)
# Need to omit frame pointers to match the performance of glibc # Need to omit frame pointers to match the performance of glibc
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer") set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer")

View File

@ -23,6 +23,7 @@ ExtendedLogMessage ExtendedLogMessage::getFrom(const Poco::Message & base)
msg_ext.time_seconds = static_cast<UInt32>(tv.tv_sec); msg_ext.time_seconds = static_cast<UInt32>(tv.tv_sec);
msg_ext.time_microseconds = static_cast<UInt32>(tv.tv_usec); msg_ext.time_microseconds = static_cast<UInt32>(tv.tv_usec);
msg_ext.time_in_microseconds = static_cast<UInt64>((tv.tv_sec) * 1000000U + (tv.tv_usec));
if (current_thread) if (current_thread)
{ {

View File

@ -23,6 +23,7 @@ public:
uint32_t time_seconds = 0; uint32_t time_seconds = 0;
uint32_t time_microseconds = 0; uint32_t time_microseconds = 0;
uint64_t time_in_microseconds = 0;
uint64_t thread_id = 0; uint64_t thread_id = 0;
std::string query_id; std::string query_id;

View File

@ -76,6 +76,7 @@ void OwnSplitChannel::logSplit(const Poco::Message & msg)
TextLogElement elem; TextLogElement elem;
elem.event_time = msg_ext.time_seconds; elem.event_time = msg_ext.time_seconds;
elem.event_time_microseconds = msg_ext.time_in_microseconds;
elem.microseconds = msg_ext.time_microseconds; elem.microseconds = msg_ext.time_microseconds;
elem.thread_name = getThreadName(); elem.thread_name = getThreadName();

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python #!/usr/bin/env python3
from __future__ import print_function
import sys import sys
import json import json
@ -99,7 +99,7 @@ def gen_html_json(options, arguments):
tuples = read_stats_file(options, arguments[1]) tuples = read_stats_file(options, arguments[1])
print('{') print('{')
print('"system: GreenPlum(x2),') print('"system: GreenPlum(x2),')
print('"version": "%s",' % '4.3.9.1') print(('"version": "%s",' % '4.3.9.1'))
print('"data_size": 10000000,') print('"data_size": 10000000,')
print('"time": "",') print('"time": "",')
print('"comments": "",') print('"comments": "",')

View File

@ -0,0 +1,330 @@
#[[
Defines the following variables:
``gRPC_FOUND``
Whether the gRPC framework is found
``gRPC_INCLUDE_DIRS``
The include directories of the gRPC framework, including the include directories of the C++ wrapper.
``gRPC_LIBRARIES``
The libraries of the gRPC framework.
``gRPC_UNSECURE_LIBRARIES``
The libraries of the gRPC framework without SSL.
``_gRPC_CPP_PLUGIN``
The plugin for generating gRPC client and server C++ stubs from `.proto` files
``_gRPC_PYTHON_PLUGIN``
The plugin for generating gRPC client and server Python stubs from `.proto` files
The following :prop_tgt:`IMPORTED` targets are also defined:
``grpc++``
``grpc++_unsecure``
``grpc_cpp_plugin``
``grpc_python_plugin``
Add custom commands to process ``.proto`` files to C++::
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
``SRCS``
Variable to define with autogenerated source files
``HDRS``
Variable to define with autogenerated header files
``DESCRIPTORS``
Variable to define with autogenerated descriptor files, if requested.
``EXPORT_MACRO``
is a macro which should expand to ``__declspec(dllexport)`` or
``__declspec(dllimport)`` depending on what is being compiled.
``ARGN``
``.proto`` files
#]]
# Function to generate C++ files from .proto files.
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
if(NOT _proto_files)
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
return()
endif()
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
set(_append_arg APPEND_PATH)
endif()
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(_descriptors DESCRIPTORS)
endif()
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
endif()
if(DEFINED Protobuf_IMPORT_DIRS)
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
endif()
set(_outvar)
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
set(${SRCS})
set(${HDRS})
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
endif()
foreach(_file ${_outvar})
if(_file MATCHES "cc$")
list(APPEND ${SRCS} ${_file})
elseif(_file MATCHES "desc$")
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
else()
list(APPEND ${HDRS} ${_file})
endif()
endforeach()
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
endif()
endfunction()
# Helper function.
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(protobuf_generate_grpc)
set(_options APPEND_PATH DESCRIPTORS)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
if(COMMAND target_sources)
list(APPEND _singleargs TARGET)
endif()
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
return()
endif()
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
return()
endif()
if(NOT protobuf_generate_grpc_LANGUAGE)
set(protobuf_generate_grpc_LANGUAGE cpp)
endif()
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
endif()
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
endif()
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
return()
endif()
endif()
if(NOT protobuf_generate_grpc_PLUGIN)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
return()
endif()
endif()
if(protobuf_generate_grpc_TARGET)
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
foreach(_file ${_source_list})
if(_file MATCHES "proto$")
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
endif()
endforeach()
endif()
if(NOT protobuf_generate_grpc_PROTOS)
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
return()
endif()
if(protobuf_generate_grpc_APPEND_PATH)
# Create an include path for each file specified
foreach(_file ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_file} ABSOLUTE)
get_filename_component(_abs_path ${_abs_file} PATH)
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${_abs_path})
endif()
endforeach()
else()
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
endif()
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
set(_generated_srcs_all)
foreach(_proto ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_proto} ABSOLUTE)
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
get_filename_component(_basename ${_proto} NAME_WE)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set(_possible_rel_dir)
if(NOT protobuf_generate_grpc_APPEND_PATH)
set(_possible_rel_dir ${_rel_dir}/)
endif()
set(_generated_srcs)
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
endforeach()
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
list(APPEND _generated_srcs ${_descriptor_file})
endif()
list(APPEND _generated_srcs_all ${_generated_srcs})
add_custom_command(
OUTPUT ${_generated_srcs}
COMMAND protobuf::protoc
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
VERBATIM)
endforeach()
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
if(protobuf_generate_grpc_OUT_VAR)
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
endif()
if(protobuf_generate_grpc_TARGET)
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
endif()
endfunction()
# Find the libraries.
if(gRPC_USE_STATIC_LIBS)
# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES
set(_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
if(WIN32)
set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a ${CMAKE_FIND_LIBRARY_SUFFIXES})
else()
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
endif()
endif()
find_library(gRPC_LIBRARY NAMES grpc)
find_library(gRPC_CPP_LIBRARY NAMES grpc++)
find_library(gRPC_UNSECURE_LIBRARY NAMES grpc_unsecure)
find_library(gRPC_CPP_UNSECURE_LIBRARY NAMES grpc++_unsecure)
set(gRPC_LIBRARIES)
if(gRPC_USE_UNSECURE_LIBRARIES)
if(gRPC_UNSECURE_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_UNSECURE_LIBRARY})
endif()
if(gRPC_CPP_UNSECURE_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_UNSECURE_LIBRARY})
endif()
else()
if(gRPC_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_LIBRARY})
endif()
if(gRPC_CPP_UNSECURE_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_LIBRARY})
endif()
endif()
# Restore the original find library ordering.
if(gRPC_USE_STATIC_LIBS)
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES})
endif()
# Find the include directories.
find_path(gRPC_INCLUDE_DIR grpc/grpc.h)
find_path(gRPC_CPP_INCLUDE_DIR grpc++/grpc++.h)
if(gRPC_INCLUDE_DIR AND gRPC_CPP_INCLUDE_DIR AND NOT(gRPC_INCLUDE_DIR STREQUAL gRPC_CPP_INCLUDE_DIR))
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR} ${gRPC_CPP_INCLUDE_DIR})
elseif(gRPC_INCLUDE_DIR)
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR})
else()
set(gRPC_INCLUDE_DIRS ${gRPC_CPP_INCLUDE_DIR})
endif()
# Get full path to plugin.
find_program(_gRPC_CPP_PLUGIN
NAMES grpc_cpp_plugin
DOC "The plugin for generating gRPC client and server C++ stubs from `.proto` files")
find_program(_gRPC_PYTHON_PLUGIN
NAMES grpc_python_plugin
DOC "The plugin for generating gRPC client and server Python stubs from `.proto` files")
# Add imported targets.
if(gRPC_CPP_LIBRARY AND NOT TARGET grpc++)
add_library(grpc++ UNKNOWN IMPORTED)
set_target_properties(grpc++ PROPERTIES
IMPORTED_LOCATION "${gRPC_CPP_LIBRARY}")
set_target_properties(grpc++ PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
endif()
if(gRPC_CPP_UNSECURE_LIBRARY AND NOT TARGET grpc++_unsecure)
add_library(grpc++_unsecure UNKNOWN IMPORTED)
set_target_properties(grpc++_unsecure PROPERTIES
IMPORTED_LOCATION "${gRPC_CPP_UNSECURE_LIBRARY}")
set_target_properties(grpc++_unsecure PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
endif()
if(gRPC_CPP_PLUGIN AND NOT TARGET grpc_cpp_plugin)
add_executable(grpc_cpp_plugin IMPORTED)
set_target_properties(grpc_cpp_plugin PROPERTIES
IMPORTED_LOCATION "${gRPC_CPP_PLUGIN}")
endif()
if(gRPC_PYTHON_PLUGIN AND NOT TARGET grpc_python_plugin)
add_executable(grpc_python_plugin IMPORTED)
set_target_properties(grpc_python_plugin PROPERTIES
IMPORTED_LOCATION "${gRPC_PYTHON_PLUGIN}")
endif()
#include(FindPackageHandleStandardArgs.cmake)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(gRPC
REQUIRED_VARS gRPC_LIBRARY gRPC_CPP_LIBRARY gRPC_UNSECURE_LIBRARY gRPC_CPP_UNSECURE_LIBRARY
gRPC_INCLUDE_DIR gRPC_CPP_INCLUDE_DIR _gRPC_CPP_PLUGIN _gRPC_PYTHON_PLUGIN)
if(gRPC_FOUND)
if(gRPC_DEBUG)
message(STATUS "gRPC: INCLUDE_DIRS=${gRPC_INCLUDE_DIRS}")
message(STATUS "gRPC: LIBRARIES=${gRPC_LIBRARIES}")
message(STATUS "gRPC: CPP_PLUGIN=${_gRPC_CPP_PLUGIN}")
message(STATUS "gRPC: PYTHON_PLUGIN=${_gRPC_PYTHON_PLUGIN}")
endif()
endif()

View File

@ -6,7 +6,7 @@ if (ENABLE_CLANG_TIDY)
message(FATAL_ERROR "clang-tidy requires CMake version at least 3.6.") message(FATAL_ERROR "clang-tidy requires CMake version at least 3.6.")
endif() endif()
find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-10" "clang-tidy-9" "clang-tidy-8") find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-11" "clang-tidy-10" "clang-tidy-9" "clang-tidy-8")
if (CLANG_TIDY_PATH) if (CLANG_TIDY_PATH)
message(STATUS message(STATUS

View File

@ -7,39 +7,59 @@ if (NOT ENABLE_GRPC)
return() return()
endif() endif()
option (USE_INTERNAL_GRPC_LIBRARY if(NOT USE_PROTOBUF)
"Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" message(WARNING "Cannot use gRPC library without protobuf")
${NOT_UNBUNDLED}) endif()
# Normally we use the internal gRPC framework.
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
# The external gRPC framework can be installed in the system by running
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
if(USE_INTERNAL_GRPC_LIBRARY)
message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init --recursive")
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc")
set(USE_INTERNAL_GRPC_LIBRARY 0)
endif()
set(MISSING_INTERNAL_GRPC_LIBRARY 1)
endif()
if(USE_SSL)
set(gRPC_USE_UNSECURE_LIBRARIES FALSE)
else()
set(gRPC_USE_UNSECURE_LIBRARIES TRUE)
endif()
if(NOT USE_INTERNAL_GRPC_LIBRARY) if(NOT USE_INTERNAL_GRPC_LIBRARY)
find_package(grpc) find_package(gRPC)
if (NOT GRPC_FOUND) if(NOT gRPC_INCLUDE_DIRS OR NOT gRPC_LIBRARIES)
find_path(GRPC_INCLUDE_DIR grpcpp/grpcpp.h) message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC library")
find_library(GRPC_LIBRARY grpc++) set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
endif () elseif(NOT _gRPC_CPP_PLUGIN)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system grcp_cpp_plugin")
if (GRPC_INCLUDE_DIR AND GRPC_LIBRARY) set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
set (USE_GRPC ON)
else() else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC") set(EXTERNAL_GRPC_LIBRARY_FOUND 1)
set(USE_GRPC 1)
endif() endif()
endif() endif()
if (NOT USE_GRPC) if(NOT EXTERNAL_GRPC_LIBRARY_FOUND AND NOT MISSING_INTERNAL_GRPC_LIBRARY)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include/grpc++/grpc++.h") set(gRPC_INCLUDE_DIRS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
message (WARNING "submodule contrib/grpc is missing. To fix try run: \n git submodule update --init --recursive") if(gRPC_USE_UNSECURE_LIBRARIES)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gRPC") set(gRPC_LIBRARIES grpc_unsecure grpc++_unsecure)
set (USE_INTERNAL_GRPC_LIBRARY OFF)
elseif (NOT USE_PROTOBUF)
message (WARNING "gRPC requires protobuf which is disabled")
message (${RECONFIGURE_MESSAGE_LEVEL} "Will not use internal gRPC without protobuf")
set (USE_INTERNAL_GRPC_LIBRARY OFF)
else() else()
set (GRPC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/grpc/include") set(gRPC_LIBRARIES grpc grpc++)
set (GRPC_LIBRARY "libgrpc++")
set (USE_GRPC ON)
set (USE_INTERNAL_GRPC_LIBRARY ON)
endif() endif()
set(_gRPC_CPP_PLUGIN $<TARGET_FILE:grpc_cpp_plugin>)
set(_gRPC_PROTOC_EXECUTABLE $<TARGET_FILE:protobuf::protoc>)
include("${ClickHouse_SOURCE_DIR}/contrib/grpc-cmake/protobuf_generate_grpc.cmake")
set(USE_INTERNAL_GRPC_LIBRARY 1)
set(USE_GRPC 1)
endif() endif()
message (STATUS "Using gRPC=${USE_GRPC}: ${GRPC_INCLUDE_DIR} : ${GRPC_LIBRARY}") message(STATUS "Using gRPC=${USE_GRPC}: ${gRPC_INCLUDE_DIRS} : ${gRPC_LIBRARIES} : ${_gRPC_CPP_PLUGIN}")

View File

@ -26,7 +26,7 @@ endif ()
if (NOT USE_INTERNAL_LLVM_LIBRARY) if (NOT USE_INTERNAL_LLVM_LIBRARY)
set (LLVM_PATHS "/usr/local/lib/llvm") set (LLVM_PATHS "/usr/local/lib/llvm")
foreach(llvm_v 9 8) foreach(llvm_v 10 9 8)
if (NOT LLVM_FOUND) if (NOT LLVM_FOUND)
find_package (LLVM ${llvm_v} CONFIG PATHS ${LLVM_PATHS}) find_package (LLVM ${llvm_v} CONFIG PATHS ${LLVM_PATHS})
endif () endif ()

View File

@ -7,7 +7,11 @@ if(NOT ENABLE_PROTOBUF)
return() return()
endif() endif()
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled" ${NOT_UNBUNDLED}) # Normally we use the internal protobuf library.
# You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case.
# The external protobuf library can be installed in the system by running
# sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt") if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
if(USE_INTERNAL_PROTOBUF_LIBRARY) if(USE_INTERNAL_PROTOBUF_LIBRARY)
@ -20,25 +24,28 @@ endif()
if(NOT USE_INTERNAL_PROTOBUF_LIBRARY) if(NOT USE_INTERNAL_PROTOBUF_LIBRARY)
find_package(Protobuf) find_package(Protobuf)
if (Protobuf_LIBRARY AND Protobuf_INCLUDE_DIR AND Protobuf_PROTOC_EXECUTABLE) if(NOT Protobuf_INCLUDE_DIR OR NOT Protobuf_LIBRARY)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf library")
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
elseif(NOT Protobuf_PROTOC_EXECUTABLE)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf compiler")
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
else()
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 1) set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 1)
set(USE_PROTOBUF 1) set(USE_PROTOBUF 1)
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf")
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
endif() endif()
endif() endif()
if(NOT EXTERNAL_PROTOBUF_LIBRARY_FOUND AND NOT MISSING_INTERNAL_PROTOBUF_LIBRARY) if(NOT EXTERNAL_PROTOBUF_LIBRARY_FOUND AND NOT MISSING_INTERNAL_PROTOBUF_LIBRARY)
set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src") set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src")
set(USE_PROTOBUF 1)
set(USE_INTERNAL_PROTOBUF_LIBRARY 1)
set(Protobuf_LIBRARY libprotobuf) set(Protobuf_LIBRARY libprotobuf)
set(Protobuf_PROTOC_LIBRARY libprotoc)
set(Protobuf_LITE_LIBRARY libprotobuf-lite)
set(Protobuf_PROTOC_EXECUTABLE "$<TARGET_FILE:protoc>") set(Protobuf_PROTOC_EXECUTABLE "$<TARGET_FILE:protoc>")
set(Protobuf_PROTOC_LIBRARY libprotoc)
include("${ClickHouse_SOURCE_DIR}/contrib/protobuf-cmake/protobuf_generate.cmake")
set(USE_INTERNAL_PROTOBUF_LIBRARY 1)
set(USE_PROTOBUF 1)
endif() endif()
if(OS_FREEBSD AND SANITIZE STREQUAL "address") if(OS_FREEBSD AND SANITIZE STREQUAL "address")
@ -52,6 +59,4 @@ if(OS_FREEBSD AND SANITIZE STREQUAL "address")
endif() endif()
endif() endif()
include ("${ClickHouse_SOURCE_DIR}/cmake/protobuf_generate_cpp.cmake") message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY} : ${Protobuf_PROTOC_EXECUTABLE} : ${Protobuf_PROTOC_LIBRARY}")
message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY} : ${Protobuf_PROTOC_EXECUTABLE}")

View File

@ -14,10 +14,10 @@ if (NOT ENABLE_RDKAFKA)
return() return()
endif() endif()
if (NOT ARCH_ARM AND USE_LIBGSASL) if (NOT ARCH_ARM)
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED}) option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED})
elseif(USE_INTERNAL_RDKAFKA_LIBRARY) elseif(USE_INTERNAL_RDKAFKA_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ARCH_ARM=${ARCH_ARM} AND USE_LIBGSASL=${USE_LIBGSASL}") message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ARCH_ARM=${ARCH_ARM}")
endif () endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt") if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")

View File

@ -1,172 +0,0 @@
# This file declares functions adding custom commands for generating C++ files from *.proto files:
# function (protobuf_generate_cpp SRCS HDRS)
# function (protobuf_generate_grpc_cpp SRCS HDRS)
if (NOT USE_PROTOBUF)
message (WARNING "Could not use protobuf_generate_cpp() without the protobuf library")
return()
endif()
if (NOT DEFINED PROTOBUF_PROTOC_EXECUTABLE)
set (PROTOBUF_PROTOC_EXECUTABLE "$<TARGET_FILE:protoc>")
endif()
if (NOT DEFINED GRPC_CPP_PLUGIN_EXECUTABLE)
set (GRPC_CPP_PLUGIN_EXECUTABLE $<TARGET_FILE:grpc_cpp_plugin>)
endif()
if (NOT DEFINED PROTOBUF_GENERATE_CPP_APPEND_PATH)
set (PROTOBUF_GENERATE_CPP_APPEND_PATH TRUE)
endif()
function(protobuf_generate_cpp_impl SRCS HDRS MODES OUTPUT_FILE_EXTS PLUGIN)
if(NOT ARGN)
message(SEND_ERROR "Error: protobuf_generate_cpp() called without any proto files")
return()
endif()
if(PROTOBUF_GENERATE_CPP_APPEND_PATH)
# Create an include path for each file specified
foreach(FIL ${ARGN})
get_filename_component(ABS_FIL ${FIL} ABSOLUTE)
get_filename_component(ABS_PATH ${ABS_FIL} PATH)
list(FIND protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
else()
set(protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
endif()
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
endif()
if(DEFINED Protobuf_IMPORT_DIRS)
foreach(DIR ${Protobuf_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
endif()
set (intermediate_dir ${CMAKE_CURRENT_BINARY_DIR}/intermediate)
file (MAKE_DIRECTORY ${intermediate_dir})
set (protoc_args)
foreach (mode ${MODES})
list (APPEND protoc_args "--${mode}_out" ${intermediate_dir})
endforeach()
if (PLUGIN)
list (APPEND protoc_args "--plugin=${PLUGIN}")
endif()
set(srcs)
set(hdrs)
set(all_intermediate_outputs)
foreach(input_name ${ARGN})
get_filename_component(abs_name ${input_name} ABSOLUTE)
get_filename_component(name ${input_name} NAME_WE)
set (intermediate_outputs)
foreach (ext ${OUTPUT_FILE_EXTS})
set (filename "${name}${ext}")
set (output "${CMAKE_CURRENT_BINARY_DIR}/${filename}")
set (intermediate_output "${intermediate_dir}/${filename}")
list (APPEND intermediate_outputs "${intermediate_output}")
list (APPEND all_intermediate_outputs "${intermediate_output}")
if (${ext} MATCHES ".*\\.h")
list(APPEND hdrs "${output}")
else()
list(APPEND srcs "${output}")
endif()
add_custom_command(
OUTPUT ${output}
COMMAND ${CMAKE_COMMAND} -DPROTOBUF_GENERATE_CPP_SCRIPT_MODE=1 -DUSE_PROTOBUF=1 -DDIR=${CMAKE_CURRENT_BINARY_DIR} -DFILENAME=${filename} -DCOMPILER_ID=${CMAKE_CXX_COMPILER_ID} -P ${ClickHouse_SOURCE_DIR}/cmake/protobuf_generate_cpp.cmake
DEPENDS ${intermediate_output})
endforeach()
add_custom_command(
OUTPUT ${intermediate_outputs}
COMMAND ${Protobuf_PROTOC_EXECUTABLE}
ARGS ${protobuf_include_path} ${protoc_args} ${abs_name}
DEPENDS ${abs_name} ${Protobuf_PROTOC_EXECUTABLE} ${PLUGIN}
COMMENT "Running C++ protocol buffer compiler on ${name}"
VERBATIM )
endforeach()
set_source_files_properties(${srcs} ${hdrs} ${all_intermediate_outputs} PROPERTIES GENERATED TRUE)
set(${SRCS} ${srcs} PARENT_SCOPE)
set(${HDRS} ${hdrs} PARENT_SCOPE)
endfunction()
if (PROTOBUF_GENERATE_CPP_SCRIPT_MODE)
set (output "${DIR}/${FILENAME}")
set (intermediate_dir ${DIR}/intermediate)
set (intermediate_output "${intermediate_dir}/${FILENAME}")
if (COMPILER_ID MATCHES "Clang")
set (pragma_push "#pragma clang diagnostic push\n")
set (pragma_pop "#pragma clang diagnostic pop\n")
set (pragma_disable_warnings "#pragma clang diagnostic ignored \"-Weverything\"\n")
elseif (COMPILER_ID MATCHES "GNU")
set (pragma_push "#pragma GCC diagnostic push\n")
set (pragma_pop "#pragma GCC diagnostic pop\n")
set (pragma_disable_warnings "#pragma GCC diagnostic ignored \"-Wall\"\n"
"#pragma GCC diagnostic ignored \"-Wextra\"\n"
"#pragma GCC diagnostic ignored \"-Warray-bounds\"\n"
"#pragma GCC diagnostic ignored \"-Wold-style-cast\"\n"
"#pragma GCC diagnostic ignored \"-Wshadow\"\n"
"#pragma GCC diagnostic ignored \"-Wsuggest-override\"\n"
"#pragma GCC diagnostic ignored \"-Wcast-qual\"\n"
"#pragma GCC diagnostic ignored \"-Wunused-parameter\"\n")
endif()
if (${FILENAME} MATCHES ".*\\.h")
file(WRITE "${output}"
"#pragma once\n"
${pragma_push}
${pragma_disable_warnings}
"#include \"${intermediate_output}\"\n"
${pragma_pop}
)
else()
file(WRITE "${output}"
${pragma_disable_warnings}
"#include \"${intermediate_output}\"\n"
)
endif()
return()
endif()
function(protobuf_generate_cpp SRCS HDRS)
set (modes cpp)
set (output_file_exts ".pb.cc" ".pb.h")
set (plugin)
protobuf_generate_cpp_impl(srcs hdrs "${modes}" "${output_file_exts}" "${plugin}" ${ARGN})
set(${SRCS} ${srcs} PARENT_SCOPE)
set(${HDRS} ${hdrs} PARENT_SCOPE)
endfunction()
function(protobuf_generate_grpc_cpp SRCS HDRS)
set (modes cpp grpc)
set (output_file_exts ".pb.cc" ".pb.h" ".grpc.pb.cc" ".grpc.pb.h")
set (plugin "protoc-gen-grpc=${GRPC_CPP_PLUGIN_EXECUTABLE}")
protobuf_generate_cpp_impl(srcs hdrs "${modes}" "${output_file_exts}" "${plugin}" ${ARGN})
set(${SRCS} ${srcs} PARENT_SCOPE)
set(${HDRS} ${hdrs} PARENT_SCOPE)
endfunction()

View File

@ -1,4 +1,4 @@
#include <FastMemcpy.h> #include "FastMemcpy.h"
void * memcpy(void * __restrict destination, const void * __restrict source, size_t size) void * memcpy(void * __restrict destination, const void * __restrict source, size_t size)
{ {

2
contrib/cyrus-sasl vendored

@ -1 +1 @@
Subproject commit 6054630889fd1cd8d0659573d69badcee1e23a00 Subproject commit 9995bf9d8e14f58934d9313ac64f13780d6dd3c9

2
contrib/grpc vendored

@ -1 +1 @@
Subproject commit 8aea4e168e78f3eb9828080740fc8cb73d53bf79 Subproject commit a6570b863cf76c9699580ba51c7827d5bffaac43

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,207 @@
#[[
Add custom commands to process ``.proto`` files to C++::
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
``SRCS``
Variable to define with autogenerated source files
``HDRS``
Variable to define with autogenerated header files
``DESCRIPTORS``
Variable to define with autogenerated descriptor files, if requested.
``EXPORT_MACRO``
is a macro which should expand to ``__declspec(dllexport)`` or
``__declspec(dllimport)`` depending on what is being compiled.
``ARGN``
``.proto`` files
#]]
# Function to generate C++ files from .proto files.
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
if(NOT _proto_files)
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
return()
endif()
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
set(_append_arg APPEND_PATH)
endif()
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(_descriptors DESCRIPTORS)
endif()
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
endif()
if(DEFINED Protobuf_IMPORT_DIRS)
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
endif()
set(_outvar)
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
set(${SRCS})
set(${HDRS})
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
endif()
foreach(_file ${_outvar})
if(_file MATCHES "cc$")
list(APPEND ${SRCS} ${_file})
elseif(_file MATCHES "desc$")
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
else()
list(APPEND ${HDRS} ${_file})
endif()
endforeach()
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
endif()
endfunction()
# Helper function.
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(protobuf_generate_grpc)
set(_options APPEND_PATH DESCRIPTORS)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
if(COMMAND target_sources)
list(APPEND _singleargs TARGET)
endif()
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
return()
endif()
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
return()
endif()
if(NOT protobuf_generate_grpc_LANGUAGE)
set(protobuf_generate_grpc_LANGUAGE cpp)
endif()
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
endif()
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
endif()
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
return()
endif()
endif()
if(NOT protobuf_generate_grpc_PLUGIN)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
return()
endif()
endif()
if(protobuf_generate_grpc_TARGET)
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
foreach(_file ${_source_list})
if(_file MATCHES "proto$")
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
endif()
endforeach()
endif()
if(NOT protobuf_generate_grpc_PROTOS)
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
return()
endif()
if(protobuf_generate_grpc_APPEND_PATH)
# Create an include path for each file specified
foreach(_file ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_file} ABSOLUTE)
get_filename_component(_abs_path ${_abs_file} PATH)
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${_abs_path})
endif()
endforeach()
else()
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
endif()
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
set(_generated_srcs_all)
foreach(_proto ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_proto} ABSOLUTE)
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
get_filename_component(_basename ${_proto} NAME_WE)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set(_possible_rel_dir)
if(NOT protobuf_generate_grpc_APPEND_PATH)
set(_possible_rel_dir ${_rel_dir}/)
endif()
set(_generated_srcs)
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
endforeach()
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
list(APPEND _generated_srcs ${_descriptor_file})
endif()
list(APPEND _generated_srcs_all ${_generated_srcs})
add_custom_command(
OUTPUT ${_generated_srcs}
COMMAND protobuf::protoc
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
VERBATIM)
endforeach()
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
if(protobuf_generate_grpc_OUT_VAR)
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
endif()
if(protobuf_generate_grpc_TARGET)
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
endif()
endfunction()

2
contrib/poco vendored

@ -1 +1 @@
Subproject commit 297fc905e166392156f83b96aaa5f44e8a6a35c4 Subproject commit 757d947235b307675cff964f29b19d388140a9eb

View File

@ -0,0 +1,198 @@
# The code in this file was copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake
#[[
Add custom commands to process ``.proto`` files to C++::
protobuf_generate_cpp (<SRCS> <HDRS>
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
``SRCS``
Variable to define with autogenerated source files
``HDRS``
Variable to define with autogenerated header files
``DESCRIPTORS``
Variable to define with autogenerated descriptor files, if requested.
``EXPORT_MACRO``
is a macro which should expand to ``__declspec(dllexport)`` or
``__declspec(dllimport)`` depending on what is being compiled.
``ARGN``
``.proto`` files
#]]
function(PROTOBUF_GENERATE_CPP SRCS HDRS)
cmake_parse_arguments(protobuf_generate_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
set(_proto_files "${protobuf_generate_cpp_UNPARSED_ARGUMENTS}")
if(NOT _proto_files)
message(SEND_ERROR "Error: PROTOBUF_GENERATE_CPP() called without any proto files")
return()
endif()
if(PROTOBUF_GENERATE_CPP_APPEND_PATH)
set(_append_arg APPEND_PATH)
endif()
if(protobuf_generate_cpp_DESCRIPTORS)
set(_descriptors DESCRIPTORS)
endif()
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
endif()
if(DEFINED Protobuf_IMPORT_DIRS)
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
endif()
set(_outvar)
protobuf_generate(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
set(${SRCS})
set(${HDRS})
if(protobuf_generate_cpp_DESCRIPTORS)
set(${protobuf_generate_cpp_DESCRIPTORS})
endif()
foreach(_file ${_outvar})
if(_file MATCHES "cc$")
list(APPEND ${SRCS} ${_file})
elseif(_file MATCHES "desc$")
list(APPEND ${protobuf_generate_cpp_DESCRIPTORS} ${_file})
else()
list(APPEND ${HDRS} ${_file})
endif()
endforeach()
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
if(protobuf_generate_cpp_DESCRIPTORS)
set(${protobuf_generate_cpp_DESCRIPTORS} "${${protobuf_generate_cpp_DESCRIPTORS}}" PARENT_SCOPE)
endif()
endfunction()
# By default have PROTOBUF_GENERATE_CPP macro pass -I to protoc
# for each directory where a proto file is referenced.
if(NOT DEFINED PROTOBUF_GENERATE_CPP_APPEND_PATH)
set(PROTOBUF_GENERATE_CPP_APPEND_PATH TRUE)
endif()
function(protobuf_generate)
set(_options APPEND_PATH DESCRIPTORS)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
if(COMMAND target_sources)
list(APPEND _singleargs TARGET)
endif()
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
cmake_parse_arguments(protobuf_generate "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
if(NOT protobuf_generate_PROTOS AND NOT protobuf_generate_TARGET)
message(SEND_ERROR "Error: protobuf_generate called without any targets or source files")
return()
endif()
if(NOT protobuf_generate_OUT_VAR AND NOT protobuf_generate_TARGET)
message(SEND_ERROR "Error: protobuf_generate called without a target or output variable")
return()
endif()
if(NOT protobuf_generate_LANGUAGE)
set(protobuf_generate_LANGUAGE cpp)
endif()
string(TOLOWER ${protobuf_generate_LANGUAGE} protobuf_generate_LANGUAGE)
if(NOT protobuf_generate_PROTOC_OUT_DIR)
set(protobuf_generate_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
endif()
if(protobuf_generate_EXPORT_MACRO AND protobuf_generate_LANGUAGE STREQUAL cpp)
set(_dll_export_decl "dllexport_decl=${protobuf_generate_EXPORT_MACRO}:")
endif()
if(NOT protobuf_generate_GENERATE_EXTENSIONS)
if(protobuf_generate_LANGUAGE STREQUAL cpp)
set(protobuf_generate_GENERATE_EXTENSIONS .pb.h .pb.cc)
elseif(protobuf_generate_LANGUAGE STREQUAL python)
set(protobuf_generate_GENERATE_EXTENSIONS _pb2.py)
else()
message(SEND_ERROR "Error: protobuf_generate given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
return()
endif()
endif()
if(protobuf_generate_TARGET)
get_target_property(_source_list ${protobuf_generate_TARGET} SOURCES)
foreach(_file ${_source_list})
if(_file MATCHES "proto$")
list(APPEND protobuf_generate_PROTOS ${_file})
endif()
endforeach()
endif()
if(NOT protobuf_generate_PROTOS)
message(SEND_ERROR "Error: protobuf_generate could not find any .proto files")
return()
endif()
if(protobuf_generate_APPEND_PATH)
# Create an include path for each file specified
foreach(_file ${protobuf_generate_PROTOS})
get_filename_component(_abs_file ${_file} ABSOLUTE)
get_filename_component(_abs_path ${_abs_file} PATH)
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${_abs_path})
endif()
endforeach()
else()
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
endif()
foreach(DIR ${protobuf_generate_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
set(_generated_srcs_all)
foreach(_proto ${protobuf_generate_PROTOS})
get_filename_component(_abs_file ${_proto} ABSOLUTE)
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
get_filename_component(_basename ${_proto} NAME_WE)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set(_possible_rel_dir)
if (NOT protobuf_generate_APPEND_PATH)
set(_possible_rel_dir ${_rel_dir}/)
endif()
set(_generated_srcs)
foreach(_ext ${protobuf_generate_GENERATE_EXTENSIONS})
list(APPEND _generated_srcs "${protobuf_generate_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
endforeach()
if(protobuf_generate_DESCRIPTORS AND protobuf_generate_LANGUAGE STREQUAL cpp)
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
list(APPEND _generated_srcs ${_descriptor_file})
endif()
list(APPEND _generated_srcs_all ${_generated_srcs})
add_custom_command(
OUTPUT ${_generated_srcs}
COMMAND protobuf::protoc
ARGS --${protobuf_generate_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_PROTOC_OUT_DIR} ${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc
COMMENT "Running ${protobuf_generate_LANGUAGE} protocol buffer compiler on ${_proto}"
VERBATIM )
endforeach()
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
if(protobuf_generate_OUT_VAR)
set(${protobuf_generate_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
endif()
if(protobuf_generate_TARGET)
target_sources(${protobuf_generate_TARGET} PRIVATE ${_generated_srcs_all})
endif()
endfunction()

2
debian/control vendored
View File

@ -62,5 +62,5 @@ Description: debugging symbols for clickhouse-common-static
Package: clickhouse-test Package: clickhouse-test
Priority: optional Priority: optional
Architecture: all Architecture: all
Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-client, bash, expect, python, python-lxml, python-termcolor, python-requests, curl, perl, sudo, openssl, netcat-openbsd, telnet, brotli, bsdutils Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-client, bash, expect, python3, python3-lxml, python3-termcolor, python3-requests, curl, perl, sudo, openssl, netcat-openbsd, telnet, brotli, bsdutils
Description: ClickHouse tests Description: ClickHouse tests

View File

@ -25,10 +25,10 @@ RUN apt-get update \
ninja-build \ ninja-build \
perl \ perl \
pkg-config \ pkg-config \
python \ python3 \
python-lxml \ python3-lxml \
python-requests \ python3-requests \
python-termcolor \ python3-termcolor \
tzdata \ tzdata \
llvm-${LLVM_VERSION} \ llvm-${LLVM_VERSION} \
clang-${LLVM_VERSION} \ clang-${LLVM_VERSION} \

View File

@ -1,9 +1,10 @@
#!/usr/bin/env bash #!/usr/bin/env bash
set -e
#ccache -s # uncomment to display CCache statistics #ccache -s # uncomment to display CCache statistics
mkdir -p /server/build_docker mkdir -p /server/build_docker
cd /server/build_docker cd /server/build_docker
cmake -G Ninja /server -DCMAKE_C_COMPILER=`which gcc-9` -DCMAKE_CXX_COMPILER=`which g++-9` cmake -G Ninja /server "-DCMAKE_C_COMPILER=$(command -v gcc-9)" "-DCMAKE_CXX_COMPILER=$(command -v g++-9)"
# Set the number of build jobs to the half of number of virtual CPU cores (rounded up). # Set the number of build jobs to the half of number of virtual CPU cores (rounded up).
# By default, ninja use all virtual CPU cores, that leads to very high memory consumption without much improvement in build time. # By default, ninja use all virtual CPU cores, that leads to very high memory consumption without much improvement in build time.

View File

@ -9,7 +9,8 @@
"name": "yandex/clickhouse-binary-builder", "name": "yandex/clickhouse-binary-builder",
"dependent": [ "dependent": [
"docker/test/split_build_smoke_test", "docker/test/split_build_smoke_test",
"docker/test/pvs" "docker/test/pvs",
"docker/test/codebrowser"
] ]
}, },
"docker/packager/unbundled": { "docker/packager/unbundled": {
@ -133,6 +134,10 @@
"name": "yandex/clickhouse-postgresql-java-client", "name": "yandex/clickhouse-postgresql-java-client",
"dependent": [] "dependent": []
}, },
"docker/test/integration/kerberos_kdc": {
"name": "yandex/clickhouse-kerberos-kdc",
"dependent": []
},
"docker/test/base": { "docker/test/base": {
"name": "yandex/clickhouse-test-base", "name": "yandex/clickhouse-test-base",
"dependent": [ "dependent": [

View File

@ -1,7 +1,7 @@
# docker build -t yandex/clickhouse-binary-builder . # docker build -t yandex/clickhouse-binary-builder .
FROM ubuntu:20.04 FROM ubuntu:20.04
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10 ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=11
RUN apt-get update \ RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
@ -11,7 +11,7 @@ RUN apt-get update \
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \ && echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
&& apt-key add /tmp/llvm-snapshot.gpg.key \ && apt-key add /tmp/llvm-snapshot.gpg.key \
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \ && export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-11 main" >> \ && echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
/etc/apt/sources.list /etc/apt/sources.list
# initial packages # initial packages
@ -32,10 +32,11 @@ RUN apt-get update \
curl \ curl \
gcc-9 \ gcc-9 \
g++-9 \ g++-9 \
llvm-${LLVM_VERSION} \ clang-10 \
clang-${LLVM_VERSION} \ clang-tidy-10 \
lld-${LLVM_VERSION} \ lld-10 \
clang-tidy-${LLVM_VERSION} \ llvm-10 \
llvm-10-dev \
clang-11 \ clang-11 \
clang-tidy-11 \ clang-tidy-11 \
lld-11 \ lld-11 \

View File

@ -17,7 +17,10 @@ ccache --show-stats ||:
ccache --zero-stats ||: ccache --zero-stats ||:
ln -s /usr/lib/x86_64-linux-gnu/libOpenCL.so.1.0.0 /usr/lib/libOpenCL.so ||: ln -s /usr/lib/x86_64-linux-gnu/libOpenCL.so.1.0.0 /usr/lib/libOpenCL.so ||:
rm -f CMakeCache.txt rm -f CMakeCache.txt
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DSANITIZE=$SANITIZER -DENABLE_CHECK_HEAVY_BUILDS=1 $CMAKE_FLAGS .. # Read cmake arguments into array (possibly empty)
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
ninja $NINJA_FLAGS clickhouse-bundle ninja $NINJA_FLAGS clickhouse-bundle
mv ./programs/clickhouse* /output mv ./programs/clickhouse* /output
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds

View File

@ -1,7 +1,7 @@
# docker build -t yandex/clickhouse-deb-builder . # docker build -t yandex/clickhouse-deb-builder .
FROM ubuntu:20.04 FROM ubuntu:20.04
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10 ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=11
RUN apt-get update \ RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
@ -11,7 +11,7 @@ RUN apt-get update \
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \ && echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
&& apt-key add /tmp/llvm-snapshot.gpg.key \ && apt-key add /tmp/llvm-snapshot.gpg.key \
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \ && export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-11 main" >> \ && echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
/etc/apt/sources.list /etc/apt/sources.list
# initial packages # initial packages
@ -49,15 +49,11 @@ RUN apt-get update \
lld-11 \ lld-11 \
llvm-11 \ llvm-11 \
llvm-11-dev \ llvm-11-dev \
clang-${LLVM_VERSION} \ clang-10 \
clang-tidy-${LLVM_VERSION} \ clang-tidy-10 \
lld-${LLVM_VERSION} \ lld-10 \
llvm-${LLVM_VERSION} \ llvm-10 \
llvm-${LLVM_VERSION}-dev \ llvm-10-dev \
llvm-9-dev \
lld-9 \
clang-9 \
clang-tidy-9 \
ninja-build \ ninja-build \
perl \ perl \
pkg-config \ pkg-config \

View File

@ -4,16 +4,17 @@ set -x -e
ccache --show-stats ||: ccache --show-stats ||:
ccache --zero-stats ||: ccache --zero-stats ||:
build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S' read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
build/release --no-pbuilder "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
mv /*.deb /output mv /*.deb /output
mv *.changes /output mv -- *.changes /output
mv *.buildinfo /output mv -- *.buildinfo /output
mv /*.rpm /output ||: # if exists mv /*.rpm /output ||: # if exists
mv /*.tgz /output ||: # if exists mv /*.tgz /output ||: # if exists
if [ -n "$BINARY_OUTPUT" ] && { [ "$BINARY_OUTPUT" = "programs" ] || [ "$BINARY_OUTPUT" = "tests" ] ;} if [ -n "$BINARY_OUTPUT" ] && { [ "$BINARY_OUTPUT" = "programs" ] || [ "$BINARY_OUTPUT" = "tests" ] ;}
then then
echo Place $BINARY_OUTPUT to output echo "Place $BINARY_OUTPUT to output"
mkdir /output/binary ||: # if exists mkdir /output/binary ||: # if exists
mv /build/obj-*/programs/clickhouse* /output/binary mv /build/obj-*/programs/clickhouse* /output/binary
if [ "$BINARY_OUTPUT" = "tests" ] if [ "$BINARY_OUTPUT" = "tests" ]

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
#-*- coding: utf-8 -*- #-*- coding: utf-8 -*-
import subprocess import subprocess
import os import os

View File

@ -51,6 +51,7 @@ RUN apt-get update \
protobuf-compiler \ protobuf-compiler \
libprotoc-dev \ libprotoc-dev \
libgrpc++-dev \ libgrpc++-dev \
protobuf-compiler-grpc \
rapidjson-dev \ rapidjson-dev \
libsnappy-dev \ libsnappy-dev \
libparquet-dev \ libparquet-dev \

View File

@ -4,10 +4,11 @@ set -x -e
ccache --show-stats ||: ccache --show-stats ||:
ccache --zero-stats ||: ccache --zero-stats ||:
build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S' read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
build/release --no-pbuilder "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
mv /*.deb /output mv /*.deb /output
mv *.changes /output mv -- *.changes /output
mv *.buildinfo /output mv -- *.buildinfo /output
mv /*.rpm /output ||: # if exists mv /*.rpm /output ||: # if exists
mv /*.tgz /output ||: # if exists mv /*.tgz /output ||: # if exists

View File

@ -48,10 +48,15 @@ RUN apt-get update \
tzdata \ tzdata \
--yes --no-install-recommends --yes --no-install-recommends
# Sanitizer options # Sanitizer options for services (clickhouse-server)
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment; \ RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment; \
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \ echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \
echo "MSAN_OPTIONS='abort_on_error=1'" >> /etc/environment; \ echo "MSAN_OPTIONS='abort_on_error=1'" >> /etc/environment; \
ln -s /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-symbolizer /usr/bin/llvm-symbolizer; ln -s /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7'
ENV UBSAN_OPTIONS='print_stacktrace=1'
ENV MSAN_OPTIONS='abort_on_error=1'
CMD sleep 1 CMD sleep 1

View File

@ -1,33 +1,15 @@
# docker build --network=host -t yandex/clickhouse-codebrowser . # docker build --network=host -t yandex/clickhouse-codebrowser .
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output yandex/clickhouse-codebrowser # docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output yandex/clickhouse-codebrowser
FROM ubuntu:18.04 FROM yandex/clickhouse-binary-builder
RUN apt-get --allow-unauthenticated update -y \ RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-9 libllvm9 libclang-9-dev
&& env DEBIAN_FRONTEND=noninteractive \
apt-get --allow-unauthenticated install --yes --no-install-recommends \
bash \
sudo \
wget \
software-properties-common \
ca-certificates \
apt-transport-https \
build-essential \
gpg-agent \
git
RUN wget -nv -O - https://apt.kitware.com/keys/kitware-archive-latest.asc | sudo apt-key add -
RUN sudo apt-add-repository 'deb https://apt.kitware.com/ubuntu/ bionic main'
RUN sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main" >> /etc/apt/sources.list
RUN sudo apt-get --yes --allow-unauthenticated update
# To build woboq
RUN sudo apt-get --yes --allow-unauthenticated install cmake clang-8 libllvm8 libclang-8-dev
# repo versions doesn't work correctly with C++17 # repo versions doesn't work correctly with C++17
# also we push reports to s3, so we add index.html to subfolder urls # also we push reports to s3, so we add index.html to subfolder urls
# https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b # https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release && make -j
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang\+\+-9 -DCMAKE_C_COMPILER=clang-9 && make -j
ENV CODEGEN=/woboq_codebrowser/generator/codebrowser_generator ENV CODEGEN=/woboq_codebrowser/generator/codebrowser_generator
ENV CODEINDEX=/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator ENV CODEINDEX=/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator
@ -40,7 +22,7 @@ ENV SHA=nosha
ENV DATA="data" ENV DATA="data"
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \ CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-8 -DCMAKE_C_COMPILER=/usr/bin/clang-8 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON && \ cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-11 -DCMAKE_C_COMPILER=/usr/bin/clang-11 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON && \
mkdir -p $HTML_RESULT_DIRECTORY && \ mkdir -p $HTML_RESULT_DIRECTORY && \
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA && \ $CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA && \
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\ cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\

View File

@ -52,10 +52,10 @@ RUN apt-get update \
moreutils \ moreutils \
ninja-build \ ninja-build \
psmisc \ psmisc \
python \ python3 \
python-lxml \ python3-lxml \
python-requests \ python3-requests \
python-termcolor \ python3-termcolor \
qemu-user-static \ qemu-user-static \
rename \ rename \
software-properties-common \ software-properties-common \

View File

@ -15,29 +15,59 @@ stage=${stage:-}
# empty parameter. # empty parameter.
read -ra FASTTEST_CMAKE_FLAGS <<< "${FASTTEST_CMAKE_FLAGS:-}" read -ra FASTTEST_CMAKE_FLAGS <<< "${FASTTEST_CMAKE_FLAGS:-}"
FASTTEST_WORKSPACE=$(readlink -f "${FASTTEST_WORKSPACE:-.}")
FASTTEST_SOURCE=$(readlink -f "${FASTTEST_SOURCE:-$FASTTEST_WORKSPACE/ch}")
FASTTEST_BUILD=$(readlink -f "${FASTTEST_BUILD:-${BUILD:-$FASTTEST_WORKSPACE/build}}")
FASTTEST_DATA=$(readlink -f "${FASTTEST_DATA:-$FASTTEST_WORKSPACE/db-fasttest}")
FASTTEST_OUTPUT=$(readlink -f "${FASTTEST_OUTPUT:-$FASTTEST_WORKSPACE}")
function kill_clickhouse # Export these variables, so that all subsequent invocations of the script
# use them, and not try to guess them anew, which leads to weird effects.
export FASTTEST_WORKSPACE
export FASTTEST_SOURCE
export FASTTEST_BUILD
export FASTTEST_DATA
export FASTTEST_OUT
server_pid=none
function stop_server
{ {
for _ in {1..60} for _ in {1..60}
do do
if ! pkill -f clickhouse-server ; then break ; fi if ! pkill -f "clickhouse-server" && ! kill -- "$server_pid" ; then break ; fi
sleep 1 sleep 1
done done
if pgrep -f clickhouse-server if kill -0 -- "$server_pid"
then then
pstree -apgT pstree -apgT
jobs jobs
echo "Failed to kill the ClickHouse server $(pgrep -f clickhouse-server)" echo "Failed to kill the ClickHouse server pid '$server_pid'"
return 1 return 1
fi fi
server_pid=none
} }
function wait_for_server_start function start_server
{ {
set -m # Spawn server in its own process groups
clickhouse-server --config-file="$FASTTEST_DATA/config.xml" -- --path "$FASTTEST_DATA" --user_files_path "$FASTTEST_DATA/user_files" &>> "$FASTTEST_OUTPUT/server.log" &
server_pid=$!
set +m
if [ "$server_pid" == "0" ]
then
echo "Failed to start ClickHouse server"
# Avoid zero PID because `kill` treats it as our process group PID.
server_pid="none"
return 1
fi
for _ in {1..60} for _ in {1..60}
do do
if clickhouse-client --query "select 1" || ! pgrep -f clickhouse-server if clickhouse-client --query "select 1" || ! kill -0 -- "$server_pid"
then then
break break
fi fi
@ -47,20 +77,26 @@ function wait_for_server_start
if ! clickhouse-client --query "select 1" if ! clickhouse-client --query "select 1"
then then
echo "Failed to wait until ClickHouse server starts." echo "Failed to wait until ClickHouse server starts."
server_pid="none"
return 1 return 1
fi fi
echo "ClickHouse server pid '$(pgrep -f clickhouse-server)' started and responded" if ! kill -0 -- "$server_pid"
then
echo "Wrong clickhouse server started: PID '$server_pid' we started is not running, but '$(pgrep -f clickhouse-server)' is running"
server_pid="none"
return 1
fi
echo "ClickHouse server pid '$server_pid' started and responded"
} }
function clone_root function clone_root
{ {
git clone https://github.com/ClickHouse/ClickHouse.git | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/clone_log.txt git clone https://github.com/ClickHouse/ClickHouse.git -- "$FASTTEST_SOURCE" | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/clone_log.txt"
cd ClickHouse
CLICKHOUSE_DIR=$(pwd)
export CLICKHOUSE_DIR
(
cd "$FASTTEST_SOURCE"
if [ "$PULL_REQUEST_NUMBER" != "0" ]; then if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
if git fetch origin "+refs/pull/$PULL_REQUEST_NUMBER/merge"; then if git fetch origin "+refs/pull/$PULL_REQUEST_NUMBER/merge"; then
git checkout FETCH_HEAD git checkout FETCH_HEAD
@ -71,22 +107,36 @@ if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
echo 'Checked out to commit' echo 'Checked out to commit'
fi fi
else else
if [ "$COMMIT_SHA" != "" ]; then if [ -v COMMIT_SHA ]; then
git checkout "$COMMIT_SHA" git checkout "$COMMIT_SHA"
fi fi
fi fi
)
} }
function run function clone_submodules
{ {
(
cd "$FASTTEST_SOURCE"
SUBMODULES_TO_UPDATE=(contrib/boost contrib/zlib-ng contrib/libxml2 contrib/poco contrib/libunwind contrib/ryu contrib/fmtlib contrib/base64 contrib/cctz contrib/libcpuid contrib/double-conversion contrib/libcxx contrib/libcxxabi contrib/libc-headers contrib/lz4 contrib/zstd contrib/fastops contrib/rapidjson contrib/re2 contrib/sparsehash-c11) SUBMODULES_TO_UPDATE=(contrib/boost contrib/zlib-ng contrib/libxml2 contrib/poco contrib/libunwind contrib/ryu contrib/fmtlib contrib/base64 contrib/cctz contrib/libcpuid contrib/double-conversion contrib/libcxx contrib/libcxxabi contrib/libc-headers contrib/lz4 contrib/zstd contrib/fastops contrib/rapidjson contrib/re2 contrib/sparsehash-c11)
git submodule update --init --recursive "${SUBMODULES_TO_UPDATE[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/submodule_log.txt git submodule sync
git submodule update --init --recursive "${SUBMODULES_TO_UPDATE[@]}"
git submodule foreach git reset --hard
git submodule foreach git checkout @ -f
git submodule foreach git clean -xfd
)
}
CMAKE_LIBS_CONFIG=(-DENABLE_LIBRARIES=0 -DENABLE_TESTS=0 -DENABLE_UTILS=0 -DENABLE_EMBEDDED_COMPILER=0 -DENABLE_THINLTO=0 -DUSE_UNWIND=1) function run_cmake
{
CMAKE_LIBS_CONFIG=("-DENABLE_LIBRARIES=0" "-DENABLE_TESTS=0" "-DENABLE_UTILS=0" "-DENABLE_EMBEDDED_COMPILER=0" "-DENABLE_THINLTO=0" "-DUSE_UNWIND=1")
export CCACHE_DIR=/ccache # TODO remove this? we don't use ccache anyway. An option would be to download it
export CCACHE_BASEDIR=/ClickHouse # from S3 simultaneously with cloning.
export CCACHE_DIR="$FASTTEST_WORKSPACE/ccache"
export CCACHE_BASEDIR="$FASTTEST_SOURCE"
export CCACHE_NOHASHDIR=true export CCACHE_NOHASHDIR=true
export CCACHE_COMPILERCHECK=content export CCACHE_COMPILERCHECK=content
export CCACHE_MAXSIZE=15G export CCACHE_MAXSIZE=15G
@ -94,34 +144,45 @@ export CCACHE_MAXSIZE=15G
ccache --show-stats ||: ccache --show-stats ||:
ccache --zero-stats ||: ccache --zero-stats ||:
mkdir build mkdir "$FASTTEST_BUILD" ||:
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_CXX_COMPILER=clang++-10 -DCMAKE_C_COMPILER=clang-10 "${CMAKE_LIBS_CONFIG[@]}" "${FASTTEST_CMAKE_FLAGS[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/cmake_log.txt
time ninja clickhouse-bundle | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/build_log.txt
ninja install | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/install_log.txt
(
cd "$FASTTEST_BUILD"
cmake "$FASTTEST_SOURCE" -DCMAKE_CXX_COMPILER=clang++-10 -DCMAKE_C_COMPILER=clang-10 "${CMAKE_LIBS_CONFIG[@]}" "${FASTTEST_CMAKE_FLAGS[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/cmake_log.txt"
)
}
function build
{
(
cd "$FASTTEST_BUILD"
time ninja clickhouse-bundle | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/build_log.txt"
ccache --show-stats ||: ccache --show-stats ||:
)
}
mkdir -p /etc/clickhouse-server function configure
mkdir -p /etc/clickhouse-client {
mkdir -p /etc/clickhouse-server/config.d clickhouse-client --version
mkdir -p /etc/clickhouse-server/users.d clickhouse-test --help
ln -s /test_output /var/log/clickhouse-server
cp "$CLICKHOUSE_DIR/programs/server/config.xml" /etc/clickhouse-server/
cp "$CLICKHOUSE_DIR/programs/server/users.xml" /etc/clickhouse-server/
# install tests config mkdir -p "$FASTTEST_DATA"{,/client-config}
$CLICKHOUSE_DIR/tests/config/install.sh cp -a "$FASTTEST_SOURCE/programs/server/"{config,users}.xml "$FASTTEST_DATA"
cp -a "$FASTTEST_SOURCE/programs/server/"{config,users}.xml "$FASTTEST_DATA"
"$FASTTEST_SOURCE/tests/config/install.sh" "$FASTTEST_DATA" "$FASTTEST_DATA/client-config"
# doesn't support SSL # doesn't support SSL
rm -f /etc/clickhouse-server/config.d/secure_ports.xml rm -f "$FASTTEST_DATA/config.d/secure_ports.xml"
}
function run_tests
{
clickhouse-server --version
clickhouse-test --help
# Kill the server in case we are running locally and not in docker # Kill the server in case we are running locally and not in docker
kill_clickhouse stop_server ||:
clickhouse-server --config /etc/clickhouse-server/config.xml --daemon start_server
wait_for_server_start
TESTS_TO_SKIP=( TESTS_TO_SKIP=(
parquet parquet
@ -191,11 +252,10 @@ TESTS_TO_SKIP=(
01460_DistributedFilesToInsert 01460_DistributedFilesToInsert
) )
time clickhouse-test -j 8 --no-long --testname --shard --zookeeper --skip "${TESTS_TO_SKIP[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/test_log.txt time clickhouse-test -j 8 --no-long --testname --shard --zookeeper --skip "${TESTS_TO_SKIP[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/test_log.txt"
# substr is to remove semicolon after test name # substr is to remove semicolon after test name
readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, length($3)-1) }' /test_output/test_log.txt | tee /test_output/failed-parallel-tests.txt) readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, length($3)-1) }' "$FASTTEST_OUTPUT/test_log.txt" | tee "$FASTTEST_OUTPUT/failed-parallel-tests.txt")
# We will rerun sequentially any tests that have failed during parallel run. # We will rerun sequentially any tests that have failed during parallel run.
# They might have failed because there was some interference from other tests # They might have failed because there was some interference from other tests
@ -206,19 +266,16 @@ readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, le
# explicit instead of guessing. # explicit instead of guessing.
if [[ -n "${FAILED_TESTS[*]}" ]] if [[ -n "${FAILED_TESTS[*]}" ]]
then then
kill_clickhouse stop_server ||:
# Clean the data so that there is no interference from the previous test run. # Clean the data so that there is no interference from the previous test run.
rm -rf /var/lib/clickhouse ||: rm -rf "$FASTTEST_DATA"/{meta,}data ||:
mkdir /var/lib/clickhouse
clickhouse-server --config /etc/clickhouse-server/config.xml --daemon start_server
wait_for_server_start
echo "Going to run again: ${FAILED_TESTS[*]}" echo "Going to run again: ${FAILED_TESTS[*]}"
clickhouse-test --no-long --testname --shard --zookeeper "${FAILED_TESTS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a /test_output/test_log.txt clickhouse-test --no-long --testname --shard --zookeeper "${FAILED_TESTS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a "$FASTTEST_OUTPUT/test_log.txt"
else else
echo "No failed tests" echo "No failed tests"
fi fi
@ -228,20 +285,50 @@ case "$stage" in
"") "")
ls -la ls -la
;& ;&
"clone_root") "clone_root")
clone_root clone_root
# Pass control to the script from cloned sources, unless asked otherwise. # Pass control to the script from cloned sources, unless asked otherwise.
if ! [ -v FASTTEST_LOCAL_SCRIPT ] if ! [ -v FASTTEST_LOCAL_SCRIPT ]
then then
stage=run "$CLICKHOUSE_DIR/docker/test/fasttest/run.sh" # 'run' stage is deprecated, used for compatibility with old scripts.
# Replace with 'clone_submodules' after Nov 1, 2020.
# cd and CLICKHOUSE_DIR are also a setup for old scripts, remove as well.
# In modern script we undo it by changing back into workspace dir right
# away, see below. Remove that as well.
cd "$FASTTEST_SOURCE"
CLICKHOUSE_DIR=$(pwd)
export CLICKHOUSE_DIR
stage=run "$FASTTEST_SOURCE/docker/test/fasttest/run.sh"
exit $? exit $?
fi fi
;& ;&
"run") "run")
run # A deprecated stage that is called by old script and equivalent to everything
# after cloning root, starting with cloning submodules.
;&
"clone_submodules")
# Recover after being called from the old script that changes into source directory.
# See the compatibility hacks in `clone_root` stage above. Remove at the same time,
# after Nov 1, 2020.
cd "$FASTTEST_WORKSPACE"
clone_submodules | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/submodule_log.txt"
;&
"run_cmake")
run_cmake
;&
"build")
build
PATH="$FASTTEST_BUILD/programs:$FASTTEST_SOURCE/tests:$PATH"
export PATH
;&
"configure")
# The `install_log.txt` is also needed for compatibility with old CI task --
# if there is no log, it will decide that build failed.
configure | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/install_log.txt"
;&
"run_tests")
run_tests
;& ;&
esac esac

View File

@ -58,7 +58,7 @@ function watchdog
echo "Fuzzing run has timed out" echo "Fuzzing run has timed out"
killall clickhouse-client ||: killall clickhouse-client ||:
for x in {1..10} for _ in {1..10}
do do
if ! pgrep -f clickhouse-client if ! pgrep -f clickhouse-client
then then
@ -81,6 +81,9 @@ function fuzz
echo Server started echo Server started
fuzzer_exit_code=0 fuzzer_exit_code=0
# SC2012: Use find instead of ls to better handle non-alphanumeric filenames.
# They are all alphanumeric.
# shellcheck disable=SC2012
./clickhouse-client --query-fuzzer-runs=1000 \ ./clickhouse-client --query-fuzzer-runs=1000 \
< <(for f in $(ls ch/tests/queries/0_stateless/*.sql | sort -R); do cat "$f"; echo ';'; done) \ < <(for f in $(ls ch/tests/queries/0_stateless/*.sql | sort -R); do cat "$f"; echo ';'; done) \
> >(tail -10000 > fuzzer.log) \ > >(tail -10000 > fuzzer.log) \

View File

@ -4,7 +4,7 @@ FROM yandex/clickhouse-test-base
RUN apt-get update \ RUN apt-get update \
&& env DEBIAN_FRONTEND=noninteractive apt-get -y install \ && env DEBIAN_FRONTEND=noninteractive apt-get -y install \
tzdata \ tzdata \
python \ python3 \
libreadline-dev \ libreadline-dev \
libicu-dev \ libicu-dev \
bsdutils \ bsdutils \
@ -16,7 +16,8 @@ RUN apt-get update \
odbc-postgresql \ odbc-postgresql \
sqlite3 \ sqlite3 \
curl \ curl \
tar tar \
krb5-user
RUN rm -rf \ RUN rm -rf \
/var/lib/apt/lists/* \ /var/lib/apt/lists/* \
/var/cache/debconf \ /var/cache/debconf \

View File

@ -0,0 +1,15 @@
# docker build -t yandex/clickhouse-kerberos-kdc .
FROM centos:6.6
# old OS to make is faster and smaller
RUN yum install -y krb5-server krb5-libs krb5-auth-dialog krb5-workstation
EXPOSE 88 749
RUN touch /config.sh
# should be overwritten e.g. via docker_compose volumes
# volumes: /some_path/my_kerberos_config.sh:/config.sh:ro
ENTRYPOINT ["/bin/bash", "/config.sh"]

View File

@ -16,13 +16,13 @@ RUN apt-get update \
iproute2 \ iproute2 \
module-init-tools \ module-init-tools \
cgroupfs-mount \ cgroupfs-mount \
python-pip \ python3-pip \
tzdata \ tzdata \
libreadline-dev \ libreadline-dev \
libicu-dev \ libicu-dev \
bsdutils \ bsdutils \
curl \ curl \
python-pika \ python3-pika \
liblua5.1-dev \ liblua5.1-dev \
luajit \ luajit \
libssl-dev \ libssl-dev \
@ -37,7 +37,7 @@ RUN apt-get update \
ENV TZ=Europe/Moscow ENV TZ=Europe/Moscow
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
RUN pip install urllib3==1.23 pytest docker-compose==1.22.0 docker dicttoxml kazoo PyMySQL psycopg2==2.7.5 pymongo tzlocal kafka-python protobuf redis aerospike pytest-timeout minio rpm-confluent-schemaregistry grpcio grpcio-tools cassandra-driver RUN python3 -m pip install urllib3==1.23 pytest docker-compose==1.22.0 docker dicttoxml kazoo PyMySQL psycopg2==2.7.5 pymongo tzlocal kafka-python protobuf redis aerospike pytest-timeout minio grpcio grpcio-tools cassandra-driver confluent-kafka avro
ENV DOCKER_CHANNEL stable ENV DOCKER_CHANNEL stable
ENV DOCKER_VERSION 17.09.1-ce ENV DOCKER_VERSION 17.09.1-ce

View File

@ -0,0 +1,59 @@
version: '2.3'
services:
kafka_kerberized_zookeeper:
image: confluentinc/cp-zookeeper:5.2.0
# restart: always
hostname: kafka_kerberized_zookeeper
environment:
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_SERVERS: "kafka_kerberized_zookeeper:2888:3888"
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/zookeeper_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dsun.security.krb5.debug=true"
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
- /dev/urandom:/dev/random
depends_on:
- kafka_kerberos
security_opt:
- label:disable
kerberized_kafka1:
image: confluentinc/cp-kafka:5.2.0
# restart: always
hostname: kerberized_kafka1
ports:
- "9092:9092"
- "9093:9093"
environment:
KAFKA_LISTENERS: OUTSIDE://:19092,UNSECURED_OUTSIDE://:19093,UNSECURED_INSIDE://:9093
KAFKA_ADVERTISED_LISTENERS: OUTSIDE://kerberized_kafka1:19092,UNSECURED_OUTSIDE://kerberized_kafka1:19093,UNSECURED_INSIDE://localhost:9093
# KAFKA_LISTENERS: INSIDE://kerberized_kafka1:9092,OUTSIDE://kerberized_kafka1:19092
# KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:9092,OUTSIDE://kerberized_kafka1:19092
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: GSSAPI
KAFKA_SASL_ENABLED_MECHANISMS: GSSAPI
KAFKA_SASL_KERBEROS_SERVICE_NAME: kafka
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: OUTSIDE:SASL_PLAINTEXT,UNSECURED_OUTSIDE:PLAINTEXT,UNSECURED_INSIDE:PLAINTEXT,
KAFKA_INTER_BROKER_LISTENER_NAME: OUTSIDE
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: "kafka_kerberized_zookeeper:2181"
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dsun.security.krb5.debug=true"
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
- /dev/urandom:/dev/random
depends_on:
- kafka_kerberized_zookeeper
- kafka_kerberos
security_opt:
- label:disable
kafka_kerberos:
image: yandex/clickhouse-kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG}
hostname: kafka_kerberos
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/tmp/keytab
- ${KERBERIZED_KAFKA_DIR}/../../kerberos_image_config.sh:/config.sh
- /dev/urandom:/dev/random
ports: [88, 749]

View File

@ -7,7 +7,7 @@ set +e
reties=0 reties=0
while true; do while true; do
docker info &>/dev/null && break docker info &>/dev/null && break
reties=$[$reties+1] reties=$((reties+1))
if [[ $reties -ge 100 ]]; then # 10 sec max if [[ $reties -ge 100 ]]; then # 10 sec max
echo "Can't start docker daemon, timeout exceeded." >&2 echo "Can't start docker daemon, timeout exceeded." >&2
exit 1; exit 1;
@ -27,6 +27,7 @@ export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}
export DOCKER_MYSQL_JS_CLIENT_TAG=${DOCKER_MYSQL_JS_CLIENT_TAG:=latest} export DOCKER_MYSQL_JS_CLIENT_TAG=${DOCKER_MYSQL_JS_CLIENT_TAG:=latest}
export DOCKER_MYSQL_PHP_CLIENT_TAG=${DOCKER_MYSQL_PHP_CLIENT_TAG:=latest} export DOCKER_MYSQL_PHP_CLIENT_TAG=${DOCKER_MYSQL_PHP_CLIENT_TAG:=latest}
export DOCKER_POSTGRESQL_JAVA_CLIENT_TAG=${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:=latest} export DOCKER_POSTGRESQL_JAVA_CLIENT_TAG=${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:=latest}
export DOCKER_KERBEROS_KDC_TAG=${DOCKER_KERBEROS_KDC_TAG:=latest}
cd /ClickHouse/tests/integration cd /ClickHouse/tests/integration
exec "$@" exec "$@"

View File

@ -9,7 +9,7 @@ right_version=${2}
if [ "$left_version" == "" ] || [ "$right_version" == "" ] if [ "$left_version" == "" ] || [ "$right_version" == "" ]
then then
>&2 echo Usage: $(basename "$0") left_version right_version >&2 echo "Usage: $(basename "$0") left_version right_version"
exit 1 exit 1
fi fi

View File

@ -181,6 +181,9 @@ function run_tests
# Randomize test order. # Randomize test order.
test_files=$(for f in $test_files; do echo "$f"; done | sort -R) test_files=$(for f in $test_files; do echo "$f"; done | sort -R)
# Limit profiling time to 10 minutes, not to run for too long.
profile_seconds_left=600
# Run the tests. # Run the tests.
test_name="<none>" test_name="<none>"
for test in $test_files for test in $test_files
@ -194,15 +197,24 @@ function run_tests
test_name=$(basename "$test" ".xml") test_name=$(basename "$test" ".xml")
echo test "$test_name" echo test "$test_name"
# Don't profile if we're past the time limit.
# Use awk because bash doesn't support floating point arithmetics.
profile_seconds=$(awk "BEGIN { print ($profile_seconds_left > 0 ? 10 : 0) }")
TIMEFORMAT=$(printf "$test_name\t%%3R\t%%3U\t%%3S\n") TIMEFORMAT=$(printf "$test_name\t%%3R\t%%3U\t%%3S\n")
# The grep is to filter out set -x output and keep only time output. # The grep is to filter out set -x output and keep only time output.
# The '2>&1 >/dev/null' redirects stderr to stdout, and discards stdout. # The '2>&1 >/dev/null' redirects stderr to stdout, and discards stdout.
{ \ { \
time "$script_dir/perf.py" --host localhost localhost --port 9001 9002 \ time "$script_dir/perf.py" --host localhost localhost --port 9001 9002 \
--runs "$CHPC_RUNS" --max-queries "$CHPC_MAX_QUERIES" \ --runs "$CHPC_RUNS" --max-queries "$CHPC_MAX_QUERIES" \
--profile-seconds "$profile_seconds" \
-- "$test" > "$test_name-raw.tsv" 2> "$test_name-err.log" ; \ -- "$test" > "$test_name-raw.tsv" 2> "$test_name-err.log" ; \
} 2>&1 >/dev/null | tee >(grep -v ^+ >> "wall-clock-times.tsv") \ } 2>&1 >/dev/null | tee >(grep -v ^+ >> "wall-clock-times.tsv") \
|| echo "Test $test_name failed with error code $?" >> "$test_name-err.log" || echo "Test $test_name failed with error code $?" >> "$test_name-err.log"
profile_seconds_left=$(awk -F' ' \
'BEGIN { s = '$profile_seconds_left'; } /^profile-total/ { s -= $2 } END { print s }' \
"$test_name-raw.tsv")
done done
unset TIMEFORMAT unset TIMEFORMAT
@ -294,6 +306,7 @@ for test_file in *-raw.tsv
do do
test_name=$(basename "$test_file" "-raw.tsv") test_name=$(basename "$test_file" "-raw.tsv")
sed -n "s/^query\t/$test_name\t/p" < "$test_file" >> "analyze/query-runs.tsv" sed -n "s/^query\t/$test_name\t/p" < "$test_file" >> "analyze/query-runs.tsv"
sed -n "s/^profile\t/$test_name\t/p" < "$test_file" >> "analyze/query-profiles.tsv"
sed -n "s/^client-time\t/$test_name\t/p" < "$test_file" >> "analyze/client-times.tsv" sed -n "s/^client-time\t/$test_name\t/p" < "$test_file" >> "analyze/client-times.tsv"
sed -n "s/^report-threshold\t/$test_name\t/p" < "$test_file" >> "analyze/report-thresholds.tsv" sed -n "s/^report-threshold\t/$test_name\t/p" < "$test_file" >> "analyze/report-thresholds.tsv"
sed -n "s/^skipped\t/$test_name\t/p" < "$test_file" >> "analyze/skipped-tests.tsv" sed -n "s/^skipped\t/$test_name\t/p" < "$test_file" >> "analyze/skipped-tests.tsv"
@ -658,13 +671,15 @@ create view test_runs as
group by test group by test
; ;
create table test_times_report engine File(TSV, 'report/test-times.tsv') as create view test_times_view as
select wall_clock_time_per_test.test, real, select
toDecimal64(total_client_time, 3), wall_clock_time_per_test.test test,
real,
total_client_time,
queries, queries,
toDecimal64(query_max, 3), query_max,
toDecimal64(real / queries, 3) avg_real_per_query, real / queries avg_real_per_query,
toDecimal64(query_min, 3), query_min,
runs runs
from test_time from test_time
-- wall clock times are also measured for skipped tests, so don't -- wall clock times are also measured for skipped tests, so don't
@ -673,7 +688,43 @@ create table test_times_report engine File(TSV, 'report/test-times.tsv') as
on wall_clock_time_per_test.test = test_time.test on wall_clock_time_per_test.test = test_time.test
full join test_runs full join test_runs
on test_runs.test = test_time.test on test_runs.test = test_time.test
order by avg_real_per_query desc; ;
-- WITH TOTALS doesn't work with INSERT SELECT, so we have to jump through these
-- hoops: https://github.com/ClickHouse/ClickHouse/issues/15227
create view test_times_view_total as
select
'Total' test,
sum(real),
sum(total_client_time),
sum(queries),
max(query_max),
sum(real) / sum(queries) avg_real_per_query,
min(query_min),
-- Totaling the number of runs doesn't make sense, but use the max so
-- that the reporting script doesn't complain about queries being too
-- long.
max(runs)
from test_times_view
;
create table test_times_report engine File(TSV, 'report/test-times.tsv') as
select
test,
toDecimal64(real, 3),
toDecimal64(total_client_time, 3),
queries,
toDecimal64(query_max, 3),
toDecimal64(avg_real_per_query, 3),
toDecimal64(query_min, 3),
runs
from (
select * from test_times_view
union all
select * from test_times_view_total
)
order by test = 'Total' desc, avg_real_per_query desc
;
-- report for all queries page, only main metric -- report for all queries page, only main metric
create table all_tests_report engine File(TSV, 'report/all-queries.tsv') as create table all_tests_report engine File(TSV, 'report/all-queries.tsv') as
@ -694,13 +745,12 @@ create table all_tests_report engine File(TSV, 'report/all-queries.tsv') as
test, query_index, query_display_name test, query_index, query_display_name
from queries order by test, query_index; from queries order by test, query_index;
-- queries for which we will build flamegraphs (see below)
create table queries_for_flamegraph engine File(TSVWithNamesAndTypes,
'report/queries-for-flamegraph.tsv') as
select test, query_index from queries where unstable_show or changed_show
;
-- Report of queries that have inconsistent 'short' markings:
-- 1) have short duration, but are not marked as 'short'
-- 2) the reverse -- marked 'short' but take too long.
-- The threshold for 2) is significantly larger than the threshold for 1), to
-- avoid jitter.
create view shortness create view shortness
as select as select
(test, query_index) in (test, query_index) in
@ -718,11 +768,6 @@ create view shortness
and times.query_index = query_display_names.query_index and times.query_index = query_display_names.query_index
; ;
-- Report of queries that have inconsistent 'short' markings:
-- 1) have short duration, but are not marked as 'short'
-- 2) the reverse -- marked 'short' but take too long.
-- The threshold for 2) is significantly larger than the threshold for 1), to
-- avoid jitter.
create table inconsistent_short_marking_report create table inconsistent_short_marking_report
engine File(TSV, 'report/unexpected-query-duration.tsv') engine File(TSV, 'report/unexpected-query-duration.tsv')
as select as select
@ -759,18 +804,15 @@ create table all_query_metrics_tsv engine File(TSV, 'report/all-query-metrics.ts
" 2> >(tee -a report/errors.log 1>&2) " 2> >(tee -a report/errors.log 1>&2)
# Prepare source data for metrics and flamegraphs for unstable queries. # Prepare source data for metrics and flamegraphs for queries that were profiled
# by perf.py.
for version in {right,left} for version in {right,left}
do do
rm -rf data rm -rf data
clickhouse-local --query " clickhouse-local --query "
create view queries_for_flamegraph as create view query_profiles as
select * from file('report/queries-for-flamegraph.tsv', TSVWithNamesAndTypes,
'test text, query_index int');
create view query_runs as
with 0 as left, 1 as right with 0 as left, 1 as right
select * from file('analyze/query-runs.tsv', TSV, select * from file('analyze/query-profiles.tsv', TSV,
'test text, query_index int, query_id text, version UInt8, time float') 'test text, query_index int, query_id text, version UInt8, time float')
where version = $version where version = $version
; ;
@ -782,15 +824,12 @@ create view query_display_names as select * from
create table unstable_query_runs engine File(TSVWithNamesAndTypes, create table unstable_query_runs engine File(TSVWithNamesAndTypes,
'unstable-query-runs.$version.rep') as 'unstable-query-runs.$version.rep') as
select query_runs.test test, query_runs.query_index query_index, select query_profiles.test test, query_profiles.query_index query_index,
query_display_name, query_id query_display_name, query_id
from query_runs from query_profiles
join queries_for_flamegraph on
query_runs.test = queries_for_flamegraph.test
and query_runs.query_index = queries_for_flamegraph.query_index
left join query_display_names on left join query_display_names on
query_runs.test = query_display_names.test query_profiles.test = query_display_names.test
and query_runs.query_index = query_display_names.query_index and query_profiles.query_index = query_display_names.query_index
; ;
create view query_log as select * create view query_log as select *

View File

@ -10,7 +10,7 @@ mkdir left ||:
left_pr=$1 left_pr=$1
left_sha=$2 left_sha=$2
right_pr=$3 # right_pr=$3 not used for now
right_sha=$4 right_sha=$4
datasets=${CHPC_DATASETS:-"hits1 hits10 hits100 values"} datasets=${CHPC_DATASETS:-"hits1 hits10 hits100 values"}

View File

@ -18,9 +18,22 @@ import xml.etree.ElementTree as et
from threading import Thread from threading import Thread
from scipy import stats from scipy import stats
total_start_seconds = time.perf_counter()
stage_start_seconds = total_start_seconds
def reportStageEnd(stage):
global stage_start_seconds, total_start_seconds
current = time.perf_counter()
print(f'stage\t{stage}\t{current - stage_start_seconds:.3f}\t{current - total_start_seconds:.3f}')
stage_start_seconds = current
def tsv_escape(s): def tsv_escape(s):
return s.replace('\\', '\\\\').replace('\t', '\\t').replace('\n', '\\n').replace('\r','') return s.replace('\\', '\\\\').replace('\t', '\\t').replace('\n', '\\n').replace('\r','')
parser = argparse.ArgumentParser(description='Run performance test.') parser = argparse.ArgumentParser(description='Run performance test.')
# Explicitly decode files as UTF-8 because sometimes we have Russian characters in queries, and LANG=C is set. # Explicitly decode files as UTF-8 because sometimes we have Russian characters in queries, and LANG=C is set.
parser.add_argument('file', metavar='FILE', type=argparse.FileType('r', encoding='utf-8'), nargs=1, help='test description file') parser.add_argument('file', metavar='FILE', type=argparse.FileType('r', encoding='utf-8'), nargs=1, help='test description file')
@ -29,16 +42,21 @@ parser.add_argument('--port', nargs='*', default=[9000], help="Space-separated l
parser.add_argument('--runs', type=int, default=1, help='Number of query runs per server.') parser.add_argument('--runs', type=int, default=1, help='Number of query runs per server.')
parser.add_argument('--max-queries', type=int, default=None, help='Test no more than this number of queries, chosen at random.') parser.add_argument('--max-queries', type=int, default=None, help='Test no more than this number of queries, chosen at random.')
parser.add_argument('--queries-to-run', nargs='*', type=int, default=None, help='Space-separated list of indexes of queries to test.') parser.add_argument('--queries-to-run', nargs='*', type=int, default=None, help='Space-separated list of indexes of queries to test.')
parser.add_argument('--profile-seconds', type=int, default=0, help='For how many seconds to profile a query for which the performance has changed.')
parser.add_argument('--long', action='store_true', help='Do not skip the tests tagged as long.') parser.add_argument('--long', action='store_true', help='Do not skip the tests tagged as long.')
parser.add_argument('--print-queries', action='store_true', help='Print test queries and exit.') parser.add_argument('--print-queries', action='store_true', help='Print test queries and exit.')
parser.add_argument('--print-settings', action='store_true', help='Print test settings and exit.') parser.add_argument('--print-settings', action='store_true', help='Print test settings and exit.')
args = parser.parse_args() args = parser.parse_args()
reportStageEnd('start')
test_name = os.path.splitext(os.path.basename(args.file[0].name))[0] test_name = os.path.splitext(os.path.basename(args.file[0].name))[0]
tree = et.parse(args.file[0]) tree = et.parse(args.file[0])
root = tree.getroot() root = tree.getroot()
reportStageEnd('parse')
# Process query parameters # Process query parameters
subst_elems = root.findall('substitutions/substitution') subst_elems = root.findall('substitutions/substitution')
available_parameters = {} # { 'table': ['hits_10m', 'hits_100m'], ... } available_parameters = {} # { 'table': ['hits_10m', 'hits_100m'], ... }
@ -78,11 +96,16 @@ for e in root.findall('query'):
assert(len(test_queries) == len(is_short)) assert(len(test_queries) == len(is_short))
# If we're given a list of queries to run, check that it makes sense.
for i in args.queries_to_run or []:
if i < 0 or i >= len(test_queries):
print(f'There is no query no. {i} in this test, only [{0}-{len(test_queries) - 1}] are present')
exit(1)
# If we're only asked to print the queries, do that and exit # If we're only asked to print the queries, do that and exit.
if args.print_queries: if args.print_queries:
for q in test_queries: for i in args.queries_to_run or range(0, len(test_queries)):
print(q) print(test_queries[i])
exit(0) exit(0)
# Print short queries # Print short queries
@ -107,15 +130,21 @@ if not args.long:
sys.exit(0) sys.exit(0)
# Print report threshold for the test if it is set. # Print report threshold for the test if it is set.
ignored_relative_change = 0.05
if 'max_ignored_relative_change' in root.attrib: if 'max_ignored_relative_change' in root.attrib:
print(f'report-threshold\t{root.attrib["max_ignored_relative_change"]}') ignored_relative_change = float(root.attrib["max_ignored_relative_change"])
print(f'report-threshold\t{ignored_relative_change}')
reportStageEnd('before-connect')
# Open connections # Open connections
servers = [{'host': host, 'port': port} for (host, port) in zip(args.host, args.port)] servers = [{'host': host or args.host[0], 'port': port or args.port[0]} for (host, port) in itertools.zip_longest(args.host, args.port)]
all_connections = [clickhouse_driver.Client(**server) for server in servers] all_connections = [clickhouse_driver.Client(**server) for server in servers]
for s in servers: for i, s in enumerate(servers):
print('server\t{}\t{}'.format(s['host'], s['port'])) print(f'server\t{i}\t{s["host"]}\t{s["port"]}')
reportStageEnd('connect')
# Run drop queries, ignoring errors. Do this before all other activity, because # Run drop queries, ignoring errors. Do this before all other activity, because
# clickhouse_driver disconnects on error (this is not configurable), and the new # clickhouse_driver disconnects on error (this is not configurable), and the new
@ -130,6 +159,8 @@ for conn_index, c in enumerate(all_connections):
except: except:
pass pass
reportStageEnd('drop-1')
# Apply settings. # Apply settings.
# If there are errors, report them and continue -- maybe a new test uses a setting # If there are errors, report them and continue -- maybe a new test uses a setting
# that is not in master, but the queries can still run. If we have multiple # that is not in master, but the queries can still run. If we have multiple
@ -147,6 +178,8 @@ for conn_index, c in enumerate(all_connections):
except: except:
print(traceback.format_exc(), file=sys.stderr) print(traceback.format_exc(), file=sys.stderr)
reportStageEnd('settings')
# Check tables that should exist. If they don't exist, just skip this test. # Check tables that should exist. If they don't exist, just skip this test.
tables = [e.text for e in root.findall('preconditions/table_exists')] tables = [e.text for e in root.findall('preconditions/table_exists')]
for t in tables: for t in tables:
@ -159,6 +192,8 @@ for t in tables:
print(f'skipped\t{tsv_escape(skipped_message)}') print(f'skipped\t{tsv_escape(skipped_message)}')
sys.exit(0) sys.exit(0)
reportStageEnd('preconditions')
# Run create and fill queries. We will run them simultaneously for both servers, # Run create and fill queries. We will run them simultaneously for both servers,
# to save time. # to save time.
# The weird search is to keep the relative order of elements, which matters, and # The weird search is to keep the relative order of elements, which matters, and
@ -189,6 +224,9 @@ for t in threads:
for t in threads: for t in threads:
t.join() t.join()
reportStageEnd('create')
# By default, test all queries.
queries_to_run = range(0, len(test_queries)) queries_to_run = range(0, len(test_queries))
if args.max_queries: if args.max_queries:
@ -196,15 +234,11 @@ if args.max_queries:
queries_to_run = random.sample(range(0, len(test_queries)), min(len(test_queries), args.max_queries)) queries_to_run = random.sample(range(0, len(test_queries)), min(len(test_queries), args.max_queries))
if args.queries_to_run: if args.queries_to_run:
# Run the specified queries, with some sanity check. # Run the specified queries.
for i in args.queries_to_run:
if i < 0 or i >= len(test_queries):
print(f'There is no query no. "{i}" in this test, only [{0}-{len(test_queries) - 1}] are present')
exit(1)
queries_to_run = args.queries_to_run queries_to_run = args.queries_to_run
# Run test queries. # Run test queries.
profile_total_seconds = 0
for query_index in queries_to_run: for query_index in queries_to_run:
q = test_queries[query_index] q = test_queries[query_index]
query_prefix = f'{test_name}.query{query_index}' query_prefix = f'{test_name}.query{query_index}'
@ -324,14 +358,30 @@ for query_index in queries_to_run:
client_seconds = time.perf_counter() - start_seconds client_seconds = time.perf_counter() - start_seconds
print(f'client-time\t{query_index}\t{client_seconds}\t{server_seconds}') print(f'client-time\t{query_index}\t{client_seconds}\t{server_seconds}')
#print(all_server_times)
#print(stats.ttest_ind(all_server_times[0], all_server_times[1], equal_var = False).pvalue)
# Run additional profiling queries to collect profile data, but only if test times appeared to be different. # Run additional profiling queries to collect profile data, but only if test times appeared to be different.
# We have to do it after normal runs because otherwise it will affect test statistics too much # We have to do it after normal runs because otherwise it will affect test statistics too much
if len(all_server_times) == 2 and stats.ttest_ind(all_server_times[0], all_server_times[1], equal_var = False).pvalue < 0.1: if len(all_server_times) != 2:
continue
if len(all_server_times[0]) < 3:
# Don't fail if for some reason there are not enough measurements.
continue
pvalue = stats.ttest_ind(all_server_times[0], all_server_times[1], equal_var = False).pvalue
median = [statistics.median(t) for t in all_server_times]
# Keep this consistent with the value used in report. Should eventually move
# to (median[1] - median[0]) / min(median), which is compatible with "times"
# difference we use in report (max(median) / min(median)).
relative_diff = (median[1] - median[0]) / median[0]
print(f'diff\t{query_index}\t{median[0]}\t{median[1]}\t{relative_diff}\t{pvalue}')
if abs(relative_diff) < ignored_relative_change or pvalue > 0.05:
continue
# Perform profile runs for fixed amount of time. Don't limit the number
# of runs, because we also have short queries.
profile_start_seconds = time.perf_counter()
run = 0 run = 0
while True: while time.perf_counter() - profile_start_seconds < args.profile_seconds:
run_id = f'{query_prefix}.profile{run}' run_id = f'{query_prefix}.profile{run}'
for conn_index, c in enumerate(this_query_connections): for conn_index, c in enumerate(this_query_connections):
@ -344,14 +394,13 @@ for query_index in queries_to_run:
e.message = run_id + ': ' + e.message e.message = run_id + ': ' + e.message
raise raise
elapsed = c.last_query.elapsed
profile_seconds += elapsed
run += 1 run += 1
# Don't spend too much time for profile runs
if run > args.runs or profile_seconds > 10: profile_total_seconds += time.perf_counter() - profile_start_seconds
break
# And don't bother with short queries print(f'profile-total\t{profile_total_seconds}')
reportStageEnd('run')
# Run drop queries # Run drop queries
drop_queries = substitute_parameters(drop_query_templates) drop_queries = substitute_parameters(drop_query_templates)
@ -359,3 +408,5 @@ for conn_index, c in enumerate(all_connections):
for q in drop_queries: for q in drop_queries:
c.execute(q) c.execute(q)
print(f'drop\t{conn_index}\t{c.last_query.elapsed}\t{tsv_escape(q)}') print(f'drop\t{conn_index}\t{c.last_query.elapsed}\t{tsv_escape(q)}')
reportStageEnd('drop-2')

View File

@ -312,7 +312,7 @@ def add_errors_explained():
if args.report == 'main': if args.report == 'main':
print(header_template.format()) print((header_template.format()))
add_tested_commits() add_tested_commits()
@ -487,7 +487,7 @@ if args.report == 'main':
for r in rows: for r in rows:
anchor = f'{currentTableAnchor()}.{r[0]}' anchor = f'{currentTableAnchor()}.{r[0]}'
total_runs = (int(r[7]) + 1) * 2 # one prewarm run, two servers total_runs = (int(r[7]) + 1) * 2 # one prewarm run, two servers
if float(r[5]) > allowed_average_run_time * total_runs: if r[0] != 'Total' and float(r[5]) > allowed_average_run_time * total_runs:
# FIXME should be 15s max -- investigate parallel_insert # FIXME should be 15s max -- investigate parallel_insert
slow_average_tests += 1 slow_average_tests += 1
attrs[5] = f'style="background: {color_bad}"' attrs[5] = f'style="background: {color_bad}"'
@ -495,7 +495,7 @@ if args.report == 'main':
else: else:
attrs[5] = '' attrs[5] = ''
if float(r[4]) > allowed_single_run_time * total_runs: if r[0] != 'Total' and float(r[4]) > allowed_single_run_time * total_runs:
slow_average_tests += 1 slow_average_tests += 1
attrs[4] = f'style="background: {color_bad}"' attrs[4] = f'style="background: {color_bad}"'
errors_explained.append([f'<a href="./all-queries.html#all-query-times.{r[0]}.0">Some query of the test \'{r[0]}\' is too slow to run. See the all queries report']) errors_explained.append([f'<a href="./all-queries.html#all-query-times.{r[0]}.0">Some query of the test \'{r[0]}\' is too slow to run. See the all queries report'])
@ -571,14 +571,14 @@ if args.report == 'main':
status = 'failure' status = 'failure'
message = 'Errors while building the report.' message = 'Errors while building the report.'
print(""" print(("""
<!--status: {status}--> <!--status: {status}-->
<!--message: {message}--> <!--message: {message}-->
""".format(status=status, message=message)) """.format(status=status, message=message)))
elif args.report == 'all-queries': elif args.report == 'all-queries':
print(header_template.format()) print((header_template.format()))
add_tested_commits() add_tested_commits()

View File

@ -4,7 +4,7 @@ FROM yandex/clickhouse-stateless-test
RUN apt-get update -y \ RUN apt-get update -y \
&& env DEBIAN_FRONTEND=noninteractive \ && env DEBIAN_FRONTEND=noninteractive \
apt-get install --yes --no-install-recommends \ apt-get install --yes --no-install-recommends \
python-requests \ python3-requests \
llvm-9 llvm-9
COPY s3downloader /s3downloader COPY s3downloader /s3downloader

View File

@ -26,11 +26,12 @@ function start()
fi fi
timeout 120 service clickhouse-server start timeout 120 service clickhouse-server start
sleep 0.5 sleep 0.5
counter=$(($counter + 1)) counter=$((counter + 1))
done done
} }
start start
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
/s3downloader --dataset-names $DATASETS /s3downloader --dataset-names $DATASETS
chmod 777 -R /var/lib/clickhouse chmod 777 -R /var/lib/clickhouse
clickhouse-client --query "SHOW DATABASES" clickhouse-client --query "SHOW DATABASES"
@ -43,8 +44,8 @@ clickhouse-client --query "RENAME TABLE datasets.hits_v1 TO test.hits"
clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits" clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits"
clickhouse-client --query "SHOW TABLES FROM test" clickhouse-client --query "SHOW TABLES FROM test"
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test ; then
SKIP_LIST_OPT="--use-skip-list" SKIP_LIST_OPT="--use-skip-list"
fi fi
clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import os import os
import sys import sys
@ -29,7 +29,7 @@ def dowload_with_progress(url, path):
logging.info("Downloading from %s to temp path %s", url, path) logging.info("Downloading from %s to temp path %s", url, path)
for i in range(RETRIES_COUNT): for i in range(RETRIES_COUNT):
try: try:
with open(path, 'w') as f: with open(path, 'wb') as f:
response = requests.get(url, stream=True) response = requests.get(url, stream=True)
response.raise_for_status() response.raise_for_status()
total_length = response.headers.get('content-length') total_length = response.headers.get('content-length')
@ -74,7 +74,7 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Simple tool for dowloading datasets for clickhouse from S3") description="Simple tool for dowloading datasets for clickhouse from S3")
parser.add_argument('--dataset-names', required=True, nargs='+', choices=AVAILABLE_DATASETS.keys()) parser.add_argument('--dataset-names', required=True, nargs='+', choices=list(AVAILABLE_DATASETS.keys()))
parser.add_argument('--url-prefix', default=DEFAULT_URL) parser.add_argument('--url-prefix', default=DEFAULT_URL)
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/') parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')

View File

@ -6,7 +6,7 @@ RUN echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9
RUN apt-get update -y \ RUN apt-get update -y \
&& env DEBIAN_FRONTEND=noninteractive \ && env DEBIAN_FRONTEND=noninteractive \
apt-get install --yes --no-install-recommends \ apt-get install --yes --no-install-recommends \
python-requests python3-requests
COPY s3downloader /s3downloader COPY s3downloader /s3downloader
COPY run.sh /run.sh COPY run.sh /run.sh

View File

@ -1,15 +1,15 @@
#!/bin/bash #!/bin/bash
kill_clickhouse () { kill_clickhouse () {
kill `pgrep -u clickhouse` 2>/dev/null kill "$(pgrep -u clickhouse)" 2>/dev/null
for i in {1..10} for _ in {1..10}
do do
if ! kill -0 `pgrep -u clickhouse`; then if ! kill -0 "$(pgrep -u clickhouse)"; then
echo "No clickhouse process" echo "No clickhouse process"
break break
else else
echo "Process" `pgrep -u clickhouse` "still alive" echo "Process $(pgrep -u clickhouse) still alive"
sleep 10 sleep 10
fi fi
done done
@ -20,19 +20,19 @@ start_clickhouse () {
} }
wait_llvm_profdata () { wait_llvm_profdata () {
while kill -0 `pgrep llvm-profdata-10`; while kill -0 "$(pgrep llvm-profdata-10)"
do do
echo "Waiting for profdata" `pgrep llvm-profdata-10` "still alive" echo "Waiting for profdata $(pgrep llvm-profdata-10) still alive"
sleep 3 sleep 3
done done
} }
merge_client_files_in_background () { merge_client_files_in_background () {
client_files=`ls /client_*profraw 2>/dev/null` client_files=$(ls /client_*profraw 2>/dev/null)
if [ ! -z "$client_files" ] if [ -n "$client_files" ]
then then
llvm-profdata-10 merge -sparse $client_files -o merged_client_`date +%s`.profraw llvm-profdata-10 merge -sparse "$client_files" -o "merged_client_$(date +%s).profraw"
rm $client_files rm "$client_files"
fi fi
} }
@ -66,12 +66,13 @@ function start()
fi fi
timeout 120 service clickhouse-server start timeout 120 service clickhouse-server start
sleep 0.5 sleep 0.5
counter=$(($counter + 1)) counter=$((counter + 1))
done done
} }
start start
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
if ! /s3downloader --dataset-names $DATASETS; then if ! /s3downloader --dataset-names $DATASETS; then
echo "Cannot download datatsets" echo "Cannot download datatsets"
exit 1 exit 1
@ -100,11 +101,11 @@ LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "RENAME TA
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits" LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits"
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "SHOW TABLES FROM test" LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "SHOW TABLES FROM test"
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
SKIP_LIST_OPT="--use-skip-list" SKIP_LIST_OPT="--use-skip-list"
fi fi
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
kill_clickhouse kill_clickhouse

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import os import os
import sys import sys
@ -74,7 +74,7 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="Simple tool for dowloading datasets for clickhouse from S3") description="Simple tool for dowloading datasets for clickhouse from S3")
parser.add_argument('--dataset-names', required=True, nargs='+', choices=AVAILABLE_DATASETS.keys()) parser.add_argument('--dataset-names', required=True, nargs='+', choices=list(AVAILABLE_DATASETS.keys()))
parser.add_argument('--url-prefix', default=DEFAULT_URL) parser.add_argument('--url-prefix', default=DEFAULT_URL)
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/') parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')

View File

@ -12,10 +12,10 @@ RUN apt-get update -y \
ncdu \ ncdu \
netcat-openbsd \ netcat-openbsd \
openssl \ openssl \
python \ python3 \
python-lxml \ python3-lxml \
python-requests \ python3-requests \
python-termcolor \ python3-termcolor \
qemu-user-static \ qemu-user-static \
sudo \ sudo \
telnet \ telnet \

View File

@ -13,8 +13,8 @@ dpkg -i package_folder/clickhouse-test_*.deb
service clickhouse-server start && sleep 5 service clickhouse-server start && sleep 5
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
SKIP_LIST_OPT="--use-skip-list" SKIP_LIST_OPT="--use-skip-list"
fi fi
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt

View File

@ -3,10 +3,10 @@ FROM yandex/clickhouse-test-base
RUN apt-get update -y && \ RUN apt-get update -y && \
apt-get install -y --no-install-recommends \ apt-get install -y --no-install-recommends \
python-pip \ python3-pip \
python-setuptools python3-setuptools
RUN pip install \ RUN python3 -m pip install \
pytest \ pytest \
pytest-html \ pytest-html \
pytest-timeout \ pytest-timeout \
@ -17,4 +17,4 @@ CMD dpkg -i package_folder/clickhouse-common-static_*.deb; \
dpkg -i package_folder/clickhouse-server_*.deb; \ dpkg -i package_folder/clickhouse-server_*.deb; \
dpkg -i package_folder/clickhouse-client_*.deb; \ dpkg -i package_folder/clickhouse-client_*.deb; \
dpkg -i package_folder/clickhouse-test_*.deb; \ dpkg -i package_folder/clickhouse-test_*.deb; \
python -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --html=test_output/report.html --self-contained-html python3 -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --html=test_output/report.html --self-contained-html

View File

@ -54,10 +54,10 @@ RUN apt-get --allow-unauthenticated update -y \
perl \ perl \
pigz \ pigz \
pkg-config \ pkg-config \
python \ python3 \
python-lxml \ python3-lxml \
python-requests \ python3-requests \
python-termcolor \ python3-termcolor \
qemu-user-static \ qemu-user-static \
sudo \ sudo \
telnet \ telnet \

View File

@ -13,8 +13,8 @@ dpkg -i package_folder/clickhouse-test_*.deb
service clickhouse-server start && sleep 5 service clickhouse-server start && sleep 5
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
SKIP_LIST_OPT="--use-skip-list" SKIP_LIST_OPT="--use-skip-list"
fi fi
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt

View File

@ -12,10 +12,10 @@ RUN apt-get update -y \
fakeroot \ fakeroot \
debhelper \ debhelper \
expect \ expect \
python \ python3 \
python-lxml \ python3-lxml \
python-termcolor \ python3-termcolor \
python-requests \ python3-requests \
sudo \ sudo \
openssl \ openssl \
ncdu \ ncdu \

View File

@ -1,24 +1,24 @@
#!/bin/bash #!/bin/bash
kill_clickhouse () { kill_clickhouse () {
echo "clickhouse pids" `ps aux | grep clickhouse` | ts '%Y-%m-%d %H:%M:%S' echo "clickhouse pids $(pgrep -u clickhouse)" | ts '%Y-%m-%d %H:%M:%S'
kill `pgrep -u clickhouse` 2>/dev/null kill "$(pgrep -u clickhouse)" 2>/dev/null
for i in {1..10} for _ in {1..10}
do do
if ! kill -0 `pgrep -u clickhouse`; then if ! kill -0 "$(pgrep -u clickhouse)"; then
echo "No clickhouse process" | ts '%Y-%m-%d %H:%M:%S' echo "No clickhouse process" | ts '%Y-%m-%d %H:%M:%S'
break break
else else
echo "Process" `pgrep -u clickhouse` "still alive" | ts '%Y-%m-%d %H:%M:%S' echo "Process $(pgrep -u clickhouse) still alive" | ts '%Y-%m-%d %H:%M:%S'
sleep 10 sleep 10
fi fi
done done
echo "Will try to send second kill signal for sure" echo "Will try to send second kill signal for sure"
kill `pgrep -u clickhouse` 2>/dev/null kill "$(pgrep -u clickhouse)" 2>/dev/null
sleep 5 sleep 5
echo "clickhouse pids" `ps aux | grep clickhouse` | ts '%Y-%m-%d %H:%M:%S' echo "clickhouse pids $(pgrep -u clickhouse)" | ts '%Y-%m-%d %H:%M:%S'
} }
start_clickhouse () { start_clickhouse () {
@ -47,11 +47,11 @@ start_clickhouse
sleep 10 sleep 10
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
SKIP_LIST_OPT="--use-skip-list" SKIP_LIST_OPT="--use-skip-list"
fi fi
LLVM_PROFILE_FILE='client_coverage.profraw' clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt LLVM_PROFILE_FILE='client_coverage.profraw' clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
kill_clickhouse kill_clickhouse

View File

@ -10,10 +10,10 @@ RUN apt-get update -y \
debhelper \ debhelper \
parallel \ parallel \
expect \ expect \
python \ python3 \
python-lxml \ python3-lxml \
python-termcolor \ python3-termcolor \
python-requests \ python3-requests \
curl \ curl \
sudo \ sudo \
openssl \ openssl \

View File

@ -13,7 +13,7 @@ function stop()
timeout 120 service clickhouse-server stop timeout 120 service clickhouse-server stop
# Wait for process to disappear from processlist and also try to kill zombies. # Wait for process to disappear from processlist and also try to kill zombies.
while kill -9 $(pidof clickhouse-server) while kill -9 "$(pidof clickhouse-server)"
do do
echo "Killed clickhouse-server" echo "Killed clickhouse-server"
sleep 0.5 sleep 0.5
@ -35,17 +35,21 @@ function start()
fi fi
timeout 120 service clickhouse-server start timeout 120 service clickhouse-server start
sleep 0.5 sleep 0.5
counter=$(($counter + 1)) counter=$((counter + 1))
done done
} }
# install test configs # install test configs
/usr/share/clickhouse-test/config/install.sh /usr/share/clickhouse-test/config/install.sh
# for clickhouse-server (via service)
echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment
# for clickhouse-client
export ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'
start start
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
/s3downloader --dataset-names $DATASETS /s3downloader --dataset-names $DATASETS
chmod 777 -R /var/lib/clickhouse chmod 777 -R /var/lib/clickhouse
clickhouse-client --query "ATTACH DATABASE IF NOT EXISTS datasets ENGINE = Ordinary" clickhouse-client --query "ATTACH DATABASE IF NOT EXISTS datasets ENGINE = Ordinary"

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from multiprocessing import cpu_count from multiprocessing import cpu_count
from subprocess import Popen, check_call from subprocess import Popen, check_call

View File

@ -2,17 +2,19 @@
set -e -x set -e -x
# Not sure why shellcheck complains that rc is not assigned before it is referenced.
# shellcheck disable=SC2154
trap 'rc=$?; echo EXITED WITH: $rc; exit $rc' EXIT trap 'rc=$?; echo EXITED WITH: $rc; exit $rc' EXIT
# CLI option to prevent rebuilding images, just re-run tests with images leftover from previuos time # CLI option to prevent rebuilding images, just re-run tests with images leftover from previuos time
readonly NO_REBUILD_FLAG="--no-rebuild" readonly NO_REBUILD_FLAG="--no-rebuild"
readonly CLICKHOUSE_DOCKER_DIR="$(realpath ${1})" readonly CLICKHOUSE_DOCKER_DIR="$(realpath "${1}")"
readonly CLICKHOUSE_PACKAGES_ARG="${2}" readonly CLICKHOUSE_PACKAGES_ARG="${2}"
CLICKHOUSE_SERVER_IMAGE="${3}" CLICKHOUSE_SERVER_IMAGE="${3}"
if [ ${CLICKHOUSE_PACKAGES_ARG} != ${NO_REBUILD_FLAG} ]; then if [ "${CLICKHOUSE_PACKAGES_ARG}" != "${NO_REBUILD_FLAG}" ]; then
readonly CLICKHOUSE_PACKAGES_DIR="$(realpath ${2})" # or --no-rebuild readonly CLICKHOUSE_PACKAGES_DIR="$(realpath "${2}")" # or --no-rebuild
fi fi
@ -25,7 +27,7 @@ fi
# TODO: optionally mount most recent clickhouse-test and queries directory from local machine # TODO: optionally mount most recent clickhouse-test and queries directory from local machine
if [ ${CLICKHOUSE_PACKAGES_ARG} != ${NO_REBUILD_FLAG} ]; then if [ "${CLICKHOUSE_PACKAGES_ARG}" != "${NO_REBUILD_FLAG}" ]; then
docker build --network=host \ docker build --network=host \
-f "${CLICKHOUSE_DOCKER_DIR}/test/stateless/clickhouse-statelest-test-runner.Dockerfile" \ -f "${CLICKHOUSE_DOCKER_DIR}/test/stateless/clickhouse-statelest-test-runner.Dockerfile" \
--target clickhouse-test-runner-base \ --target clickhouse-test-runner-base \
@ -49,7 +51,7 @@ fi
if [ -z "${CLICKHOUSE_SERVER_IMAGE}" ]; then if [ -z "${CLICKHOUSE_SERVER_IMAGE}" ]; then
CLICKHOUSE_SERVER_IMAGE="yandex/clickhouse-server:local" CLICKHOUSE_SERVER_IMAGE="yandex/clickhouse-server:local"
if [ ${CLICKHOUSE_PACKAGES_ARG} != ${NO_REBUILD_FLAG} ]; then if [ "${CLICKHOUSE_PACKAGES_ARG}" != "${NO_REBUILD_FLAG}" ]; then
docker build --network=host \ docker build --network=host \
-f "${CLICKHOUSE_DOCKER_DIR}/server/local.Dockerfile" \ -f "${CLICKHOUSE_DOCKER_DIR}/server/local.Dockerfile" \
--target clickhouse-server-base \ --target clickhouse-server-base \

View File

@ -7,7 +7,7 @@ set +e
reties=0 reties=0
while true; do while true; do
docker info &>/dev/null && break docker info &>/dev/null && break
reties=$[$reties+1] reties=$((reties+1))
if [[ $reties -ge 100 ]]; then # 10 sec max if [[ $reties -ge 100 ]]; then # 10 sec max
echo "Can't start docker daemon, timeout exceeded." >&2 echo "Can't start docker daemon, timeout exceeded." >&2
exit 1; exit 1;

View File

@ -116,7 +116,7 @@ ninja
Example for Fedora Rawhide: Example for Fedora Rawhide:
``` bash ``` bash
sudo yum update sudo yum update
yum --nogpg install git cmake make gcc-c++ python2 yum --nogpg install git cmake make gcc-c++ python3
git clone --recursive https://github.com/ClickHouse/ClickHouse.git git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build mkdir build && cd build
cmake ../ClickHouse cmake ../ClickHouse

View File

@ -165,6 +165,22 @@ Similar to GraphiteMergeTree, the Kafka engine supports extended configuration u
For a list of possible configuration options, see the [librdkafka configuration reference](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md). Use the underscore (`_`) instead of a dot in the ClickHouse configuration. For example, `check.crcs=true` will be `<check_crcs>true</check_crcs>`. For a list of possible configuration options, see the [librdkafka configuration reference](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md). Use the underscore (`_`) instead of a dot in the ClickHouse configuration. For example, `check.crcs=true` will be `<check_crcs>true</check_crcs>`.
### Kerberos support {#kafka-kerberos-support}
To deal with Kerberos-aware Kafka, add `security_protocol` child element with `sasl_plaintext` value. It is enough if Kerberos ticket-granting ticket is obtained and cached by OS facilities.
ClickHouse is able to maintain Kerberos credentials using a keytab file. Consider `sasl_kerberos_service_name`, `sasl_kerberos_keytab`, `sasl_kerberos_principal` and `sasl.kerberos.kinit.cmd` child elements.
Example:
``` xml
<!-- Kerberos-aware Kafka -->
<kafka>
<security_protocol>SASL_PLAINTEXT</security_protocol>
<sasl_kerberos_keytab>/home/kafkauser/kafkauser.keytab</sasl_kerberos_keytab>
<sasl_kerberos_principal>kafkauser/kafkahost@EXAMPLE.COM</sasl_kerberos_principal>
</kafka>
```
## Virtual Columns {#virtual-columns} ## Virtual Columns {#virtual-columns}
- `_topic` — Kafka topic. - `_topic` — Kafka topic.

View File

@ -66,6 +66,32 @@ If no conditions met for a data part, ClickHouse uses the `lz4` compression.
</compression> </compression>
``` ```
## custom_settings_prefixes {#custom_settings_prefixes}
List of prefixes for [custom settings](../../operations/settings/index.md#custom_settings). The prefixes must be separated with commas.
**Example**
```xml
<custom_settings_prefixes>custom_</custom_settings_prefixes>
```
**See Also**
- [Custom settings](../../operations/settings/index.md#custom_settings)
## core_dump
Configures soft limit for core dump file size, one gigabyte by default.
```xml
<core_dump>
<size_limit>1073741824</size_limit>
</core_dump>
```
(Hard limit is configured via system tools)
## default\_database {#default-database} ## default\_database {#default-database}
The default database. The default database.
@ -405,7 +431,7 @@ Limits total RAM usage by the ClickHouse server.
Possible values: Possible values:
- Positive integer. - Positive integer.
- 0 — Unlimited. - 0 (auto).
Default value: `0`. Default value: `0`.

View File

@ -28,4 +28,30 @@ Ways to configure settings, in order of priority:
Settings that can only be made in the server config file are not covered in this section. Settings that can only be made in the server config file are not covered in this section.
## Custom Settings {#custom_settings}
In addition to the common [settings](../../operations/settings/settings.md), users can define custom settings.
A custom setting name must begin with one of predefined prefixes. The list of these prefixes must be declared in the [custom_settings_prefixes](../../operations/server-configuration-parameters/settings.md#custom_settings_prefixes) parameter in the server configuration file.
```xml
<custom_settings_prefixes>custom_</custom_settings_prefixes>
```
To define a custom setting use `SET` command:
```sql
SET custom_a = 123;
```
To get the current value of a custom setting use `getSetting()` function:
```sql
SELECT getSetting('custom_a');
```
**See Also**
- [Server Configuration Settings](../../operations/server-configuration-parameters/settings.md)
[Original article](https://clickhouse.tech/docs/en/operations/settings/) <!--hide--> [Original article](https://clickhouse.tech/docs/en/operations/settings/) <!--hide-->

View File

@ -1144,9 +1144,9 @@ See also:
## insert\_quorum\_timeout {#settings-insert_quorum_timeout} ## insert\_quorum\_timeout {#settings-insert_quorum_timeout}
Write to quorum timeout in seconds. If the timeout has passed and no write has taken place yet, ClickHouse will generate an exception and the client must repeat the query to write the same block to the same or any other replica. Write to quorum timeout in milliseconds. If the timeout has passed and no write has taken place yet, ClickHouse will generate an exception and the client must repeat the query to write the same block to the same or any other replica.
Default value: 60 seconds. Default value: 600000 milliseconds (ten minutes).
See also: See also:

View File

@ -33,6 +33,7 @@ Columns:
- `'ExceptionWhileProcessing' = 4` — Exception during the query execution. - `'ExceptionWhileProcessing' = 4` — Exception during the query execution.
- `event_date` ([Date](../../sql-reference/data-types/date.md)) — Query starting date. - `event_date` ([Date](../../sql-reference/data-types/date.md)) — Query starting date.
- `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Query starting time. - `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Query starting time.
- `event_time_microseconds` ([DateTime](../../sql-reference/data-types/datetime.md)) — Query starting time with microseconds precision.
- `query_start_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Start time of query execution. - `query_start_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Start time of query execution.
- `query_start_time_microseconds` ([DateTime64](../../sql-reference/data-types/datetime64.md)) — Start time of query execution with microsecond precision. - `query_start_time_microseconds` ([DateTime64](../../sql-reference/data-types/datetime64.md)) — Start time of query execution with microsecond precision.
- `query_duration_ms` ([UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Duration of query execution in milliseconds. - `query_duration_ms` ([UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Duration of query execution in milliseconds.
@ -84,16 +85,18 @@ Columns:
**Example** **Example**
``` sql ``` sql
SELECT * FROM system.query_log LIMIT 1 FORMAT Vertical; SELECT * FROM system.query_log LIMIT 1 \G
``` ```
``` text ``` text
Row 1: Row 1:
────── ──────
type: QueryStart type: QueryStart
event_date: 2020-05-13 event_date: 2020-09-11
event_time: 2020-05-13 14:02:28 event_time: 2020-09-11 10:08:17
query_start_time: 2020-05-13 14:02:28 event_time_microseconds: 2020-09-11 10:08:17.063321
query_start_time: 2020-09-11 10:08:17
query_start_time_microseconds: 2020-09-11 10:08:17.063321
query_duration_ms: 0 query_duration_ms: 0
read_rows: 0 read_rows: 0
read_bytes: 0 read_bytes: 0
@ -102,36 +105,37 @@ written_bytes: 0
result_rows: 0 result_rows: 0
result_bytes: 0 result_bytes: 0
memory_usage: 0 memory_usage: 0
query: SELECT 1 current_database: default
query: INSERT INTO test1 VALUES
exception_code: 0 exception_code: 0
exception: exception:
stack_trace: stack_trace:
is_initial_query: 1 is_initial_query: 1
user: default user: default
query_id: 5e834082-6f6d-4e34-b47b-cd1934f4002a query_id: 50a320fd-85a8-49b8-8761-98a86bcbacef
address: ::ffff:127.0.0.1 address: ::ffff:127.0.0.1
port: 57720 port: 33452
initial_user: default initial_user: default
initial_query_id: 5e834082-6f6d-4e34-b47b-cd1934f4002a initial_query_id: 50a320fd-85a8-49b8-8761-98a86bcbacef
initial_address: ::ffff:127.0.0.1 initial_address: ::ffff:127.0.0.1
initial_port: 57720 initial_port: 33452
interface: 1 interface: 1
os_user: bayonet os_user: bharatnc
client_hostname: clickhouse.ru-central1.internal client_hostname: tower
client_name: ClickHouse client client_name: ClickHouse
client_revision: 54434 client_revision: 54437
client_version_major: 20 client_version_major: 20
client_version_minor: 4 client_version_minor: 7
client_version_patch: 1 client_version_patch: 2
http_method: 0 http_method: 0
http_user_agent: http_user_agent:
quota_key: quota_key:
revision: 54434 revision: 54440
thread_ids: [] thread_ids: []
ProfileEvents.Names: [] ProfileEvents.Names: []
ProfileEvents.Values: [] ProfileEvents.Values: []
Settings.Names: ['use_uncompressed_cache','load_balancing','log_queries','max_memory_usage'] Settings.Names: ['use_uncompressed_cache','load_balancing','log_queries','max_memory_usage','allow_introspection_functions']
Settings.Values: ['0','random','1','10000000000'] Settings.Values: ['0','random','1','10000000000','1']
``` ```
**See Also** **See Also**

View File

@ -15,6 +15,7 @@ Columns:
- `event_date` ([Date](../../sql-reference/data-types/date.md)) — The date when the thread has finished execution of the query. - `event_date` ([Date](../../sql-reference/data-types/date.md)) — The date when the thread has finished execution of the query.
- `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — The date and time when the thread has finished execution of the query. - `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — The date and time when the thread has finished execution of the query.
- `event_time_microsecinds` ([DateTime](../../sql-reference/data-types/datetime.md)) — The date and time when the thread has finished execution of the query with microseconds precision.
- `query_start_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Start time of query execution. - `query_start_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Start time of query execution.
- `query_start_time_microseconds` ([DateTime64](../../sql-reference/data-types/datetime64.md)) — Start time of query execution with microsecond precision. - `query_start_time_microseconds` ([DateTime64](../../sql-reference/data-types/datetime64.md)) — Start time of query execution with microsecond precision.
- `query_duration_ms` ([UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Duration of query execution. - `query_duration_ms` ([UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Duration of query execution.
@ -63,50 +64,51 @@ Columns:
**Example** **Example**
``` sql ``` sql
SELECT * FROM system.query_thread_log LIMIT 1 FORMAT Vertical SELECT * FROM system.query_thread_log LIMIT 1 \G
``` ```
``` text ``` text
Row 1: Row 1:
────── ──────
event_date: 2020-05-13 event_date: 2020-09-11
event_time: 2020-05-13 14:02:28 event_time: 2020-09-11 10:08:17
query_start_time: 2020-05-13 14:02:28 event_time_microseconds: 2020-09-11 10:08:17.134042
query_duration_ms: 0 query_start_time: 2020-09-11 10:08:17
read_rows: 1 query_start_time_microseconds: 2020-09-11 10:08:17.063150
read_bytes: 1 query_duration_ms: 70
written_rows: 0 read_rows: 0
written_bytes: 0 read_bytes: 0
memory_usage: 0 written_rows: 1
peak_memory_usage: 0 written_bytes: 12
thread_name: QueryPipelineEx memory_usage: 4300844
thread_id: 28952 peak_memory_usage: 4300844
master_thread_id: 28924 thread_name: TCPHandler
query: SELECT 1 thread_id: 638133
master_thread_id: 638133
query: INSERT INTO test1 VALUES
is_initial_query: 1 is_initial_query: 1
user: default user: default
query_id: 5e834082-6f6d-4e34-b47b-cd1934f4002a query_id: 50a320fd-85a8-49b8-8761-98a86bcbacef
address: ::ffff:127.0.0.1 address: ::ffff:127.0.0.1
port: 57720 port: 33452
initial_user: default initial_user: default
initial_query_id: 5e834082-6f6d-4e34-b47b-cd1934f4002a initial_query_id: 50a320fd-85a8-49b8-8761-98a86bcbacef
initial_address: ::ffff:127.0.0.1 initial_address: ::ffff:127.0.0.1
initial_port: 57720 initial_port: 33452
interface: 1 interface: 1
os_user: bayonet os_user: bharatnc
client_hostname: clickhouse.ru-central1.internal client_hostname: tower
client_name: ClickHouse client client_name: ClickHouse
client_revision: 54434 client_revision: 54437
client_version_major: 20 client_version_major: 20
client_version_minor: 4 client_version_minor: 7
client_version_patch: 1 client_version_patch: 2
http_method: 0 http_method: 0
http_user_agent: http_user_agent:
quota_key: quota_key:
revision: 54434 revision: 54440
ProfileEvents.Names: ['ContextLock','RealTimeMicroseconds','UserTimeMicroseconds','OSCPUWaitMicroseconds','OSCPUVirtualTimeMicroseconds'] ProfileEvents.Names: ['Query','InsertQuery','FileOpen','WriteBufferFromFileDescriptorWrite','WriteBufferFromFileDescriptorWriteBytes','ReadCompressedBytes','CompressedReadBufferBlocks','CompressedReadBufferBytes','IOBufferAllocs','IOBufferAllocBytes','FunctionExecute','CreatedWriteBufferOrdinary','DiskWriteElapsedMicroseconds','NetworkReceiveElapsedMicroseconds','NetworkSendElapsedMicroseconds','InsertedRows','InsertedBytes','SelectedRows','SelectedBytes','MergeTreeDataWriterRows','MergeTreeDataWriterUncompressedBytes','MergeTreeDataWriterCompressedBytes','MergeTreeDataWriterBlocks','MergeTreeDataWriterBlocksAlreadySorted','ContextLock','RWLockAcquiredReadLocks','RealTimeMicroseconds','UserTimeMicroseconds','SoftPageFaults','OSCPUVirtualTimeMicroseconds','OSWriteBytes','OSReadChars','OSWriteChars']
ProfileEvents.Values: [1,97,81,5,81] ProfileEvents.Values: [1,1,11,11,591,148,3,71,29,6533808,1,11,72,18,47,1,12,1,12,1,12,189,1,1,10,2,70853,2748,49,2747,45056,422,1520]
...
``` ```
**See Also** **See Also**

View File

@ -6,6 +6,7 @@ Columns:
- `event_date` (Date) — Date of the entry. - `event_date` (Date) — Date of the entry.
- `event_time` (DateTime) — Time of the entry. - `event_time` (DateTime) — Time of the entry.
- `event_time_microseconds` (DateTime) — Time of the entry with microseconds precision.
- `microseconds` (UInt32) — Microseconds of the entry. - `microseconds` (UInt32) — Microseconds of the entry.
- `thread_name` (String) — Name of the thread from which the logging was done. - `thread_name` (String) — Name of the thread from which the logging was done.
- `thread_id` (UInt64) — OS thread ID. - `thread_id` (UInt64) — OS thread ID.
@ -25,4 +26,28 @@ Columns:
- `source_file` (LowCardinality(String)) — Source file from which the logging was done. - `source_file` (LowCardinality(String)) — Source file from which the logging was done.
- `source_line` (UInt64) — Source line from which the logging was done. - `source_line` (UInt64) — Source line from which the logging was done.
**Example**
``` sql
SELECT * FROM system.text_log LIMIT 1 \G
```
``` text
Row 1:
──────
event_date: 2020-09-10
event_time: 2020-09-10 11:23:07
event_time_microseconds: 2020-09-10 11:23:07.871397
microseconds: 871397
thread_name: clickhouse-serv
thread_id: 564917
level: Information
query_id:
logger_name: DNSCacheUpdater
message: Update period 15 seconds
revision: 54440
source_file: /ClickHouse/src/Interpreters/DNSCacheUpdater.cpp; void DB::DNSCacheUpdater::start()
source_line: 45
```
[Original article](https://clickhouse.tech/docs/en/operations/system_tables/text_log) <!--hide--> [Original article](https://clickhouse.tech/docs/en/operations/system_tables/text_log) <!--hide-->

View File

@ -12,6 +12,8 @@ Columns:
- `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Timestamp of the sampling moment. - `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Timestamp of the sampling moment.
- `event_time_microseconds` ([DateTime](../../sql-reference/data-types/datetime.md)) — Timestamp of the sampling moment with microseconds precision.
- `timestamp_ns` ([UInt64](../../sql-reference/data-types/int-uint.md)) — Timestamp of the sampling moment in nanoseconds. - `timestamp_ns` ([UInt64](../../sql-reference/data-types/int-uint.md)) — Timestamp of the sampling moment in nanoseconds.
- `revision` ([UInt32](../../sql-reference/data-types/int-uint.md)) — ClickHouse server build revision. - `revision` ([UInt32](../../sql-reference/data-types/int-uint.md)) — ClickHouse server build revision.
@ -38,13 +40,16 @@ SELECT * FROM system.trace_log LIMIT 1 \G
``` text ``` text
Row 1: Row 1:
────── ──────
event_date: 2019-11-15 event_date: 2020-09-10
event_time: 2019-11-15 15:09:38 event_time: 2020-09-10 11:23:09
revision: 54428 event_time_microseconds: 2020-09-10 11:23:09.872924
timer_type: Real timestamp_ns: 1599762189872924510
thread_number: 48 revision: 54440
query_id: acc4d61f-5bd1-4a3e-bc91-2180be37c915 trace_type: Memory
trace: [94222141367858,94222152240175,94222152325351,94222152329944,94222152330796,94222151449980,94222144088167,94222151682763,94222144088167,94222151682763,94222144088167,94222144058283,94222144059248,94222091840750,94222091842302,94222091831228,94222189631488,140509950166747,140509942945935] thread_id: 564963
query_id:
trace: [371912858,371912789,371798468,371799717,371801313,371790250,624462773,566365041,566440261,566445834,566460071,566459914,566459842,566459580,566459469,566459389,566459341,566455774,371993941,371988245,372158848,372187428,372187309,372187093,372185478,140222123165193,140222122205443]
size: 5244400
``` ```
[Original article](https://clickhouse.tech/docs/en/operations/system_tables/trace_log) <!--hide--> [Original article](https://clickhouse.tech/docs/en/operations/system_tables/trace_log) <!--hide-->

View File

@ -3,25 +3,27 @@ toc_priority: 42
toc_title: Decimal toc_title: Decimal
--- ---
# Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S) {#decimalp-s-decimal32s-decimal64s-decimal128s} # Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S), Decimal256(S) {#decimalp-s-decimal32s-decimal64s-decimal128s}
Signed fixed-point numbers that keep precision during add, subtract and multiply operations. For division least significant digits are discarded (not rounded). Signed fixed-point numbers that keep precision during add, subtract and multiply operations. For division least significant digits are discarded (not rounded).
## Parameters {#parameters} ## Parameters {#parameters}
- P - precision. Valid range: \[ 1 : 38 \]. Determines how many decimal digits number can have (including fraction). - P - precision. Valid range: \[ 1 : 76 \]. Determines how many decimal digits number can have (including fraction).
- S - scale. Valid range: \[ 0 : P \]. Determines how many decimal digits fraction can have. - S - scale. Valid range: \[ 0 : P \]. Determines how many decimal digits fraction can have.
Depending on P parameter value Decimal(P, S) is a synonym for: Depending on P parameter value Decimal(P, S) is a synonym for:
- P from \[ 1 : 9 \] - for Decimal32(S) - P from \[ 1 : 9 \] - for Decimal32(S)
- P from \[ 10 : 18 \] - for Decimal64(S) - P from \[ 10 : 18 \] - for Decimal64(S)
- P from \[ 19 : 38 \] - for Decimal128(S) - P from \[ 19 : 38 \] - for Decimal128(S)
- P from \[ 39 : 76 \] - for Decimal256(S)
## Decimal Value Ranges {#decimal-value-ranges} ## Decimal Value Ranges {#decimal-value-ranges}
- Decimal32(S) - ( -1 \* 10^(9 - S), 1 \* 10^(9 - S) ) - Decimal32(S) - ( -1 \* 10^(9 - S), 1 \* 10^(9 - S) )
- Decimal64(S) - ( -1 \* 10^(18 - S), 1 \* 10^(18 - S) ) - Decimal64(S) - ( -1 \* 10^(18 - S), 1 \* 10^(18 - S) )
- Decimal128(S) - ( -1 \* 10^(38 - S), 1 \* 10^(38 - S) ) - Decimal128(S) - ( -1 \* 10^(38 - S), 1 \* 10^(38 - S) )
- Decimal256(S) - ( -1 \* 10^(76 - S), 1 \* 10^(76 - S) )
For example, Decimal32(4) can contain numbers from -99999.9999 to 99999.9999 with 0.0001 step. For example, Decimal32(4) can contain numbers from -99999.9999 to 99999.9999 with 0.0001 step.
@ -38,6 +40,7 @@ Binary operations on Decimal result in wider result type (with any order of argu
- `Decimal64(S1) <op> Decimal32(S2) -> Decimal64(S)` - `Decimal64(S1) <op> Decimal32(S2) -> Decimal64(S)`
- `Decimal128(S1) <op> Decimal32(S2) -> Decimal128(S)` - `Decimal128(S1) <op> Decimal32(S2) -> Decimal128(S)`
- `Decimal128(S1) <op> Decimal64(S2) -> Decimal128(S)` - `Decimal128(S1) <op> Decimal64(S2) -> Decimal128(S)`
- `Decimal256(S1) <op> Decimal<32|64|128>(S2) -> Decimal256(S)`
Rules for scale: Rules for scale:

View File

@ -1,9 +1,9 @@
--- ---
toc_priority: 40 toc_priority: 40
toc_title: UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64 toc_title: UInt8, UInt16, UInt32, UInt64, UInt256, Int8, Int16, Int32, Int64, Int128, Int256
--- ---
# UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64 {#uint8-uint16-uint32-uint64-int8-int16-int32-int64} # UInt8, UInt16, UInt32, UInt64, UInt256, Int8, Int16, Int32, Int64, Int128, Int256 {#uint8-uint16-uint32-uint64-int8-int16-int32-int64}
Fixed-length integers, with or without a sign. Fixed-length integers, with or without a sign.
@ -13,6 +13,8 @@ Fixed-length integers, with or without a sign.
- Int16 - \[-32768 : 32767\] - Int16 - \[-32768 : 32767\]
- Int32 - \[-2147483648 : 2147483647\] - Int32 - \[-2147483648 : 2147483647\]
- Int64 - \[-9223372036854775808 : 9223372036854775807\] - Int64 - \[-9223372036854775808 : 9223372036854775807\]
- Int128 - \[-170141183460469231731687303715884105728 : 170141183460469231731687303715884105727\]
- Int256 - \[-57896044618658097711785492504343953926634992332820282019728792003956564819968 : 57896044618658097711785492504343953926634992332820282019728792003956564819967\]
## Uint Ranges {#uint-ranges} ## Uint Ranges {#uint-ranges}
@ -20,5 +22,8 @@ Fixed-length integers, with or without a sign.
- UInt16 - \[0 : 65535\] - UInt16 - \[0 : 65535\]
- UInt32 - \[0 : 4294967295\] - UInt32 - \[0 : 4294967295\]
- UInt64 - \[0 : 18446744073709551615\] - UInt64 - \[0 : 18446744073709551615\]
- UInt256 - \[0 : 115792089237316195423570985008687907853269984665640564039457584007913129639935\]
UInt128 is not supported yet.
[Original article](https://clickhouse.tech/docs/en/data_types/int_uint/) <!--hide--> [Original article](https://clickhouse.tech/docs/en/data_types/int_uint/) <!--hide-->

View File

@ -357,7 +357,7 @@ SELECT date_trunc('hour', now())
## now {#now} ## now {#now}
Accepts zero arguments and returns the current time at one of the moments of request execution. Accepts zero or one arguments(timezone) and returns the current time at one of the moments of request execution, or current time of specific timezone at one of the moments of request execution if `timezone` argument provided.
This function returns a constant, even if the request took a long time to complete. This function returns a constant, even if the request took a long time to complete.
## today {#today} ## today {#today}

View File

@ -16,3 +16,82 @@ The [stochasticLinearRegression](../../sql-reference/aggregate-functions/referen
## stochasticLogisticRegression {#stochastic-logistic-regression} ## stochasticLogisticRegression {#stochastic-logistic-regression}
The [stochasticLogisticRegression](../../sql-reference/aggregate-functions/reference/stochasticlogisticregression.md#agg_functions-stochasticlogisticregression) aggregate function implements stochastic gradient descent method for binary classification problem. Uses `evalMLMethod` to predict on new data. The [stochasticLogisticRegression](../../sql-reference/aggregate-functions/reference/stochasticlogisticregression.md#agg_functions-stochasticlogisticregression) aggregate function implements stochastic gradient descent method for binary classification problem. Uses `evalMLMethod` to predict on new data.
## bayesAB {#bayesab}
Compares test groups (variants) and calculates for each group the probability to be the best one. The first group is used as a control group.
**Syntax**
``` sql
bayesAB(distribution_name, higher_is_better, variant_names, x, y)
```
**Parameters**
- `distribution_name` — Name of the probability distribution. [String](../../sql-reference/data-types/string.md). Possible values:
- `beta` for [Beta distribution](https://en.wikipedia.org/wiki/Beta_distribution)
- `gamma` for [Gamma distribution](https://en.wikipedia.org/wiki/Gamma_distribution)
- `higher_is_better` — Boolean flag. [Boolean](../../sql-reference/data-types/boolean.md). Possible values:
- `0` - lower values are considered to be better than higher
- `1` - higher values are considered to be better than lower
- `variant_names` - Variant names. [Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md)).
- `x` - Numbers of tests for the corresponding variants. [Array](../../sql-reference/data-types/array.md)([Float64](../../sql-reference/data-types/float.md)).
- `y` - Numbers of successful tests for the corresponding variants. [Array](../../sql-reference/data-types/array.md)([Float64](../../sql-reference/data-types/float.md)).
!!! note "Note"
All three arrays must have the same size. All `x` and `y` values must be non-negative constant numbers. `y` cannot be larger than `x`.
**Returned values**
For each variant the function calculates:
- `beats_control` - long-term probability to out-perform the first (control) variant
- `to_be_best` - long-term probability to out-perform all other variants
Type: JSON.
**Example**
Query:
``` sql
SELECT bayesAB('beta', 1, ['Control', 'A', 'B'], [3000., 3000., 3000.], [100., 90., 110.]) FORMAT PrettySpace;
```
Result:
``` text
{
"data":[
{
"variant_name":"Control",
"x":3000,
"y":100,
"beats_control":0,
"to_be_best":0.22619
},
{
"variant_name":"A",
"x":3000,
"y":90,
"beats_control":0.23469,
"to_be_best":0.04671
},
{
"variant_name":"B",
"x":3000,
"y":110,
"beats_control":0.7580899999999999,
"to_be_best":0.7271
}
]
}
```
[Original article](https://clickhouse.tech/docs/en/query_language/functions/machine-learning-functions/) <!--hide-->

View File

@ -1491,4 +1491,40 @@ Result:
``` ```
## getSetting {#getSetting}
Returns the current value of a [custom setting](../../operations/settings/index.md#custom_settings).
**Syntax**
```sql
getSetting('custom_setting');
```
**Parameter**
- `custom_setting` — The setting name. [String](../../sql-reference/data-types/string.md).
**Returned value**
- The setting current value.
**Example**
```sql
SET custom_a = 123;
SELECT getSetting('custom_a');
```
**Result**
```
123
```
**See Also**
- [Custom Settings](../../operations/settings/index.md#custom_settings)
[Original article](https://clickhouse.tech/docs/en/query_language/functions/other_functions/) <!--hide--> [Original article](https://clickhouse.tech/docs/en/query_language/functions/other_functions/) <!--hide-->

View File

@ -487,4 +487,75 @@ Returns the CRC64 checksum of a string, using CRC-64-ECMA polynomial.
The result type is UInt64. The result type is UInt64.
## normalizeQuery {#normalized-query}
Replaces literals, sequences of literals and complex aliases with placeholders.
**Syntax**
``` sql
normalizeQuery(x)
```
**Parameters**
- `x` — Sequence of characters. [String](../../sql-reference/data-types/string.md).
**Returned value**
- Sequence of characters with placeholders.
Type: [String](../../sql-reference/data-types/string.md).
**Example**
Query:
``` sql
SELECT normalizeQuery('[1, 2, 3, x]') AS query;
```
Result:
``` text
┌─query────┐
│ [?.., x] │
└──────────┘
```
## normalizedQueryHash {#normalized-query-hash}
Returns identical 64bit hash values without the values of literals for similar queries. It helps to analyze query log.
**Syntax**
``` sql
normalizedQueryHash(x)
```
**Parameters**
- `x` — Sequence of characters. [String](../../sql-reference/data-types/string.md).
**Returned value**
- Hash value.
Type: [UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges).
**Example**
Query:
``` sql
SELECT normalizedQueryHash('SELECT 1 AS `xyz`') != normalizedQueryHash('SELECT 1 AS `abc`') AS res;
```
Result:
``` text
┌─res─┐
│ 1 │
└─────┘
```
[Original article](https://clickhouse.tech/docs/en/query_language/functions/string_functions/) <!--hide--> [Original article](https://clickhouse.tech/docs/en/query_language/functions/string_functions/) <!--hide-->

View File

@ -11,7 +11,7 @@ When you convert a value from one to another data type, you should remember that
ClickHouse has the [same behavior as C++ programs](https://en.cppreference.com/w/cpp/language/implicit_conversion). ClickHouse has the [same behavior as C++ programs](https://en.cppreference.com/w/cpp/language/implicit_conversion).
## toInt(8\|16\|32\|64) {#toint8163264} ## toInt(8\|16\|32\|64\|128\|256) {#toint8163264}
Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md) data type. This function family includes: Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md) data type. This function family includes:
@ -19,6 +19,8 @@ Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md)
- `toInt16(expr)` — Results in the `Int16` data type. - `toInt16(expr)` — Results in the `Int16` data type.
- `toInt32(expr)` — Results in the `Int32` data type. - `toInt32(expr)` — Results in the `Int32` data type.
- `toInt64(expr)` — Results in the `Int64` data type. - `toInt64(expr)` — Results in the `Int64` data type.
- `toInt128(expr)` — Results in the `Int128` data type.
- `toInt256(expr)` — Results in the `Int256` data type.
**Parameters** **Parameters**
@ -26,7 +28,7 @@ Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md)
**Returned value** **Returned value**
Integer value in the `Int8`, `Int16`, `Int32`, or `Int64` data type. Integer value in the `Int8`, `Int16`, `Int32`, `Int64`, `Int128` or `Int256` data type.
Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers. Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers.
@ -44,9 +46,9 @@ SELECT toInt64(nan), toInt32(32), toInt16('16'), toInt8(8.8)
└──────────────────────┴─────────────┴───────────────┴─────────────┘ └──────────────────────┴─────────────┴───────────────┴─────────────┘
``` ```
## toInt(8\|16\|32\|64)OrZero {#toint8163264orzero} ## toInt(8\|16\|32\|64\|128\|256)OrZero {#toint8163264orzero}
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64). If failed, returns 0. It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64 \| 128 \| 256). If failed, returns 0.
**Example** **Example**
@ -60,9 +62,9 @@ select toInt64OrZero('123123'), toInt8OrZero('123qwe123')
└─────────────────────────┴───────────────────────────┘ └─────────────────────────┴───────────────────────────┘
``` ```
## toInt(8\|16\|32\|64)OrNull {#toint8163264ornull} ## toInt(8\|16\|32\|64\|128\|256)OrNull {#toint8163264ornull}
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64). If failed, returns NULL. It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64 \| 128 \| 256). If failed, returns NULL.
**Example** **Example**
@ -76,7 +78,7 @@ select toInt64OrNull('123123'), toInt8OrNull('123qwe123')
└─────────────────────────┴───────────────────────────┘ └─────────────────────────┴───────────────────────────┘
``` ```
## toUInt(8\|16\|32\|64) {#touint8163264} ## toUInt(8\|16\|32\|64\|256) {#touint8163264}
Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md) data type. This function family includes: Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md) data type. This function family includes:
@ -84,6 +86,7 @@ Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md
- `toUInt16(expr)` — Results in the `UInt16` data type. - `toUInt16(expr)` — Results in the `UInt16` data type.
- `toUInt32(expr)` — Results in the `UInt32` data type. - `toUInt32(expr)` — Results in the `UInt32` data type.
- `toUInt64(expr)` — Results in the `UInt64` data type. - `toUInt64(expr)` — Results in the `UInt64` data type.
- `toUInt256(expr)` — Results in the `UInt256` data type.
**Parameters** **Parameters**
@ -91,7 +94,7 @@ Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md
**Returned value** **Returned value**
Integer value in the `UInt8`, `UInt16`, `UInt32`, or `UInt64` data type. Integer value in the `UInt8`, `UInt16`, `UInt32`, `UInt64` or `UInt256` data type.
Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers. Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers.
@ -109,9 +112,9 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
└─────────────────────┴───────────────┴────────────────┴──────────────┘ └─────────────────────┴───────────────┴────────────────┴──────────────┘
``` ```
## toUInt(8\|16\|32\|64)OrZero {#touint8163264orzero} ## toUInt(8\|16\|32\|64\|256)OrZero {#touint8163264orzero}
## toUInt(8\|16\|32\|64)OrNull {#touint8163264ornull} ## toUInt(8\|16\|32\|64\|256)OrNull {#touint8163264ornull}
## toFloat(32\|64) {#tofloat3264} ## toFloat(32\|64) {#tofloat3264}
@ -131,21 +134,23 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
## toDateTimeOrNull {#todatetimeornull} ## toDateTimeOrNull {#todatetimeornull}
## toDecimal(32\|64\|128) {#todecimal3264128} ## toDecimal(32\|64\|128\|256) {#todecimal3264128}
Converts `value` to the [Decimal](../../sql-reference/data-types/decimal.md) data type with precision of `S`. The `value` can be a number or a string. The `S` (scale) parameter specifies the number of decimal places. Converts `value` to the [Decimal](../../sql-reference/data-types/decimal.md) data type with precision of `S`. The `value` can be a number or a string. The `S` (scale) parameter specifies the number of decimal places.
- `toDecimal32(value, S)` - `toDecimal32(value, S)`
- `toDecimal64(value, S)` - `toDecimal64(value, S)`
- `toDecimal128(value, S)` - `toDecimal128(value, S)`
- `toDecimal256(value, S)`
## toDecimal(32\|64\|128)OrNull {#todecimal3264128ornull} ## toDecimal(32\|64\|128\|256)OrNull {#todecimal3264128ornull}
Converts an input string to a [Nullable(Decimal(P,S))](../../sql-reference/data-types/decimal.md) data type value. This family of functions include: Converts an input string to a [Nullable(Decimal(P,S))](../../sql-reference/data-types/decimal.md) data type value. This family of functions include:
- `toDecimal32OrNull(expr, S)` — Results in `Nullable(Decimal32(S))` data type. - `toDecimal32OrNull(expr, S)` — Results in `Nullable(Decimal32(S))` data type.
- `toDecimal64OrNull(expr, S)` — Results in `Nullable(Decimal64(S))` data type. - `toDecimal64OrNull(expr, S)` — Results in `Nullable(Decimal64(S))` data type.
- `toDecimal128OrNull(expr, S)` — Results in `Nullable(Decimal128(S))` data type. - `toDecimal128OrNull(expr, S)` — Results in `Nullable(Decimal128(S))` data type.
- `toDecimal256OrNull(expr, S)` — Results in `Nullable(Decimal256(S))` data type.
These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `NULL` value instead of an exception in the event of an input value parsing error. These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `NULL` value instead of an exception in the event of an input value parsing error.
@ -183,13 +188,14 @@ SELECT toDecimal32OrNull(toString(-1.111), 2) AS val, toTypeName(val)
└──────┴────────────────────────────────────────────────────┘ └──────┴────────────────────────────────────────────────────┘
``` ```
## toDecimal(32\|64\|128)OrZero {#todecimal3264128orzero} ## toDecimal(32\|64\|128\|256)OrZero {#todecimal3264128orzero}
Converts an input value to the [Decimal(P,S)](../../sql-reference/data-types/decimal.md) data type. This family of functions include: Converts an input value to the [Decimal(P,S)](../../sql-reference/data-types/decimal.md) data type. This family of functions include:
- `toDecimal32OrZero( expr, S)` — Results in `Decimal32(S)` data type. - `toDecimal32OrZero( expr, S)` — Results in `Decimal32(S)` data type.
- `toDecimal64OrZero( expr, S)` — Results in `Decimal64(S)` data type. - `toDecimal64OrZero( expr, S)` — Results in `Decimal64(S)` data type.
- `toDecimal128OrZero( expr, S)` — Results in `Decimal128(S)` data type. - `toDecimal128OrZero( expr, S)` — Results in `Decimal128(S)` data type.
- `toDecimal256OrZero( expr, S)` — Results in `Decimal256(S)` data type.
These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `0` value instead of an exception in the event of an input value parsing error. These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `0` value instead of an exception in the event of an input value parsing error.
@ -729,4 +735,45 @@ SELECT fromUnixTimestamp64Milli(i64, 'UTC')
└──────────────────────────────────────┘ └──────────────────────────────────────┘
``` ```
## formatRow {#formatrow}
Converts arbitrary expressions into a string via given format.
**Syntax**
``` sql
formatRow(format, x, y, ...)
```
**Parameters**
- `format` — Text format. For example, [CSV](../../interfaces/formats.md#csv), [TSV](../../interfaces/formats.md#tabseparated).
- `x`,`y`, ... — Expressions.
**Returned value**
- A formatted string (for text formats it's usually terminated with the new line character).
**Example**
Query:
``` sql
SELECT formatRow('CSV', number, 'good')
FROM numbers(3)
```
Result:
``` text
┌─formatRow('CSV', number, 'good')─┐
│ 0,"good"
│ 1,"good"
│ 2,"good"
└──────────────────────────────────┘
```
[Original article](https://clickhouse.tech/docs/en/query_language/functions/type_conversion_functions/) <!--hide--> [Original article](https://clickhouse.tech/docs/en/query_language/functions/type_conversion_functions/) <!--hide-->

View File

@ -102,7 +102,7 @@ Ejemplo de OpenSUSE Tumbleweed:
Ejemplo de Fedora Rawhide: Ejemplo de Fedora Rawhide:
sudo yum update sudo yum update
yum --nogpg install git cmake make gcc-c++ python2 yum --nogpg install git cmake make gcc-c++ python3
git clone --recursive https://github.com/ClickHouse/ClickHouse.git git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build mkdir build && cd build
cmake ../ClickHouse cmake ../ClickHouse

View File

@ -1,15 +1,15 @@
--- ---
machine_translated: true machine_translated: false
machine_translated_rev: 72537a2d527c63c07aa5d2361a8829f3895cf2bd machine_translated_rev:
toc_priority: 0 toc_priority: 0
toc_title: "Descripci\xF3n" toc_title: "Descripción"
--- ---
# ¿Qué es ClickHouse? {#what-is-clickhouse} # ¿Qué es ClickHouse? {#what-is-clickhouse}
ClickHouse es un sistema de gestión de bases de datos orientado a columnas (DBMS) para el procesamiento analítico en línea de consultas (OLAP). ClickHouse es un sistema de gestión de bases de datos (DBMS), orientado a columnas, para el procesamiento analítico de consultas en línea (OLAP).
En un “normal” DBMS orientado a filas, los datos se almacenan en este orden: En un DBMS “normal”, orientado a filas, los datos se almacenan en este orden:
| Fila | Argumento | JavaEnable | Titular | GoodEvent | EventTime | | Fila | Argumento | JavaEnable | Titular | GoodEvent | EventTime |
|------|-------------|------------|---------------------------|-----------|---------------------| |------|-------------|------------|---------------------------|-----------|---------------------|
@ -36,7 +36,7 @@ Estos ejemplos solo muestran el orden en el que se organizan los datos. Los valo
Ejemplos de un DBMS orientado a columnas: Vertica, Paraccel (Actian Matrix y Amazon Redshift), Sybase IQ, Exasol, Infobright, InfiniDB, MonetDB (VectorWise y Actian Vector), LucidDB, SAP HANA, Google Dremel, Google PowerDrill, Druid y kdb+. Ejemplos de un DBMS orientado a columnas: Vertica, Paraccel (Actian Matrix y Amazon Redshift), Sybase IQ, Exasol, Infobright, InfiniDB, MonetDB (VectorWise y Actian Vector), LucidDB, SAP HANA, Google Dremel, Google PowerDrill, Druid y kdb+.
Different orders for storing data are better suited to different scenarios. The data access scenario refers to what queries are made, how often, and in what proportion; how much data is read for each type of query rows, columns, and bytes; the relationship between reading and updating data; the working size of the data and how locally it is used; whether transactions are used, and how isolated they are; requirements for data replication and logical integrity; requirements for latency and throughput for each type of query, and so on. Los diferentes modos de ordenar los datos al guardarlos se adecúan mejor a diferentes escenarios. El escenario de acceso a los datos se refiere a qué consultas se hacen, con qué frecuencia y en qué proporción; cuántos datos se leen para cada tipo de consulta - filas, columnas y bytes; la relación entre lectura y actualización de datos; el tamaño de trabajo de los datos y qué tan localmente son usados; si se usan transacciones y qué tan aisladas están;requerimientos de replicación de los datos y de integridad lógica, requerimientos de latencia y caudal (throughput) para cada tipo de consulta, y cosas por el estilo.
Cuanto mayor sea la carga en el sistema, más importante es personalizar el sistema configurado para que coincida con los requisitos del escenario de uso, y más fino será esta personalización. No existe un sistema que sea igualmente adecuado para escenarios significativamente diferentes. Si un sistema es adaptable a un amplio conjunto de escenarios, bajo una carga alta, el sistema manejará todos los escenarios igualmente mal, o funcionará bien para solo uno o algunos de los escenarios posibles. Cuanto mayor sea la carga en el sistema, más importante es personalizar el sistema configurado para que coincida con los requisitos del escenario de uso, y más fino será esta personalización. No existe un sistema que sea igualmente adecuado para escenarios significativamente diferentes. Si un sistema es adaptable a un amplio conjunto de escenarios, bajo una carga alta, el sistema manejará todos los escenarios igualmente mal, o funcionará bien para solo uno o algunos de los escenarios posibles.

View File

@ -103,7 +103,7 @@ $ cd ..
به عنوان مثال برای فدورا پوست دباغی نشده: به عنوان مثال برای فدورا پوست دباغی نشده:
sudo yum update sudo yum update
yum --nogpg install git cmake make gcc-c++ python2 yum --nogpg install git cmake make gcc-c++ python3
git clone --recursive https://github.com/ClickHouse/ClickHouse.git git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build mkdir build && cd build
cmake ../ClickHouse cmake ../ClickHouse

View File

@ -102,7 +102,7 @@ Exemple Pour openSUSE Tumbleweed:
Exemple Pour Fedora Rawhide: Exemple Pour Fedora Rawhide:
sudo yum update sudo yum update
yum --nogpg install git cmake make gcc-c++ python2 yum --nogpg install git cmake make gcc-c++ python3
git clone --recursive https://github.com/ClickHouse/ClickHouse.git git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build mkdir build && cd build
cmake ../ClickHouse cmake ../ClickHouse

View File

@ -102,7 +102,7 @@ OpenSUSEタンブルウィードの例:
Fedora Rawhideの例: Fedora Rawhideの例:
sudo yum update sudo yum update
yum --nogpg install git cmake make gcc-c++ python2 yum --nogpg install git cmake make gcc-c++ python3
git clone --recursive https://github.com/ClickHouse/ClickHouse.git git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build mkdir build && cd build
cmake ../ClickHouse cmake ../ClickHouse

View File

@ -1,22 +1,24 @@
# Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S) {#decimalp-s-decimal32s-decimal64s-decimal128s} # Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S), Decimal256(S) {#decimalp-s-decimal32s-decimal64s-decimal128s}
Знаковые дробные числа с сохранением точности операций сложения, умножения и вычитания. Для деления осуществляется отбрасывание (не округление) знаков, не попадающих в младший десятичный разряд. Знаковые дробные числа с сохранением точности операций сложения, умножения и вычитания. Для деления осуществляется отбрасывание (не округление) знаков, не попадающих в младший десятичный разряд.
## Параметры {#parametry} ## Параметры {#parametry}
- P - precision. Значение из диапазона \[ 1 : 38 \]. Определяет, сколько десятичных знаков (с учетом дробной части) может содержать число. - P - precision. Значение из диапазона \[ 1 : 76 \]. Определяет, сколько десятичных знаков (с учетом дробной части) может содержать число.
- S - scale. Значение из диапазона \[ 0 : P \]. Определяет, сколько десятичных знаков содержится в дробной части числа. - S - scale. Значение из диапазона \[ 0 : P \]. Определяет, сколько десятичных знаков содержится в дробной части числа.
В зависимости от параметра P Decimal(P, S) является синонимом: В зависимости от параметра P Decimal(P, S) является синонимом:
- P из \[ 1 : 9 \] - для Decimal32(S) - P из \[ 1 : 9 \] - для Decimal32(S)
- P из \[ 10 : 18 \] - для Decimal64(S) - P из \[ 10 : 18 \] - для Decimal64(S)
- P из \[ 19 : 38 \] - для Decimal128(S) - P из \[ 19 : 38 \] - для Decimal128(S)
- P из \[ 39 : 76 \] - для Decimal256(S)
## Диапазоны Decimal {#diapazony-decimal} ## Диапазоны Decimal {#diapazony-decimal}
- Decimal32(S) - ( -1 \* 10^(9 - S), 1 \* 10^(9 - S) ) - Decimal32(S) - ( -1 \* 10^(9 - S), 1 \* 10^(9 - S) )
- Decimal64(S) - ( -1 \* 10^(18 - S), 1 \* 10^(18 - S) ) - Decimal64(S) - ( -1 \* 10^(18 - S), 1 \* 10^(18 - S) )
- Decimal128(S) - ( -1 \* 10^(38 - S), 1 \* 10^(38 - S) ) - Decimal128(S) - ( -1 \* 10^(38 - S), 1 \* 10^(38 - S) )
- Decimal256(S) - ( -1 \* 10^(76 - S), 1 \* 10^(76 - S) )
Например, Decimal32(4) содержит числа от -99999.9999 до 99999.9999 c шагом 0.0001. Например, Decimal32(4) содержит числа от -99999.9999 до 99999.9999 c шагом 0.0001.
@ -32,6 +34,7 @@
- `Decimal64(S1) <op> Decimal32(S2) -> Decimal64(S)` - `Decimal64(S1) <op> Decimal32(S2) -> Decimal64(S)`
- `Decimal128(S1) <op> Decimal32(S2) -> Decimal128(S)` - `Decimal128(S1) <op> Decimal32(S2) -> Decimal128(S)`
- `Decimal128(S1) <op> Decimal64(S2) -> Decimal128(S)` - `Decimal128(S1) <op> Decimal64(S2) -> Decimal128(S)`
- `Decimal256(S1) <op> Decimal<32|64|128>(S2) -> Decimal256(S)`
Для размера дробной части (scale) результата действуют следующие правила: Для размера дробной части (scale) результата действуют следующие правила:

View File

@ -1,4 +1,4 @@
# UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64 {#uint8-uint16-uint32-uint64-int8-int16-int32-int64} # UInt8, UInt16, UInt32, UInt64, UInt256, Int8, Int16, Int32, Int64, Int128, Int256 {#uint8-uint16-uint32-uint64-int8-int16-int32-int64}
Целые числа фиксированной длины, без знака или со знаком. Целые числа фиксированной длины, без знака или со знаком.
@ -8,6 +8,8 @@
- Int16 - \[-32768 : 32767\] - Int16 - \[-32768 : 32767\]
- Int32 - \[-2147483648 : 2147483647\] - Int32 - \[-2147483648 : 2147483647\]
- Int64 - \[-9223372036854775808 : 9223372036854775807\] - Int64 - \[-9223372036854775808 : 9223372036854775807\]
- Int128 - \[-170141183460469231731687303715884105728 : 170141183460469231731687303715884105727\]
- Int256 - \[-57896044618658097711785492504343953926634992332820282019728792003956564819968 : 57896044618658097711785492504343953926634992332820282019728792003956564819967\]
## Диапазоны Uint {#uint-ranges} ## Диапазоны Uint {#uint-ranges}
@ -15,5 +17,8 @@
- UInt16 - \[0 : 65535\] - UInt16 - \[0 : 65535\]
- UInt32 - \[0 : 4294967295\] - UInt32 - \[0 : 4294967295\]
- UInt64 - \[0 : 18446744073709551615\] - UInt64 - \[0 : 18446744073709551615\]
- UInt256 - \[0 : 115792089237316195423570985008687907853269984665640564039457584007913129639935\]
UInt128 пока не реализован.
[Оригинальная статья](https://clickhouse.tech/docs/ru/data_types/int_uint/) <!--hide--> [Оригинальная статья](https://clickhouse.tech/docs/ru/data_types/int_uint/) <!--hide-->

View File

@ -479,4 +479,75 @@ SELECT trimBoth(' Hello, world! ')
Тип результата — UInt64. Тип результата — UInt64.
## normalizeQuery {#normalized-query}
Заменяет литералы, последовательности литералов и сложные псевдонимы заполнителями.
**Синтаксис**
``` sql
normalizeQuery(x)
```
**Параметры**
- `x` — Последовательность символов. [String](../../sql-reference/data-types/string.md).
**Возвращаемое значение**
- Последовательность символов с заполнителями.
Тип: [String](../../sql-reference/data-types/string.md).
**Пример**
Запрос:
``` sql
SELECT normalizeQuery('[1, 2, 3, x]') AS query;
```
Результат:
``` text
┌─query────┐
│ [?.., x] │
└──────────┘
```
## normalizedQueryHash {#normalized-query-hash}
Возвращает идентичные 64-битные хэш - суммы без значений литералов для аналогичных запросов. Это помогает анализировать журнал запросов.
**Синтаксис**
``` sql
normalizedQueryHash(x)
```
**Параметры**
- `x` — Последовательность символов. [String](../../sql-reference/data-types/string.md).
**Возвращаемое значение**
- Хэш-сумма.
Тип: [UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges).
**Пример**
Запрос:
``` sql
SELECT normalizedQueryHash('SELECT 1 AS `xyz`') != normalizedQueryHash('SELECT 1 AS `abc`') AS res;
```
Результат:
``` text
┌─res─┐
│ 1 │
└─────┘
```
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/string_functions/) <!--hide--> [Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/string_functions/) <!--hide-->

View File

@ -6,7 +6,7 @@
Поведение ClickHouse при конвертировании похоже на [поведение C++ программ](https://en.cppreference.com/w/cpp/language/implicit_conversion). Поведение ClickHouse при конвертировании похоже на [поведение C++ программ](https://en.cppreference.com/w/cpp/language/implicit_conversion).
## toInt(8\|16\|32\|64) {#toint8163264} ## toInt(8\|16\|32\|64\|128\|256) {#toint8163264}
Преобразует входное значение к типу [Int](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает: Преобразует входное значение к типу [Int](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает:
@ -14,6 +14,8 @@
- `toInt16(expr)` — возвращает значение типа `Int16`. - `toInt16(expr)` — возвращает значение типа `Int16`.
- `toInt32(expr)` — возвращает значение типа `Int32`. - `toInt32(expr)` — возвращает значение типа `Int32`.
- `toInt64(expr)` — возвращает значение типа `Int64`. - `toInt64(expr)` — возвращает значение типа `Int64`.
- `toInt128(expr)` — возвращает значение типа `Int128`.
- `toInt256(expr)` — возвращает значение типа `Int256`.
**Параметры** **Параметры**
@ -21,7 +23,7 @@
**Возвращаемое значение** **Возвращаемое значение**
Целое число типа `Int8`, `Int16`, `Int32` или `Int64`. Целое число типа `Int8`, `Int16`, `Int32`, `Int64`, `Int128` или `Int256`.
Функции используют [округление к нулю](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), т.е. обрезают дробную часть числа. Функции используют [округление к нулю](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), т.е. обрезают дробную часть числа.
@ -39,9 +41,9 @@ SELECT toInt64(nan), toInt32(32), toInt16('16'), toInt8(8.8)
└──────────────────────┴─────────────┴───────────────┴─────────────┘ └──────────────────────┴─────────────┴───────────────┴─────────────┘
``` ```
## toInt(8\|16\|32\|64)OrZero {#toint8163264orzero} ## toInt(8\|16\|32\|64\|128\|256)OrZero {#toint8163264orzero}
Принимает аргумент типа String и пытается его распарсить в Int(8\|16\|32\|64). Если не удалось - возвращает 0. Принимает аргумент типа String и пытается его распарсить в Int(8\|16\|32\|64\|128\|256). Если не удалось - возвращает 0.
**Пример** **Пример**
@ -55,9 +57,9 @@ select toInt64OrZero('123123'), toInt8OrZero('123qwe123')
└─────────────────────────┴───────────────────────────┘ └─────────────────────────┴───────────────────────────┘
``` ```
## toInt(8\|16\|32\|64)OrNull {#toint8163264ornull} ## toInt(8\|16\|32\|64\|128\|256)OrNull {#toint8163264ornull}
Принимает аргумент типа String и пытается его распарсить в Int(8\|16\|32\|64). Если не удалось - возвращает NULL. Принимает аргумент типа String и пытается его распарсить в Int(8\|16\|32\|64\|128\|256). Если не удалось - возвращает NULL.
**Пример** **Пример**
@ -71,7 +73,7 @@ select toInt64OrNull('123123'), toInt8OrNull('123qwe123')
└─────────────────────────┴───────────────────────────┘ └─────────────────────────┴───────────────────────────┘
``` ```
## toUInt(8\|16\|32\|64) {#touint8163264} ## toUInt(8\|16\|32\|64\|256) {#touint8163264}
Преобраует входное значение к типу [UInt](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает: Преобраует входное значение к типу [UInt](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает:
@ -79,6 +81,7 @@ select toInt64OrNull('123123'), toInt8OrNull('123qwe123')
- `toUInt16(expr)` — возвращает значение типа `UInt16`. - `toUInt16(expr)` — возвращает значение типа `UInt16`.
- `toUInt32(expr)` — возвращает значение типа `UInt32`. - `toUInt32(expr)` — возвращает значение типа `UInt32`.
- `toUInt64(expr)` — возвращает значение типа `UInt64`. - `toUInt64(expr)` — возвращает значение типа `UInt64`.
- `toUInt256(expr)` — возвращает значение типа `UInt256`.
**Параметры** **Параметры**
@ -86,7 +89,7 @@ select toInt64OrNull('123123'), toInt8OrNull('123qwe123')
**Возвращаемое значение** **Возвращаемое значение**
Целое число типа `UInt8`, `UInt16`, `UInt32` или `UInt64`. Целое число типа `UInt8`, `UInt16`, `UInt32`, `UInt64` или `UInt256`.
Функции используют [округление к нулю](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), т.е. обрезают дробную часть числа. Функции используют [округление к нулю](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), т.е. обрезают дробную часть числа.
@ -104,9 +107,9 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
└─────────────────────┴───────────────┴────────────────┴──────────────┘ └─────────────────────┴───────────────┴────────────────┴──────────────┘
``` ```
## toUInt(8\|16\|32\|64)OrZero {#touint8163264orzero} ## toUInt(8\|16\|32\|64\|256)OrZero {#touint8163264orzero}
## toUInt(8\|16\|32\|64)OrNull {#touint8163264ornull} ## toUInt(8\|16\|32\|64\|256)OrNull {#touint8163264ornull}
## toFloat(32\|64) {#tofloat3264} ## toFloat(32\|64) {#tofloat3264}
@ -126,21 +129,23 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
## toDateTimeOrNull {#todatetimeornull} ## toDateTimeOrNull {#todatetimeornull}
## toDecimal(32\|64\|128) {#todecimal3264128} ## toDecimal(32\|64\|128\|256) {#todecimal3264128}
Преобразует `value` к типу данных [Decimal](../../sql-reference/functions/type-conversion-functions.md) с точностью `S`. `value` может быть числом или строкой. Параметр `S` (scale) задаёт число десятичных знаков. Преобразует `value` к типу данных [Decimal](../../sql-reference/functions/type-conversion-functions.md) с точностью `S`. `value` может быть числом или строкой. Параметр `S` (scale) задаёт число десятичных знаков.
- `toDecimal32(value, S)` - `toDecimal32(value, S)`
- `toDecimal64(value, S)` - `toDecimal64(value, S)`
- `toDecimal128(value, S)` - `toDecimal128(value, S)`
- `toDecimal256(value, S)`
## toDecimal(32\|64\|128)OrNull {#todecimal3264128ornull} ## toDecimal(32\|64\|128\|256)OrNull {#todecimal3264128ornull}
Преобразует входную строку в значение с типом данных [Nullable (Decimal (P, S))](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает в себя: Преобразует входную строку в значение с типом данных [Nullable (Decimal (P, S))](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает в себя:
- `toDecimal32OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal32(S))`. - `toDecimal32OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal32(S))`.
- `toDecimal64OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal64(S))`. - `toDecimal64OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal64(S))`.
- `toDecimal128OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal128(S))`. - `toDecimal128OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal128(S))`.
- `toDecimal256OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal256(S))`.
Эти функции следует использовать вместо функций `toDecimal*()`, если при ошибке обработки входного значения вы хотите получать `NULL` вместо исключения. Эти функции следует использовать вместо функций `toDecimal*()`, если при ошибке обработки входного значения вы хотите получать `NULL` вместо исключения.
@ -178,13 +183,14 @@ SELECT toDecimal32OrNull(toString(-1.111), 2) AS val, toTypeName(val)
└──────┴────────────────────────────────────────────────────┘ └──────┴────────────────────────────────────────────────────┘
``` ```
## toDecimal(32\|64\|128)OrZero {#todecimal3264128orzero} ## toDecimal(32\|64\|128\|256)OrZero {#todecimal3264128orzero}
Преобразует тип входного значения в [Decimal (P, S)](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает в себя: Преобразует тип входного значения в [Decimal (P, S)](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает в себя:
- `toDecimal32OrZero( expr, S)` — возвращает значение типа `Decimal32(S)`. - `toDecimal32OrZero( expr, S)` — возвращает значение типа `Decimal32(S)`.
- `toDecimal64OrZero( expr, S)` — возвращает значение типа `Decimal64(S)`. - `toDecimal64OrZero( expr, S)` — возвращает значение типа `Decimal64(S)`.
- `toDecimal128OrZero( expr, S)` — возвращает значение типа `Decimal128(S)`. - `toDecimal128OrZero( expr, S)` — возвращает значение типа `Decimal128(S)`.
- `toDecimal256OrZero( expr, S)` — возвращает значение типа `Decimal256(S)`.
Эти функции следует использовать вместо функций `toDecimal*()`, если при ошибке обработки входного значения вы хотите получать `0` вместо исключения. Эти функции следует использовать вместо функций `toDecimal*()`, если при ошибке обработки входного значения вы хотите получать `0` вместо исключения.
@ -717,4 +723,44 @@ SELECT toLowCardinality('1')
└───────────────────────┘ └───────────────────────┘
``` ```
## formatRow {#formatrow}
Преобразует произвольные выражения в строку заданного формата.
**Синтаксис**
``` sql
formatRow(format, x, y, ...)
```
**Параметры**
- `format` — Текстовый формат. Например, [CSV](../../interfaces/formats.md#csv), [TSV](../../interfaces/formats.md#tabseparated).
- `x`,`y`, ... — Выражения.
**Возвращаемое значение**
- Отформатированная строка (в текстовых форматах обычно с завершающим переводом строки).
**Пример**
Запрос:
``` sql
SELECT formatRow('CSV', number, 'good')
FROM numbers(3)
```
Ответ:
``` text
┌─formatRow('CSV', number, 'good')─┐
│ 0,"good"
│ 1,"good"
│ 2,"good"
└──────────────────────────────────┘
```
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/type_conversion_functions/) <!--hide--> [Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/type_conversion_functions/) <!--hide-->

View File

@ -1,6 +1,6 @@
# Секция INTO OUTFILE {#into-outfile-clause} # Секция INTO OUTFILE {#into-outfile-clause}
Чтобы перенаправить вывод `SELECT` запроса в указанный файл на стороне клиента, добавьте к нему секцию `INTO OUTFILE filename` (где filenam — строковый литерал). Чтобы перенаправить вывод `SELECT` запроса в указанный файл на стороне клиента, добавьте к нему секцию `INTO OUTFILE filename` (где filename — строковый литерал).
## Детали реализации {#implementation-details} ## Детали реализации {#implementation-details}

View File

@ -185,7 +185,7 @@ def build(args):
test.test_templates(args.website_dir) test.test_templates(args.website_dir)
if not args.skip_docs: if not args.skip_docs:
generate_cmake_flags_files(os.path.join(os.path.dirname(__file__), '..', '..')) generate_cmake_flags_files()
build_docs(args) build_docs(args)
from github import build_releases from github import build_releases

Some files were not shown because too many files have changed in this diff Show More