Merge branch 'master' of github.com:ClickHouse/ClickHouse into copier-backquotes

This commit is contained in:
nikitamikhaylov 2020-10-09 16:26:31 +03:00
commit 88a5302759
618 changed files with 9698 additions and 5590 deletions

17
.github/codecov.yml vendored Normal file
View File

@ -0,0 +1,17 @@
codecov:
max_report_age: off
strict_yaml_branch: "master"
ignore:
- "contrib"
- "docs"
- "benchmark"
- "tests"
- "docker"
- "debian"
- "cmake"
comment: false
github_checks:
annotations: false

4
.gitmodules vendored
View File

@ -107,7 +107,6 @@
[submodule "contrib/grpc"]
path = contrib/grpc
url = https://github.com/ClickHouse-Extras/grpc.git
branch = v1.25.0
[submodule "contrib/aws"]
path = contrib/aws
url = https://github.com/ClickHouse-Extras/aws-sdk-cpp.git
@ -159,7 +158,7 @@
url = https://github.com/openldap/openldap.git
[submodule "contrib/AMQP-CPP"]
path = contrib/AMQP-CPP
url = https://github.com/CopernicaMarketingSoftware/AMQP-CPP.git
url = https://github.com/ClickHouse-Extras/AMQP-CPP.git
[submodule "contrib/cassandra"]
path = contrib/cassandra
url = https://github.com/ClickHouse-Extras/cpp-driver.git
@ -186,3 +185,4 @@
[submodule "contrib/cyrus-sasl"]
path = contrib/cyrus-sasl
url = https://github.com/cyrusimap/cyrus-sasl
branch = cyrus-sasl-2.1

View File

@ -300,6 +300,11 @@ if (COMPILER_CLANG)
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
endif()
# Set new experimental pass manager, it's a performance, build time and binary size win.
# Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang.
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fexperimental-new-pass-manager")
# We cannot afford to use LTO when compiling unit tests, and it's not enough
# to only supply -fno-lto at the final linking stage. So we disable it
# completely.
@ -513,7 +518,13 @@ endif ()
macro (add_executable target)
# invoke built-in add_executable
# explicitly acquire and interpose malloc symbols by clickhouse_malloc
# if GLIBC_COMPATIBILITY is ON and ENABLE_THINLTO is on than provide memcpy symbol explicitly to neutrialize thinlto's libcall generation.
if (GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc> $<TARGET_OBJECTS:clickhouse_memcpy>)
else ()
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>)
endif ()
get_target_property (type ${target} TYPE)
if (${type} STREQUAL EXECUTABLE)
# operator::new/delete for executables (MemoryTracker stuff)

View File

@ -17,5 +17,4 @@ ClickHouse is an open-source column-oriented database management system that all
## Upcoming Events
* [ClickHouse for Edge Analytics](https://ones2020.sched.com/event/bWPs) on September 29, 2020.
* [ClickHouse online meetup (in Russian)](https://clck.ru/R2zB9) on October 1, 2020.

View File

@ -10,11 +10,14 @@ currently being supported with security updates:
| 1.x | :x: |
| 18.x | :x: |
| 19.x | :x: |
| 19.14 | :white_check_mark: |
| 20.1 | :x: |
| 20.3 | :white_check_mark: |
| 20.4 | :white_check_mark: |
| 20.5 | :white_check_mark: |
| 20.4 | :x: |
| 20.5 | :x: |
| 20.6 | :x: |
| 20.7 | :white_check_mark: |
| 20.8 | :white_check_mark: |
| 20.9 | :white_check_mark: |
## Reporting a Vulnerability

View File

@ -38,10 +38,10 @@ bool hasInputData()
}
LineReader::Suggest::WordsRange LineReader::Suggest::getCompletions(const String & prefix, size_t prefix_length) const
std::optional<LineReader::Suggest::WordsRange> LineReader::Suggest::getCompletions(const String & prefix, size_t prefix_length) const
{
if (!ready)
return std::make_pair(words.end(), words.end());
return std::nullopt;
std::string_view last_word;

View File

@ -4,6 +4,7 @@
#include <atomic>
#include <vector>
#include <optional>
class LineReader
{
@ -18,7 +19,7 @@ public:
std::atomic<bool> ready{false};
/// Get iterators for the matched range of words if any.
WordsRange getCompletions(const String & prefix, size_t prefix_length) const;
std::optional<WordsRange> getCompletions(const String & prefix, size_t prefix_length) const;
};
using Patterns = std::vector<const char *>;

View File

@ -30,7 +30,8 @@ static LineReader::Suggest::Words::const_iterator end;
static void findRange(const char * prefix, size_t prefix_length)
{
std::string prefix_str(prefix);
std::tie(pos, end) = suggest->getCompletions(prefix_str, prefix_length);
if (auto completions = suggest->getCompletions(prefix_str, prefix_length))
std::tie(pos, end) = *completions;
}
/// Iterates through matched range.

View File

@ -70,8 +70,9 @@ ReplxxLineReader::ReplxxLineReader(
auto callback = [&suggest] (const String & context, size_t context_size)
{
auto range = suggest.getCompletions(context, context_size);
return Replxx::completions_t(range.first, range.second);
if (auto range = suggest.getCompletions(context, context_size))
return Replxx::completions_t(range->first, range->second);
return Replxx::completions_t();
};
rx.set_completion_callback(callback);

View File

@ -436,7 +436,54 @@ private:
}
template <typename T>
constexpr static auto multiply(const integer<Bits, Signed> & lhs, const T & rhs)
constexpr static integer<Bits, Signed>
multiply(const integer<Bits, Signed> & lhs, const T & rhs)
{
if constexpr (Bits == 256 && sizeof(base_type) == 8)
{
/// @sa https://github.com/abseil/abseil-cpp/blob/master/absl/numeric/int128.h
using HalfType = unsigned __int128;
HalfType a01 = (HalfType(lhs.items[little(1)]) << 64) + lhs.items[little(0)];
HalfType a23 = (HalfType(lhs.items[little(3)]) << 64) + lhs.items[little(2)];
HalfType a0 = lhs.items[little(0)];
HalfType a1 = lhs.items[little(1)];
HalfType b01 = rhs;
uint64_t b0 = b01;
uint64_t b1 = 0;
HalfType b23 = 0;
if constexpr (sizeof(T) > 8)
b1 = b01 >> 64;
if constexpr (sizeof(T) > 16)
b23 = (HalfType(rhs.items[little(3)]) << 64) + rhs.items[little(2)];
HalfType r23 = a23 * b01 + a01 * b23 + a1 * b1;
HalfType r01 = a0 * b0;
HalfType r12 = (r01 >> 64) + (r23 << 64);
HalfType r12_x = a1 * b0;
integer<Bits, Signed> res;
res.items[little(0)] = r01;
res.items[little(3)] = r23 >> 64;
if constexpr (sizeof(T) > 8)
{
HalfType r12_y = a0 * b1;
r12_x += r12_y;
if (r12_x < r12_y)
++res.items[little(3)];
}
r12 += r12_x;
if (r12 < r12_x)
++res.items[little(3)];
res.items[little(1)] = r12;
res.items[little(2)] = r12 >> 64;
return res;
}
else
{
integer<Bits, Signed> res{};
#if 1
@ -477,6 +524,7 @@ private:
return res;
}
}
public:
constexpr static integer<Bits, Signed> operator_unary_tilda(const integer<Bits, Signed> & lhs) noexcept

View File

@ -27,6 +27,10 @@ if (GLIBC_COMPATIBILITY)
list(APPEND glibc_compatibility_sources musl/getentropy.c)
endif()
add_library (clickhouse_memcpy OBJECT
${ClickHouse_SOURCE_DIR}/contrib/FastMemcpy/memcpy_wrapper.c
)
# Need to omit frame pointers to match the performance of glibc
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer")

View File

@ -23,6 +23,7 @@ ExtendedLogMessage ExtendedLogMessage::getFrom(const Poco::Message & base)
msg_ext.time_seconds = static_cast<UInt32>(tv.tv_sec);
msg_ext.time_microseconds = static_cast<UInt32>(tv.tv_usec);
msg_ext.time_in_microseconds = static_cast<UInt64>((tv.tv_sec) * 1000000U + (tv.tv_usec));
if (current_thread)
{

View File

@ -23,6 +23,7 @@ public:
uint32_t time_seconds = 0;
uint32_t time_microseconds = 0;
uint64_t time_in_microseconds = 0;
uint64_t thread_id = 0;
std::string query_id;

View File

@ -76,6 +76,7 @@ void OwnSplitChannel::logSplit(const Poco::Message & msg)
TextLogElement elem;
elem.event_time = msg_ext.time_seconds;
elem.event_time_microseconds = msg_ext.time_in_microseconds;
elem.microseconds = msg_ext.time_microseconds;
elem.thread_name = getThreadName();

View File

@ -21,8 +21,8 @@ void Pool::Entry::incrementRefCount()
{
if (!data)
return;
++data->ref_count;
if (data->ref_count == 1)
/// First reference, initialize thread
if (data->ref_count.fetch_add(1) == 0)
mysql_thread_init();
}
@ -30,13 +30,11 @@ void Pool::Entry::decrementRefCount()
{
if (!data)
return;
if (data->ref_count > 0)
{
--data->ref_count;
if (data->ref_count == 0)
/// We were the last user of this thread, deinitialize it
if (data->ref_count.fetch_sub(1) == 1)
mysql_thread_end();
}
}
Pool::Pool(const Poco::Util::AbstractConfiguration & cfg, const std::string & config_name,

View File

@ -3,6 +3,7 @@
#include <list>
#include <memory>
#include <mutex>
#include <atomic>
#include <Poco/Exception.h>
#include <mysqlxx/Connection.h>
@ -35,7 +36,9 @@ protected:
struct Connection
{
mysqlxx::Connection conn;
int ref_count = 0;
/// Ref count modified in constructor/descructor of Entry
/// but also read in pool code.
std::atomic<int> ref_count = 0;
};
public:

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
#!/usr/bin/env python3
from __future__ import print_function
import sys
import json
@ -99,7 +99,7 @@ def gen_html_json(options, arguments):
tuples = read_stats_file(options, arguments[1])
print('{')
print('"system: GreenPlum(x2),')
print('"version": "%s",' % '4.3.9.1')
print(('"version": "%s",' % '4.3.9.1'))
print('"data_size": 10000000,')
print('"time": "",')
print('"comments": "",')

View File

@ -0,0 +1,330 @@
#[[
Defines the following variables:
``gRPC_FOUND``
Whether the gRPC framework is found
``gRPC_INCLUDE_DIRS``
The include directories of the gRPC framework, including the include directories of the C++ wrapper.
``gRPC_LIBRARIES``
The libraries of the gRPC framework.
``gRPC_UNSECURE_LIBRARIES``
The libraries of the gRPC framework without SSL.
``_gRPC_CPP_PLUGIN``
The plugin for generating gRPC client and server C++ stubs from `.proto` files
``_gRPC_PYTHON_PLUGIN``
The plugin for generating gRPC client and server Python stubs from `.proto` files
The following :prop_tgt:`IMPORTED` targets are also defined:
``grpc++``
``grpc++_unsecure``
``grpc_cpp_plugin``
``grpc_python_plugin``
Add custom commands to process ``.proto`` files to C++::
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
``SRCS``
Variable to define with autogenerated source files
``HDRS``
Variable to define with autogenerated header files
``DESCRIPTORS``
Variable to define with autogenerated descriptor files, if requested.
``EXPORT_MACRO``
is a macro which should expand to ``__declspec(dllexport)`` or
``__declspec(dllimport)`` depending on what is being compiled.
``ARGN``
``.proto`` files
#]]
# Function to generate C++ files from .proto files.
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
if(NOT _proto_files)
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
return()
endif()
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
set(_append_arg APPEND_PATH)
endif()
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(_descriptors DESCRIPTORS)
endif()
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
endif()
if(DEFINED Protobuf_IMPORT_DIRS)
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
endif()
set(_outvar)
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
set(${SRCS})
set(${HDRS})
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
endif()
foreach(_file ${_outvar})
if(_file MATCHES "cc$")
list(APPEND ${SRCS} ${_file})
elseif(_file MATCHES "desc$")
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
else()
list(APPEND ${HDRS} ${_file})
endif()
endforeach()
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
endif()
endfunction()
# Helper function.
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(protobuf_generate_grpc)
set(_options APPEND_PATH DESCRIPTORS)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
if(COMMAND target_sources)
list(APPEND _singleargs TARGET)
endif()
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
return()
endif()
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
return()
endif()
if(NOT protobuf_generate_grpc_LANGUAGE)
set(protobuf_generate_grpc_LANGUAGE cpp)
endif()
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
endif()
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
endif()
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
return()
endif()
endif()
if(NOT protobuf_generate_grpc_PLUGIN)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
return()
endif()
endif()
if(protobuf_generate_grpc_TARGET)
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
foreach(_file ${_source_list})
if(_file MATCHES "proto$")
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
endif()
endforeach()
endif()
if(NOT protobuf_generate_grpc_PROTOS)
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
return()
endif()
if(protobuf_generate_grpc_APPEND_PATH)
# Create an include path for each file specified
foreach(_file ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_file} ABSOLUTE)
get_filename_component(_abs_path ${_abs_file} PATH)
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${_abs_path})
endif()
endforeach()
else()
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
endif()
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
set(_generated_srcs_all)
foreach(_proto ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_proto} ABSOLUTE)
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
get_filename_component(_basename ${_proto} NAME_WE)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set(_possible_rel_dir)
if(NOT protobuf_generate_grpc_APPEND_PATH)
set(_possible_rel_dir ${_rel_dir}/)
endif()
set(_generated_srcs)
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
endforeach()
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
list(APPEND _generated_srcs ${_descriptor_file})
endif()
list(APPEND _generated_srcs_all ${_generated_srcs})
add_custom_command(
OUTPUT ${_generated_srcs}
COMMAND protobuf::protoc
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
VERBATIM)
endforeach()
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
if(protobuf_generate_grpc_OUT_VAR)
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
endif()
if(protobuf_generate_grpc_TARGET)
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
endif()
endfunction()
# Find the libraries.
if(gRPC_USE_STATIC_LIBS)
# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES
set(_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
if(WIN32)
set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a ${CMAKE_FIND_LIBRARY_SUFFIXES})
else()
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
endif()
endif()
find_library(gRPC_LIBRARY NAMES grpc)
find_library(gRPC_CPP_LIBRARY NAMES grpc++)
find_library(gRPC_UNSECURE_LIBRARY NAMES grpc_unsecure)
find_library(gRPC_CPP_UNSECURE_LIBRARY NAMES grpc++_unsecure)
set(gRPC_LIBRARIES)
if(gRPC_USE_UNSECURE_LIBRARIES)
if(gRPC_UNSECURE_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_UNSECURE_LIBRARY})
endif()
if(gRPC_CPP_UNSECURE_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_UNSECURE_LIBRARY})
endif()
else()
if(gRPC_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_LIBRARY})
endif()
if(gRPC_CPP_UNSECURE_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_LIBRARY})
endif()
endif()
# Restore the original find library ordering.
if(gRPC_USE_STATIC_LIBS)
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES})
endif()
# Find the include directories.
find_path(gRPC_INCLUDE_DIR grpc/grpc.h)
find_path(gRPC_CPP_INCLUDE_DIR grpc++/grpc++.h)
if(gRPC_INCLUDE_DIR AND gRPC_CPP_INCLUDE_DIR AND NOT(gRPC_INCLUDE_DIR STREQUAL gRPC_CPP_INCLUDE_DIR))
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR} ${gRPC_CPP_INCLUDE_DIR})
elseif(gRPC_INCLUDE_DIR)
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR})
else()
set(gRPC_INCLUDE_DIRS ${gRPC_CPP_INCLUDE_DIR})
endif()
# Get full path to plugin.
find_program(_gRPC_CPP_PLUGIN
NAMES grpc_cpp_plugin
DOC "The plugin for generating gRPC client and server C++ stubs from `.proto` files")
find_program(_gRPC_PYTHON_PLUGIN
NAMES grpc_python_plugin
DOC "The plugin for generating gRPC client and server Python stubs from `.proto` files")
# Add imported targets.
if(gRPC_CPP_LIBRARY AND NOT TARGET grpc++)
add_library(grpc++ UNKNOWN IMPORTED)
set_target_properties(grpc++ PROPERTIES
IMPORTED_LOCATION "${gRPC_CPP_LIBRARY}")
set_target_properties(grpc++ PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
endif()
if(gRPC_CPP_UNSECURE_LIBRARY AND NOT TARGET grpc++_unsecure)
add_library(grpc++_unsecure UNKNOWN IMPORTED)
set_target_properties(grpc++_unsecure PROPERTIES
IMPORTED_LOCATION "${gRPC_CPP_UNSECURE_LIBRARY}")
set_target_properties(grpc++_unsecure PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
endif()
if(gRPC_CPP_PLUGIN AND NOT TARGET grpc_cpp_plugin)
add_executable(grpc_cpp_plugin IMPORTED)
set_target_properties(grpc_cpp_plugin PROPERTIES
IMPORTED_LOCATION "${gRPC_CPP_PLUGIN}")
endif()
if(gRPC_PYTHON_PLUGIN AND NOT TARGET grpc_python_plugin)
add_executable(grpc_python_plugin IMPORTED)
set_target_properties(grpc_python_plugin PROPERTIES
IMPORTED_LOCATION "${gRPC_PYTHON_PLUGIN}")
endif()
#include(FindPackageHandleStandardArgs.cmake)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(gRPC
REQUIRED_VARS gRPC_LIBRARY gRPC_CPP_LIBRARY gRPC_UNSECURE_LIBRARY gRPC_CPP_UNSECURE_LIBRARY
gRPC_INCLUDE_DIR gRPC_CPP_INCLUDE_DIR _gRPC_CPP_PLUGIN _gRPC_PYTHON_PLUGIN)
if(gRPC_FOUND)
if(gRPC_DEBUG)
message(STATUS "gRPC: INCLUDE_DIRS=${gRPC_INCLUDE_DIRS}")
message(STATUS "gRPC: LIBRARIES=${gRPC_LIBRARIES}")
message(STATUS "gRPC: CPP_PLUGIN=${_gRPC_CPP_PLUGIN}")
message(STATUS "gRPC: PYTHON_PLUGIN=${_gRPC_PYTHON_PLUGIN}")
endif()
endif()

View File

@ -6,7 +6,7 @@ if (ENABLE_CLANG_TIDY)
message(FATAL_ERROR "clang-tidy requires CMake version at least 3.6.")
endif()
find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-10" "clang-tidy-9" "clang-tidy-8")
find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-11" "clang-tidy-10" "clang-tidy-9" "clang-tidy-8")
if (CLANG_TIDY_PATH)
message(STATUS

View File

@ -7,39 +7,59 @@ if (NOT ENABLE_GRPC)
return()
endif()
option (USE_INTERNAL_GRPC_LIBRARY
"Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)"
${NOT_UNBUNDLED})
if(NOT USE_PROTOBUF)
message(WARNING "Cannot use gRPC library without protobuf")
endif()
# Normally we use the internal gRPC framework.
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
# The external gRPC framework can be installed in the system by running
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
if(USE_INTERNAL_GRPC_LIBRARY)
message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init --recursive")
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc")
set(USE_INTERNAL_GRPC_LIBRARY 0)
endif()
set(MISSING_INTERNAL_GRPC_LIBRARY 1)
endif()
if(USE_SSL)
set(gRPC_USE_UNSECURE_LIBRARIES FALSE)
else()
set(gRPC_USE_UNSECURE_LIBRARIES TRUE)
endif()
if(NOT USE_INTERNAL_GRPC_LIBRARY)
find_package(grpc)
if (NOT GRPC_FOUND)
find_path(GRPC_INCLUDE_DIR grpcpp/grpcpp.h)
find_library(GRPC_LIBRARY grpc++)
endif ()
if (GRPC_INCLUDE_DIR AND GRPC_LIBRARY)
set (USE_GRPC ON)
find_package(gRPC)
if(NOT gRPC_INCLUDE_DIRS OR NOT gRPC_LIBRARIES)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC library")
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
elseif(NOT _gRPC_CPP_PLUGIN)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system grcp_cpp_plugin")
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC")
set(EXTERNAL_GRPC_LIBRARY_FOUND 1)
set(USE_GRPC 1)
endif()
endif()
if (NOT USE_GRPC)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include/grpc++/grpc++.h")
message (WARNING "submodule contrib/grpc is missing. To fix try run: \n git submodule update --init --recursive")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gRPC")
set (USE_INTERNAL_GRPC_LIBRARY OFF)
elseif (NOT USE_PROTOBUF)
message (WARNING "gRPC requires protobuf which is disabled")
message (${RECONFIGURE_MESSAGE_LEVEL} "Will not use internal gRPC without protobuf")
set (USE_INTERNAL_GRPC_LIBRARY OFF)
if(NOT EXTERNAL_GRPC_LIBRARY_FOUND AND NOT MISSING_INTERNAL_GRPC_LIBRARY)
set(gRPC_INCLUDE_DIRS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
if(gRPC_USE_UNSECURE_LIBRARIES)
set(gRPC_LIBRARIES grpc_unsecure grpc++_unsecure)
else()
set (GRPC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
set (GRPC_LIBRARY "libgrpc++")
set (USE_GRPC ON)
set (USE_INTERNAL_GRPC_LIBRARY ON)
set(gRPC_LIBRARIES grpc grpc++)
endif()
set(_gRPC_CPP_PLUGIN $<TARGET_FILE:grpc_cpp_plugin>)
set(_gRPC_PROTOC_EXECUTABLE $<TARGET_FILE:protobuf::protoc>)
include("${ClickHouse_SOURCE_DIR}/contrib/grpc-cmake/protobuf_generate_grpc.cmake")
set(USE_INTERNAL_GRPC_LIBRARY 1)
set(USE_GRPC 1)
endif()
message (STATUS "Using gRPC=${USE_GRPC}: ${GRPC_INCLUDE_DIR} : ${GRPC_LIBRARY}")
message(STATUS "Using gRPC=${USE_GRPC}: ${gRPC_INCLUDE_DIRS} : ${gRPC_LIBRARIES} : ${_gRPC_CPP_PLUGIN}")

View File

@ -26,7 +26,7 @@ endif ()
if (NOT USE_INTERNAL_LLVM_LIBRARY)
set (LLVM_PATHS "/usr/local/lib/llvm")
foreach(llvm_v 9 8)
foreach(llvm_v 10 9 8)
if (NOT LLVM_FOUND)
find_package (LLVM ${llvm_v} CONFIG PATHS ${LLVM_PATHS})
endif ()

View File

@ -7,7 +7,11 @@ if(NOT ENABLE_PROTOBUF)
return()
endif()
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled" ${NOT_UNBUNDLED})
# Normally we use the internal protobuf library.
# You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case.
# The external protobuf library can be installed in the system by running
# sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
if(USE_INTERNAL_PROTOBUF_LIBRARY)
@ -20,25 +24,28 @@ endif()
if(NOT USE_INTERNAL_PROTOBUF_LIBRARY)
find_package(Protobuf)
if (Protobuf_LIBRARY AND Protobuf_INCLUDE_DIR AND Protobuf_PROTOC_EXECUTABLE)
if(NOT Protobuf_INCLUDE_DIR OR NOT Protobuf_LIBRARY)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf library")
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
elseif(NOT Protobuf_PROTOC_EXECUTABLE)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf compiler")
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
else()
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 1)
set(USE_PROTOBUF 1)
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf")
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
endif()
endif()
if(NOT EXTERNAL_PROTOBUF_LIBRARY_FOUND AND NOT MISSING_INTERNAL_PROTOBUF_LIBRARY)
set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src")
set(USE_PROTOBUF 1)
set(USE_INTERNAL_PROTOBUF_LIBRARY 1)
set(Protobuf_LIBRARY libprotobuf)
set(Protobuf_PROTOC_LIBRARY libprotoc)
set(Protobuf_LITE_LIBRARY libprotobuf-lite)
set(Protobuf_PROTOC_EXECUTABLE "$<TARGET_FILE:protoc>")
set(Protobuf_PROTOC_LIBRARY libprotoc)
include("${ClickHouse_SOURCE_DIR}/contrib/protobuf-cmake/protobuf_generate.cmake")
set(USE_INTERNAL_PROTOBUF_LIBRARY 1)
set(USE_PROTOBUF 1)
endif()
if(OS_FREEBSD AND SANITIZE STREQUAL "address")
@ -52,6 +59,4 @@ if(OS_FREEBSD AND SANITIZE STREQUAL "address")
endif()
endif()
include ("${ClickHouse_SOURCE_DIR}/cmake/protobuf_generate_cpp.cmake")
message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY} : ${Protobuf_PROTOC_EXECUTABLE}")
message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY} : ${Protobuf_PROTOC_EXECUTABLE} : ${Protobuf_PROTOC_LIBRARY}")

View File

@ -14,10 +14,10 @@ if (NOT ENABLE_RDKAFKA)
return()
endif()
if (NOT ARCH_ARM AND USE_LIBGSASL)
if (NOT ARCH_ARM)
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED})
elseif(USE_INTERNAL_RDKAFKA_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ARCH_ARM=${ARCH_ARM} AND USE_LIBGSASL=${USE_LIBGSASL}")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ARCH_ARM=${ARCH_ARM}")
endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")

View File

@ -1,172 +0,0 @@
# This file declares functions adding custom commands for generating C++ files from *.proto files:
# function (protobuf_generate_cpp SRCS HDRS)
# function (protobuf_generate_grpc_cpp SRCS HDRS)
if (NOT USE_PROTOBUF)
message (WARNING "Could not use protobuf_generate_cpp() without the protobuf library")
return()
endif()
if (NOT DEFINED PROTOBUF_PROTOC_EXECUTABLE)
set (PROTOBUF_PROTOC_EXECUTABLE "$<TARGET_FILE:protoc>")
endif()
if (NOT DEFINED GRPC_CPP_PLUGIN_EXECUTABLE)
set (GRPC_CPP_PLUGIN_EXECUTABLE $<TARGET_FILE:grpc_cpp_plugin>)
endif()
if (NOT DEFINED PROTOBUF_GENERATE_CPP_APPEND_PATH)
set (PROTOBUF_GENERATE_CPP_APPEND_PATH TRUE)
endif()
function(protobuf_generate_cpp_impl SRCS HDRS MODES OUTPUT_FILE_EXTS PLUGIN)
if(NOT ARGN)
message(SEND_ERROR "Error: protobuf_generate_cpp() called without any proto files")
return()
endif()
if(PROTOBUF_GENERATE_CPP_APPEND_PATH)
# Create an include path for each file specified
foreach(FIL ${ARGN})
get_filename_component(ABS_FIL ${FIL} ABSOLUTE)
get_filename_component(ABS_PATH ${ABS_FIL} PATH)
list(FIND protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
else()
set(protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
endif()
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
endif()
if(DEFINED Protobuf_IMPORT_DIRS)
foreach(DIR ${Protobuf_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
endif()
set (intermediate_dir ${CMAKE_CURRENT_BINARY_DIR}/intermediate)
file (MAKE_DIRECTORY ${intermediate_dir})
set (protoc_args)
foreach (mode ${MODES})
list (APPEND protoc_args "--${mode}_out" ${intermediate_dir})
endforeach()
if (PLUGIN)
list (APPEND protoc_args "--plugin=${PLUGIN}")
endif()
set(srcs)
set(hdrs)
set(all_intermediate_outputs)
foreach(input_name ${ARGN})
get_filename_component(abs_name ${input_name} ABSOLUTE)
get_filename_component(name ${input_name} NAME_WE)
set (intermediate_outputs)
foreach (ext ${OUTPUT_FILE_EXTS})
set (filename "${name}${ext}")
set (output "${CMAKE_CURRENT_BINARY_DIR}/${filename}")
set (intermediate_output "${intermediate_dir}/${filename}")
list (APPEND intermediate_outputs "${intermediate_output}")
list (APPEND all_intermediate_outputs "${intermediate_output}")
if (${ext} MATCHES ".*\\.h")
list(APPEND hdrs "${output}")
else()
list(APPEND srcs "${output}")
endif()
add_custom_command(
OUTPUT ${output}
COMMAND ${CMAKE_COMMAND} -DPROTOBUF_GENERATE_CPP_SCRIPT_MODE=1 -DUSE_PROTOBUF=1 -DDIR=${CMAKE_CURRENT_BINARY_DIR} -DFILENAME=${filename} -DCOMPILER_ID=${CMAKE_CXX_COMPILER_ID} -P ${ClickHouse_SOURCE_DIR}/cmake/protobuf_generate_cpp.cmake
DEPENDS ${intermediate_output})
endforeach()
add_custom_command(
OUTPUT ${intermediate_outputs}
COMMAND ${Protobuf_PROTOC_EXECUTABLE}
ARGS ${protobuf_include_path} ${protoc_args} ${abs_name}
DEPENDS ${abs_name} ${Protobuf_PROTOC_EXECUTABLE} ${PLUGIN}
COMMENT "Running C++ protocol buffer compiler on ${name}"
VERBATIM )
endforeach()
set_source_files_properties(${srcs} ${hdrs} ${all_intermediate_outputs} PROPERTIES GENERATED TRUE)
set(${SRCS} ${srcs} PARENT_SCOPE)
set(${HDRS} ${hdrs} PARENT_SCOPE)
endfunction()
if (PROTOBUF_GENERATE_CPP_SCRIPT_MODE)
set (output "${DIR}/${FILENAME}")
set (intermediate_dir ${DIR}/intermediate)
set (intermediate_output "${intermediate_dir}/${FILENAME}")
if (COMPILER_ID MATCHES "Clang")
set (pragma_push "#pragma clang diagnostic push\n")
set (pragma_pop "#pragma clang diagnostic pop\n")
set (pragma_disable_warnings "#pragma clang diagnostic ignored \"-Weverything\"\n")
elseif (COMPILER_ID MATCHES "GNU")
set (pragma_push "#pragma GCC diagnostic push\n")
set (pragma_pop "#pragma GCC diagnostic pop\n")
set (pragma_disable_warnings "#pragma GCC diagnostic ignored \"-Wall\"\n"
"#pragma GCC diagnostic ignored \"-Wextra\"\n"
"#pragma GCC diagnostic ignored \"-Warray-bounds\"\n"
"#pragma GCC diagnostic ignored \"-Wold-style-cast\"\n"
"#pragma GCC diagnostic ignored \"-Wshadow\"\n"
"#pragma GCC diagnostic ignored \"-Wsuggest-override\"\n"
"#pragma GCC diagnostic ignored \"-Wcast-qual\"\n"
"#pragma GCC diagnostic ignored \"-Wunused-parameter\"\n")
endif()
if (${FILENAME} MATCHES ".*\\.h")
file(WRITE "${output}"
"#pragma once\n"
${pragma_push}
${pragma_disable_warnings}
"#include \"${intermediate_output}\"\n"
${pragma_pop}
)
else()
file(WRITE "${output}"
${pragma_disable_warnings}
"#include \"${intermediate_output}\"\n"
)
endif()
return()
endif()
function(protobuf_generate_cpp SRCS HDRS)
set (modes cpp)
set (output_file_exts ".pb.cc" ".pb.h")
set (plugin)
protobuf_generate_cpp_impl(srcs hdrs "${modes}" "${output_file_exts}" "${plugin}" ${ARGN})
set(${SRCS} ${srcs} PARENT_SCOPE)
set(${HDRS} ${hdrs} PARENT_SCOPE)
endfunction()
function(protobuf_generate_grpc_cpp SRCS HDRS)
set (modes cpp grpc)
set (output_file_exts ".pb.cc" ".pb.h" ".grpc.pb.cc" ".grpc.pb.h")
set (plugin "protoc-gen-grpc=${GRPC_CPP_PLUGIN_EXECUTABLE}")
protobuf_generate_cpp_impl(srcs hdrs "${modes}" "${output_file_exts}" "${plugin}" ${ARGN})
set(${SRCS} ${srcs} PARENT_SCOPE)
set(${HDRS} ${hdrs} PARENT_SCOPE)
endfunction()

View File

@ -31,6 +31,7 @@ if (COMPILER_CLANG)
add_warning(pedantic)
no_warning(vla-extension)
no_warning(zero-length-array)
no_warning(c11-extensions)
add_warning(comma)
add_warning(conditional-uninitialized)

2
contrib/AMQP-CPP vendored

@ -1 +1 @@
Subproject commit 1c08399ab0ab9e4042ef8e2bbe9e208e5dcbc13b
Subproject commit d63e1f016582e9faaaf279aa24513087a07bc6e7

View File

@ -1,4 +1,4 @@
#include <FastMemcpy.h>
#include "FastMemcpy.h"
void * memcpy(void * __restrict destination, const void * __restrict source, size_t size)
{

View File

@ -16,6 +16,7 @@ set (SRCS
${LIBRARY_DIR}/src/flags.cpp
${LIBRARY_DIR}/src/linux_tcp/openssl.cpp
${LIBRARY_DIR}/src/linux_tcp/tcpconnection.cpp
${LIBRARY_DIR}/src/inbuffer.cpp
${LIBRARY_DIR}/src/receivedframe.cpp
${LIBRARY_DIR}/src/table.cpp
${LIBRARY_DIR}/src/watchable.cpp

2
contrib/cyrus-sasl vendored

@ -1 +1 @@
Subproject commit 6054630889fd1cd8d0659573d69badcee1e23a00
Subproject commit 9995bf9d8e14f58934d9313ac64f13780d6dd3c9

2
contrib/grpc vendored

@ -1 +1 @@
Subproject commit 8aea4e168e78f3eb9828080740fc8cb73d53bf79
Subproject commit a6570b863cf76c9699580ba51c7827d5bffaac43

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,207 @@
#[[
Add custom commands to process ``.proto`` files to C++::
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
``SRCS``
Variable to define with autogenerated source files
``HDRS``
Variable to define with autogenerated header files
``DESCRIPTORS``
Variable to define with autogenerated descriptor files, if requested.
``EXPORT_MACRO``
is a macro which should expand to ``__declspec(dllexport)`` or
``__declspec(dllimport)`` depending on what is being compiled.
``ARGN``
``.proto`` files
#]]
# Function to generate C++ files from .proto files.
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
if(NOT _proto_files)
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
return()
endif()
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
set(_append_arg APPEND_PATH)
endif()
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(_descriptors DESCRIPTORS)
endif()
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
endif()
if(DEFINED Protobuf_IMPORT_DIRS)
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
endif()
set(_outvar)
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
set(${SRCS})
set(${HDRS})
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
endif()
foreach(_file ${_outvar})
if(_file MATCHES "cc$")
list(APPEND ${SRCS} ${_file})
elseif(_file MATCHES "desc$")
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
else()
list(APPEND ${HDRS} ${_file})
endif()
endforeach()
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
endif()
endfunction()
# Helper function.
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(protobuf_generate_grpc)
set(_options APPEND_PATH DESCRIPTORS)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
if(COMMAND target_sources)
list(APPEND _singleargs TARGET)
endif()
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
return()
endif()
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
return()
endif()
if(NOT protobuf_generate_grpc_LANGUAGE)
set(protobuf_generate_grpc_LANGUAGE cpp)
endif()
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
endif()
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
endif()
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
return()
endif()
endif()
if(NOT protobuf_generate_grpc_PLUGIN)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
return()
endif()
endif()
if(protobuf_generate_grpc_TARGET)
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
foreach(_file ${_source_list})
if(_file MATCHES "proto$")
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
endif()
endforeach()
endif()
if(NOT protobuf_generate_grpc_PROTOS)
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
return()
endif()
if(protobuf_generate_grpc_APPEND_PATH)
# Create an include path for each file specified
foreach(_file ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_file} ABSOLUTE)
get_filename_component(_abs_path ${_abs_file} PATH)
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${_abs_path})
endif()
endforeach()
else()
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
endif()
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
set(_generated_srcs_all)
foreach(_proto ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_proto} ABSOLUTE)
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
get_filename_component(_basename ${_proto} NAME_WE)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set(_possible_rel_dir)
if(NOT protobuf_generate_grpc_APPEND_PATH)
set(_possible_rel_dir ${_rel_dir}/)
endif()
set(_generated_srcs)
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
endforeach()
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
list(APPEND _generated_srcs ${_descriptor_file})
endif()
list(APPEND _generated_srcs_all ${_generated_srcs})
add_custom_command(
OUTPUT ${_generated_srcs}
COMMAND protobuf::protoc
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
VERBATIM)
endforeach()
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
if(protobuf_generate_grpc_OUT_VAR)
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
endif()
if(protobuf_generate_grpc_TARGET)
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
endif()
endfunction()

2
contrib/libhdfs3 vendored

@ -1 +1 @@
Subproject commit 1b666578c85094306b061352078022f6350bfab8
Subproject commit 24b058c356794ef6cc2d31323dc9adf0386652ff

@ -1 +1 @@
Subproject commit 3f512fedf0ba0f769a1b4852b4bac542d92c5b20
Subproject commit f5638e954a79f50bac7c7a5deaa5a241e0ce8b5f

2
contrib/poco vendored

@ -1 +1 @@
Subproject commit 297fc905e166392156f83b96aaa5f44e8a6a35c4
Subproject commit 757d947235b307675cff964f29b19d388140a9eb

View File

@ -0,0 +1,198 @@
# The code in this file was copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake
#[[
Add custom commands to process ``.proto`` files to C++::
protobuf_generate_cpp (<SRCS> <HDRS>
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
``SRCS``
Variable to define with autogenerated source files
``HDRS``
Variable to define with autogenerated header files
``DESCRIPTORS``
Variable to define with autogenerated descriptor files, if requested.
``EXPORT_MACRO``
is a macro which should expand to ``__declspec(dllexport)`` or
``__declspec(dllimport)`` depending on what is being compiled.
``ARGN``
``.proto`` files
#]]
function(PROTOBUF_GENERATE_CPP SRCS HDRS)
cmake_parse_arguments(protobuf_generate_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
set(_proto_files "${protobuf_generate_cpp_UNPARSED_ARGUMENTS}")
if(NOT _proto_files)
message(SEND_ERROR "Error: PROTOBUF_GENERATE_CPP() called without any proto files")
return()
endif()
if(PROTOBUF_GENERATE_CPP_APPEND_PATH)
set(_append_arg APPEND_PATH)
endif()
if(protobuf_generate_cpp_DESCRIPTORS)
set(_descriptors DESCRIPTORS)
endif()
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
endif()
if(DEFINED Protobuf_IMPORT_DIRS)
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
endif()
set(_outvar)
protobuf_generate(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
set(${SRCS})
set(${HDRS})
if(protobuf_generate_cpp_DESCRIPTORS)
set(${protobuf_generate_cpp_DESCRIPTORS})
endif()
foreach(_file ${_outvar})
if(_file MATCHES "cc$")
list(APPEND ${SRCS} ${_file})
elseif(_file MATCHES "desc$")
list(APPEND ${protobuf_generate_cpp_DESCRIPTORS} ${_file})
else()
list(APPEND ${HDRS} ${_file})
endif()
endforeach()
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
if(protobuf_generate_cpp_DESCRIPTORS)
set(${protobuf_generate_cpp_DESCRIPTORS} "${${protobuf_generate_cpp_DESCRIPTORS}}" PARENT_SCOPE)
endif()
endfunction()
# By default have PROTOBUF_GENERATE_CPP macro pass -I to protoc
# for each directory where a proto file is referenced.
if(NOT DEFINED PROTOBUF_GENERATE_CPP_APPEND_PATH)
set(PROTOBUF_GENERATE_CPP_APPEND_PATH TRUE)
endif()
function(protobuf_generate)
set(_options APPEND_PATH DESCRIPTORS)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
if(COMMAND target_sources)
list(APPEND _singleargs TARGET)
endif()
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
cmake_parse_arguments(protobuf_generate "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
if(NOT protobuf_generate_PROTOS AND NOT protobuf_generate_TARGET)
message(SEND_ERROR "Error: protobuf_generate called without any targets or source files")
return()
endif()
if(NOT protobuf_generate_OUT_VAR AND NOT protobuf_generate_TARGET)
message(SEND_ERROR "Error: protobuf_generate called without a target or output variable")
return()
endif()
if(NOT protobuf_generate_LANGUAGE)
set(protobuf_generate_LANGUAGE cpp)
endif()
string(TOLOWER ${protobuf_generate_LANGUAGE} protobuf_generate_LANGUAGE)
if(NOT protobuf_generate_PROTOC_OUT_DIR)
set(protobuf_generate_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
endif()
if(protobuf_generate_EXPORT_MACRO AND protobuf_generate_LANGUAGE STREQUAL cpp)
set(_dll_export_decl "dllexport_decl=${protobuf_generate_EXPORT_MACRO}:")
endif()
if(NOT protobuf_generate_GENERATE_EXTENSIONS)
if(protobuf_generate_LANGUAGE STREQUAL cpp)
set(protobuf_generate_GENERATE_EXTENSIONS .pb.h .pb.cc)
elseif(protobuf_generate_LANGUAGE STREQUAL python)
set(protobuf_generate_GENERATE_EXTENSIONS _pb2.py)
else()
message(SEND_ERROR "Error: protobuf_generate given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
return()
endif()
endif()
if(protobuf_generate_TARGET)
get_target_property(_source_list ${protobuf_generate_TARGET} SOURCES)
foreach(_file ${_source_list})
if(_file MATCHES "proto$")
list(APPEND protobuf_generate_PROTOS ${_file})
endif()
endforeach()
endif()
if(NOT protobuf_generate_PROTOS)
message(SEND_ERROR "Error: protobuf_generate could not find any .proto files")
return()
endif()
if(protobuf_generate_APPEND_PATH)
# Create an include path for each file specified
foreach(_file ${protobuf_generate_PROTOS})
get_filename_component(_abs_file ${_file} ABSOLUTE)
get_filename_component(_abs_path ${_abs_file} PATH)
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${_abs_path})
endif()
endforeach()
else()
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
endif()
foreach(DIR ${protobuf_generate_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
set(_generated_srcs_all)
foreach(_proto ${protobuf_generate_PROTOS})
get_filename_component(_abs_file ${_proto} ABSOLUTE)
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
get_filename_component(_basename ${_proto} NAME_WE)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set(_possible_rel_dir)
if (NOT protobuf_generate_APPEND_PATH)
set(_possible_rel_dir ${_rel_dir}/)
endif()
set(_generated_srcs)
foreach(_ext ${protobuf_generate_GENERATE_EXTENSIONS})
list(APPEND _generated_srcs "${protobuf_generate_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
endforeach()
if(protobuf_generate_DESCRIPTORS AND protobuf_generate_LANGUAGE STREQUAL cpp)
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
list(APPEND _generated_srcs ${_descriptor_file})
endif()
list(APPEND _generated_srcs_all ${_generated_srcs})
add_custom_command(
OUTPUT ${_generated_srcs}
COMMAND protobuf::protoc
ARGS --${protobuf_generate_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_PROTOC_OUT_DIR} ${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc
COMMENT "Running ${protobuf_generate_LANGUAGE} protocol buffer compiler on ${_proto}"
VERBATIM )
endforeach()
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
if(protobuf_generate_OUT_VAR)
set(${protobuf_generate_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
endif()
if(protobuf_generate_TARGET)
target_sources(${protobuf_generate_TARGET} PRIVATE ${_generated_srcs_all})
endif()
endfunction()

2
debian/control vendored
View File

@ -62,5 +62,5 @@ Description: debugging symbols for clickhouse-common-static
Package: clickhouse-test
Priority: optional
Architecture: all
Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-client, bash, expect, python, python-lxml, python-termcolor, python-requests, curl, perl, sudo, openssl, netcat-openbsd, telnet, brotli, bsdutils
Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-client, bash, expect, python3, python3-lxml, python3-termcolor, python3-requests, curl, perl, sudo, openssl, netcat-openbsd, telnet, brotli, bsdutils
Description: ClickHouse tests

View File

@ -25,10 +25,10 @@ RUN apt-get update \
ninja-build \
perl \
pkg-config \
python \
python-lxml \
python-requests \
python-termcolor \
python3 \
python3-lxml \
python3-requests \
python3-termcolor \
tzdata \
llvm-${LLVM_VERSION} \
clang-${LLVM_VERSION} \

View File

@ -1,9 +1,10 @@
#!/usr/bin/env bash
set -e
#ccache -s # uncomment to display CCache statistics
mkdir -p /server/build_docker
cd /server/build_docker
cmake -G Ninja /server -DCMAKE_C_COMPILER=`which gcc-9` -DCMAKE_CXX_COMPILER=`which g++-9`
cmake -G Ninja /server "-DCMAKE_C_COMPILER=$(command -v gcc-9)" "-DCMAKE_CXX_COMPILER=$(command -v g++-9)"
# Set the number of build jobs to the half of number of virtual CPU cores (rounded up).
# By default, ninja use all virtual CPU cores, that leads to very high memory consumption without much improvement in build time.

View File

@ -9,7 +9,8 @@
"name": "yandex/clickhouse-binary-builder",
"dependent": [
"docker/test/split_build_smoke_test",
"docker/test/pvs"
"docker/test/pvs",
"docker/test/codebrowser"
]
},
"docker/packager/unbundled": {
@ -133,6 +134,10 @@
"name": "yandex/clickhouse-postgresql-java-client",
"dependent": []
},
"docker/test/integration/kerberos_kdc": {
"name": "yandex/clickhouse-kerberos-kdc",
"dependent": []
},
"docker/test/base": {
"name": "yandex/clickhouse-test-base",
"dependent": [

View File

@ -1,7 +1,7 @@
# docker build -t yandex/clickhouse-binary-builder .
FROM ubuntu:20.04
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=11
RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
@ -11,7 +11,7 @@ RUN apt-get update \
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
&& apt-key add /tmp/llvm-snapshot.gpg.key \
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-11 main" >> \
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
/etc/apt/sources.list
# initial packages
@ -32,10 +32,11 @@ RUN apt-get update \
curl \
gcc-9 \
g++-9 \
llvm-${LLVM_VERSION} \
clang-${LLVM_VERSION} \
lld-${LLVM_VERSION} \
clang-tidy-${LLVM_VERSION} \
clang-10 \
clang-tidy-10 \
lld-10 \
llvm-10 \
llvm-10-dev \
clang-11 \
clang-tidy-11 \
lld-11 \

View File

@ -17,7 +17,10 @@ ccache --show-stats ||:
ccache --zero-stats ||:
ln -s /usr/lib/x86_64-linux-gnu/libOpenCL.so.1.0.0 /usr/lib/libOpenCL.so ||:
rm -f CMakeCache.txt
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DSANITIZE=$SANITIZER -DENABLE_CHECK_HEAVY_BUILDS=1 $CMAKE_FLAGS ..
# Read cmake arguments into array (possibly empty)
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
ninja $NINJA_FLAGS clickhouse-bundle
mv ./programs/clickhouse* /output
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds

View File

@ -1,7 +1,7 @@
# docker build -t yandex/clickhouse-deb-builder .
FROM ubuntu:20.04
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=11
RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
@ -11,7 +11,7 @@ RUN apt-get update \
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
&& apt-key add /tmp/llvm-snapshot.gpg.key \
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-11 main" >> \
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
/etc/apt/sources.list
# initial packages
@ -49,15 +49,11 @@ RUN apt-get update \
lld-11 \
llvm-11 \
llvm-11-dev \
clang-${LLVM_VERSION} \
clang-tidy-${LLVM_VERSION} \
lld-${LLVM_VERSION} \
llvm-${LLVM_VERSION} \
llvm-${LLVM_VERSION}-dev \
llvm-9-dev \
lld-9 \
clang-9 \
clang-tidy-9 \
clang-10 \
clang-tidy-10 \
lld-10 \
llvm-10 \
llvm-10-dev \
ninja-build \
perl \
pkg-config \

View File

@ -4,16 +4,17 @@ set -x -e
ccache --show-stats ||:
ccache --zero-stats ||:
build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S'
read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
build/release --no-pbuilder "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
mv /*.deb /output
mv *.changes /output
mv *.buildinfo /output
mv -- *.changes /output
mv -- *.buildinfo /output
mv /*.rpm /output ||: # if exists
mv /*.tgz /output ||: # if exists
if [ -n "$BINARY_OUTPUT" ] && { [ "$BINARY_OUTPUT" = "programs" ] || [ "$BINARY_OUTPUT" = "tests" ] ;}
then
echo Place $BINARY_OUTPUT to output
echo "Place $BINARY_OUTPUT to output"
mkdir /output/binary ||: # if exists
mv /build/obj-*/programs/clickhouse* /output/binary
if [ "$BINARY_OUTPUT" = "tests" ]

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
#-*- coding: utf-8 -*-
import subprocess
import os

View File

@ -51,6 +51,7 @@ RUN apt-get update \
protobuf-compiler \
libprotoc-dev \
libgrpc++-dev \
protobuf-compiler-grpc \
rapidjson-dev \
libsnappy-dev \
libparquet-dev \

View File

@ -4,10 +4,11 @@ set -x -e
ccache --show-stats ||:
ccache --zero-stats ||:
build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S'
read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
build/release --no-pbuilder "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
mv /*.deb /output
mv *.changes /output
mv *.buildinfo /output
mv -- *.changes /output
mv -- *.buildinfo /output
mv /*.rpm /output ||: # if exists
mv /*.tgz /output ||: # if exists

View File

@ -48,10 +48,15 @@ RUN apt-get update \
tzdata \
--yes --no-install-recommends
# Sanitizer options
# Sanitizer options for services (clickhouse-server)
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment; \
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \
echo "MSAN_OPTIONS='abort_on_error=1'" >> /etc/environment; \
ln -s /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7'
ENV UBSAN_OPTIONS='print_stacktrace=1'
ENV MSAN_OPTIONS='abort_on_error=1'
CMD sleep 1

View File

@ -1,33 +1,15 @@
# docker build --network=host -t yandex/clickhouse-codebrowser .
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output yandex/clickhouse-codebrowser
FROM ubuntu:18.04
FROM yandex/clickhouse-binary-builder
RUN apt-get --allow-unauthenticated update -y \
&& env DEBIAN_FRONTEND=noninteractive \
apt-get --allow-unauthenticated install --yes --no-install-recommends \
bash \
sudo \
wget \
software-properties-common \
ca-certificates \
apt-transport-https \
build-essential \
gpg-agent \
git
RUN wget -nv -O - https://apt.kitware.com/keys/kitware-archive-latest.asc | sudo apt-key add -
RUN sudo apt-add-repository 'deb https://apt.kitware.com/ubuntu/ bionic main'
RUN sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main" >> /etc/apt/sources.list
RUN sudo apt-get --yes --allow-unauthenticated update
# To build woboq
RUN sudo apt-get --yes --allow-unauthenticated install cmake clang-8 libllvm8 libclang-8-dev
RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-9 libllvm9 libclang-9-dev
# repo versions doesn't work correctly with C++17
# also we push reports to s3, so we add index.html to subfolder urls
# https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release && make -j
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang\+\+-9 -DCMAKE_C_COMPILER=clang-9 && make -j
ENV CODEGEN=/woboq_codebrowser/generator/codebrowser_generator
ENV CODEINDEX=/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator
@ -40,7 +22,7 @@ ENV SHA=nosha
ENV DATA="data"
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-8 -DCMAKE_C_COMPILER=/usr/bin/clang-8 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON && \
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-11 -DCMAKE_C_COMPILER=/usr/bin/clang-11 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON && \
mkdir -p $HTML_RESULT_DIRECTORY && \
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA && \
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\

View File

@ -11,7 +11,7 @@ RUN apt-get update \
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
&& apt-key add /tmp/llvm-snapshot.gpg.key \
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
&& echo "deb [trusted=yes] https://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
/etc/apt/sources.list
# initial packages
@ -52,10 +52,10 @@ RUN apt-get update \
moreutils \
ninja-build \
psmisc \
python \
python-lxml \
python-requests \
python-termcolor \
python3 \
python3-lxml \
python3-requests \
python3-termcolor \
qemu-user-static \
rename \
software-properties-common \

View File

@ -15,29 +15,59 @@ stage=${stage:-}
# empty parameter.
read -ra FASTTEST_CMAKE_FLAGS <<< "${FASTTEST_CMAKE_FLAGS:-}"
FASTTEST_WORKSPACE=$(readlink -f "${FASTTEST_WORKSPACE:-.}")
FASTTEST_SOURCE=$(readlink -f "${FASTTEST_SOURCE:-$FASTTEST_WORKSPACE/ch}")
FASTTEST_BUILD=$(readlink -f "${FASTTEST_BUILD:-${BUILD:-$FASTTEST_WORKSPACE/build}}")
FASTTEST_DATA=$(readlink -f "${FASTTEST_DATA:-$FASTTEST_WORKSPACE/db-fasttest}")
FASTTEST_OUTPUT=$(readlink -f "${FASTTEST_OUTPUT:-$FASTTEST_WORKSPACE}")
function kill_clickhouse
# Export these variables, so that all subsequent invocations of the script
# use them, and not try to guess them anew, which leads to weird effects.
export FASTTEST_WORKSPACE
export FASTTEST_SOURCE
export FASTTEST_BUILD
export FASTTEST_DATA
export FASTTEST_OUT
server_pid=none
function stop_server
{
for _ in {1..60}
do
if ! pkill -f clickhouse-server ; then break ; fi
if ! pkill -f "clickhouse-server" && ! kill -- "$server_pid" ; then break ; fi
sleep 1
done
if pgrep -f clickhouse-server
if kill -0 -- "$server_pid"
then
pstree -apgT
jobs
echo "Failed to kill the ClickHouse server $(pgrep -f clickhouse-server)"
echo "Failed to kill the ClickHouse server pid '$server_pid'"
return 1
fi
server_pid=none
}
function wait_for_server_start
function start_server
{
set -m # Spawn server in its own process groups
clickhouse-server --config-file="$FASTTEST_DATA/config.xml" -- --path "$FASTTEST_DATA" --user_files_path "$FASTTEST_DATA/user_files" &>> "$FASTTEST_OUTPUT/server.log" &
server_pid=$!
set +m
if [ "$server_pid" == "0" ]
then
echo "Failed to start ClickHouse server"
# Avoid zero PID because `kill` treats it as our process group PID.
server_pid="none"
return 1
fi
for _ in {1..60}
do
if clickhouse-client --query "select 1" || ! pgrep -f clickhouse-server
if clickhouse-client --query "select 1" || ! kill -0 -- "$server_pid"
then
break
fi
@ -47,20 +77,26 @@ function wait_for_server_start
if ! clickhouse-client --query "select 1"
then
echo "Failed to wait until ClickHouse server starts."
server_pid="none"
return 1
fi
echo "ClickHouse server pid '$(pgrep -f clickhouse-server)' started and responded"
if ! kill -0 -- "$server_pid"
then
echo "Wrong clickhouse server started: PID '$server_pid' we started is not running, but '$(pgrep -f clickhouse-server)' is running"
server_pid="none"
return 1
fi
echo "ClickHouse server pid '$server_pid' started and responded"
}
function clone_root
{
git clone https://github.com/ClickHouse/ClickHouse.git | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/clone_log.txt
cd ClickHouse
CLICKHOUSE_DIR=$(pwd)
export CLICKHOUSE_DIR
git clone https://github.com/ClickHouse/ClickHouse.git -- "$FASTTEST_SOURCE" | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/clone_log.txt"
(
cd "$FASTTEST_SOURCE"
if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
if git fetch origin "+refs/pull/$PULL_REQUEST_NUMBER/merge"; then
git checkout FETCH_HEAD
@ -71,22 +107,36 @@ if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
echo 'Checked out to commit'
fi
else
if [ "$COMMIT_SHA" != "" ]; then
if [ -v COMMIT_SHA ]; then
git checkout "$COMMIT_SHA"
fi
fi
)
}
function run
function clone_submodules
{
(
cd "$FASTTEST_SOURCE"
SUBMODULES_TO_UPDATE=(contrib/boost contrib/zlib-ng contrib/libxml2 contrib/poco contrib/libunwind contrib/ryu contrib/fmtlib contrib/base64 contrib/cctz contrib/libcpuid contrib/double-conversion contrib/libcxx contrib/libcxxabi contrib/libc-headers contrib/lz4 contrib/zstd contrib/fastops contrib/rapidjson contrib/re2 contrib/sparsehash-c11)
git submodule update --init --recursive "${SUBMODULES_TO_UPDATE[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/submodule_log.txt
git submodule sync
git submodule update --init --recursive "${SUBMODULES_TO_UPDATE[@]}"
git submodule foreach git reset --hard
git submodule foreach git checkout @ -f
git submodule foreach git clean -xfd
)
}
CMAKE_LIBS_CONFIG=(-DENABLE_LIBRARIES=0 -DENABLE_TESTS=0 -DENABLE_UTILS=0 -DENABLE_EMBEDDED_COMPILER=0 -DENABLE_THINLTO=0 -DUSE_UNWIND=1)
function run_cmake
{
CMAKE_LIBS_CONFIG=("-DENABLE_LIBRARIES=0" "-DENABLE_TESTS=0" "-DENABLE_UTILS=0" "-DENABLE_EMBEDDED_COMPILER=0" "-DENABLE_THINLTO=0" "-DUSE_UNWIND=1")
export CCACHE_DIR=/ccache
export CCACHE_BASEDIR=/ClickHouse
# TODO remove this? we don't use ccache anyway. An option would be to download it
# from S3 simultaneously with cloning.
export CCACHE_DIR="$FASTTEST_WORKSPACE/ccache"
export CCACHE_BASEDIR="$FASTTEST_SOURCE"
export CCACHE_NOHASHDIR=true
export CCACHE_COMPILERCHECK=content
export CCACHE_MAXSIZE=15G
@ -94,34 +144,45 @@ export CCACHE_MAXSIZE=15G
ccache --show-stats ||:
ccache --zero-stats ||:
mkdir build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_CXX_COMPILER=clang++-10 -DCMAKE_C_COMPILER=clang-10 "${CMAKE_LIBS_CONFIG[@]}" "${FASTTEST_CMAKE_FLAGS[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/cmake_log.txt
time ninja clickhouse-bundle | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/build_log.txt
ninja install | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/install_log.txt
mkdir "$FASTTEST_BUILD" ||:
(
cd "$FASTTEST_BUILD"
cmake "$FASTTEST_SOURCE" -DCMAKE_CXX_COMPILER=clang++-10 -DCMAKE_C_COMPILER=clang-10 "${CMAKE_LIBS_CONFIG[@]}" "${FASTTEST_CMAKE_FLAGS[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/cmake_log.txt"
)
}
function build
{
(
cd "$FASTTEST_BUILD"
time ninja clickhouse-bundle | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/build_log.txt"
ccache --show-stats ||:
)
}
mkdir -p /etc/clickhouse-server
mkdir -p /etc/clickhouse-client
mkdir -p /etc/clickhouse-server/config.d
mkdir -p /etc/clickhouse-server/users.d
ln -s /test_output /var/log/clickhouse-server
cp "$CLICKHOUSE_DIR/programs/server/config.xml" /etc/clickhouse-server/
cp "$CLICKHOUSE_DIR/programs/server/users.xml" /etc/clickhouse-server/
function configure
{
clickhouse-client --version
clickhouse-test --help
# install tests config
$CLICKHOUSE_DIR/tests/config/install.sh
mkdir -p "$FASTTEST_DATA"{,/client-config}
cp -a "$FASTTEST_SOURCE/programs/server/"{config,users}.xml "$FASTTEST_DATA"
cp -a "$FASTTEST_SOURCE/programs/server/"{config,users}.xml "$FASTTEST_DATA"
"$FASTTEST_SOURCE/tests/config/install.sh" "$FASTTEST_DATA" "$FASTTEST_DATA/client-config"
# doesn't support SSL
rm -f /etc/clickhouse-server/config.d/secure_ports.xml
rm -f "$FASTTEST_DATA/config.d/secure_ports.xml"
}
function run_tests
{
clickhouse-server --version
clickhouse-test --help
# Kill the server in case we are running locally and not in docker
kill_clickhouse
stop_server ||:
clickhouse-server --config /etc/clickhouse-server/config.xml --daemon
wait_for_server_start
start_server
TESTS_TO_SKIP=(
parquet
@ -191,11 +252,10 @@ TESTS_TO_SKIP=(
01460_DistributedFilesToInsert
)
time clickhouse-test -j 8 --no-long --testname --shard --zookeeper --skip "${TESTS_TO_SKIP[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/test_log.txt
time clickhouse-test -j 8 --no-long --testname --shard --zookeeper --skip "${TESTS_TO_SKIP[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/test_log.txt"
# substr is to remove semicolon after test name
readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, length($3)-1) }' /test_output/test_log.txt | tee /test_output/failed-parallel-tests.txt)
readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, length($3)-1) }' "$FASTTEST_OUTPUT/test_log.txt" | tee "$FASTTEST_OUTPUT/failed-parallel-tests.txt")
# We will rerun sequentially any tests that have failed during parallel run.
# They might have failed because there was some interference from other tests
@ -206,19 +266,16 @@ readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, le
# explicit instead of guessing.
if [[ -n "${FAILED_TESTS[*]}" ]]
then
kill_clickhouse
stop_server ||:
# Clean the data so that there is no interference from the previous test run.
rm -rf /var/lib/clickhouse ||:
mkdir /var/lib/clickhouse
rm -rf "$FASTTEST_DATA"/{meta,}data ||:
clickhouse-server --config /etc/clickhouse-server/config.xml --daemon
wait_for_server_start
start_server
echo "Going to run again: ${FAILED_TESTS[*]}"
clickhouse-test --no-long --testname --shard --zookeeper "${FAILED_TESTS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a /test_output/test_log.txt
clickhouse-test --no-long --testname --shard --zookeeper "${FAILED_TESTS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a "$FASTTEST_OUTPUT/test_log.txt"
else
echo "No failed tests"
fi
@ -228,20 +285,50 @@ case "$stage" in
"")
ls -la
;&
"clone_root")
clone_root
# Pass control to the script from cloned sources, unless asked otherwise.
if ! [ -v FASTTEST_LOCAL_SCRIPT ]
then
stage=run "$CLICKHOUSE_DIR/docker/test/fasttest/run.sh"
# 'run' stage is deprecated, used for compatibility with old scripts.
# Replace with 'clone_submodules' after Nov 1, 2020.
# cd and CLICKHOUSE_DIR are also a setup for old scripts, remove as well.
# In modern script we undo it by changing back into workspace dir right
# away, see below. Remove that as well.
cd "$FASTTEST_SOURCE"
CLICKHOUSE_DIR=$(pwd)
export CLICKHOUSE_DIR
stage=run "$FASTTEST_SOURCE/docker/test/fasttest/run.sh"
exit $?
fi
;&
"run")
run
# A deprecated stage that is called by old script and equivalent to everything
# after cloning root, starting with cloning submodules.
;&
"clone_submodules")
# Recover after being called from the old script that changes into source directory.
# See the compatibility hacks in `clone_root` stage above. Remove at the same time,
# after Nov 1, 2020.
cd "$FASTTEST_WORKSPACE"
clone_submodules | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/submodule_log.txt"
;&
"run_cmake")
run_cmake
;&
"build")
build
PATH="$FASTTEST_BUILD/programs:$FASTTEST_SOURCE/tests:$PATH"
export PATH
;&
"configure")
# The `install_log.txt` is also needed for compatibility with old CI task --
# if there is no log, it will decide that build failed.
configure | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/install_log.txt"
;&
"run_tests")
run_tests
;&
esac

View File

@ -58,7 +58,7 @@ function watchdog
echo "Fuzzing run has timed out"
killall clickhouse-client ||:
for x in {1..10}
for _ in {1..10}
do
if ! pgrep -f clickhouse-client
then
@ -81,6 +81,9 @@ function fuzz
echo Server started
fuzzer_exit_code=0
# SC2012: Use find instead of ls to better handle non-alphanumeric filenames.
# They are all alphanumeric.
# shellcheck disable=SC2012
./clickhouse-client --query-fuzzer-runs=1000 \
< <(for f in $(ls ch/tests/queries/0_stateless/*.sql | sort -R); do cat "$f"; echo ';'; done) \
> >(tail -10000 > fuzzer.log) \

View File

@ -4,7 +4,7 @@ FROM yandex/clickhouse-test-base
RUN apt-get update \
&& env DEBIAN_FRONTEND=noninteractive apt-get -y install \
tzdata \
python \
python3 \
libreadline-dev \
libicu-dev \
bsdutils \
@ -16,7 +16,8 @@ RUN apt-get update \
odbc-postgresql \
sqlite3 \
curl \
tar
tar \
krb5-user
RUN rm -rf \
/var/lib/apt/lists/* \
/var/cache/debconf \

View File

@ -0,0 +1,15 @@
# docker build -t yandex/clickhouse-kerberos-kdc .
FROM centos:6.6
# old OS to make is faster and smaller
RUN yum install -y krb5-server krb5-libs krb5-auth-dialog krb5-workstation
EXPOSE 88 749
RUN touch /config.sh
# should be overwritten e.g. via docker_compose volumes
# volumes: /some_path/my_kerberos_config.sh:/config.sh:ro
ENTRYPOINT ["/bin/bash", "/config.sh"]

View File

@ -16,13 +16,13 @@ RUN apt-get update \
iproute2 \
module-init-tools \
cgroupfs-mount \
python-pip \
python3-pip \
tzdata \
libreadline-dev \
libicu-dev \
bsdutils \
curl \
python-pika \
python3-pika \
liblua5.1-dev \
luajit \
libssl-dev \
@ -37,7 +37,7 @@ RUN apt-get update \
ENV TZ=Europe/Moscow
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
RUN pip install urllib3==1.23 pytest docker-compose==1.22.0 docker dicttoxml kazoo PyMySQL psycopg2==2.7.5 pymongo tzlocal kafka-python protobuf redis aerospike pytest-timeout minio rpm-confluent-schemaregistry grpcio grpcio-tools cassandra-driver
RUN python3 -m pip install urllib3==1.23 pytest docker-compose==1.22.0 docker dicttoxml kazoo PyMySQL psycopg2==2.7.5 pymongo tzlocal kafka-python protobuf redis aerospike pytest-timeout minio grpcio grpcio-tools cassandra-driver confluent-kafka avro
ENV DOCKER_CHANNEL stable
ENV DOCKER_VERSION 17.09.1-ce

View File

@ -0,0 +1,59 @@
version: '2.3'
services:
kafka_kerberized_zookeeper:
image: confluentinc/cp-zookeeper:5.2.0
# restart: always
hostname: kafka_kerberized_zookeeper
environment:
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_SERVERS: "kafka_kerberized_zookeeper:2888:3888"
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/zookeeper_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dsun.security.krb5.debug=true"
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
- /dev/urandom:/dev/random
depends_on:
- kafka_kerberos
security_opt:
- label:disable
kerberized_kafka1:
image: confluentinc/cp-kafka:5.2.0
# restart: always
hostname: kerberized_kafka1
ports:
- "9092:9092"
- "9093:9093"
environment:
KAFKA_LISTENERS: OUTSIDE://:19092,UNSECURED_OUTSIDE://:19093,UNSECURED_INSIDE://:9093
KAFKA_ADVERTISED_LISTENERS: OUTSIDE://kerberized_kafka1:19092,UNSECURED_OUTSIDE://kerberized_kafka1:19093,UNSECURED_INSIDE://localhost:9093
# KAFKA_LISTENERS: INSIDE://kerberized_kafka1:9092,OUTSIDE://kerberized_kafka1:19092
# KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:9092,OUTSIDE://kerberized_kafka1:19092
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: GSSAPI
KAFKA_SASL_ENABLED_MECHANISMS: GSSAPI
KAFKA_SASL_KERBEROS_SERVICE_NAME: kafka
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: OUTSIDE:SASL_PLAINTEXT,UNSECURED_OUTSIDE:PLAINTEXT,UNSECURED_INSIDE:PLAINTEXT,
KAFKA_INTER_BROKER_LISTENER_NAME: OUTSIDE
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: "kafka_kerberized_zookeeper:2181"
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dsun.security.krb5.debug=true"
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
- /dev/urandom:/dev/random
depends_on:
- kafka_kerberized_zookeeper
- kafka_kerberos
security_opt:
- label:disable
kafka_kerberos:
image: yandex/clickhouse-kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG}
hostname: kafka_kerberos
volumes:
- ${KERBERIZED_KAFKA_DIR}/secrets:/tmp/keytab
- ${KERBERIZED_KAFKA_DIR}/../../kerberos_image_config.sh:/config.sh
- /dev/urandom:/dev/random
ports: [88, 749]

View File

@ -7,7 +7,7 @@ set +e
reties=0
while true; do
docker info &>/dev/null && break
reties=$[$reties+1]
reties=$((reties+1))
if [[ $reties -ge 100 ]]; then # 10 sec max
echo "Can't start docker daemon, timeout exceeded." >&2
exit 1;
@ -27,6 +27,7 @@ export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}
export DOCKER_MYSQL_JS_CLIENT_TAG=${DOCKER_MYSQL_JS_CLIENT_TAG:=latest}
export DOCKER_MYSQL_PHP_CLIENT_TAG=${DOCKER_MYSQL_PHP_CLIENT_TAG:=latest}
export DOCKER_POSTGRESQL_JAVA_CLIENT_TAG=${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:=latest}
export DOCKER_KERBEROS_KDC_TAG=${DOCKER_KERBEROS_KDC_TAG:=latest}
cd /ClickHouse/tests/integration
exec "$@"

View File

@ -9,7 +9,7 @@ right_version=${2}
if [ "$left_version" == "" ] || [ "$right_version" == "" ]
then
>&2 echo Usage: $(basename "$0") left_version right_version
>&2 echo "Usage: $(basename "$0") left_version right_version"
exit 1
fi

View File

@ -181,6 +181,9 @@ function run_tests
# Randomize test order.
test_files=$(for f in $test_files; do echo "$f"; done | sort -R)
# Limit profiling time to 10 minutes, not to run for too long.
profile_seconds_left=600
# Run the tests.
test_name="<none>"
for test in $test_files
@ -194,15 +197,24 @@ function run_tests
test_name=$(basename "$test" ".xml")
echo test "$test_name"
# Don't profile if we're past the time limit.
# Use awk because bash doesn't support floating point arithmetics.
profile_seconds=$(awk "BEGIN { print ($profile_seconds_left > 0 ? 10 : 0) }")
TIMEFORMAT=$(printf "$test_name\t%%3R\t%%3U\t%%3S\n")
# The grep is to filter out set -x output and keep only time output.
# The '2>&1 >/dev/null' redirects stderr to stdout, and discards stdout.
{ \
time "$script_dir/perf.py" --host localhost localhost --port 9001 9002 \
--runs "$CHPC_RUNS" --max-queries "$CHPC_MAX_QUERIES" \
--profile-seconds "$profile_seconds" \
-- "$test" > "$test_name-raw.tsv" 2> "$test_name-err.log" ; \
} 2>&1 >/dev/null | tee >(grep -v ^+ >> "wall-clock-times.tsv") \
|| echo "Test $test_name failed with error code $?" >> "$test_name-err.log"
profile_seconds_left=$(awk -F' ' \
'BEGIN { s = '$profile_seconds_left'; } /^profile-total/ { s -= $2 } END { print s }' \
"$test_name-raw.tsv")
done
unset TIMEFORMAT
@ -294,6 +306,7 @@ for test_file in *-raw.tsv
do
test_name=$(basename "$test_file" "-raw.tsv")
sed -n "s/^query\t/$test_name\t/p" < "$test_file" >> "analyze/query-runs.tsv"
sed -n "s/^profile\t/$test_name\t/p" < "$test_file" >> "analyze/query-profiles.tsv"
sed -n "s/^client-time\t/$test_name\t/p" < "$test_file" >> "analyze/client-times.tsv"
sed -n "s/^report-threshold\t/$test_name\t/p" < "$test_file" >> "analyze/report-thresholds.tsv"
sed -n "s/^skipped\t/$test_name\t/p" < "$test_file" >> "analyze/skipped-tests.tsv"
@ -658,13 +671,15 @@ create view test_runs as
group by test
;
create table test_times_report engine File(TSV, 'report/test-times.tsv') as
select wall_clock_time_per_test.test, real,
toDecimal64(total_client_time, 3),
create view test_times_view as
select
wall_clock_time_per_test.test test,
real,
total_client_time,
queries,
toDecimal64(query_max, 3),
toDecimal64(real / queries, 3) avg_real_per_query,
toDecimal64(query_min, 3),
query_max,
real / queries avg_real_per_query,
query_min,
runs
from test_time
-- wall clock times are also measured for skipped tests, so don't
@ -673,7 +688,43 @@ create table test_times_report engine File(TSV, 'report/test-times.tsv') as
on wall_clock_time_per_test.test = test_time.test
full join test_runs
on test_runs.test = test_time.test
order by avg_real_per_query desc;
;
-- WITH TOTALS doesn't work with INSERT SELECT, so we have to jump through these
-- hoops: https://github.com/ClickHouse/ClickHouse/issues/15227
create view test_times_view_total as
select
'Total' test,
sum(real),
sum(total_client_time),
sum(queries),
max(query_max),
sum(real) / sum(queries) avg_real_per_query,
min(query_min),
-- Totaling the number of runs doesn't make sense, but use the max so
-- that the reporting script doesn't complain about queries being too
-- long.
max(runs)
from test_times_view
;
create table test_times_report engine File(TSV, 'report/test-times.tsv') as
select
test,
toDecimal64(real, 3),
toDecimal64(total_client_time, 3),
queries,
toDecimal64(query_max, 3),
toDecimal64(avg_real_per_query, 3),
toDecimal64(query_min, 3),
runs
from (
select * from test_times_view
union all
select * from test_times_view_total
)
order by test = 'Total' desc, avg_real_per_query desc
;
-- report for all queries page, only main metric
create table all_tests_report engine File(TSV, 'report/all-queries.tsv') as
@ -694,13 +745,12 @@ create table all_tests_report engine File(TSV, 'report/all-queries.tsv') as
test, query_index, query_display_name
from queries order by test, query_index;
-- queries for which we will build flamegraphs (see below)
create table queries_for_flamegraph engine File(TSVWithNamesAndTypes,
'report/queries-for-flamegraph.tsv') as
select test, query_index from queries where unstable_show or changed_show
;
-- Report of queries that have inconsistent 'short' markings:
-- 1) have short duration, but are not marked as 'short'
-- 2) the reverse -- marked 'short' but take too long.
-- The threshold for 2) is significantly larger than the threshold for 1), to
-- avoid jitter.
create view shortness
as select
(test, query_index) in
@ -718,11 +768,6 @@ create view shortness
and times.query_index = query_display_names.query_index
;
-- Report of queries that have inconsistent 'short' markings:
-- 1) have short duration, but are not marked as 'short'
-- 2) the reverse -- marked 'short' but take too long.
-- The threshold for 2) is significantly larger than the threshold for 1), to
-- avoid jitter.
create table inconsistent_short_marking_report
engine File(TSV, 'report/unexpected-query-duration.tsv')
as select
@ -759,18 +804,15 @@ create table all_query_metrics_tsv engine File(TSV, 'report/all-query-metrics.ts
" 2> >(tee -a report/errors.log 1>&2)
# Prepare source data for metrics and flamegraphs for unstable queries.
# Prepare source data for metrics and flamegraphs for queries that were profiled
# by perf.py.
for version in {right,left}
do
rm -rf data
clickhouse-local --query "
create view queries_for_flamegraph as
select * from file('report/queries-for-flamegraph.tsv', TSVWithNamesAndTypes,
'test text, query_index int');
create view query_runs as
create view query_profiles as
with 0 as left, 1 as right
select * from file('analyze/query-runs.tsv', TSV,
select * from file('analyze/query-profiles.tsv', TSV,
'test text, query_index int, query_id text, version UInt8, time float')
where version = $version
;
@ -782,15 +824,12 @@ create view query_display_names as select * from
create table unstable_query_runs engine File(TSVWithNamesAndTypes,
'unstable-query-runs.$version.rep') as
select query_runs.test test, query_runs.query_index query_index,
select query_profiles.test test, query_profiles.query_index query_index,
query_display_name, query_id
from query_runs
join queries_for_flamegraph on
query_runs.test = queries_for_flamegraph.test
and query_runs.query_index = queries_for_flamegraph.query_index
from query_profiles
left join query_display_names on
query_runs.test = query_display_names.test
and query_runs.query_index = query_display_names.query_index
query_profiles.test = query_display_names.test
and query_profiles.query_index = query_display_names.query_index
;
create view query_log as select *

View File

@ -10,7 +10,7 @@ mkdir left ||:
left_pr=$1
left_sha=$2
right_pr=$3
# right_pr=$3 not used for now
right_sha=$4
datasets=${CHPC_DATASETS:-"hits1 hits10 hits100 values"}

View File

@ -18,9 +18,22 @@ import xml.etree.ElementTree as et
from threading import Thread
from scipy import stats
total_start_seconds = time.perf_counter()
stage_start_seconds = total_start_seconds
def reportStageEnd(stage):
global stage_start_seconds, total_start_seconds
current = time.perf_counter()
print(f'stage\t{stage}\t{current - stage_start_seconds:.3f}\t{current - total_start_seconds:.3f}')
stage_start_seconds = current
def tsv_escape(s):
return s.replace('\\', '\\\\').replace('\t', '\\t').replace('\n', '\\n').replace('\r','')
parser = argparse.ArgumentParser(description='Run performance test.')
# Explicitly decode files as UTF-8 because sometimes we have Russian characters in queries, and LANG=C is set.
parser.add_argument('file', metavar='FILE', type=argparse.FileType('r', encoding='utf-8'), nargs=1, help='test description file')
@ -29,16 +42,21 @@ parser.add_argument('--port', nargs='*', default=[9000], help="Space-separated l
parser.add_argument('--runs', type=int, default=1, help='Number of query runs per server.')
parser.add_argument('--max-queries', type=int, default=None, help='Test no more than this number of queries, chosen at random.')
parser.add_argument('--queries-to-run', nargs='*', type=int, default=None, help='Space-separated list of indexes of queries to test.')
parser.add_argument('--profile-seconds', type=int, default=0, help='For how many seconds to profile a query for which the performance has changed.')
parser.add_argument('--long', action='store_true', help='Do not skip the tests tagged as long.')
parser.add_argument('--print-queries', action='store_true', help='Print test queries and exit.')
parser.add_argument('--print-settings', action='store_true', help='Print test settings and exit.')
args = parser.parse_args()
reportStageEnd('start')
test_name = os.path.splitext(os.path.basename(args.file[0].name))[0]
tree = et.parse(args.file[0])
root = tree.getroot()
reportStageEnd('parse')
# Process query parameters
subst_elems = root.findall('substitutions/substitution')
available_parameters = {} # { 'table': ['hits_10m', 'hits_100m'], ... }
@ -78,11 +96,16 @@ for e in root.findall('query'):
assert(len(test_queries) == len(is_short))
# If we're given a list of queries to run, check that it makes sense.
for i in args.queries_to_run or []:
if i < 0 or i >= len(test_queries):
print(f'There is no query no. {i} in this test, only [{0}-{len(test_queries) - 1}] are present')
exit(1)
# If we're only asked to print the queries, do that and exit
# If we're only asked to print the queries, do that and exit.
if args.print_queries:
for q in test_queries:
print(q)
for i in args.queries_to_run or range(0, len(test_queries)):
print(test_queries[i])
exit(0)
# Print short queries
@ -107,15 +130,21 @@ if not args.long:
sys.exit(0)
# Print report threshold for the test if it is set.
ignored_relative_change = 0.05
if 'max_ignored_relative_change' in root.attrib:
print(f'report-threshold\t{root.attrib["max_ignored_relative_change"]}')
ignored_relative_change = float(root.attrib["max_ignored_relative_change"])
print(f'report-threshold\t{ignored_relative_change}')
reportStageEnd('before-connect')
# Open connections
servers = [{'host': host, 'port': port} for (host, port) in zip(args.host, args.port)]
servers = [{'host': host or args.host[0], 'port': port or args.port[0]} for (host, port) in itertools.zip_longest(args.host, args.port)]
all_connections = [clickhouse_driver.Client(**server) for server in servers]
for s in servers:
print('server\t{}\t{}'.format(s['host'], s['port']))
for i, s in enumerate(servers):
print(f'server\t{i}\t{s["host"]}\t{s["port"]}')
reportStageEnd('connect')
# Run drop queries, ignoring errors. Do this before all other activity, because
# clickhouse_driver disconnects on error (this is not configurable), and the new
@ -130,6 +159,8 @@ for conn_index, c in enumerate(all_connections):
except:
pass
reportStageEnd('drop-1')
# Apply settings.
# If there are errors, report them and continue -- maybe a new test uses a setting
# that is not in master, but the queries can still run. If we have multiple
@ -147,6 +178,8 @@ for conn_index, c in enumerate(all_connections):
except:
print(traceback.format_exc(), file=sys.stderr)
reportStageEnd('settings')
# Check tables that should exist. If they don't exist, just skip this test.
tables = [e.text for e in root.findall('preconditions/table_exists')]
for t in tables:
@ -159,6 +192,8 @@ for t in tables:
print(f'skipped\t{tsv_escape(skipped_message)}')
sys.exit(0)
reportStageEnd('preconditions')
# Run create and fill queries. We will run them simultaneously for both servers,
# to save time.
# The weird search is to keep the relative order of elements, which matters, and
@ -189,6 +224,9 @@ for t in threads:
for t in threads:
t.join()
reportStageEnd('create')
# By default, test all queries.
queries_to_run = range(0, len(test_queries))
if args.max_queries:
@ -196,15 +234,11 @@ if args.max_queries:
queries_to_run = random.sample(range(0, len(test_queries)), min(len(test_queries), args.max_queries))
if args.queries_to_run:
# Run the specified queries, with some sanity check.
for i in args.queries_to_run:
if i < 0 or i >= len(test_queries):
print(f'There is no query no. "{i}" in this test, only [{0}-{len(test_queries) - 1}] are present')
exit(1)
# Run the specified queries.
queries_to_run = args.queries_to_run
# Run test queries.
profile_total_seconds = 0
for query_index in queries_to_run:
q = test_queries[query_index]
query_prefix = f'{test_name}.query{query_index}'
@ -324,14 +358,30 @@ for query_index in queries_to_run:
client_seconds = time.perf_counter() - start_seconds
print(f'client-time\t{query_index}\t{client_seconds}\t{server_seconds}')
#print(all_server_times)
#print(stats.ttest_ind(all_server_times[0], all_server_times[1], equal_var = False).pvalue)
# Run additional profiling queries to collect profile data, but only if test times appeared to be different.
# We have to do it after normal runs because otherwise it will affect test statistics too much
if len(all_server_times) == 2 and stats.ttest_ind(all_server_times[0], all_server_times[1], equal_var = False).pvalue < 0.1:
if len(all_server_times) != 2:
continue
if len(all_server_times[0]) < 3:
# Don't fail if for some reason there are not enough measurements.
continue
pvalue = stats.ttest_ind(all_server_times[0], all_server_times[1], equal_var = False).pvalue
median = [statistics.median(t) for t in all_server_times]
# Keep this consistent with the value used in report. Should eventually move
# to (median[1] - median[0]) / min(median), which is compatible with "times"
# difference we use in report (max(median) / min(median)).
relative_diff = (median[1] - median[0]) / median[0]
print(f'diff\t{query_index}\t{median[0]}\t{median[1]}\t{relative_diff}\t{pvalue}')
if abs(relative_diff) < ignored_relative_change or pvalue > 0.05:
continue
# Perform profile runs for fixed amount of time. Don't limit the number
# of runs, because we also have short queries.
profile_start_seconds = time.perf_counter()
run = 0
while True:
while time.perf_counter() - profile_start_seconds < args.profile_seconds:
run_id = f'{query_prefix}.profile{run}'
for conn_index, c in enumerate(this_query_connections):
@ -344,14 +394,13 @@ for query_index in queries_to_run:
e.message = run_id + ': ' + e.message
raise
elapsed = c.last_query.elapsed
profile_seconds += elapsed
run += 1
# Don't spend too much time for profile runs
if run > args.runs or profile_seconds > 10:
break
# And don't bother with short queries
profile_total_seconds += time.perf_counter() - profile_start_seconds
print(f'profile-total\t{profile_total_seconds}')
reportStageEnd('run')
# Run drop queries
drop_queries = substitute_parameters(drop_query_templates)
@ -359,3 +408,5 @@ for conn_index, c in enumerate(all_connections):
for q in drop_queries:
c.execute(q)
print(f'drop\t{conn_index}\t{c.last_query.elapsed}\t{tsv_escape(q)}')
reportStageEnd('drop-2')

View File

@ -312,7 +312,7 @@ def add_errors_explained():
if args.report == 'main':
print(header_template.format())
print((header_template.format()))
add_tested_commits()
@ -487,7 +487,7 @@ if args.report == 'main':
for r in rows:
anchor = f'{currentTableAnchor()}.{r[0]}'
total_runs = (int(r[7]) + 1) * 2 # one prewarm run, two servers
if float(r[5]) > allowed_average_run_time * total_runs:
if r[0] != 'Total' and float(r[5]) > allowed_average_run_time * total_runs:
# FIXME should be 15s max -- investigate parallel_insert
slow_average_tests += 1
attrs[5] = f'style="background: {color_bad}"'
@ -495,7 +495,7 @@ if args.report == 'main':
else:
attrs[5] = ''
if float(r[4]) > allowed_single_run_time * total_runs:
if r[0] != 'Total' and float(r[4]) > allowed_single_run_time * total_runs:
slow_average_tests += 1
attrs[4] = f'style="background: {color_bad}"'
errors_explained.append([f'<a href="./all-queries.html#all-query-times.{r[0]}.0">Some query of the test \'{r[0]}\' is too slow to run. See the all queries report'])
@ -571,14 +571,14 @@ if args.report == 'main':
status = 'failure'
message = 'Errors while building the report.'
print("""
print(("""
<!--status: {status}-->
<!--message: {message}-->
""".format(status=status, message=message))
""".format(status=status, message=message)))
elif args.report == 'all-queries':
print(header_template.format())
print((header_template.format()))
add_tested_commits()

View File

@ -4,7 +4,7 @@ FROM yandex/clickhouse-stateless-test
RUN apt-get update -y \
&& env DEBIAN_FRONTEND=noninteractive \
apt-get install --yes --no-install-recommends \
python-requests \
python3-requests \
llvm-9
COPY s3downloader /s3downloader

View File

@ -26,11 +26,12 @@ function start()
fi
timeout 120 service clickhouse-server start
sleep 0.5
counter=$(($counter + 1))
counter=$((counter + 1))
done
}
start
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
/s3downloader --dataset-names $DATASETS
chmod 777 -R /var/lib/clickhouse
clickhouse-client --query "SHOW DATABASES"
@ -43,8 +44,8 @@ clickhouse-client --query "RENAME TABLE datasets.hits_v1 TO test.hits"
clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits"
clickhouse-client --query "SHOW TABLES FROM test"
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test ; then
SKIP_LIST_OPT="--use-skip-list"
fi
clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
@ -29,7 +29,7 @@ def dowload_with_progress(url, path):
logging.info("Downloading from %s to temp path %s", url, path)
for i in range(RETRIES_COUNT):
try:
with open(path, 'w') as f:
with open(path, 'wb') as f:
response = requests.get(url, stream=True)
response.raise_for_status()
total_length = response.headers.get('content-length')
@ -74,7 +74,7 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Simple tool for dowloading datasets for clickhouse from S3")
parser.add_argument('--dataset-names', required=True, nargs='+', choices=AVAILABLE_DATASETS.keys())
parser.add_argument('--dataset-names', required=True, nargs='+', choices=list(AVAILABLE_DATASETS.keys()))
parser.add_argument('--url-prefix', default=DEFAULT_URL)
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')

View File

@ -6,7 +6,7 @@ RUN echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9
RUN apt-get update -y \
&& env DEBIAN_FRONTEND=noninteractive \
apt-get install --yes --no-install-recommends \
python-requests
python3-requests
COPY s3downloader /s3downloader
COPY run.sh /run.sh

View File

@ -1,15 +1,15 @@
#!/bin/bash
kill_clickhouse () {
kill `pgrep -u clickhouse` 2>/dev/null
kill "$(pgrep -u clickhouse)" 2>/dev/null
for i in {1..10}
for _ in {1..10}
do
if ! kill -0 `pgrep -u clickhouse`; then
if ! kill -0 "$(pgrep -u clickhouse)"; then
echo "No clickhouse process"
break
else
echo "Process" `pgrep -u clickhouse` "still alive"
echo "Process $(pgrep -u clickhouse) still alive"
sleep 10
fi
done
@ -20,19 +20,19 @@ start_clickhouse () {
}
wait_llvm_profdata () {
while kill -0 `pgrep llvm-profdata-10`;
while kill -0 "$(pgrep llvm-profdata-10)"
do
echo "Waiting for profdata" `pgrep llvm-profdata-10` "still alive"
echo "Waiting for profdata $(pgrep llvm-profdata-10) still alive"
sleep 3
done
}
merge_client_files_in_background () {
client_files=`ls /client_*profraw 2>/dev/null`
if [ ! -z "$client_files" ]
client_files=$(ls /client_*profraw 2>/dev/null)
if [ -n "$client_files" ]
then
llvm-profdata-10 merge -sparse $client_files -o merged_client_`date +%s`.profraw
rm $client_files
llvm-profdata-10 merge -sparse "$client_files" -o "merged_client_$(date +%s).profraw"
rm "$client_files"
fi
}
@ -66,12 +66,13 @@ function start()
fi
timeout 120 service clickhouse-server start
sleep 0.5
counter=$(($counter + 1))
counter=$((counter + 1))
done
}
start
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
if ! /s3downloader --dataset-names $DATASETS; then
echo "Cannot download datatsets"
exit 1
@ -100,11 +101,11 @@ LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "RENAME TA
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits"
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "SHOW TABLES FROM test"
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
SKIP_LIST_OPT="--use-skip-list"
fi
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
kill_clickhouse

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
@ -74,7 +74,7 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Simple tool for dowloading datasets for clickhouse from S3")
parser.add_argument('--dataset-names', required=True, nargs='+', choices=AVAILABLE_DATASETS.keys())
parser.add_argument('--dataset-names', required=True, nargs='+', choices=list(AVAILABLE_DATASETS.keys()))
parser.add_argument('--url-prefix', default=DEFAULT_URL)
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')

View File

@ -12,10 +12,10 @@ RUN apt-get update -y \
ncdu \
netcat-openbsd \
openssl \
python \
python-lxml \
python-requests \
python-termcolor \
python3 \
python3-lxml \
python3-requests \
python3-termcolor \
qemu-user-static \
sudo \
telnet \

View File

@ -13,8 +13,8 @@ dpkg -i package_folder/clickhouse-test_*.deb
service clickhouse-server start && sleep 5
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
SKIP_LIST_OPT="--use-skip-list"
fi
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt

View File

@ -3,10 +3,10 @@ FROM yandex/clickhouse-test-base
RUN apt-get update -y && \
apt-get install -y --no-install-recommends \
python-pip \
python-setuptools
python3-pip \
python3-setuptools
RUN pip install \
RUN python3 -m pip install \
pytest \
pytest-html \
pytest-timeout \
@ -17,4 +17,4 @@ CMD dpkg -i package_folder/clickhouse-common-static_*.deb; \
dpkg -i package_folder/clickhouse-server_*.deb; \
dpkg -i package_folder/clickhouse-client_*.deb; \
dpkg -i package_folder/clickhouse-test_*.deb; \
python -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --html=test_output/report.html --self-contained-html
python3 -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --html=test_output/report.html --self-contained-html

View File

@ -54,10 +54,10 @@ RUN apt-get --allow-unauthenticated update -y \
perl \
pigz \
pkg-config \
python \
python-lxml \
python-requests \
python-termcolor \
python3 \
python3-lxml \
python3-requests \
python3-termcolor \
qemu-user-static \
sudo \
telnet \

View File

@ -13,8 +13,8 @@ dpkg -i package_folder/clickhouse-test_*.deb
service clickhouse-server start && sleep 5
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
SKIP_LIST_OPT="--use-skip-list"
fi
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt

View File

@ -12,10 +12,10 @@ RUN apt-get update -y \
fakeroot \
debhelper \
expect \
python \
python-lxml \
python-termcolor \
python-requests \
python3 \
python3-lxml \
python3-termcolor \
python3-requests \
sudo \
openssl \
ncdu \

View File

@ -1,24 +1,24 @@
#!/bin/bash
kill_clickhouse () {
echo "clickhouse pids" `ps aux | grep clickhouse` | ts '%Y-%m-%d %H:%M:%S'
kill `pgrep -u clickhouse` 2>/dev/null
echo "clickhouse pids $(pgrep -u clickhouse)" | ts '%Y-%m-%d %H:%M:%S'
kill "$(pgrep -u clickhouse)" 2>/dev/null
for i in {1..10}
for _ in {1..10}
do
if ! kill -0 `pgrep -u clickhouse`; then
if ! kill -0 "$(pgrep -u clickhouse)"; then
echo "No clickhouse process" | ts '%Y-%m-%d %H:%M:%S'
break
else
echo "Process" `pgrep -u clickhouse` "still alive" | ts '%Y-%m-%d %H:%M:%S'
echo "Process $(pgrep -u clickhouse) still alive" | ts '%Y-%m-%d %H:%M:%S'
sleep 10
fi
done
echo "Will try to send second kill signal for sure"
kill `pgrep -u clickhouse` 2>/dev/null
kill "$(pgrep -u clickhouse)" 2>/dev/null
sleep 5
echo "clickhouse pids" `ps aux | grep clickhouse` | ts '%Y-%m-%d %H:%M:%S'
echo "clickhouse pids $(pgrep -u clickhouse)" | ts '%Y-%m-%d %H:%M:%S'
}
start_clickhouse () {
@ -47,11 +47,11 @@ start_clickhouse
sleep 10
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
SKIP_LIST_OPT="--use-skip-list"
fi
LLVM_PROFILE_FILE='client_coverage.profraw' clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
LLVM_PROFILE_FILE='client_coverage.profraw' clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
kill_clickhouse

View File

@ -10,10 +10,10 @@ RUN apt-get update -y \
debhelper \
parallel \
expect \
python \
python-lxml \
python-termcolor \
python-requests \
python3 \
python3-lxml \
python3-termcolor \
python3-requests \
curl \
sudo \
openssl \

View File

@ -13,7 +13,7 @@ function stop()
timeout 120 service clickhouse-server stop
# Wait for process to disappear from processlist and also try to kill zombies.
while kill -9 $(pidof clickhouse-server)
while kill -9 "$(pidof clickhouse-server)"
do
echo "Killed clickhouse-server"
sleep 0.5
@ -35,17 +35,21 @@ function start()
fi
timeout 120 service clickhouse-server start
sleep 0.5
counter=$(($counter + 1))
counter=$((counter + 1))
done
}
# install test configs
/usr/share/clickhouse-test/config/install.sh
# for clickhouse-server (via service)
echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment
# for clickhouse-client
export ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'
start
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
/s3downloader --dataset-names $DATASETS
chmod 777 -R /var/lib/clickhouse
clickhouse-client --query "ATTACH DATABASE IF NOT EXISTS datasets ENGINE = Ordinary"

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from multiprocessing import cpu_count
from subprocess import Popen, check_call

View File

@ -2,17 +2,19 @@
set -e -x
# Not sure why shellcheck complains that rc is not assigned before it is referenced.
# shellcheck disable=SC2154
trap 'rc=$?; echo EXITED WITH: $rc; exit $rc' EXIT
# CLI option to prevent rebuilding images, just re-run tests with images leftover from previuos time
readonly NO_REBUILD_FLAG="--no-rebuild"
readonly CLICKHOUSE_DOCKER_DIR="$(realpath ${1})"
readonly CLICKHOUSE_DOCKER_DIR="$(realpath "${1}")"
readonly CLICKHOUSE_PACKAGES_ARG="${2}"
CLICKHOUSE_SERVER_IMAGE="${3}"
if [ ${CLICKHOUSE_PACKAGES_ARG} != ${NO_REBUILD_FLAG} ]; then
readonly CLICKHOUSE_PACKAGES_DIR="$(realpath ${2})" # or --no-rebuild
if [ "${CLICKHOUSE_PACKAGES_ARG}" != "${NO_REBUILD_FLAG}" ]; then
readonly CLICKHOUSE_PACKAGES_DIR="$(realpath "${2}")" # or --no-rebuild
fi
@ -25,7 +27,7 @@ fi
# TODO: optionally mount most recent clickhouse-test and queries directory from local machine
if [ ${CLICKHOUSE_PACKAGES_ARG} != ${NO_REBUILD_FLAG} ]; then
if [ "${CLICKHOUSE_PACKAGES_ARG}" != "${NO_REBUILD_FLAG}" ]; then
docker build --network=host \
-f "${CLICKHOUSE_DOCKER_DIR}/test/stateless/clickhouse-statelest-test-runner.Dockerfile" \
--target clickhouse-test-runner-base \
@ -49,7 +51,7 @@ fi
if [ -z "${CLICKHOUSE_SERVER_IMAGE}" ]; then
CLICKHOUSE_SERVER_IMAGE="yandex/clickhouse-server:local"
if [ ${CLICKHOUSE_PACKAGES_ARG} != ${NO_REBUILD_FLAG} ]; then
if [ "${CLICKHOUSE_PACKAGES_ARG}" != "${NO_REBUILD_FLAG}" ]; then
docker build --network=host \
-f "${CLICKHOUSE_DOCKER_DIR}/server/local.Dockerfile" \
--target clickhouse-server-base \

View File

@ -7,7 +7,7 @@ set +e
reties=0
while true; do
docker info &>/dev/null && break
reties=$[$reties+1]
reties=$((reties+1))
if [[ $reties -ge 100 ]]; then # 10 sec max
echo "Can't start docker daemon, timeout exceeded." >&2
exit 1;

View File

@ -116,7 +116,7 @@ ninja
Example for Fedora Rawhide:
``` bash
sudo yum update
yum --nogpg install git cmake make gcc-c++ python2
yum --nogpg install git cmake make gcc-c++ python3
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build
cmake ../ClickHouse

View File

@ -165,6 +165,22 @@ Similar to GraphiteMergeTree, the Kafka engine supports extended configuration u
For a list of possible configuration options, see the [librdkafka configuration reference](https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md). Use the underscore (`_`) instead of a dot in the ClickHouse configuration. For example, `check.crcs=true` will be `<check_crcs>true</check_crcs>`.
### Kerberos support {#kafka-kerberos-support}
To deal with Kerberos-aware Kafka, add `security_protocol` child element with `sasl_plaintext` value. It is enough if Kerberos ticket-granting ticket is obtained and cached by OS facilities.
ClickHouse is able to maintain Kerberos credentials using a keytab file. Consider `sasl_kerberos_service_name`, `sasl_kerberos_keytab`, `sasl_kerberos_principal` and `sasl.kerberos.kinit.cmd` child elements.
Example:
``` xml
<!-- Kerberos-aware Kafka -->
<kafka>
<security_protocol>SASL_PLAINTEXT</security_protocol>
<sasl_kerberos_keytab>/home/kafkauser/kafkauser.keytab</sasl_kerberos_keytab>
<sasl_kerberos_principal>kafkauser/kafkahost@EXAMPLE.COM</sasl_kerberos_principal>
</kafka>
```
## Virtual Columns {#virtual-columns}
- `_topic` — Kafka topic.

View File

@ -20,6 +20,7 @@ toc_title: Client Libraries
- [simpod/clickhouse-client](https://packagist.org/packages/simpod/clickhouse-client)
- [seva-code/php-click-house-client](https://packagist.org/packages/seva-code/php-click-house-client)
- [SeasClick C++ client](https://github.com/SeasX/SeasClick)
- [one-ck](https://github.com/lizhichao/one-ck)
- Go
- [clickhouse](https://github.com/kshvakov/clickhouse/)
- [go-clickhouse](https://github.com/roistat/go-clickhouse)

View File

@ -66,6 +66,32 @@ If no conditions met for a data part, ClickHouse uses the `lz4` compression.
</compression>
```
## custom_settings_prefixes {#custom_settings_prefixes}
List of prefixes for [custom settings](../../operations/settings/index.md#custom_settings). The prefixes must be separated with commas.
**Example**
```xml
<custom_settings_prefixes>custom_</custom_settings_prefixes>
```
**See Also**
- [Custom settings](../../operations/settings/index.md#custom_settings)
## core_dump
Configures soft limit for core dump file size, one gigabyte by default.
```xml
<core_dump>
<size_limit>1073741824</size_limit>
</core_dump>
```
(Hard limit is configured via system tools)
## default\_database {#default-database}
The default database.
@ -405,7 +431,7 @@ Limits total RAM usage by the ClickHouse server.
Possible values:
- Positive integer.
- 0 — Unlimited.
- 0 (auto).
Default value: `0`.

View File

@ -28,4 +28,30 @@ Ways to configure settings, in order of priority:
Settings that can only be made in the server config file are not covered in this section.
## Custom Settings {#custom_settings}
In addition to the common [settings](../../operations/settings/settings.md), users can define custom settings.
A custom setting name must begin with one of predefined prefixes. The list of these prefixes must be declared in the [custom_settings_prefixes](../../operations/server-configuration-parameters/settings.md#custom_settings_prefixes) parameter in the server configuration file.
```xml
<custom_settings_prefixes>custom_</custom_settings_prefixes>
```
To define a custom setting use `SET` command:
```sql
SET custom_a = 123;
```
To get the current value of a custom setting use `getSetting()` function:
```sql
SELECT getSetting('custom_a');
```
**See Also**
- [Server Configuration Settings](../../operations/server-configuration-parameters/settings.md)
[Original article](https://clickhouse.tech/docs/en/operations/settings/) <!--hide-->

View File

@ -70,6 +70,35 @@ Works with tables in the MergeTree family.
If `force_primary_key=1`, ClickHouse checks to see if the query has a primary key condition that can be used for restricting data ranges. If there is no suitable condition, it throws an exception. However, it does not check whether the condition reduces the amount of data to read. For more information about data ranges in MergeTree tables, see [MergeTree](../../engines/table-engines/mergetree-family/mergetree.md).
## force\_data\_skipping\_indices {#settings-force_data_skipping_indices}
Disables query execution if passed data skipping indices wasn't used.
Consider the following example:
```sql
CREATE TABLE data
(
key Int,
d1 Int,
d1_null Nullable(Int),
INDEX d1_idx d1 TYPE minmax GRANULARITY 1,
INDEX d1_null_idx assumeNotNull(d1_null) TYPE minmax GRANULARITY 1
)
Engine=MergeTree()
ORDER BY key;
SELECT * FROM data_01515;
SELECT * FROM data_01515 SETTINGS force_data_skipping_indices=''; -- query will produce CANNOT_PARSE_TEXT error.
SELECT * FROM data_01515 SETTINGS force_data_skipping_indices='d1_idx'; -- query will produce INDEX_NOT_USED error.
SELECT * FROM data_01515 WHERE d1 = 0 SETTINGS force_data_skipping_indices='d1_idx'; -- Ok.
SELECT * FROM data_01515 WHERE d1 = 0 SETTINGS force_data_skipping_indices='`d1_idx`'; -- Ok (example of full featured parser).
SELECT * FROM data_01515 WHERE d1 = 0 SETTINGS force_data_skipping_indices='`d1_idx`, d1_null_idx'; -- query will produce INDEX_NOT_USED error, since d1_null_idx is not used.
SELECT * FROM data_01515 WHERE d1 = 0 AND assumeNotNull(d1_null) = 0 SETTINGS force_data_skipping_indices='`d1_idx`, d1_null_idx'; -- Ok.
```
Works with tables in the MergeTree family.
## format\_schema {#format-schema}
This parameter is useful when you are using formats that require a schema definition, such as [Capn Proto](https://capnproto.org/) or [Protobuf](https://developers.google.com/protocol-buffers/). The value depends on the format.
@ -1144,9 +1173,9 @@ See also:
## insert\_quorum\_timeout {#settings-insert_quorum_timeout}
Write to quorum timeout in seconds. If the timeout has passed and no write has taken place yet, ClickHouse will generate an exception and the client must repeat the query to write the same block to the same or any other replica.
Write to quorum timeout in milliseconds. If the timeout has passed and no write has taken place yet, ClickHouse will generate an exception and the client must repeat the query to write the same block to the same or any other replica.
Default value: 60 seconds.
Default value: 600000 milliseconds (ten minutes).
See also:
@ -1565,7 +1594,7 @@ See also:
## allow\_introspection\_functions {#settings-allow_introspection_functions}
Enables of disables [introspections functions](../../sql-reference/functions/introspection.md) for query profiling.
Enables or disables [introspections functions](../../sql-reference/functions/introspection.md) for query profiling.
Possible values:
@ -2027,3 +2056,14 @@ Result:
```
[Original article](https://clickhouse.tech/docs/en/operations/settings/settings/) <!-- hide -->
## allow_experimental_bigint_types {#allow_experimental_bigint_types}
Enables or disables integer values exceeding the range that is supported by the int data type.
Possible values:
- 1 — The bigint data type is enabled.
- 0 — The bigint data type is disabled.
Default value: `0`.

View File

@ -33,6 +33,7 @@ Columns:
- `'ExceptionWhileProcessing' = 4` — Exception during the query execution.
- `event_date` ([Date](../../sql-reference/data-types/date.md)) — Query starting date.
- `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Query starting time.
- `event_time_microseconds` ([DateTime](../../sql-reference/data-types/datetime.md)) — Query starting time with microseconds precision.
- `query_start_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Start time of query execution.
- `query_start_time_microseconds` ([DateTime64](../../sql-reference/data-types/datetime64.md)) — Start time of query execution with microsecond precision.
- `query_duration_ms` ([UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Duration of query execution in milliseconds.
@ -84,16 +85,18 @@ Columns:
**Example**
``` sql
SELECT * FROM system.query_log LIMIT 1 FORMAT Vertical;
SELECT * FROM system.query_log LIMIT 1 \G
```
``` text
Row 1:
──────
type: QueryStart
event_date: 2020-05-13
event_time: 2020-05-13 14:02:28
query_start_time: 2020-05-13 14:02:28
event_date: 2020-09-11
event_time: 2020-09-11 10:08:17
event_time_microseconds: 2020-09-11 10:08:17.063321
query_start_time: 2020-09-11 10:08:17
query_start_time_microseconds: 2020-09-11 10:08:17.063321
query_duration_ms: 0
read_rows: 0
read_bytes: 0
@ -102,36 +105,37 @@ written_bytes: 0
result_rows: 0
result_bytes: 0
memory_usage: 0
query: SELECT 1
current_database: default
query: INSERT INTO test1 VALUES
exception_code: 0
exception:
stack_trace:
is_initial_query: 1
user: default
query_id: 5e834082-6f6d-4e34-b47b-cd1934f4002a
query_id: 50a320fd-85a8-49b8-8761-98a86bcbacef
address: ::ffff:127.0.0.1
port: 57720
port: 33452
initial_user: default
initial_query_id: 5e834082-6f6d-4e34-b47b-cd1934f4002a
initial_query_id: 50a320fd-85a8-49b8-8761-98a86bcbacef
initial_address: ::ffff:127.0.0.1
initial_port: 57720
initial_port: 33452
interface: 1
os_user: bayonet
client_hostname: clickhouse.ru-central1.internal
client_name: ClickHouse client
client_revision: 54434
os_user: bharatnc
client_hostname: tower
client_name: ClickHouse
client_revision: 54437
client_version_major: 20
client_version_minor: 4
client_version_patch: 1
client_version_minor: 7
client_version_patch: 2
http_method: 0
http_user_agent:
quota_key:
revision: 54434
revision: 54440
thread_ids: []
ProfileEvents.Names: []
ProfileEvents.Values: []
Settings.Names: ['use_uncompressed_cache','load_balancing','log_queries','max_memory_usage']
Settings.Values: ['0','random','1','10000000000']
Settings.Names: ['use_uncompressed_cache','load_balancing','log_queries','max_memory_usage','allow_introspection_functions']
Settings.Values: ['0','random','1','10000000000','1']
```
**See Also**

View File

@ -15,6 +15,7 @@ Columns:
- `event_date` ([Date](../../sql-reference/data-types/date.md)) — The date when the thread has finished execution of the query.
- `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — The date and time when the thread has finished execution of the query.
- `event_time_microsecinds` ([DateTime](../../sql-reference/data-types/datetime.md)) — The date and time when the thread has finished execution of the query with microseconds precision.
- `query_start_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Start time of query execution.
- `query_start_time_microseconds` ([DateTime64](../../sql-reference/data-types/datetime64.md)) — Start time of query execution with microsecond precision.
- `query_duration_ms` ([UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges)) — Duration of query execution.
@ -63,50 +64,51 @@ Columns:
**Example**
``` sql
SELECT * FROM system.query_thread_log LIMIT 1 FORMAT Vertical
SELECT * FROM system.query_thread_log LIMIT 1 \G
```
``` text
Row 1:
──────
event_date: 2020-05-13
event_time: 2020-05-13 14:02:28
query_start_time: 2020-05-13 14:02:28
query_duration_ms: 0
read_rows: 1
read_bytes: 1
written_rows: 0
written_bytes: 0
memory_usage: 0
peak_memory_usage: 0
thread_name: QueryPipelineEx
thread_id: 28952
master_thread_id: 28924
query: SELECT 1
event_date: 2020-09-11
event_time: 2020-09-11 10:08:17
event_time_microseconds: 2020-09-11 10:08:17.134042
query_start_time: 2020-09-11 10:08:17
query_start_time_microseconds: 2020-09-11 10:08:17.063150
query_duration_ms: 70
read_rows: 0
read_bytes: 0
written_rows: 1
written_bytes: 12
memory_usage: 4300844
peak_memory_usage: 4300844
thread_name: TCPHandler
thread_id: 638133
master_thread_id: 638133
query: INSERT INTO test1 VALUES
is_initial_query: 1
user: default
query_id: 5e834082-6f6d-4e34-b47b-cd1934f4002a
query_id: 50a320fd-85a8-49b8-8761-98a86bcbacef
address: ::ffff:127.0.0.1
port: 57720
port: 33452
initial_user: default
initial_query_id: 5e834082-6f6d-4e34-b47b-cd1934f4002a
initial_query_id: 50a320fd-85a8-49b8-8761-98a86bcbacef
initial_address: ::ffff:127.0.0.1
initial_port: 57720
initial_port: 33452
interface: 1
os_user: bayonet
client_hostname: clickhouse.ru-central1.internal
client_name: ClickHouse client
client_revision: 54434
os_user: bharatnc
client_hostname: tower
client_name: ClickHouse
client_revision: 54437
client_version_major: 20
client_version_minor: 4
client_version_patch: 1
client_version_minor: 7
client_version_patch: 2
http_method: 0
http_user_agent:
quota_key:
revision: 54434
ProfileEvents.Names: ['ContextLock','RealTimeMicroseconds','UserTimeMicroseconds','OSCPUWaitMicroseconds','OSCPUVirtualTimeMicroseconds']
ProfileEvents.Values: [1,97,81,5,81]
...
revision: 54440
ProfileEvents.Names: ['Query','InsertQuery','FileOpen','WriteBufferFromFileDescriptorWrite','WriteBufferFromFileDescriptorWriteBytes','ReadCompressedBytes','CompressedReadBufferBlocks','CompressedReadBufferBytes','IOBufferAllocs','IOBufferAllocBytes','FunctionExecute','CreatedWriteBufferOrdinary','DiskWriteElapsedMicroseconds','NetworkReceiveElapsedMicroseconds','NetworkSendElapsedMicroseconds','InsertedRows','InsertedBytes','SelectedRows','SelectedBytes','MergeTreeDataWriterRows','MergeTreeDataWriterUncompressedBytes','MergeTreeDataWriterCompressedBytes','MergeTreeDataWriterBlocks','MergeTreeDataWriterBlocksAlreadySorted','ContextLock','RWLockAcquiredReadLocks','RealTimeMicroseconds','UserTimeMicroseconds','SoftPageFaults','OSCPUVirtualTimeMicroseconds','OSWriteBytes','OSReadChars','OSWriteChars']
ProfileEvents.Values: [1,1,11,11,591,148,3,71,29,6533808,1,11,72,18,47,1,12,1,12,1,12,189,1,1,10,2,70853,2748,49,2747,45056,422,1520]
```
**See Also**

View File

@ -6,6 +6,7 @@ Columns:
- `event_date` (Date) — Date of the entry.
- `event_time` (DateTime) — Time of the entry.
- `event_time_microseconds` (DateTime) — Time of the entry with microseconds precision.
- `microseconds` (UInt32) — Microseconds of the entry.
- `thread_name` (String) — Name of the thread from which the logging was done.
- `thread_id` (UInt64) — OS thread ID.
@ -25,4 +26,28 @@ Columns:
- `source_file` (LowCardinality(String)) — Source file from which the logging was done.
- `source_line` (UInt64) — Source line from which the logging was done.
**Example**
``` sql
SELECT * FROM system.text_log LIMIT 1 \G
```
``` text
Row 1:
──────
event_date: 2020-09-10
event_time: 2020-09-10 11:23:07
event_time_microseconds: 2020-09-10 11:23:07.871397
microseconds: 871397
thread_name: clickhouse-serv
thread_id: 564917
level: Information
query_id:
logger_name: DNSCacheUpdater
message: Update period 15 seconds
revision: 54440
source_file: /ClickHouse/src/Interpreters/DNSCacheUpdater.cpp; void DB::DNSCacheUpdater::start()
source_line: 45
```
[Original article](https://clickhouse.tech/docs/en/operations/system_tables/text_log) <!--hide-->

View File

@ -12,6 +12,8 @@ Columns:
- `event_time` ([DateTime](../../sql-reference/data-types/datetime.md)) — Timestamp of the sampling moment.
- `event_time_microseconds` ([DateTime](../../sql-reference/data-types/datetime.md)) — Timestamp of the sampling moment with microseconds precision.
- `timestamp_ns` ([UInt64](../../sql-reference/data-types/int-uint.md)) — Timestamp of the sampling moment in nanoseconds.
- `revision` ([UInt32](../../sql-reference/data-types/int-uint.md)) — ClickHouse server build revision.
@ -38,13 +40,16 @@ SELECT * FROM system.trace_log LIMIT 1 \G
``` text
Row 1:
──────
event_date: 2019-11-15
event_time: 2019-11-15 15:09:38
revision: 54428
timer_type: Real
thread_number: 48
query_id: acc4d61f-5bd1-4a3e-bc91-2180be37c915
trace: [94222141367858,94222152240175,94222152325351,94222152329944,94222152330796,94222151449980,94222144088167,94222151682763,94222144088167,94222151682763,94222144088167,94222144058283,94222144059248,94222091840750,94222091842302,94222091831228,94222189631488,140509950166747,140509942945935]
event_date: 2020-09-10
event_time: 2020-09-10 11:23:09
event_time_microseconds: 2020-09-10 11:23:09.872924
timestamp_ns: 1599762189872924510
revision: 54440
trace_type: Memory
thread_id: 564963
query_id:
trace: [371912858,371912789,371798468,371799717,371801313,371790250,624462773,566365041,566440261,566445834,566460071,566459914,566459842,566459580,566459469,566459389,566459341,566455774,371993941,371988245,372158848,372187428,372187309,372187093,372185478,140222123165193,140222122205443]
size: 5244400
```
[Original article](https://clickhouse.tech/docs/en/operations/system_tables/trace_log) <!--hide-->

View File

@ -3,25 +3,27 @@ toc_priority: 42
toc_title: Decimal
---
# Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S) {#decimalp-s-decimal32s-decimal64s-decimal128s}
# Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S), Decimal256(S) {#decimalp-s-decimal32s-decimal64s-decimal128s-decimal256s}
Signed fixed-point numbers that keep precision during add, subtract and multiply operations. For division least significant digits are discarded (not rounded).
## Parameters {#parameters}
- P - precision. Valid range: \[ 1 : 38 \]. Determines how many decimal digits number can have (including fraction).
- P - precision. Valid range: \[ 1 : 76 \]. Determines how many decimal digits number can have (including fraction).
- S - scale. Valid range: \[ 0 : P \]. Determines how many decimal digits fraction can have.
Depending on P parameter value Decimal(P, S) is a synonym for:
- P from \[ 1 : 9 \] - for Decimal32(S)
- P from \[ 10 : 18 \] - for Decimal64(S)
- P from \[ 19 : 38 \] - for Decimal128(S)
- P from \[ 39 : 76 \] - for Decimal256(S)
## Decimal Value Ranges {#decimal-value-ranges}
- Decimal32(S) - ( -1 \* 10^(9 - S), 1 \* 10^(9 - S) )
- Decimal64(S) - ( -1 \* 10^(18 - S), 1 \* 10^(18 - S) )
- Decimal128(S) - ( -1 \* 10^(38 - S), 1 \* 10^(38 - S) )
- Decimal256(S) - ( -1 \* 10^(76 - S), 1 \* 10^(76 - S) )
For example, Decimal32(4) can contain numbers from -99999.9999 to 99999.9999 with 0.0001 step.
@ -38,6 +40,7 @@ Binary operations on Decimal result in wider result type (with any order of argu
- `Decimal64(S1) <op> Decimal32(S2) -> Decimal64(S)`
- `Decimal128(S1) <op> Decimal32(S2) -> Decimal128(S)`
- `Decimal128(S1) <op> Decimal64(S2) -> Decimal128(S)`
- `Decimal256(S1) <op> Decimal<32|64|128>(S2) -> Decimal256(S)`
Rules for scale:
@ -104,4 +107,8 @@ SELECT toDecimal32(1, 8) < 100
DB::Exception: Can't compare.
```
**See also**
- [isDecimalOverflow](../../sql-reference/functions/other-functions.md#is-decimal-overflow)
- [countDigits](../../sql-reference/functions/other-functions.md#count-digits)
[Original article](https://clickhouse.tech/docs/en/data_types/decimal/) <!--hide-->

View File

@ -1,9 +1,9 @@
---
toc_priority: 40
toc_title: UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64
toc_title: UInt8, UInt16, UInt32, UInt64, UInt256, Int8, Int16, Int32, Int64, Int128, Int256
---
# UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64 {#uint8-uint16-uint32-uint64-int8-int16-int32-int64}
# UInt8, UInt16, UInt32, UInt64, UInt256, Int8, Int16, Int32, Int64, Int128, Int256 {#uint8-uint16-uint32-uint64-uint256-int8-int16-int32-int64-int128-int256}
Fixed-length integers, with or without a sign.
@ -13,6 +13,8 @@ Fixed-length integers, with or without a sign.
- Int16 - \[-32768 : 32767\]
- Int32 - \[-2147483648 : 2147483647\]
- Int64 - \[-9223372036854775808 : 9223372036854775807\]
- Int128 - \[-170141183460469231731687303715884105728 : 170141183460469231731687303715884105727\]
- Int256 - \[-57896044618658097711785492504343953926634992332820282019728792003956564819968 : 57896044618658097711785492504343953926634992332820282019728792003956564819967\]
## Uint Ranges {#uint-ranges}
@ -20,5 +22,8 @@ Fixed-length integers, with or without a sign.
- UInt16 - \[0 : 65535\]
- UInt32 - \[0 : 4294967295\]
- UInt64 - \[0 : 18446744073709551615\]
- UInt256 - \[0 : 115792089237316195423570985008687907853269984665640564039457584007913129639935\]
UInt128 is not supported yet.
[Original article](https://clickhouse.tech/docs/en/data_types/int_uint/) <!--hide-->

View File

@ -357,7 +357,7 @@ SELECT date_trunc('hour', now())
## now {#now}
Accepts zero arguments and returns the current time at one of the moments of request execution.
Accepts zero or one arguments(timezone) and returns the current time at one of the moments of request execution, or current time of specific timezone at one of the moments of request execution if `timezone` argument provided.
This function returns a constant, even if the request took a long time to complete.
## today {#today}

View File

@ -16,3 +16,82 @@ The [stochasticLinearRegression](../../sql-reference/aggregate-functions/referen
## stochasticLogisticRegression {#stochastic-logistic-regression}
The [stochasticLogisticRegression](../../sql-reference/aggregate-functions/reference/stochasticlogisticregression.md#agg_functions-stochasticlogisticregression) aggregate function implements stochastic gradient descent method for binary classification problem. Uses `evalMLMethod` to predict on new data.
## bayesAB {#bayesab}
Compares test groups (variants) and calculates for each group the probability to be the best one. The first group is used as a control group.
**Syntax**
``` sql
bayesAB(distribution_name, higher_is_better, variant_names, x, y)
```
**Parameters**
- `distribution_name` — Name of the probability distribution. [String](../../sql-reference/data-types/string.md). Possible values:
- `beta` for [Beta distribution](https://en.wikipedia.org/wiki/Beta_distribution)
- `gamma` for [Gamma distribution](https://en.wikipedia.org/wiki/Gamma_distribution)
- `higher_is_better` — Boolean flag. [Boolean](../../sql-reference/data-types/boolean.md). Possible values:
- `0` - lower values are considered to be better than higher
- `1` - higher values are considered to be better than lower
- `variant_names` - Variant names. [Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md)).
- `x` - Numbers of tests for the corresponding variants. [Array](../../sql-reference/data-types/array.md)([Float64](../../sql-reference/data-types/float.md)).
- `y` - Numbers of successful tests for the corresponding variants. [Array](../../sql-reference/data-types/array.md)([Float64](../../sql-reference/data-types/float.md)).
!!! note "Note"
All three arrays must have the same size. All `x` and `y` values must be non-negative constant numbers. `y` cannot be larger than `x`.
**Returned values**
For each variant the function calculates:
- `beats_control` - long-term probability to out-perform the first (control) variant
- `to_be_best` - long-term probability to out-perform all other variants
Type: JSON.
**Example**
Query:
``` sql
SELECT bayesAB('beta', 1, ['Control', 'A', 'B'], [3000., 3000., 3000.], [100., 90., 110.]) FORMAT PrettySpace;
```
Result:
``` text
{
"data":[
{
"variant_name":"Control",
"x":3000,
"y":100,
"beats_control":0,
"to_be_best":0.22619
},
{
"variant_name":"A",
"x":3000,
"y":90,
"beats_control":0.23469,
"to_be_best":0.04671
},
{
"variant_name":"B",
"x":3000,
"y":110,
"beats_control":0.7580899999999999,
"to_be_best":0.7271
}
]
}
```
[Original article](https://clickhouse.tech/docs/en/query_language/functions/machine-learning-functions/) <!--hide-->

View File

@ -1491,4 +1491,115 @@ Result:
```
## getSetting {#getSetting}
Returns the current value of a [custom setting](../../operations/settings/index.md#custom_settings).
**Syntax**
```sql
getSetting('custom_setting');
```
**Parameter**
- `custom_setting` — The setting name. [String](../../sql-reference/data-types/string.md).
**Returned value**
- The setting current value.
**Example**
```sql
SET custom_a = 123;
SELECT getSetting('custom_a');
```
**Result**
```
123
```
**See Also**
- [Custom Settings](../../operations/settings/index.md#custom_settings)
## isDecimalOverflow {#is-decimal-overflow}
Checks whether the [Decimal](../../sql-reference/data-types/decimal.md#decimalp-s-decimal32s-decimal64s-decimal128s) value is out of its (or specified) precision.
**Syntax**
``` sql
isDecimalOverflow(d, [p])
```
**Parameters**
- `d` — value. [Decimal](../../sql-reference/data-types/decimal.md#decimalp-s-decimal32s-decimal64s-decimal128s).
- `p` — precision. Optional. If omitted, the initial presicion of the first argument is used. Using of this paratemer could be helpful for data extraction to another DBMS or file. [UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges).
**Returned values**
- `1` — Decimal value has more digits then it's precision allow,
- `0` — Decimal value satisfies the specified precision.
**Example**
Query:
``` sql
SELECT isDecimalOverflow(toDecimal32(1000000000, 0), 9),
isDecimalOverflow(toDecimal32(1000000000, 0)),
isDecimalOverflow(toDecimal32(-1000000000, 0), 9),
isDecimalOverflow(toDecimal32(-1000000000, 0));
```
Result:
``` text
1 1 1 1
```
## countDigits {#count-digits}
Returns number of decimal digits you need to represent the value.
**Syntax**
``` sql
countDigits(x)
```
**Parameters**
- `x` — [Int](../../sql-reference/data-types/int-uint.md#uint8-uint16-uint32-uint64-int8-int16-int32-int64) or [Decimal](../../sql-reference/data-types/decimal.md#decimalp-s-decimal32s-decimal64s-decimal128s) value.
**Returned value**
Number of digits.
Type: [UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges).
!!! note "Note"
For `Decimal` values takes into account their scales: calculates result over underlying integer type which is `(value * scale)`. For example: `countDigits(42) = 2`, `countDigits(42.000) = 5`, `countDigits(0.04200) = 4`. I.e. you may check decimal overflow for `Decimal64` with `countDecimal(x) > 18`. It's a slow variant of [isDecimalOverflow](#is-decimal-overflow).
**Example**
Query:
``` sql
SELECT countDigits(toDecimal32(1, 9)), countDigits(toDecimal32(-1, 9)),
countDigits(toDecimal64(1, 18)), countDigits(toDecimal64(-1, 18)),
countDigits(toDecimal128(1, 38)), countDigits(toDecimal128(-1, 38));
```
Result:
``` text
10 10 19 19 39 39
```
[Original article](https://clickhouse.tech/docs/en/query_language/functions/other_functions/) <!--hide-->

View File

@ -487,4 +487,75 @@ Returns the CRC64 checksum of a string, using CRC-64-ECMA polynomial.
The result type is UInt64.
## normalizeQuery {#normalized-query}
Replaces literals, sequences of literals and complex aliases with placeholders.
**Syntax**
``` sql
normalizeQuery(x)
```
**Parameters**
- `x` — Sequence of characters. [String](../../sql-reference/data-types/string.md).
**Returned value**
- Sequence of characters with placeholders.
Type: [String](../../sql-reference/data-types/string.md).
**Example**
Query:
``` sql
SELECT normalizeQuery('[1, 2, 3, x]') AS query;
```
Result:
``` text
┌─query────┐
│ [?.., x] │
└──────────┘
```
## normalizedQueryHash {#normalized-query-hash}
Returns identical 64bit hash values without the values of literals for similar queries. It helps to analyze query log.
**Syntax**
``` sql
normalizedQueryHash(x)
```
**Parameters**
- `x` — Sequence of characters. [String](../../sql-reference/data-types/string.md).
**Returned value**
- Hash value.
Type: [UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges).
**Example**
Query:
``` sql
SELECT normalizedQueryHash('SELECT 1 AS `xyz`') != normalizedQueryHash('SELECT 1 AS `abc`') AS res;
```
Result:
``` text
┌─res─┐
│ 1 │
└─────┘
```
[Original article](https://clickhouse.tech/docs/en/query_language/functions/string_functions/) <!--hide-->

View File

@ -360,6 +360,89 @@ Extracts a fragment of a string using a regular expression. If haystack do
Extracts all the fragments of a string using a regular expression. If haystack doesnt match the pattern regex, an empty string is returned. Returns an array of strings consisting of all matches to the regex. In general, the behavior is the same as the extract function (it takes the first subpattern, or the entire expression if there isnt a subpattern).
## extractAllGroupsHorizontal {#extractallgroups-horizontal}
Matches all groups of the `haystack` string using the `pattern` regular expression. Returns an array of arrays, where the first array includes all fragments matching the first group, the second array - matching the second group, etc.
!!! note "Note"
`extractAllGroupsHorizontal` function is slower than [extractAllGroupsVertical](#extractallgroups-vertical).
**Syntax**
``` sql
extractAllGroupsHorizontal(haystack, pattern)
```
**Parameters**
- `haystack` — Input string. Type: [String](../../sql-reference/data-types/string.md).
- `pattern` — Regular expression with [re2 syntax](https://github.com/google/re2/wiki/Syntax). Must contain groups, each group enclosed in parentheses. If `pattern` contains no groups, an exception is thrown. Type: [String](../../sql-reference/data-types/string.md).
**Returned value**
- Type: [Array](../../sql-reference/data-types/array.md).
If `haystack` doesnt match the `pattern` regex, an array of empty arrays is returned.
**Example**
Query:
``` sql
SELECT extractAllGroupsHorizontal('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')
```
Result:
``` text
┌─extractAllGroupsHorizontal('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')─┐
│ [['abc','def','ghi'],['111','222','333']] │
└──────────────────────────────────────────────────────────────────────────────────────────┘
```
**See also**
- [extractAllGroupsVertical](#extractallgroups-vertical)
## extractAllGroupsVertical {#extractallgroups-vertical}
Matches all groups of the `haystack` string using the `pattern` regular expression. Returns an array of arrays, where each array includes matching fragments from every group. Fragments are grouped in order of appearance in the `haystack`.
**Syntax**
``` sql
extractAllGroupsVertical(haystack, pattern)
```
**Parameters**
- `haystack` — Input string. Type: [String](../../sql-reference/data-types/string.md).
- `pattern` — Regular expression with [re2 syntax](https://github.com/google/re2/wiki/Syntax). Must contain groups, each group enclosed in parentheses. If `pattern` contains no groups, an exception is thrown. Type: [String](../../sql-reference/data-types/string.md).
**Returned value**
- Type: [Array](../../sql-reference/data-types/array.md).
If `haystack` doesnt match the `pattern` regex, an empty array is returned.
**Example**
Query:
``` sql
SELECT extractAllGroupsVertical('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')
```
Result:
``` text
┌─extractAllGroupsVertical('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')─┐
│ [['abc','111'],['def','222'],['ghi','333']] │
└────────────────────────────────────────────────────────────────────────────────────────┘
```
**See also**
- [extractAllGroupsHorizontal](#extractallgroups-horizontal)
## like(haystack, pattern), haystack LIKE pattern operator {#function-like}
Checks whether a string matches a simple regular expression.

View File

@ -11,7 +11,8 @@ When you convert a value from one to another data type, you should remember that
ClickHouse has the [same behavior as C++ programs](https://en.cppreference.com/w/cpp/language/implicit_conversion).
## toInt(8\|16\|32\|64) {#toint8163264}
## toInt(8\|16\|32\|64\|128\|256) {#toint8163264128256}
Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md) data type. This function family includes:
@ -19,6 +20,8 @@ Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md)
- `toInt16(expr)` — Results in the `Int16` data type.
- `toInt32(expr)` — Results in the `Int32` data type.
- `toInt64(expr)` — Results in the `Int64` data type.
- `toInt128(expr)` — Results in the `Int128` data type.
- `toInt256(expr)` — Results in the `Int256` data type.
**Parameters**
@ -26,7 +29,7 @@ Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md)
**Returned value**
Integer value in the `Int8`, `Int16`, `Int32`, or `Int64` data type.
Integer value in the `Int8`, `Int16`, `Int32`, `Int64`, `Int128` or `Int256` data type.
Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers.
@ -44,9 +47,9 @@ SELECT toInt64(nan), toInt32(32), toInt16('16'), toInt8(8.8)
└──────────────────────┴─────────────┴───────────────┴─────────────┘
```
## toInt(8\|16\|32\|64)OrZero {#toint8163264orzero}
## toInt(8\|16\|32\|64\|128\|256)OrZero {#toint8163264orzero}
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64). If failed, returns 0.
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64 \| 128 \| 256). If failed, returns 0.
**Example**
@ -60,9 +63,9 @@ select toInt64OrZero('123123'), toInt8OrZero('123qwe123')
└─────────────────────────┴───────────────────────────┘
```
## toInt(8\|16\|32\|64)OrNull {#toint8163264ornull}
## toInt(8\|16\|32\|64\|128\|256)OrNull {#toint8163264128256ornull}
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64). If failed, returns NULL.
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64 \| 128 \| 256). If failed, returns NULL.
**Example**
@ -76,7 +79,7 @@ select toInt64OrNull('123123'), toInt8OrNull('123qwe123')
└─────────────────────────┴───────────────────────────┘
```
## toUInt(8\|16\|32\|64) {#touint8163264}
## toUInt(8\|16\|32\|64\|256) {#touint8163264256}
Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md) data type. This function family includes:
@ -84,6 +87,7 @@ Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md
- `toUInt16(expr)` — Results in the `UInt16` data type.
- `toUInt32(expr)` — Results in the `UInt32` data type.
- `toUInt64(expr)` — Results in the `UInt64` data type.
- `toUInt256(expr)` — Results in the `UInt256` data type.
**Parameters**
@ -91,7 +95,7 @@ Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md
**Returned value**
Integer value in the `UInt8`, `UInt16`, `UInt32`, or `UInt64` data type.
Integer value in the `UInt8`, `UInt16`, `UInt32`, `UInt64` or `UInt256` data type.
Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers.
@ -109,9 +113,9 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
└─────────────────────┴───────────────┴────────────────┴──────────────┘
```
## toUInt(8\|16\|32\|64)OrZero {#touint8163264orzero}
## toUInt(8\|16\|32\|64\|256)OrZero {#touint8163264256orzero}
## toUInt(8\|16\|32\|64)OrNull {#touint8163264ornull}
## toUInt(8\|16\|32\|64\|256)OrNull {#touint8163264256ornull}
## toFloat(32\|64) {#tofloat3264}
@ -131,21 +135,23 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
## toDateTimeOrNull {#todatetimeornull}
## toDecimal(32\|64\|128) {#todecimal3264128}
## toDecimal(32\|64\|128\|256) {#todecimal3264128256}
Converts `value` to the [Decimal](../../sql-reference/data-types/decimal.md) data type with precision of `S`. The `value` can be a number or a string. The `S` (scale) parameter specifies the number of decimal places.
- `toDecimal32(value, S)`
- `toDecimal64(value, S)`
- `toDecimal128(value, S)`
- `toDecimal256(value, S)`
## toDecimal(32\|64\|128)OrNull {#todecimal3264128ornull}
## toDecimal(32\|64\|128\|256)OrNull {#todecimal3264128256ornull}
Converts an input string to a [Nullable(Decimal(P,S))](../../sql-reference/data-types/decimal.md) data type value. This family of functions include:
- `toDecimal32OrNull(expr, S)` — Results in `Nullable(Decimal32(S))` data type.
- `toDecimal64OrNull(expr, S)` — Results in `Nullable(Decimal64(S))` data type.
- `toDecimal128OrNull(expr, S)` — Results in `Nullable(Decimal128(S))` data type.
- `toDecimal256OrNull(expr, S)` — Results in `Nullable(Decimal256(S))` data type.
These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `NULL` value instead of an exception in the event of an input value parsing error.
@ -183,13 +189,16 @@ SELECT toDecimal32OrNull(toString(-1.111), 2) AS val, toTypeName(val)
└──────┴────────────────────────────────────────────────────┘
```
## toDecimal(32\|64\|128)OrZero {#todecimal3264128orzero}
## toDecimal(32\|64\|128\'|256)OrZero {#todecimal3264128256orzero}
Converts an input value to the [Decimal(P,S)](../../sql-reference/data-types/decimal.md) data type. This family of functions include:
- `toDecimal32OrZero( expr, S)` — Results in `Decimal32(S)` data type.
- `toDecimal64OrZero( expr, S)` — Results in `Decimal64(S)` data type.
- `toDecimal128OrZero( expr, S)` — Results in `Decimal128(S)` data type.
- `toDecimal256OrZero( expr, S)` — Results in `Decimal256(S)` data type.
These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `0` value instead of an exception in the event of an input value parsing error.
@ -729,4 +738,45 @@ SELECT fromUnixTimestamp64Milli(i64, 'UTC')
└──────────────────────────────────────┘
```
## formatRow {#formatrow}
Converts arbitrary expressions into a string via given format.
**Syntax**
``` sql
formatRow(format, x, y, ...)
```
**Parameters**
- `format` — Text format. For example, [CSV](../../interfaces/formats.md#csv), [TSV](../../interfaces/formats.md#tabseparated).
- `x`,`y`, ... — Expressions.
**Returned value**
- A formatted string (for text formats it's usually terminated with the new line character).
**Example**
Query:
``` sql
SELECT formatRow('CSV', number, 'good')
FROM numbers(3)
```
Result:
``` text
┌─formatRow('CSV', number, 'good')─┐
│ 0,"good"
│ 1,"good"
│ 2,"good"
└──────────────────────────────────┘
```
[Original article](https://clickhouse.tech/docs/en/query_language/functions/type_conversion_functions/) <!--hide-->

View File

@ -102,7 +102,7 @@ Ejemplo de OpenSUSE Tumbleweed:
Ejemplo de Fedora Rawhide:
sudo yum update
yum --nogpg install git cmake make gcc-c++ python2
yum --nogpg install git cmake make gcc-c++ python3
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
mkdir build && cd build
cmake ../ClickHouse

View File

@ -1,15 +1,15 @@
---
machine_translated: true
machine_translated_rev: 72537a2d527c63c07aa5d2361a8829f3895cf2bd
machine_translated: false
machine_translated_rev:
toc_priority: 0
toc_title: "Descripci\xF3n"
toc_title: "Descripción"
---
# ¿Qué es ClickHouse? {#what-is-clickhouse}
ClickHouse es un sistema de gestión de bases de datos orientado a columnas (DBMS) para el procesamiento analítico en línea de consultas (OLAP).
ClickHouse es un sistema de gestión de bases de datos (DBMS), orientado a columnas, para el procesamiento analítico de consultas en línea (OLAP).
En un “normal” DBMS orientado a filas, los datos se almacenan en este orden:
En un DBMS “normal”, orientado a filas, los datos se almacenan en este orden:
| Fila | Argumento | JavaEnable | Titular | GoodEvent | EventTime |
|------|-------------|------------|---------------------------|-----------|---------------------|
@ -36,7 +36,7 @@ Estos ejemplos solo muestran el orden en el que se organizan los datos. Los valo
Ejemplos de un DBMS orientado a columnas: Vertica, Paraccel (Actian Matrix y Amazon Redshift), Sybase IQ, Exasol, Infobright, InfiniDB, MonetDB (VectorWise y Actian Vector), LucidDB, SAP HANA, Google Dremel, Google PowerDrill, Druid y kdb+.
Different orders for storing data are better suited to different scenarios. The data access scenario refers to what queries are made, how often, and in what proportion; how much data is read for each type of query rows, columns, and bytes; the relationship between reading and updating data; the working size of the data and how locally it is used; whether transactions are used, and how isolated they are; requirements for data replication and logical integrity; requirements for latency and throughput for each type of query, and so on.
Los diferentes modos de ordenar los datos al guardarlos se adecúan mejor a diferentes escenarios. El escenario de acceso a los datos se refiere a qué consultas se hacen, con qué frecuencia y en qué proporción; cuántos datos se leen para cada tipo de consulta - filas, columnas y bytes; la relación entre lectura y actualización de datos; el tamaño de trabajo de los datos y qué tan localmente son usados; si se usan transacciones y qué tan aisladas están;requerimientos de replicación de los datos y de integridad lógica, requerimientos de latencia y caudal (throughput) para cada tipo de consulta, y cosas por el estilo.
Cuanto mayor sea la carga en el sistema, más importante es personalizar el sistema configurado para que coincida con los requisitos del escenario de uso, y más fino será esta personalización. No existe un sistema que sea igualmente adecuado para escenarios significativamente diferentes. Si un sistema es adaptable a un amplio conjunto de escenarios, bajo una carga alta, el sistema manejará todos los escenarios igualmente mal, o funcionará bien para solo uno o algunos de los escenarios posibles.

Some files were not shown because too many files have changed in this diff Show More