mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 23:21:59 +00:00
Merge origin/master into tmp (using imerge)
This commit is contained in:
commit
6200433e12
3
.gitmodules
vendored
3
.gitmodules
vendored
@ -107,7 +107,6 @@
|
|||||||
[submodule "contrib/grpc"]
|
[submodule "contrib/grpc"]
|
||||||
path = contrib/grpc
|
path = contrib/grpc
|
||||||
url = https://github.com/ClickHouse-Extras/grpc.git
|
url = https://github.com/ClickHouse-Extras/grpc.git
|
||||||
branch = v1.25.0
|
|
||||||
[submodule "contrib/aws"]
|
[submodule "contrib/aws"]
|
||||||
path = contrib/aws
|
path = contrib/aws
|
||||||
url = https://github.com/ClickHouse-Extras/aws-sdk-cpp.git
|
url = https://github.com/ClickHouse-Extras/aws-sdk-cpp.git
|
||||||
@ -159,7 +158,7 @@
|
|||||||
url = https://github.com/openldap/openldap.git
|
url = https://github.com/openldap/openldap.git
|
||||||
[submodule "contrib/AMQP-CPP"]
|
[submodule "contrib/AMQP-CPP"]
|
||||||
path = contrib/AMQP-CPP
|
path = contrib/AMQP-CPP
|
||||||
url = https://github.com/CopernicaMarketingSoftware/AMQP-CPP.git
|
url = https://github.com/ClickHouse-Extras/AMQP-CPP.git
|
||||||
[submodule "contrib/cassandra"]
|
[submodule "contrib/cassandra"]
|
||||||
path = contrib/cassandra
|
path = contrib/cassandra
|
||||||
url = https://github.com/ClickHouse-Extras/cpp-driver.git
|
url = https://github.com/ClickHouse-Extras/cpp-driver.git
|
||||||
|
@ -300,6 +300,11 @@ if (COMPILER_CLANG)
|
|||||||
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
# Set new experimental pass manager, it's a performance, build time and binary size win.
|
||||||
|
# Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang.
|
||||||
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager")
|
||||||
|
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fexperimental-new-pass-manager")
|
||||||
|
|
||||||
# We cannot afford to use LTO when compiling unit tests, and it's not enough
|
# We cannot afford to use LTO when compiling unit tests, and it's not enough
|
||||||
# to only supply -fno-lto at the final linking stage. So we disable it
|
# to only supply -fno-lto at the final linking stage. So we disable it
|
||||||
# completely.
|
# completely.
|
||||||
@ -513,7 +518,13 @@ endif ()
|
|||||||
macro (add_executable target)
|
macro (add_executable target)
|
||||||
# invoke built-in add_executable
|
# invoke built-in add_executable
|
||||||
# explicitly acquire and interpose malloc symbols by clickhouse_malloc
|
# explicitly acquire and interpose malloc symbols by clickhouse_malloc
|
||||||
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>)
|
# if GLIBC_COMPATIBILITY is ON and ENABLE_THINLTO is on than provide memcpy symbol explicitly to neutrialize thinlto's libcall generation.
|
||||||
|
if (GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
|
||||||
|
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc> $<TARGET_OBJECTS:clickhouse_memcpy>)
|
||||||
|
else ()
|
||||||
|
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>)
|
||||||
|
endif ()
|
||||||
|
|
||||||
get_target_property (type ${target} TYPE)
|
get_target_property (type ${target} TYPE)
|
||||||
if (${type} STREQUAL EXECUTABLE)
|
if (${type} STREQUAL EXECUTABLE)
|
||||||
# operator::new/delete for executables (MemoryTracker stuff)
|
# operator::new/delete for executables (MemoryTracker stuff)
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <common/types.h>
|
#include <common/types.h>
|
||||||
|
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
/// Original is here https://github.com/cerevra/int
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
/// Original is here https://github.com/cerevra/int
|
||||||
|
/// Distributed under the Boost Software License, Version 1.0.
|
||||||
|
/// (See at http://www.boost.org/LICENSE_1_0.txt)
|
||||||
|
|
||||||
#include "throwError.h"
|
#include "throwError.h"
|
||||||
|
|
||||||
namespace wide
|
namespace wide
|
||||||
|
@ -27,6 +27,10 @@ if (GLIBC_COMPATIBILITY)
|
|||||||
list(APPEND glibc_compatibility_sources musl/getentropy.c)
|
list(APPEND glibc_compatibility_sources musl/getentropy.c)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
add_library (clickhouse_memcpy OBJECT
|
||||||
|
${ClickHouse_SOURCE_DIR}/contrib/FastMemcpy/memcpy_wrapper.c
|
||||||
|
)
|
||||||
|
|
||||||
# Need to omit frame pointers to match the performance of glibc
|
# Need to omit frame pointers to match the performance of glibc
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer")
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
#include <optional>
|
#include <optional>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <Poco/AutoPtr.h>
|
#include <Poco/AutoPtr.h>
|
||||||
|
@ -21,8 +21,8 @@ void Pool::Entry::incrementRefCount()
|
|||||||
{
|
{
|
||||||
if (!data)
|
if (!data)
|
||||||
return;
|
return;
|
||||||
++data->ref_count;
|
/// First reference, initialize thread
|
||||||
if (data->ref_count == 1)
|
if (data->ref_count.fetch_add(1) == 0)
|
||||||
mysql_thread_init();
|
mysql_thread_init();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -30,12 +30,10 @@ void Pool::Entry::decrementRefCount()
|
|||||||
{
|
{
|
||||||
if (!data)
|
if (!data)
|
||||||
return;
|
return;
|
||||||
if (data->ref_count > 0)
|
|
||||||
{
|
/// We were the last user of this thread, deinitialize it
|
||||||
--data->ref_count;
|
if (data->ref_count.fetch_sub(1) == 1)
|
||||||
if (data->ref_count == 0)
|
mysql_thread_end();
|
||||||
mysql_thread_end();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
#include <list>
|
#include <list>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <mutex>
|
#include <mutex>
|
||||||
|
#include <atomic>
|
||||||
|
|
||||||
#include <Poco/Exception.h>
|
#include <Poco/Exception.h>
|
||||||
#include <mysqlxx/Connection.h>
|
#include <mysqlxx/Connection.h>
|
||||||
@ -35,7 +36,9 @@ protected:
|
|||||||
struct Connection
|
struct Connection
|
||||||
{
|
{
|
||||||
mysqlxx::Connection conn;
|
mysqlxx::Connection conn;
|
||||||
int ref_count = 0;
|
/// Ref count modified in constructor/descructor of Entry
|
||||||
|
/// but also read in pool code.
|
||||||
|
std::atomic<int> ref_count = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@ -99,7 +99,7 @@ def gen_html_json(options, arguments):
|
|||||||
tuples = read_stats_file(options, arguments[1])
|
tuples = read_stats_file(options, arguments[1])
|
||||||
print('{')
|
print('{')
|
||||||
print('"system: GreenPlum(x2),')
|
print('"system: GreenPlum(x2),')
|
||||||
print('"version": "%s",' % '4.3.9.1')
|
print(('"version": "%s",' % '4.3.9.1'))
|
||||||
print('"data_size": 10000000,')
|
print('"data_size": 10000000,')
|
||||||
print('"time": "",')
|
print('"time": "",')
|
||||||
print('"comments": "",')
|
print('"comments": "",')
|
||||||
|
330
cmake/Modules/FindgRPC.cmake
Normal file
330
cmake/Modules/FindgRPC.cmake
Normal file
@ -0,0 +1,330 @@
|
|||||||
|
#[[
|
||||||
|
Defines the following variables:
|
||||||
|
``gRPC_FOUND``
|
||||||
|
Whether the gRPC framework is found
|
||||||
|
``gRPC_INCLUDE_DIRS``
|
||||||
|
The include directories of the gRPC framework, including the include directories of the C++ wrapper.
|
||||||
|
``gRPC_LIBRARIES``
|
||||||
|
The libraries of the gRPC framework.
|
||||||
|
``gRPC_UNSECURE_LIBRARIES``
|
||||||
|
The libraries of the gRPC framework without SSL.
|
||||||
|
``_gRPC_CPP_PLUGIN``
|
||||||
|
The plugin for generating gRPC client and server C++ stubs from `.proto` files
|
||||||
|
``_gRPC_PYTHON_PLUGIN``
|
||||||
|
The plugin for generating gRPC client and server Python stubs from `.proto` files
|
||||||
|
|
||||||
|
The following :prop_tgt:`IMPORTED` targets are also defined:
|
||||||
|
``grpc++``
|
||||||
|
``grpc++_unsecure``
|
||||||
|
``grpc_cpp_plugin``
|
||||||
|
``grpc_python_plugin``
|
||||||
|
|
||||||
|
Add custom commands to process ``.proto`` files to C++::
|
||||||
|
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
|
||||||
|
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
|
||||||
|
|
||||||
|
``SRCS``
|
||||||
|
Variable to define with autogenerated source files
|
||||||
|
``HDRS``
|
||||||
|
Variable to define with autogenerated header files
|
||||||
|
``DESCRIPTORS``
|
||||||
|
Variable to define with autogenerated descriptor files, if requested.
|
||||||
|
``EXPORT_MACRO``
|
||||||
|
is a macro which should expand to ``__declspec(dllexport)`` or
|
||||||
|
``__declspec(dllimport)`` depending on what is being compiled.
|
||||||
|
``ARGN``
|
||||||
|
``.proto`` files
|
||||||
|
#]]
|
||||||
|
|
||||||
|
# Function to generate C++ files from .proto files.
|
||||||
|
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||||
|
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
|
||||||
|
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
|
||||||
|
|
||||||
|
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
|
||||||
|
if(NOT _proto_files)
|
||||||
|
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
|
||||||
|
set(_append_arg APPEND_PATH)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(_descriptors DESCRIPTORS)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_outvar)
|
||||||
|
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
|
||||||
|
|
||||||
|
set(${SRCS})
|
||||||
|
set(${HDRS})
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(_file ${_outvar})
|
||||||
|
if(_file MATCHES "cc$")
|
||||||
|
list(APPEND ${SRCS} ${_file})
|
||||||
|
elseif(_file MATCHES "desc$")
|
||||||
|
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
|
||||||
|
else()
|
||||||
|
list(APPEND ${HDRS} ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
|
||||||
|
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# Helper function.
|
||||||
|
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||||
|
function(protobuf_generate_grpc)
|
||||||
|
set(_options APPEND_PATH DESCRIPTORS)
|
||||||
|
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
|
||||||
|
if(COMMAND target_sources)
|
||||||
|
list(APPEND _singleargs TARGET)
|
||||||
|
endif()
|
||||||
|
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
|
||||||
|
|
||||||
|
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_LANGUAGE)
|
||||||
|
set(protobuf_generate_grpc_LANGUAGE cpp)
|
||||||
|
endif()
|
||||||
|
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
|
||||||
|
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
|
||||||
|
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
|
||||||
|
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||||
|
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
|
||||||
|
else()
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PLUGIN)
|
||||||
|
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
|
||||||
|
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||||
|
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
|
||||||
|
else()
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_TARGET)
|
||||||
|
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
|
||||||
|
foreach(_file ${_source_list})
|
||||||
|
if(_file MATCHES "proto$")
|
||||||
|
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOS)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_APPEND_PATH)
|
||||||
|
# Create an include path for each file specified
|
||||||
|
foreach(_file ${protobuf_generate_grpc_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_file} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_path ${_abs_file} PATH)
|
||||||
|
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${_abs_path})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
else()
|
||||||
|
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
|
||||||
|
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
|
||||||
|
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${ABS_PATH})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set(_generated_srcs_all)
|
||||||
|
foreach(_proto ${protobuf_generate_grpc_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_proto} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||||
|
get_filename_component(_basename ${_proto} NAME_WE)
|
||||||
|
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||||
|
|
||||||
|
set(_possible_rel_dir)
|
||||||
|
if(NOT protobuf_generate_grpc_APPEND_PATH)
|
||||||
|
set(_possible_rel_dir ${_rel_dir}/)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_generated_srcs)
|
||||||
|
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
|
||||||
|
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
|
||||||
|
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
|
||||||
|
list(APPEND _generated_srcs ${_descriptor_file})
|
||||||
|
endif()
|
||||||
|
list(APPEND _generated_srcs_all ${_generated_srcs})
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${_generated_srcs}
|
||||||
|
COMMAND protobuf::protoc
|
||||||
|
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||||
|
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||||
|
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
|
||||||
|
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
|
||||||
|
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
|
||||||
|
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
|
||||||
|
VERBATIM)
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
|
||||||
|
if(protobuf_generate_grpc_OUT_VAR)
|
||||||
|
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
if(protobuf_generate_grpc_TARGET)
|
||||||
|
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
|
||||||
|
endif()
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
|
||||||
|
# Find the libraries.
|
||||||
|
if(gRPC_USE_STATIC_LIBS)
|
||||||
|
# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES
|
||||||
|
set(_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
|
if(WIN32)
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
|
else()
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_library(gRPC_LIBRARY NAMES grpc)
|
||||||
|
find_library(gRPC_CPP_LIBRARY NAMES grpc++)
|
||||||
|
find_library(gRPC_UNSECURE_LIBRARY NAMES grpc_unsecure)
|
||||||
|
find_library(gRPC_CPP_UNSECURE_LIBRARY NAMES grpc++_unsecure)
|
||||||
|
|
||||||
|
set(gRPC_LIBRARIES)
|
||||||
|
if(gRPC_USE_UNSECURE_LIBRARIES)
|
||||||
|
if(gRPC_UNSECURE_LIBRARY)
|
||||||
|
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_UNSECURE_LIBRARY})
|
||||||
|
endif()
|
||||||
|
if(gRPC_CPP_UNSECURE_LIBRARY)
|
||||||
|
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_UNSECURE_LIBRARY})
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
if(gRPC_LIBRARY)
|
||||||
|
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_LIBRARY})
|
||||||
|
endif()
|
||||||
|
if(gRPC_CPP_UNSECURE_LIBRARY)
|
||||||
|
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_LIBRARY})
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Restore the original find library ordering.
|
||||||
|
if(gRPC_USE_STATIC_LIBS)
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Find the include directories.
|
||||||
|
find_path(gRPC_INCLUDE_DIR grpc/grpc.h)
|
||||||
|
find_path(gRPC_CPP_INCLUDE_DIR grpc++/grpc++.h)
|
||||||
|
|
||||||
|
if(gRPC_INCLUDE_DIR AND gRPC_CPP_INCLUDE_DIR AND NOT(gRPC_INCLUDE_DIR STREQUAL gRPC_CPP_INCLUDE_DIR))
|
||||||
|
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR} ${gRPC_CPP_INCLUDE_DIR})
|
||||||
|
elseif(gRPC_INCLUDE_DIR)
|
||||||
|
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR})
|
||||||
|
else()
|
||||||
|
set(gRPC_INCLUDE_DIRS ${gRPC_CPP_INCLUDE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Get full path to plugin.
|
||||||
|
find_program(_gRPC_CPP_PLUGIN
|
||||||
|
NAMES grpc_cpp_plugin
|
||||||
|
DOC "The plugin for generating gRPC client and server C++ stubs from `.proto` files")
|
||||||
|
|
||||||
|
find_program(_gRPC_PYTHON_PLUGIN
|
||||||
|
NAMES grpc_python_plugin
|
||||||
|
DOC "The plugin for generating gRPC client and server Python stubs from `.proto` files")
|
||||||
|
|
||||||
|
# Add imported targets.
|
||||||
|
if(gRPC_CPP_LIBRARY AND NOT TARGET grpc++)
|
||||||
|
add_library(grpc++ UNKNOWN IMPORTED)
|
||||||
|
set_target_properties(grpc++ PROPERTIES
|
||||||
|
IMPORTED_LOCATION "${gRPC_CPP_LIBRARY}")
|
||||||
|
set_target_properties(grpc++ PROPERTIES
|
||||||
|
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(gRPC_CPP_UNSECURE_LIBRARY AND NOT TARGET grpc++_unsecure)
|
||||||
|
add_library(grpc++_unsecure UNKNOWN IMPORTED)
|
||||||
|
set_target_properties(grpc++_unsecure PROPERTIES
|
||||||
|
IMPORTED_LOCATION "${gRPC_CPP_UNSECURE_LIBRARY}")
|
||||||
|
set_target_properties(grpc++_unsecure PROPERTIES
|
||||||
|
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(gRPC_CPP_PLUGIN AND NOT TARGET grpc_cpp_plugin)
|
||||||
|
add_executable(grpc_cpp_plugin IMPORTED)
|
||||||
|
set_target_properties(grpc_cpp_plugin PROPERTIES
|
||||||
|
IMPORTED_LOCATION "${gRPC_CPP_PLUGIN}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(gRPC_PYTHON_PLUGIN AND NOT TARGET grpc_python_plugin)
|
||||||
|
add_executable(grpc_python_plugin IMPORTED)
|
||||||
|
set_target_properties(grpc_python_plugin PROPERTIES
|
||||||
|
IMPORTED_LOCATION "${gRPC_PYTHON_PLUGIN}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
#include(FindPackageHandleStandardArgs.cmake)
|
||||||
|
FIND_PACKAGE_HANDLE_STANDARD_ARGS(gRPC
|
||||||
|
REQUIRED_VARS gRPC_LIBRARY gRPC_CPP_LIBRARY gRPC_UNSECURE_LIBRARY gRPC_CPP_UNSECURE_LIBRARY
|
||||||
|
gRPC_INCLUDE_DIR gRPC_CPP_INCLUDE_DIR _gRPC_CPP_PLUGIN _gRPC_PYTHON_PLUGIN)
|
||||||
|
|
||||||
|
if(gRPC_FOUND)
|
||||||
|
if(gRPC_DEBUG)
|
||||||
|
message(STATUS "gRPC: INCLUDE_DIRS=${gRPC_INCLUDE_DIRS}")
|
||||||
|
message(STATUS "gRPC: LIBRARIES=${gRPC_LIBRARIES}")
|
||||||
|
message(STATUS "gRPC: CPP_PLUGIN=${_gRPC_CPP_PLUGIN}")
|
||||||
|
message(STATUS "gRPC: PYTHON_PLUGIN=${_gRPC_PYTHON_PLUGIN}")
|
||||||
|
endif()
|
||||||
|
endif()
|
@ -1,9 +1,9 @@
|
|||||||
# This strings autochanged from release_lib.sh:
|
# This strings autochanged from release_lib.sh:
|
||||||
SET(VERSION_REVISION 54441)
|
SET(VERSION_REVISION 54442)
|
||||||
SET(VERSION_MAJOR 20)
|
SET(VERSION_MAJOR 20)
|
||||||
SET(VERSION_MINOR 10)
|
SET(VERSION_MINOR 11)
|
||||||
SET(VERSION_PATCH 1)
|
SET(VERSION_PATCH 1)
|
||||||
SET(VERSION_GITHASH 11a247d2f42010c1a17bf678c3e00a4bc89b23f8)
|
SET(VERSION_GITHASH 76a04fb4b4f6cd27ad999baf6dc9a25e88851c42)
|
||||||
SET(VERSION_DESCRIBE v20.10.1.1-prestable)
|
SET(VERSION_DESCRIBE v20.11.1.1-prestable)
|
||||||
SET(VERSION_STRING 20.10.1.1)
|
SET(VERSION_STRING 20.11.1.1)
|
||||||
# end of autochange
|
# end of autochange
|
||||||
|
@ -1,45 +1,65 @@
|
|||||||
option (ENABLE_GRPC "Use gRPC" ${ENABLE_LIBRARIES})
|
option(ENABLE_GRPC "Use gRPC" ${ENABLE_LIBRARIES})
|
||||||
|
|
||||||
if (NOT ENABLE_GRPC)
|
if(NOT ENABLE_GRPC)
|
||||||
if (USE_INTERNAL_GRPC_LIBRARY)
|
if(USE_INTERNAL_GRPC_LIBRARY)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal gRPC library with ENABLE_GRPC=OFF")
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal gRPC library with ENABLE_GRPC=OFF")
|
||||||
endif()
|
endif()
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option (USE_INTERNAL_GRPC_LIBRARY
|
if(NOT USE_PROTOBUF)
|
||||||
"Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)"
|
message(WARNING "Cannot use gRPC library without protobuf")
|
||||||
${NOT_UNBUNDLED})
|
|
||||||
|
|
||||||
if (NOT USE_INTERNAL_GRPC_LIBRARY)
|
|
||||||
find_package(grpc)
|
|
||||||
if (NOT GRPC_FOUND)
|
|
||||||
find_path(GRPC_INCLUDE_DIR grpcpp/grpcpp.h)
|
|
||||||
find_library(GRPC_LIBRARY grpc++)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (GRPC_INCLUDE_DIR AND GRPC_LIBRARY)
|
|
||||||
set (USE_GRPC ON)
|
|
||||||
else()
|
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC")
|
|
||||||
endif()
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (NOT USE_GRPC)
|
# Normally we use the internal gRPC framework.
|
||||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include/grpc++/grpc++.h")
|
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
|
||||||
message (WARNING "submodule contrib/grpc is missing. To fix try run: \n git submodule update --init --recursive")
|
# The external gRPC framework can be installed in the system by running
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gRPC")
|
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
|
||||||
set (USE_INTERNAL_GRPC_LIBRARY OFF)
|
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
|
||||||
elseif (NOT USE_PROTOBUF)
|
|
||||||
message (WARNING "gRPC requires protobuf which is disabled")
|
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Will not use internal gRPC without protobuf")
|
if(USE_INTERNAL_GRPC_LIBRARY)
|
||||||
set (USE_INTERNAL_GRPC_LIBRARY OFF)
|
message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init --recursive")
|
||||||
else()
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc")
|
||||||
set (GRPC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
|
set(USE_INTERNAL_GRPC_LIBRARY 0)
|
||||||
set (GRPC_LIBRARY "libgrpc++")
|
endif()
|
||||||
set (USE_GRPC ON)
|
set(MISSING_INTERNAL_GRPC_LIBRARY 1)
|
||||||
set (USE_INTERNAL_GRPC_LIBRARY ON)
|
|
||||||
endif()
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
message (STATUS "Using gRPC=${USE_GRPC}: ${GRPC_INCLUDE_DIR} : ${GRPC_LIBRARY}")
|
if(USE_SSL)
|
||||||
|
set(gRPC_USE_UNSECURE_LIBRARIES FALSE)
|
||||||
|
else()
|
||||||
|
set(gRPC_USE_UNSECURE_LIBRARIES TRUE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT USE_INTERNAL_GRPC_LIBRARY)
|
||||||
|
find_package(gRPC)
|
||||||
|
if(NOT gRPC_INCLUDE_DIRS OR NOT gRPC_LIBRARIES)
|
||||||
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC library")
|
||||||
|
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
|
||||||
|
elseif(NOT _gRPC_CPP_PLUGIN)
|
||||||
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system grcp_cpp_plugin")
|
||||||
|
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
|
||||||
|
else()
|
||||||
|
set(EXTERNAL_GRPC_LIBRARY_FOUND 1)
|
||||||
|
set(USE_GRPC 1)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT EXTERNAL_GRPC_LIBRARY_FOUND AND NOT MISSING_INTERNAL_GRPC_LIBRARY)
|
||||||
|
set(gRPC_INCLUDE_DIRS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
|
||||||
|
if(gRPC_USE_UNSECURE_LIBRARIES)
|
||||||
|
set(gRPC_LIBRARIES grpc_unsecure grpc++_unsecure)
|
||||||
|
else()
|
||||||
|
set(gRPC_LIBRARIES grpc grpc++)
|
||||||
|
endif()
|
||||||
|
set(_gRPC_CPP_PLUGIN $<TARGET_FILE:grpc_cpp_plugin>)
|
||||||
|
set(_gRPC_PROTOC_EXECUTABLE $<TARGET_FILE:protobuf::protoc>)
|
||||||
|
|
||||||
|
include("${ClickHouse_SOURCE_DIR}/contrib/grpc-cmake/protobuf_generate_grpc.cmake")
|
||||||
|
|
||||||
|
set(USE_INTERNAL_GRPC_LIBRARY 1)
|
||||||
|
set(USE_GRPC 1)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
message(STATUS "Using gRPC=${USE_GRPC}: ${gRPC_INCLUDE_DIRS} : ${gRPC_LIBRARIES} : ${_gRPC_CPP_PLUGIN}")
|
||||||
|
@ -26,7 +26,7 @@ endif ()
|
|||||||
if (NOT USE_INTERNAL_LLVM_LIBRARY)
|
if (NOT USE_INTERNAL_LLVM_LIBRARY)
|
||||||
set (LLVM_PATHS "/usr/local/lib/llvm")
|
set (LLVM_PATHS "/usr/local/lib/llvm")
|
||||||
|
|
||||||
foreach(llvm_v 9 8)
|
foreach(llvm_v 10 9 8)
|
||||||
if (NOT LLVM_FOUND)
|
if (NOT LLVM_FOUND)
|
||||||
find_package (LLVM ${llvm_v} CONFIG PATHS ${LLVM_PATHS})
|
find_package (LLVM ${llvm_v} CONFIG PATHS ${LLVM_PATHS})
|
||||||
endif ()
|
endif ()
|
||||||
|
@ -26,8 +26,8 @@ if (NOT USE_INTERNAL_ODBC_LIBRARY)
|
|||||||
find_path (INCLUDE_ODBC sql.h)
|
find_path (INCLUDE_ODBC sql.h)
|
||||||
|
|
||||||
if(LIBRARY_ODBC AND INCLUDE_ODBC)
|
if(LIBRARY_ODBC AND INCLUDE_ODBC)
|
||||||
add_library (unixodbc UNKNOWN IMPORTED)
|
add_library (unixodbc INTERFACE)
|
||||||
set_target_properties (unixodbc PROPERTIES IMPORTED_LOCATION ${LIBRARY_ODBC})
|
set_target_properties (unixodbc PROPERTIES INTERFACE_LINK_LIBRARIES ${LIBRARY_ODBC})
|
||||||
set_target_properties (unixodbc PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_ODBC})
|
set_target_properties (unixodbc PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_ODBC})
|
||||||
set_target_properties (unixodbc PROPERTIES INTERFACE_COMPILE_DEFINITIONS USE_ODBC=1)
|
set_target_properties (unixodbc PROPERTIES INTERFACE_COMPILE_DEFINITIONS USE_ODBC=1)
|
||||||
|
|
||||||
|
@ -31,6 +31,7 @@ if (COMPILER_CLANG)
|
|||||||
add_warning(pedantic)
|
add_warning(pedantic)
|
||||||
no_warning(vla-extension)
|
no_warning(vla-extension)
|
||||||
no_warning(zero-length-array)
|
no_warning(zero-length-array)
|
||||||
|
no_warning(c11-extensions)
|
||||||
|
|
||||||
add_warning(comma)
|
add_warning(comma)
|
||||||
add_warning(conditional-uninitialized)
|
add_warning(conditional-uninitialized)
|
||||||
@ -57,7 +58,10 @@ if (COMPILER_CLANG)
|
|||||||
add_warning(unused-exception-parameter)
|
add_warning(unused-exception-parameter)
|
||||||
add_warning(unused-macros)
|
add_warning(unused-macros)
|
||||||
add_warning(unused-member-function)
|
add_warning(unused-member-function)
|
||||||
add_warning(zero-as-null-pointer-constant)
|
# XXX: libstdc++ has some of these for 3way compare
|
||||||
|
if (USE_LIBCXX)
|
||||||
|
add_warning(zero-as-null-pointer-constant)
|
||||||
|
endif()
|
||||||
|
|
||||||
if (WEVERYTHING)
|
if (WEVERYTHING)
|
||||||
add_warning(everything)
|
add_warning(everything)
|
||||||
@ -169,6 +173,11 @@ elseif (COMPILER_GCC)
|
|||||||
add_cxx_compile_options(-Wunused)
|
add_cxx_compile_options(-Wunused)
|
||||||
# Warn if vector operation is not implemented via SIMD capabilities of the architecture
|
# Warn if vector operation is not implemented via SIMD capabilities of the architecture
|
||||||
add_cxx_compile_options(-Wvector-operation-performance)
|
add_cxx_compile_options(-Wvector-operation-performance)
|
||||||
|
# XXX: libstdc++ has some of these for 3way compare
|
||||||
|
if (USE_LIBCXX)
|
||||||
|
# Warn when a literal 0 is used as null pointer constant.
|
||||||
|
add_cxx_compile_options(-Wzero-as-null-pointer-constant)
|
||||||
|
endif()
|
||||||
|
|
||||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 10)
|
if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 10)
|
||||||
# XXX: gcc10 stuck with this option while compiling GatherUtils code
|
# XXX: gcc10 stuck with this option while compiling GatherUtils code
|
||||||
|
2
contrib/AMQP-CPP
vendored
2
contrib/AMQP-CPP
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 1c08399ab0ab9e4042ef8e2bbe9e208e5dcbc13b
|
Subproject commit d63e1f016582e9faaaf279aa24513087a07bc6e7
|
@ -1,4 +1,4 @@
|
|||||||
#include <FastMemcpy.h>
|
#include "FastMemcpy.h"
|
||||||
|
|
||||||
void * memcpy(void * __restrict destination, const void * __restrict source, size_t size)
|
void * memcpy(void * __restrict destination, const void * __restrict source, size_t size)
|
||||||
{
|
{
|
||||||
|
@ -16,6 +16,7 @@ set (SRCS
|
|||||||
${LIBRARY_DIR}/src/flags.cpp
|
${LIBRARY_DIR}/src/flags.cpp
|
||||||
${LIBRARY_DIR}/src/linux_tcp/openssl.cpp
|
${LIBRARY_DIR}/src/linux_tcp/openssl.cpp
|
||||||
${LIBRARY_DIR}/src/linux_tcp/tcpconnection.cpp
|
${LIBRARY_DIR}/src/linux_tcp/tcpconnection.cpp
|
||||||
|
${LIBRARY_DIR}/src/inbuffer.cpp
|
||||||
${LIBRARY_DIR}/src/receivedframe.cpp
|
${LIBRARY_DIR}/src/receivedframe.cpp
|
||||||
${LIBRARY_DIR}/src/table.cpp
|
${LIBRARY_DIR}/src/table.cpp
|
||||||
${LIBRARY_DIR}/src/watchable.cpp
|
${LIBRARY_DIR}/src/watchable.cpp
|
||||||
|
2
contrib/grpc
vendored
2
contrib/grpc
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 8aea4e168e78f3eb9828080740fc8cb73d53bf79
|
Subproject commit a6570b863cf76c9699580ba51c7827d5bffaac43
|
File diff suppressed because it is too large
Load Diff
207
contrib/grpc-cmake/protobuf_generate_grpc.cmake
Normal file
207
contrib/grpc-cmake/protobuf_generate_grpc.cmake
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
#[[
|
||||||
|
Add custom commands to process ``.proto`` files to C++::
|
||||||
|
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
|
||||||
|
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
|
||||||
|
|
||||||
|
``SRCS``
|
||||||
|
Variable to define with autogenerated source files
|
||||||
|
``HDRS``
|
||||||
|
Variable to define with autogenerated header files
|
||||||
|
``DESCRIPTORS``
|
||||||
|
Variable to define with autogenerated descriptor files, if requested.
|
||||||
|
``EXPORT_MACRO``
|
||||||
|
is a macro which should expand to ``__declspec(dllexport)`` or
|
||||||
|
``__declspec(dllimport)`` depending on what is being compiled.
|
||||||
|
``ARGN``
|
||||||
|
``.proto`` files
|
||||||
|
#]]
|
||||||
|
|
||||||
|
# Function to generate C++ files from .proto files.
|
||||||
|
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||||
|
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
|
||||||
|
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
|
||||||
|
|
||||||
|
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
|
||||||
|
if(NOT _proto_files)
|
||||||
|
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
|
||||||
|
set(_append_arg APPEND_PATH)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(_descriptors DESCRIPTORS)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_outvar)
|
||||||
|
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
|
||||||
|
|
||||||
|
set(${SRCS})
|
||||||
|
set(${HDRS})
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(_file ${_outvar})
|
||||||
|
if(_file MATCHES "cc$")
|
||||||
|
list(APPEND ${SRCS} ${_file})
|
||||||
|
elseif(_file MATCHES "desc$")
|
||||||
|
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
|
||||||
|
else()
|
||||||
|
list(APPEND ${HDRS} ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
|
||||||
|
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# Helper function.
|
||||||
|
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||||
|
function(protobuf_generate_grpc)
|
||||||
|
set(_options APPEND_PATH DESCRIPTORS)
|
||||||
|
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
|
||||||
|
if(COMMAND target_sources)
|
||||||
|
list(APPEND _singleargs TARGET)
|
||||||
|
endif()
|
||||||
|
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
|
||||||
|
|
||||||
|
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_LANGUAGE)
|
||||||
|
set(protobuf_generate_grpc_LANGUAGE cpp)
|
||||||
|
endif()
|
||||||
|
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
|
||||||
|
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
|
||||||
|
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
|
||||||
|
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||||
|
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
|
||||||
|
else()
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PLUGIN)
|
||||||
|
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
|
||||||
|
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||||
|
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
|
||||||
|
else()
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_TARGET)
|
||||||
|
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
|
||||||
|
foreach(_file ${_source_list})
|
||||||
|
if(_file MATCHES "proto$")
|
||||||
|
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOS)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_APPEND_PATH)
|
||||||
|
# Create an include path for each file specified
|
||||||
|
foreach(_file ${protobuf_generate_grpc_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_file} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_path ${_abs_file} PATH)
|
||||||
|
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${_abs_path})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
else()
|
||||||
|
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
|
||||||
|
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
|
||||||
|
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${ABS_PATH})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set(_generated_srcs_all)
|
||||||
|
foreach(_proto ${protobuf_generate_grpc_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_proto} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||||
|
get_filename_component(_basename ${_proto} NAME_WE)
|
||||||
|
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||||
|
|
||||||
|
set(_possible_rel_dir)
|
||||||
|
if(NOT protobuf_generate_grpc_APPEND_PATH)
|
||||||
|
set(_possible_rel_dir ${_rel_dir}/)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_generated_srcs)
|
||||||
|
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
|
||||||
|
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
|
||||||
|
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
|
||||||
|
list(APPEND _generated_srcs ${_descriptor_file})
|
||||||
|
endif()
|
||||||
|
list(APPEND _generated_srcs_all ${_generated_srcs})
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${_generated_srcs}
|
||||||
|
COMMAND protobuf::protoc
|
||||||
|
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||||
|
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||||
|
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
|
||||||
|
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
|
||||||
|
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
|
||||||
|
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
|
||||||
|
VERBATIM)
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
|
||||||
|
if(protobuf_generate_grpc_OUT_VAR)
|
||||||
|
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
if(protobuf_generate_grpc_TARGET)
|
||||||
|
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
|
||||||
|
endif()
|
||||||
|
endfunction()
|
@ -26,8 +26,8 @@ if (NOT USE_INTERNAL_HYPERSCAN_LIBRARY)
|
|||||||
if (LIBRARY_HYPERSCAN AND INCLUDE_HYPERSCAN)
|
if (LIBRARY_HYPERSCAN AND INCLUDE_HYPERSCAN)
|
||||||
set (EXTERNAL_HYPERSCAN_LIBRARY_FOUND 1)
|
set (EXTERNAL_HYPERSCAN_LIBRARY_FOUND 1)
|
||||||
|
|
||||||
add_library (hyperscan UNKNOWN IMPORTED GLOBAL)
|
add_library (hyperscan INTERFACE)
|
||||||
set_target_properties (hyperscan PROPERTIES IMPORTED_LOCATION ${LIBRARY_HYPERSCAN})
|
set_target_properties (hyperscan PROPERTIES INTERFACE_LINK_LIBRARIES ${LIBRARY_HYPERSCAN})
|
||||||
set_target_properties (hyperscan PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_HYPERSCAN})
|
set_target_properties (hyperscan PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_HYPERSCAN})
|
||||||
set_property(TARGET hyperscan APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_HYPERSCAN=1)
|
set_property(TARGET hyperscan APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_HYPERSCAN=1)
|
||||||
else ()
|
else ()
|
||||||
|
2
contrib/jemalloc
vendored
2
contrib/jemalloc
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 026764f19995c53583ab25a3b9c06a2fd74e4689
|
Subproject commit 93e27e435cac846028da20cd9b0841fbc9110bd2
|
@ -9,10 +9,6 @@ else()
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT ENABLE_JEMALLOC)
|
if (NOT ENABLE_JEMALLOC)
|
||||||
if(USE_INTERNAL_JEMALLOC_LIBRARY)
|
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal jemalloc with ENABLE_JEMALLOC=OFF")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
add_library(jemalloc INTERFACE)
|
add_library(jemalloc INTERFACE)
|
||||||
target_compile_definitions(jemalloc INTERFACE USE_JEMALLOC=0)
|
target_compile_definitions(jemalloc INTERFACE USE_JEMALLOC=0)
|
||||||
|
|
||||||
@ -24,162 +20,116 @@ if (NOT OS_LINUX)
|
|||||||
message (WARNING "jemalloc support on non-linux is EXPERIMENTAL")
|
message (WARNING "jemalloc support on non-linux is EXPERIMENTAL")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option (USE_INTERNAL_JEMALLOC_LIBRARY "Use internal jemalloc library" ${NOT_UNBUNDLED})
|
if (OS_LINUX)
|
||||||
|
# ThreadPool select job randomly, and there can be some threads that had been
|
||||||
|
# performed some memory heavy task before and will be inactive for some time,
|
||||||
|
# but until it will became active again, the memory will not be freed since by
|
||||||
|
# default each thread has it's own arena, but there should be not more then
|
||||||
|
# 4*CPU arenas (see opt.nareans description).
|
||||||
|
#
|
||||||
|
# By enabling percpu_arena number of arenas limited to number of CPUs and hence
|
||||||
|
# this problem should go away.
|
||||||
|
#
|
||||||
|
# muzzy_decay_ms -- use MADV_FREE when available on newer Linuxes, to
|
||||||
|
# avoid spurious latencies and additional work associated with
|
||||||
|
# MADV_DONTNEED. See
|
||||||
|
# https://github.com/ClickHouse/ClickHouse/issues/11121 for motivation.
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF "percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:10000")
|
||||||
|
else()
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF "oversize_threshold:0,muzzy_decay_ms:10000")
|
||||||
|
endif()
|
||||||
|
# CACHE variable is empty, to allow changing defaults without necessity
|
||||||
|
# to purge cache
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE "" CACHE STRING "Change default configuration string of JEMalloc" )
|
||||||
|
if (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE)
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF "${JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE}")
|
||||||
|
endif()
|
||||||
|
message (STATUS "jemalloc malloc_conf: ${JEMALLOC_CONFIG_MALLOC_CONF}")
|
||||||
|
|
||||||
if (NOT USE_INTERNAL_JEMALLOC_LIBRARY)
|
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/jemalloc")
|
||||||
find_library(LIBRARY_JEMALLOC jemalloc)
|
|
||||||
find_path(INCLUDE_JEMALLOC jemalloc/jemalloc.h)
|
|
||||||
|
|
||||||
if (LIBRARY_JEMALLOC AND INCLUDE_JEMALLOC)
|
set (SRCS
|
||||||
set(EXTERNAL_JEMALLOC_LIBRARY_FOUND 1)
|
${LIBRARY_DIR}/src/arena.c
|
||||||
|
${LIBRARY_DIR}/src/background_thread.c
|
||||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
${LIBRARY_DIR}/src/base.c
|
||||||
find_package(Threads)
|
${LIBRARY_DIR}/src/bin.c
|
||||||
|
${LIBRARY_DIR}/src/bitmap.c
|
||||||
set (CMAKE_REQUIRED_LIBRARIES ${LIBRARY_JEMALLOC} Threads::Threads "dl")
|
${LIBRARY_DIR}/src/ckh.c
|
||||||
set (CMAKE_REQUIRED_INCLUDES ${INCLUDE_JEMALLOC})
|
${LIBRARY_DIR}/src/ctl.c
|
||||||
check_cxx_source_compiles (
|
${LIBRARY_DIR}/src/div.c
|
||||||
"
|
${LIBRARY_DIR}/src/extent.c
|
||||||
#include <jemalloc/jemalloc.h>
|
${LIBRARY_DIR}/src/extent_dss.c
|
||||||
|
${LIBRARY_DIR}/src/extent_mmap.c
|
||||||
int main() {
|
${LIBRARY_DIR}/src/hash.c
|
||||||
free(mallocx(1, 0));
|
${LIBRARY_DIR}/src/hook.c
|
||||||
}
|
${LIBRARY_DIR}/src/jemalloc.c
|
||||||
"
|
${LIBRARY_DIR}/src/large.c
|
||||||
EXTERNAL_JEMALLOC_LIBRARY_WORKS
|
${LIBRARY_DIR}/src/log.c
|
||||||
)
|
${LIBRARY_DIR}/src/malloc_io.c
|
||||||
|
${LIBRARY_DIR}/src/mutex.c
|
||||||
if (EXTERNAL_JEMALLOC_LIBRARY_WORKS)
|
${LIBRARY_DIR}/src/mutex_pool.c
|
||||||
add_library (jemalloc STATIC IMPORTED)
|
${LIBRARY_DIR}/src/nstime.c
|
||||||
set_property (TARGET jemalloc PROPERTY IMPORTED_LOCATION ${LIBRARY_JEMALLOC})
|
${LIBRARY_DIR}/src/pages.c
|
||||||
set_property (TARGET jemalloc PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_JEMALLOC})
|
${LIBRARY_DIR}/src/prng.c
|
||||||
set_property (TARGET jemalloc PROPERTY INTERFACE_LINK_LIBRARIES Threads::Threads dl)
|
${LIBRARY_DIR}/src/prof.c
|
||||||
else()
|
${LIBRARY_DIR}/src/rtree.c
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "External jemalloc is unusable: ${LIBRARY_JEMALLOC} ${INCLUDE_JEMALLOC}")
|
${LIBRARY_DIR}/src/sc.c
|
||||||
endif ()
|
${LIBRARY_DIR}/src/stats.c
|
||||||
|
${LIBRARY_DIR}/src/sz.c
|
||||||
else()
|
${LIBRARY_DIR}/src/tcache.c
|
||||||
set(EXTERNAL_JEMALLOC_LIBRARY_FOUND 0)
|
${LIBRARY_DIR}/src/test_hooks.c
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system jemalloc")
|
${LIBRARY_DIR}/src/ticker.c
|
||||||
endif()
|
${LIBRARY_DIR}/src/tsd.c
|
||||||
|
${LIBRARY_DIR}/src/witness.c
|
||||||
|
${LIBRARY_DIR}/src/safety_check.c
|
||||||
|
)
|
||||||
|
if (OS_DARWIN)
|
||||||
|
list(APPEND SRCS ${LIBRARY_DIR}/src/zone.c)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT EXTERNAL_JEMALLOC_LIBRARY_FOUND OR NOT EXTERNAL_JEMALLOC_LIBRARY_WORKS)
|
add_library(jemalloc ${SRCS})
|
||||||
set(USE_INTERNAL_JEMALLOC_LIBRARY 1)
|
target_include_directories(jemalloc PRIVATE ${LIBRARY_DIR}/include)
|
||||||
|
target_include_directories(jemalloc SYSTEM PUBLIC include)
|
||||||
|
|
||||||
if (OS_LINUX)
|
set (JEMALLOC_INCLUDE_PREFIX)
|
||||||
# ThreadPool select job randomly, and there can be some threads that had been
|
# OS_
|
||||||
# performed some memory heavy task before and will be inactive for some time,
|
if (OS_LINUX)
|
||||||
# but until it will became active again, the memory will not be freed since by
|
set (JEMALLOC_INCLUDE_PREFIX "include_linux")
|
||||||
# default each thread has it's own arena, but there should be not more then
|
elseif (OS_FREEBSD)
|
||||||
# 4*CPU arenas (see opt.nareans description).
|
set (JEMALLOC_INCLUDE_PREFIX "include_freebsd")
|
||||||
#
|
elseif (OS_DARWIN)
|
||||||
# By enabling percpu_arena number of arenas limited to number of CPUs and hence
|
set (JEMALLOC_INCLUDE_PREFIX "include_darwin")
|
||||||
# this problem should go away.
|
else ()
|
||||||
#
|
message (FATAL_ERROR "internal jemalloc: This OS is not supported")
|
||||||
# muzzy_decay_ms -- use MADV_FREE when available on newer Linuxes, to
|
|
||||||
# avoid spurious latencies and additional work associated with
|
|
||||||
# MADV_DONTNEED. See
|
|
||||||
# https://github.com/ClickHouse/ClickHouse/issues/11121 for motivation.
|
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF "percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:10000")
|
|
||||||
else()
|
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF "oversize_threshold:0,muzzy_decay_ms:10000")
|
|
||||||
endif()
|
|
||||||
# CACHE variable is empty, to allow changing defaults without necessity
|
|
||||||
# to purge cache
|
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE "" CACHE STRING "Change default configuration string of JEMalloc" )
|
|
||||||
if (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE)
|
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF "${JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE}")
|
|
||||||
endif()
|
|
||||||
message (STATUS "jemalloc malloc_conf: ${JEMALLOC_CONFIG_MALLOC_CONF}")
|
|
||||||
|
|
||||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/jemalloc")
|
|
||||||
|
|
||||||
set (SRCS
|
|
||||||
${LIBRARY_DIR}/src/arena.c
|
|
||||||
${LIBRARY_DIR}/src/background_thread.c
|
|
||||||
${LIBRARY_DIR}/src/base.c
|
|
||||||
${LIBRARY_DIR}/src/bin.c
|
|
||||||
${LIBRARY_DIR}/src/bitmap.c
|
|
||||||
${LIBRARY_DIR}/src/ckh.c
|
|
||||||
${LIBRARY_DIR}/src/ctl.c
|
|
||||||
${LIBRARY_DIR}/src/div.c
|
|
||||||
${LIBRARY_DIR}/src/extent.c
|
|
||||||
${LIBRARY_DIR}/src/extent_dss.c
|
|
||||||
${LIBRARY_DIR}/src/extent_mmap.c
|
|
||||||
${LIBRARY_DIR}/src/hash.c
|
|
||||||
${LIBRARY_DIR}/src/hook.c
|
|
||||||
${LIBRARY_DIR}/src/jemalloc.c
|
|
||||||
${LIBRARY_DIR}/src/large.c
|
|
||||||
${LIBRARY_DIR}/src/log.c
|
|
||||||
${LIBRARY_DIR}/src/malloc_io.c
|
|
||||||
${LIBRARY_DIR}/src/mutex.c
|
|
||||||
${LIBRARY_DIR}/src/mutex_pool.c
|
|
||||||
${LIBRARY_DIR}/src/nstime.c
|
|
||||||
${LIBRARY_DIR}/src/pages.c
|
|
||||||
${LIBRARY_DIR}/src/prng.c
|
|
||||||
${LIBRARY_DIR}/src/prof.c
|
|
||||||
${LIBRARY_DIR}/src/rtree.c
|
|
||||||
${LIBRARY_DIR}/src/sc.c
|
|
||||||
${LIBRARY_DIR}/src/stats.c
|
|
||||||
${LIBRARY_DIR}/src/sz.c
|
|
||||||
${LIBRARY_DIR}/src/tcache.c
|
|
||||||
${LIBRARY_DIR}/src/test_hooks.c
|
|
||||||
${LIBRARY_DIR}/src/ticker.c
|
|
||||||
${LIBRARY_DIR}/src/tsd.c
|
|
||||||
${LIBRARY_DIR}/src/witness.c
|
|
||||||
${LIBRARY_DIR}/src/safety_check.c
|
|
||||||
)
|
|
||||||
if (OS_DARWIN)
|
|
||||||
list(APPEND SRCS ${LIBRARY_DIR}/src/zone.c)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
add_library(jemalloc ${SRCS})
|
|
||||||
target_include_directories(jemalloc PRIVATE ${LIBRARY_DIR}/include)
|
|
||||||
target_include_directories(jemalloc SYSTEM PUBLIC include)
|
|
||||||
|
|
||||||
set (JEMALLOC_INCLUDE_PREFIX)
|
|
||||||
# OS_
|
|
||||||
if (OS_LINUX)
|
|
||||||
set (JEMALLOC_INCLUDE_PREFIX "include_linux")
|
|
||||||
elseif (OS_FREEBSD)
|
|
||||||
set (JEMALLOC_INCLUDE_PREFIX "include_freebsd")
|
|
||||||
elseif (OS_DARWIN)
|
|
||||||
set (JEMALLOC_INCLUDE_PREFIX "include_darwin")
|
|
||||||
else ()
|
|
||||||
message (FATAL_ERROR "internal jemalloc: This OS is not supported")
|
|
||||||
endif ()
|
|
||||||
# ARCH_
|
|
||||||
if (ARCH_AMD64)
|
|
||||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_x86_64")
|
|
||||||
elseif (ARCH_ARM)
|
|
||||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_aarch64")
|
|
||||||
else ()
|
|
||||||
message (FATAL_ERROR "internal jemalloc: This arch is not supported")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
configure_file(${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h.in
|
|
||||||
${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h)
|
|
||||||
target_include_directories(jemalloc SYSTEM PRIVATE
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal)
|
|
||||||
|
|
||||||
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_NO_PRIVATE_NAMESPACE)
|
|
||||||
|
|
||||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
|
||||||
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_DEBUG=1 -DJEMALLOC_PROF=1)
|
|
||||||
|
|
||||||
if (USE_UNWIND)
|
|
||||||
target_compile_definitions (jemalloc PRIVATE -DJEMALLOC_PROF_LIBUNWIND=1)
|
|
||||||
target_link_libraries (jemalloc PRIVATE unwind)
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
target_compile_options(jemalloc PRIVATE -Wno-redundant-decls)
|
|
||||||
# for RTLD_NEXT
|
|
||||||
target_compile_options(jemalloc PRIVATE -D_GNU_SOURCE)
|
|
||||||
|
|
||||||
set (USE_INTERNAL_JEMALLOC_LIBRARY 1)
|
|
||||||
endif ()
|
endif ()
|
||||||
|
# ARCH_
|
||||||
|
if (ARCH_AMD64)
|
||||||
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_x86_64")
|
||||||
|
elseif (ARCH_ARM)
|
||||||
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_aarch64")
|
||||||
|
else ()
|
||||||
|
message (FATAL_ERROR "internal jemalloc: This arch is not supported")
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
configure_file(${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h.in
|
||||||
|
${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h)
|
||||||
|
target_include_directories(jemalloc SYSTEM PRIVATE
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal)
|
||||||
|
|
||||||
|
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_NO_PRIVATE_NAMESPACE)
|
||||||
|
|
||||||
|
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||||
|
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_DEBUG=1 -DJEMALLOC_PROF=1)
|
||||||
|
|
||||||
|
if (USE_UNWIND)
|
||||||
|
target_compile_definitions (jemalloc PRIVATE -DJEMALLOC_PROF_LIBUNWIND=1)
|
||||||
|
target_link_libraries (jemalloc PRIVATE unwind)
|
||||||
|
endif ()
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
target_compile_options(jemalloc PRIVATE -Wno-redundant-decls)
|
||||||
|
# for RTLD_NEXT
|
||||||
|
target_compile_options(jemalloc PRIVATE -D_GNU_SOURCE)
|
||||||
|
|
||||||
set_property(TARGET jemalloc APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_JEMALLOC=1)
|
set_property(TARGET jemalloc APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_JEMALLOC=1)
|
||||||
if (MAKE_STATIC_LIBRARIES)
|
if (MAKE_STATIC_LIBRARIES)
|
||||||
|
2
contrib/libhdfs3
vendored
2
contrib/libhdfs3
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 1b666578c85094306b061352078022f6350bfab8
|
Subproject commit 30552ac527f2c14070d834e171493b2e7f662375
|
@ -6,8 +6,8 @@ if (NOT USE_INTERNAL_LZ4_LIBRARY)
|
|||||||
|
|
||||||
if (LIBRARY_LZ4 AND INCLUDE_LZ4)
|
if (LIBRARY_LZ4 AND INCLUDE_LZ4)
|
||||||
set(EXTERNAL_LZ4_LIBRARY_FOUND 1)
|
set(EXTERNAL_LZ4_LIBRARY_FOUND 1)
|
||||||
add_library (lz4 UNKNOWN IMPORTED)
|
add_library (lz4 INTERFACE)
|
||||||
set_property (TARGET lz4 PROPERTY IMPORTED_LOCATION ${LIBRARY_LZ4})
|
set_property (TARGET lz4 PROPERTY INTERFACE_LINK_LIBRARIES ${LIBRARY_LZ4})
|
||||||
set_property (TARGET lz4 PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_LZ4})
|
set_property (TARGET lz4 PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_LZ4})
|
||||||
set_property (TARGET lz4 APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_XXHASH=0)
|
set_property (TARGET lz4 APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_XXHASH=0)
|
||||||
else()
|
else()
|
||||||
|
2
contrib/mariadb-connector-c
vendored
2
contrib/mariadb-connector-c
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 3f512fedf0ba0f769a1b4852b4bac542d92c5b20
|
Subproject commit f5638e954a79f50bac7c7a5deaa5a241e0ce8b5f
|
2
contrib/openssl
vendored
2
contrib/openssl
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 07e9623064508d15dd61367f960ebe7fc9aecd77
|
Subproject commit 237260dd6a4bca5cb5a321d366a8a9c807957455
|
4
debian/changelog
vendored
4
debian/changelog
vendored
@ -1,5 +1,5 @@
|
|||||||
clickhouse (20.10.1.1) unstable; urgency=low
|
clickhouse (20.11.1.1) unstable; urgency=low
|
||||||
|
|
||||||
* Modified source code
|
* Modified source code
|
||||||
|
|
||||||
-- clickhouse-release <clickhouse-release@yandex-team.ru> Tue, 08 Sep 2020 17:04:39 +0300
|
-- clickhouse-release <clickhouse-release@yandex-team.ru> Sat, 10 Oct 2020 18:39:55 +0300
|
||||||
|
2
debian/control
vendored
2
debian/control
vendored
@ -62,5 +62,5 @@ Description: debugging symbols for clickhouse-common-static
|
|||||||
Package: clickhouse-test
|
Package: clickhouse-test
|
||||||
Priority: optional
|
Priority: optional
|
||||||
Architecture: all
|
Architecture: all
|
||||||
Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-client, bash, expect, python, python-lxml, python-termcolor, python-requests, curl, perl, sudo, openssl, netcat-openbsd, telnet, brotli, bsdutils
|
Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-client, bash, expect, python3, python3-lxml, python3-termcolor, python3-requests, curl, perl, sudo, openssl, netcat-openbsd, telnet, brotli, bsdutils
|
||||||
Description: ClickHouse tests
|
Description: ClickHouse tests
|
||||||
|
@ -25,10 +25,10 @@ RUN apt-get update \
|
|||||||
ninja-build \
|
ninja-build \
|
||||||
perl \
|
perl \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-requests \
|
python3-requests \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
tzdata \
|
tzdata \
|
||||||
llvm-${LLVM_VERSION} \
|
llvm-${LLVM_VERSION} \
|
||||||
clang-${LLVM_VERSION} \
|
clang-${LLVM_VERSION} \
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
FROM ubuntu:18.04
|
FROM ubuntu:18.04
|
||||||
|
|
||||||
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
||||||
ARG version=20.10.1.*
|
ARG version=20.11.1.*
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install --yes --no-install-recommends \
|
&& apt-get install --yes --no-install-recommends \
|
||||||
|
@ -9,7 +9,8 @@
|
|||||||
"name": "yandex/clickhouse-binary-builder",
|
"name": "yandex/clickhouse-binary-builder",
|
||||||
"dependent": [
|
"dependent": [
|
||||||
"docker/test/split_build_smoke_test",
|
"docker/test/split_build_smoke_test",
|
||||||
"docker/test/pvs"
|
"docker/test/pvs",
|
||||||
|
"docker/test/codebrowser"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"docker/packager/unbundled": {
|
"docker/packager/unbundled": {
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# docker build -t yandex/clickhouse-binary-builder .
|
# docker build -t yandex/clickhouse-binary-builder .
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
|
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=11
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||||
@ -11,7 +11,7 @@ RUN apt-get update \
|
|||||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-11 main" >> \
|
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||||
/etc/apt/sources.list
|
/etc/apt/sources.list
|
||||||
|
|
||||||
# initial packages
|
# initial packages
|
||||||
@ -32,10 +32,11 @@ RUN apt-get update \
|
|||||||
curl \
|
curl \
|
||||||
gcc-9 \
|
gcc-9 \
|
||||||
g++-9 \
|
g++-9 \
|
||||||
llvm-${LLVM_VERSION} \
|
clang-10 \
|
||||||
clang-${LLVM_VERSION} \
|
clang-tidy-10 \
|
||||||
lld-${LLVM_VERSION} \
|
lld-10 \
|
||||||
clang-tidy-${LLVM_VERSION} \
|
llvm-10 \
|
||||||
|
llvm-10-dev \
|
||||||
clang-11 \
|
clang-11 \
|
||||||
clang-tidy-11 \
|
clang-tidy-11 \
|
||||||
lld-11 \
|
lld-11 \
|
||||||
|
@ -17,7 +17,9 @@ ccache --show-stats ||:
|
|||||||
ccache --zero-stats ||:
|
ccache --zero-stats ||:
|
||||||
ln -s /usr/lib/x86_64-linux-gnu/libOpenCL.so.1.0.0 /usr/lib/libOpenCL.so ||:
|
ln -s /usr/lib/x86_64-linux-gnu/libOpenCL.so.1.0.0 /usr/lib/libOpenCL.so ||:
|
||||||
rm -f CMakeCache.txt
|
rm -f CMakeCache.txt
|
||||||
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "$CMAKE_FLAGS" ..
|
# Read cmake arguments into array (possibly empty)
|
||||||
|
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
|
||||||
|
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
|
||||||
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
|
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
|
||||||
ninja $NINJA_FLAGS clickhouse-bundle
|
ninja $NINJA_FLAGS clickhouse-bundle
|
||||||
mv ./programs/clickhouse* /output
|
mv ./programs/clickhouse* /output
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# docker build -t yandex/clickhouse-deb-builder .
|
# docker build -t yandex/clickhouse-deb-builder .
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
|
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=11
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||||
@ -11,7 +11,7 @@ RUN apt-get update \
|
|||||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-11 main" >> \
|
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||||
/etc/apt/sources.list
|
/etc/apt/sources.list
|
||||||
|
|
||||||
# initial packages
|
# initial packages
|
||||||
@ -49,15 +49,11 @@ RUN apt-get update \
|
|||||||
lld-11 \
|
lld-11 \
|
||||||
llvm-11 \
|
llvm-11 \
|
||||||
llvm-11-dev \
|
llvm-11-dev \
|
||||||
clang-${LLVM_VERSION} \
|
clang-10 \
|
||||||
clang-tidy-${LLVM_VERSION} \
|
clang-tidy-10 \
|
||||||
lld-${LLVM_VERSION} \
|
lld-10 \
|
||||||
llvm-${LLVM_VERSION} \
|
llvm-10 \
|
||||||
llvm-${LLVM_VERSION}-dev \
|
llvm-10-dev \
|
||||||
llvm-9-dev \
|
|
||||||
lld-9 \
|
|
||||||
clang-9 \
|
|
||||||
clang-tidy-9 \
|
|
||||||
ninja-build \
|
ninja-build \
|
||||||
perl \
|
perl \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
|
@ -4,7 +4,8 @@ set -x -e
|
|||||||
|
|
||||||
ccache --show-stats ||:
|
ccache --show-stats ||:
|
||||||
ccache --zero-stats ||:
|
ccache --zero-stats ||:
|
||||||
build/release --no-pbuilder "$ALIEN_PKGS" | ts '%Y-%m-%d %H:%M:%S'
|
read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
|
||||||
|
build/release --no-pbuilder "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
|
||||||
mv /*.deb /output
|
mv /*.deb /output
|
||||||
mv -- *.changes /output
|
mv -- *.changes /output
|
||||||
mv -- *.buildinfo /output
|
mv -- *.buildinfo /output
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
#-*- coding: utf-8 -*-
|
#-*- coding: utf-8 -*-
|
||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
|
@ -51,6 +51,7 @@ RUN apt-get update \
|
|||||||
protobuf-compiler \
|
protobuf-compiler \
|
||||||
libprotoc-dev \
|
libprotoc-dev \
|
||||||
libgrpc++-dev \
|
libgrpc++-dev \
|
||||||
|
protobuf-compiler-grpc \
|
||||||
rapidjson-dev \
|
rapidjson-dev \
|
||||||
libsnappy-dev \
|
libsnappy-dev \
|
||||||
libparquet-dev \
|
libparquet-dev \
|
||||||
|
@ -4,7 +4,8 @@ set -x -e
|
|||||||
|
|
||||||
ccache --show-stats ||:
|
ccache --show-stats ||:
|
||||||
ccache --zero-stats ||:
|
ccache --zero-stats ||:
|
||||||
build/release --no-pbuilder "$ALIEN_PKGS" | ts '%Y-%m-%d %H:%M:%S'
|
read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
|
||||||
|
build/release --no-pbuilder "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
|
||||||
mv /*.deb /output
|
mv /*.deb /output
|
||||||
mv -- *.changes /output
|
mv -- *.changes /output
|
||||||
mv -- *.buildinfo /output
|
mv -- *.buildinfo /output
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
FROM ubuntu:20.04
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
||||||
ARG version=20.10.1.*
|
ARG version=20.11.1.*
|
||||||
ARG gosu_ver=1.10
|
ARG gosu_ver=1.10
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
FROM ubuntu:18.04
|
FROM ubuntu:18.04
|
||||||
|
|
||||||
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
||||||
ARG version=20.10.1.*
|
ARG version=20.11.1.*
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y apt-transport-https dirmngr && \
|
apt-get install -y apt-transport-https dirmngr && \
|
||||||
|
@ -1,33 +1,15 @@
|
|||||||
# docker build --network=host -t yandex/clickhouse-codebrowser .
|
# docker build --network=host -t yandex/clickhouse-codebrowser .
|
||||||
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output yandex/clickhouse-codebrowser
|
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output yandex/clickhouse-codebrowser
|
||||||
FROM ubuntu:18.04
|
FROM yandex/clickhouse-binary-builder
|
||||||
|
|
||||||
RUN apt-get --allow-unauthenticated update -y \
|
RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-9 libllvm9 libclang-9-dev
|
||||||
&& env DEBIAN_FRONTEND=noninteractive \
|
|
||||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
|
||||||
bash \
|
|
||||||
sudo \
|
|
||||||
wget \
|
|
||||||
software-properties-common \
|
|
||||||
ca-certificates \
|
|
||||||
apt-transport-https \
|
|
||||||
build-essential \
|
|
||||||
gpg-agent \
|
|
||||||
git
|
|
||||||
|
|
||||||
RUN wget -nv -O - https://apt.kitware.com/keys/kitware-archive-latest.asc | sudo apt-key add -
|
|
||||||
RUN sudo apt-add-repository 'deb https://apt.kitware.com/ubuntu/ bionic main'
|
|
||||||
RUN sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main" >> /etc/apt/sources.list
|
|
||||||
|
|
||||||
RUN sudo apt-get --yes --allow-unauthenticated update
|
|
||||||
# To build woboq
|
|
||||||
RUN sudo apt-get --yes --allow-unauthenticated install cmake clang-8 libllvm8 libclang-8-dev
|
|
||||||
|
|
||||||
# repo versions doesn't work correctly with C++17
|
# repo versions doesn't work correctly with C++17
|
||||||
# also we push reports to s3, so we add index.html to subfolder urls
|
# also we push reports to s3, so we add index.html to subfolder urls
|
||||||
# https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
|
# https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
|
||||||
RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser
|
RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser
|
||||||
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release && make -j
|
|
||||||
|
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang\+\+-9 -DCMAKE_C_COMPILER=clang-9 && make -j
|
||||||
|
|
||||||
ENV CODEGEN=/woboq_codebrowser/generator/codebrowser_generator
|
ENV CODEGEN=/woboq_codebrowser/generator/codebrowser_generator
|
||||||
ENV CODEINDEX=/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator
|
ENV CODEINDEX=/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator
|
||||||
@ -40,7 +22,7 @@ ENV SHA=nosha
|
|||||||
ENV DATA="data"
|
ENV DATA="data"
|
||||||
|
|
||||||
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
|
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
|
||||||
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-8 -DCMAKE_C_COMPILER=/usr/bin/clang-8 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON && \
|
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-11 -DCMAKE_C_COMPILER=/usr/bin/clang-11 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON && \
|
||||||
mkdir -p $HTML_RESULT_DIRECTORY && \
|
mkdir -p $HTML_RESULT_DIRECTORY && \
|
||||||
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA && \
|
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA && \
|
||||||
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\
|
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\
|
||||||
|
@ -11,7 +11,7 @@ RUN apt-get update \
|
|||||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
&& echo "deb [trusted=yes] https://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||||
/etc/apt/sources.list
|
/etc/apt/sources.list
|
||||||
|
|
||||||
# initial packages
|
# initial packages
|
||||||
@ -52,10 +52,10 @@ RUN apt-get update \
|
|||||||
moreutils \
|
moreutils \
|
||||||
ninja-build \
|
ninja-build \
|
||||||
psmisc \
|
psmisc \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-requests \
|
python3-requests \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
qemu-user-static \
|
qemu-user-static \
|
||||||
rename \
|
rename \
|
||||||
software-properties-common \
|
software-properties-common \
|
||||||
|
@ -4,7 +4,7 @@ FROM yandex/clickhouse-test-base
|
|||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& env DEBIAN_FRONTEND=noninteractive apt-get -y install \
|
&& env DEBIAN_FRONTEND=noninteractive apt-get -y install \
|
||||||
tzdata \
|
tzdata \
|
||||||
python \
|
python3 \
|
||||||
libreadline-dev \
|
libreadline-dev \
|
||||||
libicu-dev \
|
libicu-dev \
|
||||||
bsdutils \
|
bsdutils \
|
||||||
|
@ -16,13 +16,13 @@ RUN apt-get update \
|
|||||||
iproute2 \
|
iproute2 \
|
||||||
module-init-tools \
|
module-init-tools \
|
||||||
cgroupfs-mount \
|
cgroupfs-mount \
|
||||||
python-pip \
|
python3-pip \
|
||||||
tzdata \
|
tzdata \
|
||||||
libreadline-dev \
|
libreadline-dev \
|
||||||
libicu-dev \
|
libicu-dev \
|
||||||
bsdutils \
|
bsdutils \
|
||||||
curl \
|
curl \
|
||||||
python-pika \
|
python3-pika \
|
||||||
liblua5.1-dev \
|
liblua5.1-dev \
|
||||||
luajit \
|
luajit \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
@ -37,7 +37,7 @@ RUN apt-get update \
|
|||||||
ENV TZ=Europe/Moscow
|
ENV TZ=Europe/Moscow
|
||||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
|
||||||
RUN pip install urllib3==1.23 pytest docker-compose==1.22.0 docker dicttoxml kazoo PyMySQL psycopg2==2.7.5 pymongo tzlocal kafka-python protobuf redis aerospike pytest-timeout minio rpm-confluent-schemaregistry grpcio grpcio-tools cassandra-driver
|
RUN python3 -m pip install urllib3==1.23 pytest docker-compose==1.22.0 docker dicttoxml kazoo PyMySQL psycopg2==2.7.5 pymongo tzlocal kafka-python protobuf redis aerospike pytest-timeout minio grpcio grpcio-tools cassandra-driver confluent-kafka avro
|
||||||
|
|
||||||
ENV DOCKER_CHANNEL stable
|
ENV DOCKER_CHANNEL stable
|
||||||
ENV DOCKER_VERSION 17.09.1-ce
|
ENV DOCKER_VERSION 17.09.1-ce
|
||||||
|
@ -312,7 +312,7 @@ def add_errors_explained():
|
|||||||
|
|
||||||
|
|
||||||
if args.report == 'main':
|
if args.report == 'main':
|
||||||
print(header_template.format())
|
print((header_template.format()))
|
||||||
|
|
||||||
add_tested_commits()
|
add_tested_commits()
|
||||||
|
|
||||||
@ -571,14 +571,14 @@ if args.report == 'main':
|
|||||||
status = 'failure'
|
status = 'failure'
|
||||||
message = 'Errors while building the report.'
|
message = 'Errors while building the report.'
|
||||||
|
|
||||||
print("""
|
print(("""
|
||||||
<!--status: {status}-->
|
<!--status: {status}-->
|
||||||
<!--message: {message}-->
|
<!--message: {message}-->
|
||||||
""".format(status=status, message=message))
|
""".format(status=status, message=message)))
|
||||||
|
|
||||||
elif args.report == 'all-queries':
|
elif args.report == 'all-queries':
|
||||||
|
|
||||||
print(header_template.format())
|
print((header_template.format()))
|
||||||
|
|
||||||
add_tested_commits()
|
add_tested_commits()
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ FROM yandex/clickhouse-stateless-test
|
|||||||
RUN apt-get update -y \
|
RUN apt-get update -y \
|
||||||
&& env DEBIAN_FRONTEND=noninteractive \
|
&& env DEBIAN_FRONTEND=noninteractive \
|
||||||
apt-get install --yes --no-install-recommends \
|
apt-get install --yes --no-install-recommends \
|
||||||
python-requests \
|
python3-requests \
|
||||||
llvm-9
|
llvm-9
|
||||||
|
|
||||||
COPY s3downloader /s3downloader
|
COPY s3downloader /s3downloader
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@ -29,7 +29,7 @@ def dowload_with_progress(url, path):
|
|||||||
logging.info("Downloading from %s to temp path %s", url, path)
|
logging.info("Downloading from %s to temp path %s", url, path)
|
||||||
for i in range(RETRIES_COUNT):
|
for i in range(RETRIES_COUNT):
|
||||||
try:
|
try:
|
||||||
with open(path, 'w') as f:
|
with open(path, 'wb') as f:
|
||||||
response = requests.get(url, stream=True)
|
response = requests.get(url, stream=True)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
total_length = response.headers.get('content-length')
|
total_length = response.headers.get('content-length')
|
||||||
@ -74,7 +74,7 @@ if __name__ == "__main__":
|
|||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Simple tool for dowloading datasets for clickhouse from S3")
|
description="Simple tool for dowloading datasets for clickhouse from S3")
|
||||||
|
|
||||||
parser.add_argument('--dataset-names', required=True, nargs='+', choices=AVAILABLE_DATASETS.keys())
|
parser.add_argument('--dataset-names', required=True, nargs='+', choices=list(AVAILABLE_DATASETS.keys()))
|
||||||
parser.add_argument('--url-prefix', default=DEFAULT_URL)
|
parser.add_argument('--url-prefix', default=DEFAULT_URL)
|
||||||
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')
|
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ RUN echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9
|
|||||||
RUN apt-get update -y \
|
RUN apt-get update -y \
|
||||||
&& env DEBIAN_FRONTEND=noninteractive \
|
&& env DEBIAN_FRONTEND=noninteractive \
|
||||||
apt-get install --yes --no-install-recommends \
|
apt-get install --yes --no-install-recommends \
|
||||||
python-requests
|
python3-requests
|
||||||
|
|
||||||
COPY s3downloader /s3downloader
|
COPY s3downloader /s3downloader
|
||||||
COPY run.sh /run.sh
|
COPY run.sh /run.sh
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@ -74,7 +74,7 @@ if __name__ == "__main__":
|
|||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Simple tool for dowloading datasets for clickhouse from S3")
|
description="Simple tool for dowloading datasets for clickhouse from S3")
|
||||||
|
|
||||||
parser.add_argument('--dataset-names', required=True, nargs='+', choices=AVAILABLE_DATASETS.keys())
|
parser.add_argument('--dataset-names', required=True, nargs='+', choices=list(AVAILABLE_DATASETS.keys()))
|
||||||
parser.add_argument('--url-prefix', default=DEFAULT_URL)
|
parser.add_argument('--url-prefix', default=DEFAULT_URL)
|
||||||
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')
|
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')
|
||||||
|
|
||||||
|
@ -12,10 +12,10 @@ RUN apt-get update -y \
|
|||||||
ncdu \
|
ncdu \
|
||||||
netcat-openbsd \
|
netcat-openbsd \
|
||||||
openssl \
|
openssl \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-requests \
|
python3-requests \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
qemu-user-static \
|
qemu-user-static \
|
||||||
sudo \
|
sudo \
|
||||||
telnet \
|
telnet \
|
||||||
|
@ -3,10 +3,10 @@ FROM yandex/clickhouse-test-base
|
|||||||
|
|
||||||
RUN apt-get update -y && \
|
RUN apt-get update -y && \
|
||||||
apt-get install -y --no-install-recommends \
|
apt-get install -y --no-install-recommends \
|
||||||
python-pip \
|
python3-pip \
|
||||||
python-setuptools
|
python3-setuptools
|
||||||
|
|
||||||
RUN pip install \
|
RUN python3 -m pip install \
|
||||||
pytest \
|
pytest \
|
||||||
pytest-html \
|
pytest-html \
|
||||||
pytest-timeout \
|
pytest-timeout \
|
||||||
@ -17,4 +17,4 @@ CMD dpkg -i package_folder/clickhouse-common-static_*.deb; \
|
|||||||
dpkg -i package_folder/clickhouse-server_*.deb; \
|
dpkg -i package_folder/clickhouse-server_*.deb; \
|
||||||
dpkg -i package_folder/clickhouse-client_*.deb; \
|
dpkg -i package_folder/clickhouse-client_*.deb; \
|
||||||
dpkg -i package_folder/clickhouse-test_*.deb; \
|
dpkg -i package_folder/clickhouse-test_*.deb; \
|
||||||
python -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --html=test_output/report.html --self-contained-html
|
python3 -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --html=test_output/report.html --self-contained-html
|
||||||
|
@ -54,10 +54,10 @@ RUN apt-get --allow-unauthenticated update -y \
|
|||||||
perl \
|
perl \
|
||||||
pigz \
|
pigz \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-requests \
|
python3-requests \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
qemu-user-static \
|
qemu-user-static \
|
||||||
sudo \
|
sudo \
|
||||||
telnet \
|
telnet \
|
||||||
|
@ -12,10 +12,10 @@ RUN apt-get update -y \
|
|||||||
fakeroot \
|
fakeroot \
|
||||||
debhelper \
|
debhelper \
|
||||||
expect \
|
expect \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
python-requests \
|
python3-requests \
|
||||||
sudo \
|
sudo \
|
||||||
openssl \
|
openssl \
|
||||||
ncdu \
|
ncdu \
|
||||||
|
@ -10,10 +10,10 @@ RUN apt-get update -y \
|
|||||||
debhelper \
|
debhelper \
|
||||||
parallel \
|
parallel \
|
||||||
expect \
|
expect \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
python-requests \
|
python3-requests \
|
||||||
curl \
|
curl \
|
||||||
sudo \
|
sudo \
|
||||||
openssl \
|
openssl \
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
from subprocess import Popen, check_call
|
from subprocess import Popen, check_call
|
||||||
|
@ -116,7 +116,7 @@ ninja
|
|||||||
Example for Fedora Rawhide:
|
Example for Fedora Rawhide:
|
||||||
``` bash
|
``` bash
|
||||||
sudo yum update
|
sudo yum update
|
||||||
yum --nogpg install git cmake make gcc-c++ python2
|
yum --nogpg install git cmake make gcc-c++ python3
|
||||||
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake ../ClickHouse
|
cmake ../ClickHouse
|
||||||
|
@ -52,9 +52,9 @@ Optional parameters:
|
|||||||
- `rabbitmq_schema` – Parameter that must be used if the format requires a schema definition. For example, [Cap’n Proto](https://capnproto.org/) requires the path to the schema file and the name of the root `schema.capnp:Message` object.
|
- `rabbitmq_schema` – Parameter that must be used if the format requires a schema definition. For example, [Cap’n Proto](https://capnproto.org/) requires the path to the schema file and the name of the root `schema.capnp:Message` object.
|
||||||
- `rabbitmq_num_consumers` – The number of consumers per table. Default: `1`. Specify more consumers if the throughput of one consumer is insufficient.
|
- `rabbitmq_num_consumers` – The number of consumers per table. Default: `1`. Specify more consumers if the throughput of one consumer is insufficient.
|
||||||
- `rabbitmq_num_queues` – The number of queues per consumer. Default: `1`. Specify more queues if the capacity of one queue per consumer is insufficient.
|
- `rabbitmq_num_queues` – The number of queues per consumer. Default: `1`. Specify more queues if the capacity of one queue per consumer is insufficient.
|
||||||
- `rabbitmq_queue_base` - Specify a base name for queues that will be declared. By default, queues are declared unique to tables based on db and table names.
|
- `rabbitmq_queue_base` - Specify a hint for queue names. Use cases of this setting are described below.
|
||||||
- `rabbitmq_deadletter_exchange` - Specify name for a [dead letter exchange](https://www.rabbitmq.com/dlx.html). You can create another table with this exchange name and collect messages in cases when they are republished to dead letter exchange. By default dead letter exchange is not specified.
|
- `rabbitmq_deadletter_exchange` - Specify name for a [dead letter exchange](https://www.rabbitmq.com/dlx.html). You can create another table with this exchange name and collect messages in cases when they are republished to dead letter exchange. By default dead letter exchange is not specified.
|
||||||
- `persistent` - If set to 1 (true), in insert query delivery mode will be set to 2 (marks messages as 'persistent'). Default: `0`.
|
- `rabbitmq_persistent` - If set to 1 (true), in insert query delivery mode will be set to 2 (marks messages as 'persistent'). Default: `0`.
|
||||||
- `rabbitmq_skip_broken_messages` – RabbitMQ message parser tolerance to schema-incompatible messages per block. Default: `0`. If `rabbitmq_skip_broken_messages = N` then the engine skips *N* RabbitMQ messages that cannot be parsed (a message equals a row of data).
|
- `rabbitmq_skip_broken_messages` – RabbitMQ message parser tolerance to schema-incompatible messages per block. Default: `0`. If `rabbitmq_skip_broken_messages = N` then the engine skips *N* RabbitMQ messages that cannot be parsed (a message equals a row of data).
|
||||||
- `rabbitmq_max_block_size`
|
- `rabbitmq_max_block_size`
|
||||||
- `rabbitmq_flush_interval_ms`
|
- `rabbitmq_flush_interval_ms`
|
||||||
@ -102,12 +102,12 @@ Exchange type options:
|
|||||||
- `fanout` - Routing to all tables (where exchange name is the same) regardless of the keys.
|
- `fanout` - Routing to all tables (where exchange name is the same) regardless of the keys.
|
||||||
- `topic` - Routing is based on patterns with dot-separated keys. Examples: `*.logs`, `records.*.*.2020`, `*.2018,*.2019,*.2020`.
|
- `topic` - Routing is based on patterns with dot-separated keys. Examples: `*.logs`, `records.*.*.2020`, `*.2018,*.2019,*.2020`.
|
||||||
- `headers` - Routing is based on `key=value` matches with a setting `x-match=all` or `x-match=any`. Example table key list: `x-match=all,format=logs,type=report,year=2020`.
|
- `headers` - Routing is based on `key=value` matches with a setting `x-match=all` or `x-match=any`. Example table key list: `x-match=all,format=logs,type=report,year=2020`.
|
||||||
- `consistent-hash` - Data is evenly distributed between all bound tables (where the exchange name is the same). Note that this exchange type must be enabled with RabbitMQ plugin: `rabbitmq-plugins enable rabbitmq_consistent_hash_exchange`.
|
- `consistent_hash` - Data is evenly distributed between all bound tables (where the exchange name is the same). Note that this exchange type must be enabled with RabbitMQ plugin: `rabbitmq-plugins enable rabbitmq_consistent_hash_exchange`.
|
||||||
|
|
||||||
Setting `rabbitmq_queue_base` may be used for the following cases:
|
Setting `rabbitmq_queue_base` may be used for the following cases:
|
||||||
- to let different tables share queues, so that multiple consumers could be registered for the same queues, which makes a better performance. If using `rabbitmq_num_consumers` and/or `rabbitmq_num_queues` settings, the exact match of queues is achieved in case these parameters are the same.
|
- to let different tables share queues, so that multiple consumers could be registered for the same queues, which makes a better performance. If using `rabbitmq_num_consumers` and/or `rabbitmq_num_queues` settings, the exact match of queues is achieved in case these parameters are the same.
|
||||||
- to be able to restore reading from certain durable queues when not all messages were successfully consumed. To be able to resume consumption from one specific queue - set its name in `rabbitmq_queue_base` setting and do not specify `rabbitmq_num_consumers` and `rabbitmq_num_queues` (defaults to 1). To be able to resume consumption from all queues, which were declared for a specific table - just specify the same settings: `rabbitmq_queue_base`, `rabbitmq_num_consumers`, `rabbitmq_num_queues`. By default, queue names will be unique to tables. Note: it makes sence only if messages are sent with delivery mode 2 - marked 'persistent', durable.
|
- to be able to restore reading from certain durable queues when not all messages were successfully consumed. To resume consumption from one specific queue - set its name in `rabbitmq_queue_base` setting and do not specify `rabbitmq_num_consumers` and `rabbitmq_num_queues` (defaults to 1). To resume consumption from all queues, which were declared for a specific table - just specify the same settings: `rabbitmq_queue_base`, `rabbitmq_num_consumers`, `rabbitmq_num_queues`. By default, queue names will be unique to tables.
|
||||||
- to reuse queues as they are declared durable and not auto-deleted.
|
- to reuse queues as they are declared durable and not auto-deleted. (Can be deleted via any of RabbitMQ CLI tools.)
|
||||||
|
|
||||||
To improve performance, received messages are grouped into blocks the size of [max\_insert\_block\_size](../../../operations/server-configuration-parameters/settings.md#settings-max_insert_block_size). If the block wasn’t formed within [stream\_flush\_interval\_ms](../../../operations/server-configuration-parameters/settings.md) milliseconds, the data will be flushed to the table regardless of the completeness of the block.
|
To improve performance, received messages are grouped into blocks the size of [max\_insert\_block\_size](../../../operations/server-configuration-parameters/settings.md#settings-max_insert_block_size). If the block wasn’t formed within [stream\_flush\_interval\_ms](../../../operations/server-configuration-parameters/settings.md) milliseconds, the data will be flushed to the table regardless of the completeness of the block.
|
||||||
|
|
||||||
|
@ -20,6 +20,7 @@ toc_title: Client Libraries
|
|||||||
- [simpod/clickhouse-client](https://packagist.org/packages/simpod/clickhouse-client)
|
- [simpod/clickhouse-client](https://packagist.org/packages/simpod/clickhouse-client)
|
||||||
- [seva-code/php-click-house-client](https://packagist.org/packages/seva-code/php-click-house-client)
|
- [seva-code/php-click-house-client](https://packagist.org/packages/seva-code/php-click-house-client)
|
||||||
- [SeasClick C++ client](https://github.com/SeasX/SeasClick)
|
- [SeasClick C++ client](https://github.com/SeasX/SeasClick)
|
||||||
|
- [one-ck](https://github.com/lizhichao/one-ck)
|
||||||
- Go
|
- Go
|
||||||
- [clickhouse](https://github.com/kshvakov/clickhouse/)
|
- [clickhouse](https://github.com/kshvakov/clickhouse/)
|
||||||
- [go-clickhouse](https://github.com/roistat/go-clickhouse)
|
- [go-clickhouse](https://github.com/roistat/go-clickhouse)
|
||||||
|
@ -66,6 +66,32 @@ If no conditions met for a data part, ClickHouse uses the `lz4` compression.
|
|||||||
</compression>
|
</compression>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## custom_settings_prefixes {#custom_settings_prefixes}
|
||||||
|
|
||||||
|
List of prefixes for [custom settings](../../operations/settings/index.md#custom_settings). The prefixes must be separated with commas.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<custom_settings_prefixes>custom_</custom_settings_prefixes>
|
||||||
|
```
|
||||||
|
|
||||||
|
**See Also**
|
||||||
|
|
||||||
|
- [Custom settings](../../operations/settings/index.md#custom_settings)
|
||||||
|
|
||||||
|
## core_dump
|
||||||
|
|
||||||
|
Configures soft limit for core dump file size, one gigabyte by default.
|
||||||
|
```xml
|
||||||
|
<core_dump>
|
||||||
|
<size_limit>1073741824</size_limit>
|
||||||
|
</core_dump>
|
||||||
|
```
|
||||||
|
|
||||||
|
(Hard limit is configured via system tools)
|
||||||
|
|
||||||
|
|
||||||
## default\_database {#default-database}
|
## default\_database {#default-database}
|
||||||
|
|
||||||
The default database.
|
The default database.
|
||||||
@ -405,7 +431,7 @@ Limits total RAM usage by the ClickHouse server.
|
|||||||
Possible values:
|
Possible values:
|
||||||
|
|
||||||
- Positive integer.
|
- Positive integer.
|
||||||
- 0 — Unlimited.
|
- 0 (auto).
|
||||||
|
|
||||||
Default value: `0`.
|
Default value: `0`.
|
||||||
|
|
||||||
|
@ -28,4 +28,30 @@ Ways to configure settings, in order of priority:
|
|||||||
|
|
||||||
Settings that can only be made in the server config file are not covered in this section.
|
Settings that can only be made in the server config file are not covered in this section.
|
||||||
|
|
||||||
|
## Custom Settings {#custom_settings}
|
||||||
|
|
||||||
|
In addition to the common [settings](../../operations/settings/settings.md), users can define custom settings.
|
||||||
|
|
||||||
|
A custom setting name must begin with one of predefined prefixes. The list of these prefixes must be declared in the [custom_settings_prefixes](../../operations/server-configuration-parameters/settings.md#custom_settings_prefixes) parameter in the server configuration file.
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<custom_settings_prefixes>custom_</custom_settings_prefixes>
|
||||||
|
```
|
||||||
|
|
||||||
|
To define a custom setting use `SET` command:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET custom_a = 123;
|
||||||
|
```
|
||||||
|
|
||||||
|
To get the current value of a custom setting use `getSetting()` function:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SELECT getSetting('custom_a');
|
||||||
|
```
|
||||||
|
|
||||||
|
**See Also**
|
||||||
|
|
||||||
|
- [Server Configuration Settings](../../operations/server-configuration-parameters/settings.md)
|
||||||
|
|
||||||
[Original article](https://clickhouse.tech/docs/en/operations/settings/) <!--hide-->
|
[Original article](https://clickhouse.tech/docs/en/operations/settings/) <!--hide-->
|
||||||
|
@ -70,6 +70,35 @@ Works with tables in the MergeTree family.
|
|||||||
|
|
||||||
If `force_primary_key=1`, ClickHouse checks to see if the query has a primary key condition that can be used for restricting data ranges. If there is no suitable condition, it throws an exception. However, it does not check whether the condition reduces the amount of data to read. For more information about data ranges in MergeTree tables, see [MergeTree](../../engines/table-engines/mergetree-family/mergetree.md).
|
If `force_primary_key=1`, ClickHouse checks to see if the query has a primary key condition that can be used for restricting data ranges. If there is no suitable condition, it throws an exception. However, it does not check whether the condition reduces the amount of data to read. For more information about data ranges in MergeTree tables, see [MergeTree](../../engines/table-engines/mergetree-family/mergetree.md).
|
||||||
|
|
||||||
|
## force\_data\_skipping\_indices {#settings-force_data_skipping_indices}
|
||||||
|
|
||||||
|
Disables query execution if passed data skipping indices wasn't used.
|
||||||
|
|
||||||
|
Consider the following example:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
CREATE TABLE data
|
||||||
|
(
|
||||||
|
key Int,
|
||||||
|
d1 Int,
|
||||||
|
d1_null Nullable(Int),
|
||||||
|
INDEX d1_idx d1 TYPE minmax GRANULARITY 1,
|
||||||
|
INDEX d1_null_idx assumeNotNull(d1_null) TYPE minmax GRANULARITY 1
|
||||||
|
)
|
||||||
|
Engine=MergeTree()
|
||||||
|
ORDER BY key;
|
||||||
|
|
||||||
|
SELECT * FROM data_01515;
|
||||||
|
SELECT * FROM data_01515 SETTINGS force_data_skipping_indices=''; -- query will produce CANNOT_PARSE_TEXT error.
|
||||||
|
SELECT * FROM data_01515 SETTINGS force_data_skipping_indices='d1_idx'; -- query will produce INDEX_NOT_USED error.
|
||||||
|
SELECT * FROM data_01515 WHERE d1 = 0 SETTINGS force_data_skipping_indices='d1_idx'; -- Ok.
|
||||||
|
SELECT * FROM data_01515 WHERE d1 = 0 SETTINGS force_data_skipping_indices='`d1_idx`'; -- Ok (example of full featured parser).
|
||||||
|
SELECT * FROM data_01515 WHERE d1 = 0 SETTINGS force_data_skipping_indices='`d1_idx`, d1_null_idx'; -- query will produce INDEX_NOT_USED error, since d1_null_idx is not used.
|
||||||
|
SELECT * FROM data_01515 WHERE d1 = 0 AND assumeNotNull(d1_null) = 0 SETTINGS force_data_skipping_indices='`d1_idx`, d1_null_idx'; -- Ok.
|
||||||
|
```
|
||||||
|
|
||||||
|
Works with tables in the MergeTree family.
|
||||||
|
|
||||||
## format\_schema {#format-schema}
|
## format\_schema {#format-schema}
|
||||||
|
|
||||||
This parameter is useful when you are using formats that require a schema definition, such as [Cap’n Proto](https://capnproto.org/) or [Protobuf](https://developers.google.com/protocol-buffers/). The value depends on the format.
|
This parameter is useful when you are using formats that require a schema definition, such as [Cap’n Proto](https://capnproto.org/) or [Protobuf](https://developers.google.com/protocol-buffers/). The value depends on the format.
|
||||||
@ -1144,9 +1173,9 @@ See also:
|
|||||||
|
|
||||||
## insert\_quorum\_timeout {#settings-insert_quorum_timeout}
|
## insert\_quorum\_timeout {#settings-insert_quorum_timeout}
|
||||||
|
|
||||||
Write to quorum timeout in seconds. If the timeout has passed and no write has taken place yet, ClickHouse will generate an exception and the client must repeat the query to write the same block to the same or any other replica.
|
Write to quorum timeout in milliseconds. If the timeout has passed and no write has taken place yet, ClickHouse will generate an exception and the client must repeat the query to write the same block to the same or any other replica.
|
||||||
|
|
||||||
Default value: 60 seconds.
|
Default value: 600000 milliseconds (ten minutes).
|
||||||
|
|
||||||
See also:
|
See also:
|
||||||
|
|
||||||
@ -1565,7 +1594,7 @@ See also:
|
|||||||
|
|
||||||
## allow\_introspection\_functions {#settings-allow_introspection_functions}
|
## allow\_introspection\_functions {#settings-allow_introspection_functions}
|
||||||
|
|
||||||
Enables of disables [introspections functions](../../sql-reference/functions/introspection.md) for query profiling.
|
Enables or disables [introspections functions](../../sql-reference/functions/introspection.md) for query profiling.
|
||||||
|
|
||||||
Possible values:
|
Possible values:
|
||||||
|
|
||||||
@ -2027,3 +2056,14 @@ Result:
|
|||||||
```
|
```
|
||||||
|
|
||||||
[Original article](https://clickhouse.tech/docs/en/operations/settings/settings/) <!-- hide -->
|
[Original article](https://clickhouse.tech/docs/en/operations/settings/settings/) <!-- hide -->
|
||||||
|
|
||||||
|
## allow_experimental_bigint_types {#allow_experimental_bigint_types}
|
||||||
|
|
||||||
|
Enables or disables integer values exceeding the range that is supported by the int data type.
|
||||||
|
|
||||||
|
Possible values:
|
||||||
|
|
||||||
|
- 1 — The bigint data type is enabled.
|
||||||
|
- 0 — The bigint data type is disabled.
|
||||||
|
|
||||||
|
Default value: `0`.
|
@ -3,25 +3,27 @@ toc_priority: 42
|
|||||||
toc_title: Decimal
|
toc_title: Decimal
|
||||||
---
|
---
|
||||||
|
|
||||||
# Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S) {#decimalp-s-decimal32s-decimal64s-decimal128s}
|
# Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S), Decimal256(S) {#decimal}
|
||||||
|
|
||||||
Signed fixed-point numbers that keep precision during add, subtract and multiply operations. For division least significant digits are discarded (not rounded).
|
Signed fixed-point numbers that keep precision during add, subtract and multiply operations. For division least significant digits are discarded (not rounded).
|
||||||
|
|
||||||
## Parameters {#parameters}
|
## Parameters {#parameters}
|
||||||
|
|
||||||
- P - precision. Valid range: \[ 1 : 38 \]. Determines how many decimal digits number can have (including fraction).
|
- P - precision. Valid range: \[ 1 : 76 \]. Determines how many decimal digits number can have (including fraction).
|
||||||
- S - scale. Valid range: \[ 0 : P \]. Determines how many decimal digits fraction can have.
|
- S - scale. Valid range: \[ 0 : P \]. Determines how many decimal digits fraction can have.
|
||||||
|
|
||||||
Depending on P parameter value Decimal(P, S) is a synonym for:
|
Depending on P parameter value Decimal(P, S) is a synonym for:
|
||||||
- P from \[ 1 : 9 \] - for Decimal32(S)
|
- P from \[ 1 : 9 \] - for Decimal32(S)
|
||||||
- P from \[ 10 : 18 \] - for Decimal64(S)
|
- P from \[ 10 : 18 \] - for Decimal64(S)
|
||||||
- P from \[ 19 : 38 \] - for Decimal128(S)
|
- P from \[ 19 : 38 \] - for Decimal128(S)
|
||||||
|
- P from \[ 39 : 76 \] - for Decimal256(S)
|
||||||
|
|
||||||
## Decimal Value Ranges {#decimal-value-ranges}
|
## Decimal Value Ranges {#decimal-value-ranges}
|
||||||
|
|
||||||
- Decimal32(S) - ( -1 \* 10^(9 - S), 1 \* 10^(9 - S) )
|
- Decimal32(S) - ( -1 \* 10^(9 - S), 1 \* 10^(9 - S) )
|
||||||
- Decimal64(S) - ( -1 \* 10^(18 - S), 1 \* 10^(18 - S) )
|
- Decimal64(S) - ( -1 \* 10^(18 - S), 1 \* 10^(18 - S) )
|
||||||
- Decimal128(S) - ( -1 \* 10^(38 - S), 1 \* 10^(38 - S) )
|
- Decimal128(S) - ( -1 \* 10^(38 - S), 1 \* 10^(38 - S) )
|
||||||
|
- Decimal256(S) - ( -1 \* 10^(76 - S), 1 \* 10^(76 - S) )
|
||||||
|
|
||||||
For example, Decimal32(4) can contain numbers from -99999.9999 to 99999.9999 with 0.0001 step.
|
For example, Decimal32(4) can contain numbers from -99999.9999 to 99999.9999 with 0.0001 step.
|
||||||
|
|
||||||
@ -38,6 +40,7 @@ Binary operations on Decimal result in wider result type (with any order of argu
|
|||||||
- `Decimal64(S1) <op> Decimal32(S2) -> Decimal64(S)`
|
- `Decimal64(S1) <op> Decimal32(S2) -> Decimal64(S)`
|
||||||
- `Decimal128(S1) <op> Decimal32(S2) -> Decimal128(S)`
|
- `Decimal128(S1) <op> Decimal32(S2) -> Decimal128(S)`
|
||||||
- `Decimal128(S1) <op> Decimal64(S2) -> Decimal128(S)`
|
- `Decimal128(S1) <op> Decimal64(S2) -> Decimal128(S)`
|
||||||
|
- `Decimal256(S1) <op> Decimal<32|64|128>(S2) -> Decimal256(S)`
|
||||||
|
|
||||||
Rules for scale:
|
Rules for scale:
|
||||||
|
|
||||||
@ -104,4 +107,8 @@ SELECT toDecimal32(1, 8) < 100
|
|||||||
DB::Exception: Can't compare.
|
DB::Exception: Can't compare.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**See also**
|
||||||
|
- [isDecimalOverflow](../../sql-reference/functions/other-functions.md#is-decimal-overflow)
|
||||||
|
- [countDigits](../../sql-reference/functions/other-functions.md#count-digits)
|
||||||
|
|
||||||
[Original article](https://clickhouse.tech/docs/en/data_types/decimal/) <!--hide-->
|
[Original article](https://clickhouse.tech/docs/en/data_types/decimal/) <!--hide-->
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
---
|
---
|
||||||
toc_priority: 40
|
toc_priority: 40
|
||||||
toc_title: UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64
|
toc_title: UInt8, UInt16, UInt32, UInt64, UInt256, Int8, Int16, Int32, Int64, Int128, Int256
|
||||||
---
|
---
|
||||||
|
|
||||||
# UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64 {#uint8-uint16-uint32-uint64-int8-int16-int32-int64}
|
# UInt8, UInt16, UInt32, UInt64, UInt256, Int8, Int16, Int32, Int64, Int128, Int256 {#uint8-uint16-uint32-uint64-uint256-int8-int16-int32-int64-int128-int256}
|
||||||
|
|
||||||
Fixed-length integers, with or without a sign.
|
Fixed-length integers, with or without a sign.
|
||||||
|
|
||||||
@ -13,6 +13,8 @@ Fixed-length integers, with or without a sign.
|
|||||||
- Int16 - \[-32768 : 32767\]
|
- Int16 - \[-32768 : 32767\]
|
||||||
- Int32 - \[-2147483648 : 2147483647\]
|
- Int32 - \[-2147483648 : 2147483647\]
|
||||||
- Int64 - \[-9223372036854775808 : 9223372036854775807\]
|
- Int64 - \[-9223372036854775808 : 9223372036854775807\]
|
||||||
|
- Int128 - \[-170141183460469231731687303715884105728 : 170141183460469231731687303715884105727\]
|
||||||
|
- Int256 - \[-57896044618658097711785492504343953926634992332820282019728792003956564819968 : 57896044618658097711785492504343953926634992332820282019728792003956564819967\]
|
||||||
|
|
||||||
## Uint Ranges {#uint-ranges}
|
## Uint Ranges {#uint-ranges}
|
||||||
|
|
||||||
@ -20,5 +22,8 @@ Fixed-length integers, with or without a sign.
|
|||||||
- UInt16 - \[0 : 65535\]
|
- UInt16 - \[0 : 65535\]
|
||||||
- UInt32 - \[0 : 4294967295\]
|
- UInt32 - \[0 : 4294967295\]
|
||||||
- UInt64 - \[0 : 18446744073709551615\]
|
- UInt64 - \[0 : 18446744073709551615\]
|
||||||
|
- UInt256 - \[0 : 115792089237316195423570985008687907853269984665640564039457584007913129639935\]
|
||||||
|
|
||||||
|
UInt128 is not supported yet.
|
||||||
|
|
||||||
[Original article](https://clickhouse.tech/docs/en/data_types/int_uint/) <!--hide-->
|
[Original article](https://clickhouse.tech/docs/en/data_types/int_uint/) <!--hide-->
|
||||||
|
@ -16,3 +16,82 @@ The [stochasticLinearRegression](../../sql-reference/aggregate-functions/referen
|
|||||||
## stochasticLogisticRegression {#stochastic-logistic-regression}
|
## stochasticLogisticRegression {#stochastic-logistic-regression}
|
||||||
|
|
||||||
The [stochasticLogisticRegression](../../sql-reference/aggregate-functions/reference/stochasticlogisticregression.md#agg_functions-stochasticlogisticregression) aggregate function implements stochastic gradient descent method for binary classification problem. Uses `evalMLMethod` to predict on new data.
|
The [stochasticLogisticRegression](../../sql-reference/aggregate-functions/reference/stochasticlogisticregression.md#agg_functions-stochasticlogisticregression) aggregate function implements stochastic gradient descent method for binary classification problem. Uses `evalMLMethod` to predict on new data.
|
||||||
|
|
||||||
|
## bayesAB {#bayesab}
|
||||||
|
|
||||||
|
Compares test groups (variants) and calculates for each group the probability to be the best one. The first group is used as a control group.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
bayesAB(distribution_name, higher_is_better, variant_names, x, y)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `distribution_name` — Name of the probability distribution. [String](../../sql-reference/data-types/string.md). Possible values:
|
||||||
|
|
||||||
|
- `beta` for [Beta distribution](https://en.wikipedia.org/wiki/Beta_distribution)
|
||||||
|
- `gamma` for [Gamma distribution](https://en.wikipedia.org/wiki/Gamma_distribution)
|
||||||
|
|
||||||
|
- `higher_is_better` — Boolean flag. [Boolean](../../sql-reference/data-types/boolean.md). Possible values:
|
||||||
|
|
||||||
|
- `0` - lower values are considered to be better than higher
|
||||||
|
- `1` - higher values are considered to be better than lower
|
||||||
|
|
||||||
|
- `variant_names` - Variant names. [Array](../../sql-reference/data-types/array.md)([String](../../sql-reference/data-types/string.md)).
|
||||||
|
|
||||||
|
- `x` - Numbers of tests for the corresponding variants. [Array](../../sql-reference/data-types/array.md)([Float64](../../sql-reference/data-types/float.md)).
|
||||||
|
|
||||||
|
- `y` - Numbers of successful tests for the corresponding variants. [Array](../../sql-reference/data-types/array.md)([Float64](../../sql-reference/data-types/float.md)).
|
||||||
|
|
||||||
|
!!! note "Note"
|
||||||
|
All three arrays must have the same size. All `x` and `y` values must be non-negative constant numbers. `y` cannot be larger than `x`.
|
||||||
|
|
||||||
|
**Returned values**
|
||||||
|
|
||||||
|
For each variant the function calculates:
|
||||||
|
- `beats_control` - long-term probability to out-perform the first (control) variant
|
||||||
|
- `to_be_best` - long-term probability to out-perform all other variants
|
||||||
|
|
||||||
|
Type: JSON.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT bayesAB('beta', 1, ['Control', 'A', 'B'], [3000., 3000., 3000.], [100., 90., 110.]) FORMAT PrettySpace;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
{
|
||||||
|
"data":[
|
||||||
|
{
|
||||||
|
"variant_name":"Control",
|
||||||
|
"x":3000,
|
||||||
|
"y":100,
|
||||||
|
"beats_control":0,
|
||||||
|
"to_be_best":0.22619
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"variant_name":"A",
|
||||||
|
"x":3000,
|
||||||
|
"y":90,
|
||||||
|
"beats_control":0.23469,
|
||||||
|
"to_be_best":0.04671
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"variant_name":"B",
|
||||||
|
"x":3000,
|
||||||
|
"y":110,
|
||||||
|
"beats_control":0.7580899999999999,
|
||||||
|
"to_be_best":0.7271
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
[Original article](https://clickhouse.tech/docs/en/query_language/functions/machine-learning-functions/) <!--hide-->
|
||||||
|
@ -1491,4 +1491,115 @@ Result:
|
|||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## getSetting {#getSetting}
|
||||||
|
|
||||||
|
Returns the current value of a [custom setting](../../operations/settings/index.md#custom_settings).
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
getSetting('custom_setting');
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameter**
|
||||||
|
|
||||||
|
- `custom_setting` — The setting name. [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- The setting current value.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET custom_a = 123;
|
||||||
|
SELECT getSetting('custom_a');
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result**
|
||||||
|
|
||||||
|
```
|
||||||
|
123
|
||||||
|
```
|
||||||
|
|
||||||
|
**See Also**
|
||||||
|
|
||||||
|
- [Custom Settings](../../operations/settings/index.md#custom_settings)
|
||||||
|
|
||||||
|
## isDecimalOverflow {#is-decimal-overflow}
|
||||||
|
|
||||||
|
Checks whether the [Decimal](../../sql-reference/data-types/decimal.md) value is out of its (or specified) precision.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
isDecimalOverflow(d, [p])
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `d` — value. [Decimal](../../sql-reference/data-types/decimal.md).
|
||||||
|
- `p` — precision. Optional. If omitted, the initial presicion of the first argument is used. Using of this paratemer could be helpful for data extraction to another DBMS or file. [UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges).
|
||||||
|
|
||||||
|
**Returned values**
|
||||||
|
|
||||||
|
- `1` — Decimal value has more digits then it's precision allow,
|
||||||
|
- `0` — Decimal value satisfies the specified precision.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT isDecimalOverflow(toDecimal32(1000000000, 0), 9),
|
||||||
|
isDecimalOverflow(toDecimal32(1000000000, 0)),
|
||||||
|
isDecimalOverflow(toDecimal32(-1000000000, 0), 9),
|
||||||
|
isDecimalOverflow(toDecimal32(-1000000000, 0));
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
1 1 1 1
|
||||||
|
```
|
||||||
|
|
||||||
|
## countDigits {#count-digits}
|
||||||
|
|
||||||
|
Returns number of decimal digits you need to represent the value.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
countDigits(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `x` — [Int](../../sql-reference/data-types/int-uint.md) or [Decimal](../../sql-reference/data-types/decimal.md) value.
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
Number of digits.
|
||||||
|
|
||||||
|
Type: [UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges).
|
||||||
|
|
||||||
|
!!! note "Note"
|
||||||
|
For `Decimal` values takes into account their scales: calculates result over underlying integer type which is `(value * scale)`. For example: `countDigits(42) = 2`, `countDigits(42.000) = 5`, `countDigits(0.04200) = 4`. I.e. you may check decimal overflow for `Decimal64` with `countDecimal(x) > 18`. It's a slow variant of [isDecimalOverflow](#is-decimal-overflow).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT countDigits(toDecimal32(1, 9)), countDigits(toDecimal32(-1, 9)),
|
||||||
|
countDigits(toDecimal64(1, 18)), countDigits(toDecimal64(-1, 18)),
|
||||||
|
countDigits(toDecimal128(1, 38)), countDigits(toDecimal128(-1, 38));
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
10 10 19 19 39 39
|
||||||
|
```
|
||||||
|
|
||||||
[Original article](https://clickhouse.tech/docs/en/query_language/functions/other_functions/) <!--hide-->
|
[Original article](https://clickhouse.tech/docs/en/query_language/functions/other_functions/) <!--hide-->
|
||||||
|
@ -487,4 +487,75 @@ Returns the CRC64 checksum of a string, using CRC-64-ECMA polynomial.
|
|||||||
|
|
||||||
The result type is UInt64.
|
The result type is UInt64.
|
||||||
|
|
||||||
|
## normalizeQuery {#normalized-query}
|
||||||
|
|
||||||
|
Replaces literals, sequences of literals and complex aliases with placeholders.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
``` sql
|
||||||
|
normalizeQuery(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `x` — Sequence of characters. [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- Sequence of characters with placeholders.
|
||||||
|
|
||||||
|
Type: [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT normalizeQuery('[1, 2, 3, x]') AS query;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─query────┐
|
||||||
|
│ [?.., x] │
|
||||||
|
└──────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## normalizedQueryHash {#normalized-query-hash}
|
||||||
|
|
||||||
|
Returns identical 64bit hash values without the values of literals for similar queries. It helps to analyze query log.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
normalizedQueryHash(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `x` — Sequence of characters. [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- Hash value.
|
||||||
|
|
||||||
|
Type: [UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT normalizedQueryHash('SELECT 1 AS `xyz`') != normalizedQueryHash('SELECT 1 AS `abc`') AS res;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─res─┐
|
||||||
|
│ 1 │
|
||||||
|
└─────┘
|
||||||
|
```
|
||||||
|
|
||||||
[Original article](https://clickhouse.tech/docs/en/query_language/functions/string_functions/) <!--hide-->
|
[Original article](https://clickhouse.tech/docs/en/query_language/functions/string_functions/) <!--hide-->
|
||||||
|
@ -360,6 +360,89 @@ Extracts a fragment of a string using a regular expression. If ‘haystack’ do
|
|||||||
|
|
||||||
Extracts all the fragments of a string using a regular expression. If ‘haystack’ doesn’t match the ‘pattern’ regex, an empty string is returned. Returns an array of strings consisting of all matches to the regex. In general, the behavior is the same as the ‘extract’ function (it takes the first subpattern, or the entire expression if there isn’t a subpattern).
|
Extracts all the fragments of a string using a regular expression. If ‘haystack’ doesn’t match the ‘pattern’ regex, an empty string is returned. Returns an array of strings consisting of all matches to the regex. In general, the behavior is the same as the ‘extract’ function (it takes the first subpattern, or the entire expression if there isn’t a subpattern).
|
||||||
|
|
||||||
|
## extractAllGroupsHorizontal {#extractallgroups-horizontal}
|
||||||
|
|
||||||
|
Matches all groups of the `haystack` string using the `pattern` regular expression. Returns an array of arrays, where the first array includes all fragments matching the first group, the second array - matching the second group, etc.
|
||||||
|
|
||||||
|
!!! note "Note"
|
||||||
|
`extractAllGroupsHorizontal` function is slower than [extractAllGroupsVertical](#extractallgroups-vertical).
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
extractAllGroupsHorizontal(haystack, pattern)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `haystack` — Input string. Type: [String](../../sql-reference/data-types/string.md).
|
||||||
|
- `pattern` — Regular expression with [re2 syntax](https://github.com/google/re2/wiki/Syntax). Must contain groups, each group enclosed in parentheses. If `pattern` contains no groups, an exception is thrown. Type: [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- Type: [Array](../../sql-reference/data-types/array.md).
|
||||||
|
|
||||||
|
If `haystack` doesn’t match the `pattern` regex, an array of empty arrays is returned.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT extractAllGroupsHorizontal('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─extractAllGroupsHorizontal('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')─┐
|
||||||
|
│ [['abc','def','ghi'],['111','222','333']] │
|
||||||
|
└──────────────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**See also**
|
||||||
|
- [extractAllGroupsVertical](#extractallgroups-vertical)
|
||||||
|
|
||||||
|
## extractAllGroupsVertical {#extractallgroups-vertical}
|
||||||
|
|
||||||
|
Matches all groups of the `haystack` string using the `pattern` regular expression. Returns an array of arrays, where each array includes matching fragments from every group. Fragments are grouped in order of appearance in the `haystack`.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
extractAllGroupsVertical(haystack, pattern)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `haystack` — Input string. Type: [String](../../sql-reference/data-types/string.md).
|
||||||
|
- `pattern` — Regular expression with [re2 syntax](https://github.com/google/re2/wiki/Syntax). Must contain groups, each group enclosed in parentheses. If `pattern` contains no groups, an exception is thrown. Type: [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- Type: [Array](../../sql-reference/data-types/array.md).
|
||||||
|
|
||||||
|
If `haystack` doesn’t match the `pattern` regex, an empty array is returned.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT extractAllGroupsVertical('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─extractAllGroupsVertical('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')─┐
|
||||||
|
│ [['abc','111'],['def','222'],['ghi','333']] │
|
||||||
|
└────────────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**See also**
|
||||||
|
- [extractAllGroupsHorizontal](#extractallgroups-horizontal)
|
||||||
|
|
||||||
## like(haystack, pattern), haystack LIKE pattern operator {#function-like}
|
## like(haystack, pattern), haystack LIKE pattern operator {#function-like}
|
||||||
|
|
||||||
Checks whether a string matches a simple regular expression.
|
Checks whether a string matches a simple regular expression.
|
||||||
|
@ -11,7 +11,7 @@ When you convert a value from one to another data type, you should remember that
|
|||||||
|
|
||||||
ClickHouse has the [same behavior as C++ programs](https://en.cppreference.com/w/cpp/language/implicit_conversion).
|
ClickHouse has the [same behavior as C++ programs](https://en.cppreference.com/w/cpp/language/implicit_conversion).
|
||||||
|
|
||||||
## toInt(8\|16\|32\|64) {#toint8163264}
|
## toInt(8\|16\|32\|64\|128\|256) {#toint8163264128256}
|
||||||
|
|
||||||
Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md) data type. This function family includes:
|
Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md) data type. This function family includes:
|
||||||
|
|
||||||
@ -19,6 +19,8 @@ Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md)
|
|||||||
- `toInt16(expr)` — Results in the `Int16` data type.
|
- `toInt16(expr)` — Results in the `Int16` data type.
|
||||||
- `toInt32(expr)` — Results in the `Int32` data type.
|
- `toInt32(expr)` — Results in the `Int32` data type.
|
||||||
- `toInt64(expr)` — Results in the `Int64` data type.
|
- `toInt64(expr)` — Results in the `Int64` data type.
|
||||||
|
- `toInt128(expr)` — Results in the `Int128` data type.
|
||||||
|
- `toInt256(expr)` — Results in the `Int256` data type.
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
|
||||||
@ -26,7 +28,7 @@ Converts an input value to the [Int](../../sql-reference/data-types/int-uint.md)
|
|||||||
|
|
||||||
**Returned value**
|
**Returned value**
|
||||||
|
|
||||||
Integer value in the `Int8`, `Int16`, `Int32`, or `Int64` data type.
|
Integer value in the `Int8`, `Int16`, `Int32`, `Int64`, `Int128` or `Int256` data type.
|
||||||
|
|
||||||
Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers.
|
Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers.
|
||||||
|
|
||||||
@ -44,9 +46,9 @@ SELECT toInt64(nan), toInt32(32), toInt16('16'), toInt8(8.8)
|
|||||||
└──────────────────────┴─────────────┴───────────────┴─────────────┘
|
└──────────────────────┴─────────────┴───────────────┴─────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toInt(8\|16\|32\|64)OrZero {#toint8163264orzero}
|
## toInt(8\|16\|32\|64\|128\|256)OrZero {#toint8163264orzero}
|
||||||
|
|
||||||
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64). If failed, returns 0.
|
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64 \| 128 \| 256). If failed, returns 0.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
@ -60,9 +62,9 @@ select toInt64OrZero('123123'), toInt8OrZero('123qwe123')
|
|||||||
└─────────────────────────┴───────────────────────────┘
|
└─────────────────────────┴───────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toInt(8\|16\|32\|64)OrNull {#toint8163264ornull}
|
## toInt(8\|16\|32\|64\|128\|256)OrNull {#toint8163264128256ornull}
|
||||||
|
|
||||||
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64). If failed, returns NULL.
|
It takes an argument of type String and tries to parse it into Int (8 \| 16 \| 32 \| 64 \| 128 \| 256). If failed, returns NULL.
|
||||||
|
|
||||||
**Example**
|
**Example**
|
||||||
|
|
||||||
@ -76,7 +78,7 @@ select toInt64OrNull('123123'), toInt8OrNull('123qwe123')
|
|||||||
└─────────────────────────┴───────────────────────────┘
|
└─────────────────────────┴───────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toUInt(8\|16\|32\|64) {#touint8163264}
|
## toUInt(8\|16\|32\|64\|256) {#touint8163264256}
|
||||||
|
|
||||||
Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md) data type. This function family includes:
|
Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md) data type. This function family includes:
|
||||||
|
|
||||||
@ -84,6 +86,7 @@ Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md
|
|||||||
- `toUInt16(expr)` — Results in the `UInt16` data type.
|
- `toUInt16(expr)` — Results in the `UInt16` data type.
|
||||||
- `toUInt32(expr)` — Results in the `UInt32` data type.
|
- `toUInt32(expr)` — Results in the `UInt32` data type.
|
||||||
- `toUInt64(expr)` — Results in the `UInt64` data type.
|
- `toUInt64(expr)` — Results in the `UInt64` data type.
|
||||||
|
- `toUInt256(expr)` — Results in the `UInt256` data type.
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
|
||||||
@ -91,7 +94,7 @@ Converts an input value to the [UInt](../../sql-reference/data-types/int-uint.md
|
|||||||
|
|
||||||
**Returned value**
|
**Returned value**
|
||||||
|
|
||||||
Integer value in the `UInt8`, `UInt16`, `UInt32`, or `UInt64` data type.
|
Integer value in the `UInt8`, `UInt16`, `UInt32`, `UInt64` or `UInt256` data type.
|
||||||
|
|
||||||
Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers.
|
Functions use [rounding towards zero](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), meaning they truncate fractional digits of numbers.
|
||||||
|
|
||||||
@ -109,9 +112,9 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
|
|||||||
└─────────────────────┴───────────────┴────────────────┴──────────────┘
|
└─────────────────────┴───────────────┴────────────────┴──────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toUInt(8\|16\|32\|64)OrZero {#touint8163264orzero}
|
## toUInt(8\|16\|32\|64\|256)OrZero {#touint8163264256orzero}
|
||||||
|
|
||||||
## toUInt(8\|16\|32\|64)OrNull {#touint8163264ornull}
|
## toUInt(8\|16\|32\|64\|256)OrNull {#touint8163264256ornull}
|
||||||
|
|
||||||
## toFloat(32\|64) {#tofloat3264}
|
## toFloat(32\|64) {#tofloat3264}
|
||||||
|
|
||||||
@ -131,21 +134,23 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
|
|||||||
|
|
||||||
## toDateTimeOrNull {#todatetimeornull}
|
## toDateTimeOrNull {#todatetimeornull}
|
||||||
|
|
||||||
## toDecimal(32\|64\|128) {#todecimal3264128}
|
## toDecimal(32\|64\|128\|256) {#todecimal3264128256}
|
||||||
|
|
||||||
Converts `value` to the [Decimal](../../sql-reference/data-types/decimal.md) data type with precision of `S`. The `value` can be a number or a string. The `S` (scale) parameter specifies the number of decimal places.
|
Converts `value` to the [Decimal](../../sql-reference/data-types/decimal.md) data type with precision of `S`. The `value` can be a number or a string. The `S` (scale) parameter specifies the number of decimal places.
|
||||||
|
|
||||||
- `toDecimal32(value, S)`
|
- `toDecimal32(value, S)`
|
||||||
- `toDecimal64(value, S)`
|
- `toDecimal64(value, S)`
|
||||||
- `toDecimal128(value, S)`
|
- `toDecimal128(value, S)`
|
||||||
|
- `toDecimal256(value, S)`
|
||||||
|
|
||||||
## toDecimal(32\|64\|128)OrNull {#todecimal3264128ornull}
|
## toDecimal(32\|64\|128\|256)OrNull {#todecimal3264128256ornull}
|
||||||
|
|
||||||
Converts an input string to a [Nullable(Decimal(P,S))](../../sql-reference/data-types/decimal.md) data type value. This family of functions include:
|
Converts an input string to a [Nullable(Decimal(P,S))](../../sql-reference/data-types/decimal.md) data type value. This family of functions include:
|
||||||
|
|
||||||
- `toDecimal32OrNull(expr, S)` — Results in `Nullable(Decimal32(S))` data type.
|
- `toDecimal32OrNull(expr, S)` — Results in `Nullable(Decimal32(S))` data type.
|
||||||
- `toDecimal64OrNull(expr, S)` — Results in `Nullable(Decimal64(S))` data type.
|
- `toDecimal64OrNull(expr, S)` — Results in `Nullable(Decimal64(S))` data type.
|
||||||
- `toDecimal128OrNull(expr, S)` — Results in `Nullable(Decimal128(S))` data type.
|
- `toDecimal128OrNull(expr, S)` — Results in `Nullable(Decimal128(S))` data type.
|
||||||
|
- `toDecimal256OrNull(expr, S)` — Results in `Nullable(Decimal256(S))` data type.
|
||||||
|
|
||||||
These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `NULL` value instead of an exception in the event of an input value parsing error.
|
These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `NULL` value instead of an exception in the event of an input value parsing error.
|
||||||
|
|
||||||
@ -183,13 +188,14 @@ SELECT toDecimal32OrNull(toString(-1.111), 2) AS val, toTypeName(val)
|
|||||||
└──────┴────────────────────────────────────────────────────┘
|
└──────┴────────────────────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toDecimal(32\|64\|128)OrZero {#todecimal3264128orzero}
|
## toDecimal(32\|64\|128\|256)OrZero {#todecimal3264128256orzero}
|
||||||
|
|
||||||
Converts an input value to the [Decimal(P,S)](../../sql-reference/data-types/decimal.md) data type. This family of functions include:
|
Converts an input value to the [Decimal(P,S)](../../sql-reference/data-types/decimal.md) data type. This family of functions include:
|
||||||
|
|
||||||
- `toDecimal32OrZero( expr, S)` — Results in `Decimal32(S)` data type.
|
- `toDecimal32OrZero( expr, S)` — Results in `Decimal32(S)` data type.
|
||||||
- `toDecimal64OrZero( expr, S)` — Results in `Decimal64(S)` data type.
|
- `toDecimal64OrZero( expr, S)` — Results in `Decimal64(S)` data type.
|
||||||
- `toDecimal128OrZero( expr, S)` — Results in `Decimal128(S)` data type.
|
- `toDecimal128OrZero( expr, S)` — Results in `Decimal128(S)` data type.
|
||||||
|
- `toDecimal256OrZero( expr, S)` — Results in `Decimal256(S)` data type.
|
||||||
|
|
||||||
These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `0` value instead of an exception in the event of an input value parsing error.
|
These functions should be used instead of `toDecimal*()` functions, if you prefer to get a `0` value instead of an exception in the event of an input value parsing error.
|
||||||
|
|
||||||
@ -729,4 +735,45 @@ SELECT fromUnixTimestamp64Milli(i64, 'UTC')
|
|||||||
└──────────────────────────────────────┘
|
└──────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## formatRow {#formatrow}
|
||||||
|
|
||||||
|
Converts arbitrary expressions into a string via given format.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
formatRow(format, x, y, ...)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `format` — Text format. For example, [CSV](../../interfaces/formats.md#csv), [TSV](../../interfaces/formats.md#tabseparated).
|
||||||
|
- `x`,`y`, ... — Expressions.
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- A formatted string (for text formats it's usually terminated with the new line character).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT formatRow('CSV', number, 'good')
|
||||||
|
FROM numbers(3)
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─formatRow('CSV', number, 'good')─┐
|
||||||
|
│ 0,"good"
|
||||||
|
│
|
||||||
|
│ 1,"good"
|
||||||
|
│
|
||||||
|
│ 2,"good"
|
||||||
|
│
|
||||||
|
└──────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
[Original article](https://clickhouse.tech/docs/en/query_language/functions/type_conversion_functions/) <!--hide-->
|
[Original article](https://clickhouse.tech/docs/en/query_language/functions/type_conversion_functions/) <!--hide-->
|
||||||
|
@ -121,7 +121,9 @@ Defines storage time for values. Can be specified only for MergeTree-family tabl
|
|||||||
|
|
||||||
## Column Compression Codecs {#codecs}
|
## Column Compression Codecs {#codecs}
|
||||||
|
|
||||||
By default, ClickHouse applies the `lz4` compression method. For `MergeTree`-engine family you can change the default compression method in the [compression](../../../operations/server-configuration-parameters/settings.md#server-settings-compression) section of a server configuration. You can also define the compression method for each individual column in the `CREATE TABLE` query.
|
By default, ClickHouse applies the `lz4` compression method. For `MergeTree`-engine family you can change the default compression method in the [compression](../../../operations/server-configuration-parameters/settings.md#server-settings-compression) section of a server configuration.
|
||||||
|
|
||||||
|
You can also define the compression method for each individual column in the `CREATE TABLE` query.
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
CREATE TABLE codec_example
|
CREATE TABLE codec_example
|
||||||
@ -136,7 +138,18 @@ ENGINE = <Engine>
|
|||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
If a codec is specified, the default codec doesn’t apply. Codecs can be combined in a pipeline, for example, `CODEC(Delta, ZSTD)`. To select the best codec combination for you project, pass benchmarks similar to described in the Altinity [New Encodings to Improve ClickHouse Efficiency](https://www.altinity.com/blog/2019/7/new-encodings-to-improve-clickhouse) article. One thing to note is that codec can't be applied for ALIAS column type.
|
The `Default` codec can be specified to reference default compression which may dependend on different settings (and properties of data) in runtime.
|
||||||
|
Example: `value UInt64 CODEC(Default)` - the same as lack of codec specification.
|
||||||
|
|
||||||
|
Also you can remove current CODEC from the column and use default compression from config.xml:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
ALTER TABLE codec_example MODIFY COLUMN float_value CODEC(Default);
|
||||||
|
```
|
||||||
|
|
||||||
|
Codecs can be combined in a pipeline, for example, `CODEC(Delta, Default)`.
|
||||||
|
|
||||||
|
To select the best codec combination for you project, pass benchmarks similar to described in the Altinity [New Encodings to Improve ClickHouse Efficiency](https://www.altinity.com/blog/2019/7/new-encodings-to-improve-clickhouse) article. One thing to note is that codec can't be applied for ALIAS column type.
|
||||||
|
|
||||||
!!! warning "Warning"
|
!!! warning "Warning"
|
||||||
You can’t decompress ClickHouse database files with external utilities like `lz4`. Instead, use the special [clickhouse-compressor](https://github.com/ClickHouse/ClickHouse/tree/master/programs/compressor) utility.
|
You can’t decompress ClickHouse database files with external utilities like `lz4`. Instead, use the special [clickhouse-compressor](https://github.com/ClickHouse/ClickHouse/tree/master/programs/compressor) utility.
|
||||||
|
@ -102,7 +102,7 @@ Ejemplo de OpenSUSE Tumbleweed:
|
|||||||
Ejemplo de Fedora Rawhide:
|
Ejemplo de Fedora Rawhide:
|
||||||
|
|
||||||
sudo yum update
|
sudo yum update
|
||||||
yum --nogpg install git cmake make gcc-c++ python2
|
yum --nogpg install git cmake make gcc-c++ python3
|
||||||
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake ../ClickHouse
|
cmake ../ClickHouse
|
||||||
|
@ -103,7 +103,7 @@ $ cd ..
|
|||||||
به عنوان مثال برای فدورا پوست دباغی نشده:
|
به عنوان مثال برای فدورا پوست دباغی نشده:
|
||||||
|
|
||||||
sudo yum update
|
sudo yum update
|
||||||
yum --nogpg install git cmake make gcc-c++ python2
|
yum --nogpg install git cmake make gcc-c++ python3
|
||||||
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake ../ClickHouse
|
cmake ../ClickHouse
|
||||||
|
@ -102,7 +102,7 @@ Exemple Pour openSUSE Tumbleweed:
|
|||||||
Exemple Pour Fedora Rawhide:
|
Exemple Pour Fedora Rawhide:
|
||||||
|
|
||||||
sudo yum update
|
sudo yum update
|
||||||
yum --nogpg install git cmake make gcc-c++ python2
|
yum --nogpg install git cmake make gcc-c++ python3
|
||||||
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake ../ClickHouse
|
cmake ../ClickHouse
|
||||||
|
@ -102,7 +102,7 @@ OpenSUSEタンブルウィードの例:
|
|||||||
Fedora Rawhideの例:
|
Fedora Rawhideの例:
|
||||||
|
|
||||||
sudo yum update
|
sudo yum update
|
||||||
yum --nogpg install git cmake make gcc-c++ python2
|
yum --nogpg install git cmake make gcc-c++ python3
|
||||||
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake ../ClickHouse
|
cmake ../ClickHouse
|
||||||
|
@ -1,8 +1,3 @@
|
|||||||
---
|
|
||||||
toc_priority: 6
|
|
||||||
toc_title: RabbitMQ
|
|
||||||
---
|
|
||||||
|
|
||||||
# RabbitMQ {#rabbitmq-engine}
|
# RabbitMQ {#rabbitmq-engine}
|
||||||
|
|
||||||
Движок работает с [RabbitMQ](https://www.rabbitmq.com).
|
Движок работает с [RabbitMQ](https://www.rabbitmq.com).
|
||||||
@ -27,9 +22,14 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster]
|
|||||||
[rabbitmq_exchange_type = 'exchange_type',]
|
[rabbitmq_exchange_type = 'exchange_type',]
|
||||||
[rabbitmq_routing_key_list = 'key1,key2,...',]
|
[rabbitmq_routing_key_list = 'key1,key2,...',]
|
||||||
[rabbitmq_row_delimiter = 'delimiter_symbol',]
|
[rabbitmq_row_delimiter = 'delimiter_symbol',]
|
||||||
|
[rabbitmq_schema = '',]
|
||||||
[rabbitmq_num_consumers = N,]
|
[rabbitmq_num_consumers = N,]
|
||||||
[rabbitmq_num_queues = N,]
|
[rabbitmq_num_queues = N,]
|
||||||
[rabbitmq_transactional_channel = 0]
|
[rabbitmq_queue_base = 'queue',]
|
||||||
|
[rabbitmq_persistent = 0,]
|
||||||
|
[rabbitmq_skip_broken_messages = N,]
|
||||||
|
[rabbitmq_max_block_size = N,]
|
||||||
|
[rabbitmq_flush_interval_ms = N]
|
||||||
```
|
```
|
||||||
|
|
||||||
Обязательные параметры:
|
Обязательные параметры:
|
||||||
@ -40,12 +40,17 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster]
|
|||||||
|
|
||||||
Дополнительные параметры:
|
Дополнительные параметры:
|
||||||
|
|
||||||
- `rabbitmq_exchange_type` – тип точки обмена в RabbitMQ: `direct`, `fanout`, `topic`, `headers`, `consistent-hash`. По умолчанию: `fanout`.
|
- `rabbitmq_exchange_type` – тип точки обмена в RabbitMQ: `direct`, `fanout`, `topic`, `headers`, `consistent_hash`. По умолчанию: `fanout`.
|
||||||
- `rabbitmq_routing_key_list` – список ключей маршрутизации, через запятую.
|
- `rabbitmq_routing_key_list` – список ключей маршрутизации, через запятую.
|
||||||
- `rabbitmq_row_delimiter` – символ-разделитель, который завершает сообщение.
|
- `rabbitmq_row_delimiter` – символ-разделитель, который завершает сообщение.
|
||||||
|
- `rabbitmq_schema` – опциональный параметр, необходимый, если используется формат, требующий определения схемы. Например, [Cap’n Proto](https://capnproto.org/) требует путь к файлу со схемой и название корневого объекта `schema.capnp:Message`.
|
||||||
- `rabbitmq_num_consumers` – количество потребителей на таблицу. По умолчанию: `1`. Укажите больше потребителей, если пропускная способность одного потребителя недостаточна.
|
- `rabbitmq_num_consumers` – количество потребителей на таблицу. По умолчанию: `1`. Укажите больше потребителей, если пропускная способность одного потребителя недостаточна.
|
||||||
- `rabbitmq_num_queues` – количество очередей на потребителя. По умолчанию: `1`. Укажите больше потребителей, если пропускная способность одной очереди на потребителя недостаточна. Одна очередь поддерживает до 50 тысяч сообщений одновременно.
|
- `rabbitmq_num_queues` – количество очередей на потребителя. По умолчанию: `1`. Укажите больше потребителей, если пропускная способность одной очереди на потребителя недостаточна.
|
||||||
- `rabbitmq_transactional_channel` – обернутые запросы `INSERT` в транзакциях. По умолчанию: `0`.
|
- `rabbitmq_queue_base` - настройка для имен очередей. Сценарии использования описаны ниже.
|
||||||
|
- `rabbitmq_persistent` - флаг, от которого зависит настройка 'durable' для сообщений при запросах `INSERT`. По умолчанию: `0`.
|
||||||
|
- `rabbitmq_skip_broken_messages` – максимальное количество некорректных сообщений в блоке. Если `rabbitmq_skip_broken_messages = N`, то движок отбрасывает `N` сообщений, которые не получилось обработать. Одно сообщение в точности соответствует одной записи (строке). Значение по умолчанию – 0.
|
||||||
|
- `rabbitmq_max_block_size`
|
||||||
|
- `rabbitmq_flush_interval_ms`
|
||||||
|
|
||||||
Требуемая конфигурация:
|
Требуемая конфигурация:
|
||||||
|
|
||||||
@ -90,15 +95,23 @@ Example:
|
|||||||
- `fanout` - маршрутизация по всем таблицам, где имя точки обмена совпадает, независимо от ключей.
|
- `fanout` - маршрутизация по всем таблицам, где имя точки обмена совпадает, независимо от ключей.
|
||||||
- `topic` - маршрутизация основана на правилах с ключами, разделенными точками. Например: `*.logs`, `records.*.*.2020`, `*.2018,*.2019,*.2020`.
|
- `topic` - маршрутизация основана на правилах с ключами, разделенными точками. Например: `*.logs`, `records.*.*.2020`, `*.2018,*.2019,*.2020`.
|
||||||
- `headers` - маршрутизация основана на совпадении `key=value` с настройкой `x-match=all` или `x-match=any`. Пример списка ключей таблицы: `x-match=all,format=logs,type=report,year=2020`.
|
- `headers` - маршрутизация основана на совпадении `key=value` с настройкой `x-match=all` или `x-match=any`. Пример списка ключей таблицы: `x-match=all,format=logs,type=report,year=2020`.
|
||||||
- `consistent-hash` - данные равномерно распределяются между всеми связанными таблицами, где имя точки обмена совпадает. Обратите внимание, что этот тип обмена должен быть включен с помощью плагина RabbitMQ: `rabbitmq-plugins enable rabbitmq_consistent_hash_exchange`.
|
- `consistent_hash` - данные равномерно распределяются между всеми связанными таблицами, где имя точки обмена совпадает. Обратите внимание, что этот тип обмена должен быть включен с помощью плагина RabbitMQ: `rabbitmq-plugins enable rabbitmq_consistent_hash_exchange`.
|
||||||
|
|
||||||
Если тип точки обмена не задан, по умолчанию используется `fanout`. В таком случае ключи маршрутизации для публикации данных должны быть рандомизированы в диапазоне `[1, num_consumers]` за каждое сообщение/пакет (или в диапазоне `[1, num_consumers * num_queues]`, если `rabbitmq_num_queues` задано). Эта конфигурация таблицы работает быстрее, чем любая другая, особенно когда заданы параметры `rabbitmq_num_consumers` и/или `rabbitmq_num_queues`.
|
Настройка `rabbitmq_queue_base` может быть использована в следующих случаях:
|
||||||
|
1. чтобы восстановить чтение из ранее созданных очередей, если оно прекратилось по какой-либо причине, но очереди остались непустыми. Для восстановления чтения из одной конкретной очереди, нужно написать ее имя в `rabbitmq_queue_base` настройку и не указывать настройки `rabbitmq_num_consumers` и `rabbitmq_num_queues`. Чтобы восстановить чтение из всех очередей, которые были созданы для конкретной таблицы, необходимо совпадение следующих настроек: `rabbitmq_queue_base`, `rabbitmq_num_consumers`, `rabbitmq_num_queues`. По умолчанию, если настройка `rabbitmq_queue_base` не указана, будут использованы уникальные для каждой таблицы имена очередей.
|
||||||
|
2. чтобы объявить одни и те же очереди для разных таблиц, что позволяет создавать несколько параллельных подписчиков на каждую из очередей. То есть обеспечивается лучшая производительность. В данном случае, для таких таблиц также необходимо совпадение настроек: `rabbitmq_num_consumers`, `rabbitmq_num_queues`.
|
||||||
|
3. чтобы повторно использовать созданные c `durable` настройкой очереди, так как они не удаляются автоматически (но могут быть удалены с помощью любого RabbitMQ CLI).
|
||||||
|
|
||||||
|
Для улучшения производительности полученные сообщения группируются в блоки размера [max\_insert\_block\_size](../../../operations/settings/settings.md#settings-max_insert_block_size). Если блок не удалось сформировать за [stream\_flush\_interval\_ms](../../../operations/settings/settings.md#stream-flush-interval-ms) миллисекунд, то данные будут сброшены в таблицу независимо от полноты блока.
|
||||||
|
|
||||||
Если параметры`rabbitmq_num_consumers` и/или `rabbitmq_num_queues` заданы вместе с параметром `rabbitmq_exchange_type`:
|
Если параметры`rabbitmq_num_consumers` и/или `rabbitmq_num_queues` заданы вместе с параметром `rabbitmq_exchange_type`:
|
||||||
|
|
||||||
- плагин `rabbitmq-consistent-hash-exchange` должен быть включен.
|
- плагин `rabbitmq-consistent-hash-exchange` должен быть включен.
|
||||||
- свойство `message_id` должно быть определено (уникальное для каждого сообщения/пакета).
|
- свойство `message_id` должно быть определено (уникальное для каждого сообщения/пакета).
|
||||||
|
|
||||||
|
При запросах `INSERT` отправляемым сообщениям добавляются метаданные: `messageID` и флаг `republished` - доступны через заголовки сообщений (headers).
|
||||||
|
Для запросов чтения и вставки не должна использоваться одна и та же таблица.
|
||||||
|
|
||||||
Пример:
|
Пример:
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
@ -120,3 +133,11 @@ Example:
|
|||||||
|
|
||||||
SELECT key, value FROM daily ORDER BY key;
|
SELECT key, value FROM daily ORDER BY key;
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Virtual Columns {#virtual-columns}
|
||||||
|
|
||||||
|
- `_exchange_name` - имя точки обмена RabbitMQ.
|
||||||
|
- `_channel_id` - идентификатор канала `ChannelID`, на котором было получено сообщение.
|
||||||
|
- `_delivery_tag` - значение `DeliveryTag` полученного сообщения. Уникально в рамках одного канала.
|
||||||
|
- `_redelivered` - флаг `redelivered`. (Не равно нулю, если есть возможность, что сообщение было получено более, чем одним каналом.)
|
||||||
|
- `_message_id` - значение `MessageID` полученного сообщения. Данное поле непусто, если указано в параметрах при отправке сообщения.
|
||||||
|
@ -1,22 +1,24 @@
|
|||||||
# Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S) {#decimalp-s-decimal32s-decimal64s-decimal128s}
|
# Decimal(P, S), Decimal32(S), Decimal64(S), Decimal128(S), Decimal256(S) {#decimalp-s-decimal32s-decimal64s-decimal128s}
|
||||||
|
|
||||||
Знаковые дробные числа с сохранением точности операций сложения, умножения и вычитания. Для деления осуществляется отбрасывание (не округление) знаков, не попадающих в младший десятичный разряд.
|
Знаковые дробные числа с сохранением точности операций сложения, умножения и вычитания. Для деления осуществляется отбрасывание (не округление) знаков, не попадающих в младший десятичный разряд.
|
||||||
|
|
||||||
## Параметры {#parametry}
|
## Параметры {#parametry}
|
||||||
|
|
||||||
- P - precision. Значение из диапазона \[ 1 : 38 \]. Определяет, сколько десятичных знаков (с учетом дробной части) может содержать число.
|
- P - precision. Значение из диапазона \[ 1 : 76 \]. Определяет, сколько десятичных знаков (с учетом дробной части) может содержать число.
|
||||||
- S - scale. Значение из диапазона \[ 0 : P \]. Определяет, сколько десятичных знаков содержится в дробной части числа.
|
- S - scale. Значение из диапазона \[ 0 : P \]. Определяет, сколько десятичных знаков содержится в дробной части числа.
|
||||||
|
|
||||||
В зависимости от параметра P Decimal(P, S) является синонимом:
|
В зависимости от параметра P Decimal(P, S) является синонимом:
|
||||||
- P из \[ 1 : 9 \] - для Decimal32(S)
|
- P из \[ 1 : 9 \] - для Decimal32(S)
|
||||||
- P из \[ 10 : 18 \] - для Decimal64(S)
|
- P из \[ 10 : 18 \] - для Decimal64(S)
|
||||||
- P из \[ 19 : 38 \] - для Decimal128(S)
|
- P из \[ 19 : 38 \] - для Decimal128(S)
|
||||||
|
- P из \[ 39 : 76 \] - для Decimal256(S)
|
||||||
|
|
||||||
## Диапазоны Decimal {#diapazony-decimal}
|
## Диапазоны Decimal {#diapazony-decimal}
|
||||||
|
|
||||||
- Decimal32(S) - ( -1 \* 10^(9 - S), 1 \* 10^(9 - S) )
|
- Decimal32(S) - ( -1 \* 10^(9 - S), 1 \* 10^(9 - S) )
|
||||||
- Decimal64(S) - ( -1 \* 10^(18 - S), 1 \* 10^(18 - S) )
|
- Decimal64(S) - ( -1 \* 10^(18 - S), 1 \* 10^(18 - S) )
|
||||||
- Decimal128(S) - ( -1 \* 10^(38 - S), 1 \* 10^(38 - S) )
|
- Decimal128(S) - ( -1 \* 10^(38 - S), 1 \* 10^(38 - S) )
|
||||||
|
- Decimal256(S) - ( -1 \* 10^(76 - S), 1 \* 10^(76 - S) )
|
||||||
|
|
||||||
Например, Decimal32(4) содержит числа от -99999.9999 до 99999.9999 c шагом 0.0001.
|
Например, Decimal32(4) содержит числа от -99999.9999 до 99999.9999 c шагом 0.0001.
|
||||||
|
|
||||||
@ -32,6 +34,7 @@
|
|||||||
- `Decimal64(S1) <op> Decimal32(S2) -> Decimal64(S)`
|
- `Decimal64(S1) <op> Decimal32(S2) -> Decimal64(S)`
|
||||||
- `Decimal128(S1) <op> Decimal32(S2) -> Decimal128(S)`
|
- `Decimal128(S1) <op> Decimal32(S2) -> Decimal128(S)`
|
||||||
- `Decimal128(S1) <op> Decimal64(S2) -> Decimal128(S)`
|
- `Decimal128(S1) <op> Decimal64(S2) -> Decimal128(S)`
|
||||||
|
- `Decimal256(S1) <op> Decimal<32|64|128>(S2) -> Decimal256(S)`
|
||||||
|
|
||||||
Для размера дробной части (scale) результата действуют следующие правила:
|
Для размера дробной части (scale) результата действуют следующие правила:
|
||||||
|
|
||||||
@ -99,4 +102,9 @@ SELECT toDecimal32(1, 8) < 100
|
|||||||
DB::Exception: Can't compare.
|
DB::Exception: Can't compare.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
**Смотрите также**
|
||||||
|
- [isDecimalOverflow](../../sql-reference/functions/other-functions.md#is-decimal-overflow)
|
||||||
|
- [countDigits](../../sql-reference/functions/other-functions.md#count-digits)
|
||||||
|
|
||||||
|
|
||||||
[Оригинальная статья](https://clickhouse.tech/docs/ru/data_types/decimal/) <!--hide-->
|
[Оригинальная статья](https://clickhouse.tech/docs/ru/data_types/decimal/) <!--hide-->
|
||||||
|
@ -1,19 +1,24 @@
|
|||||||
# UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64 {#uint8-uint16-uint32-uint64-int8-int16-int32-int64}
|
# UInt8, UInt16, UInt32, UInt64, UInt256, Int8, Int16, Int32, Int64, Int128, Int256 {#uint8-uint16-uint32-uint64-int8-int16-int32-int64}
|
||||||
|
|
||||||
Целые числа фиксированной длины, без знака или со знаком.
|
Целые числа фиксированной длины, без знака или со знаком.
|
||||||
|
|
||||||
## Диапазоны Int {#int-ranges}
|
## Диапазоны Int {#int-ranges}
|
||||||
|
|
||||||
- Int8 - \[ -128 : 127 \]
|
- Int8 - \[-128 : 127\]
|
||||||
- Int16 - \[ -32768 : 32767 \]
|
- Int16 - \[-32768 : 32767\]
|
||||||
- Int32 - \[ -2147483648 : 2147483647 \]
|
- Int32 - \[-2147483648 : 2147483647\]
|
||||||
- Int64 - \[ -9223372036854775808 : 9223372036854775807 \]
|
- Int64 - \[-9223372036854775808 : 9223372036854775807\]
|
||||||
|
- Int128 - \[-170141183460469231731687303715884105728 : 170141183460469231731687303715884105727\]
|
||||||
|
- Int256 - \[-57896044618658097711785492504343953926634992332820282019728792003956564819968 : 57896044618658097711785492504343953926634992332820282019728792003956564819967\]
|
||||||
|
|
||||||
## Диапазоны Uint {#uint-ranges}
|
## Диапазоны Uint {#uint-ranges}
|
||||||
|
|
||||||
- UInt8 - \[ 0 : 255 \]
|
- UInt8 - \[0 : 255\]
|
||||||
- UInt16 - \[ 0 : 65535 \]
|
- UInt16 - \[0 : 65535\]
|
||||||
- UInt32 - \[ 0 : 4294967295 \]
|
- UInt32 - \[0 : 4294967295\]
|
||||||
- UInt64 - \[ 0 : 18446744073709551615 \]
|
- UInt64 - \[0 : 18446744073709551615\]
|
||||||
|
- UInt256 - \[0 : 115792089237316195423570985008687907853269984665640564039457584007913129639935\]
|
||||||
|
|
||||||
|
UInt128 пока не реализован.
|
||||||
|
|
||||||
[Оригинальная статья](https://clickhouse.tech/docs/ru/data_types/int_uint/) <!--hide-->
|
[Оригинальная статья](https://clickhouse.tech/docs/ru/data_types/int_uint/) <!--hide-->
|
||||||
|
@ -1431,5 +1431,80 @@ SELECT randomStringUTF8(13)
|
|||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## isDecimalOverflow {#is-decimal-overflow}
|
||||||
|
|
||||||
|
Проверяет, находится ли число [Decimal](../../sql-reference/data-types/decimal.md#decimalp-s-decimal32s-decimal64s-decimal128s) вне собственной (или заданной) области значений.
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
isDecimalOverflow(d, [p])
|
||||||
|
```
|
||||||
|
|
||||||
|
**Параметры**
|
||||||
|
|
||||||
|
- `d` — число. [Decimal](../../sql-reference/data-types/decimal.md#decimalp-s-decimal32s-decimal64s-decimal128s).
|
||||||
|
- `p` — точность. Необязательный параметр. Если опущен, используется исходная точность первого аргумента. Использование этого параметра может быть полезно для извлечения данных в другую СУБД или файл. [UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges).
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- `1` — число имеет больше цифр, чем позволяет точность.
|
||||||
|
- `0` — число удовлетворяет заданной точности.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT isDecimalOverflow(toDecimal32(1000000000, 0), 9),
|
||||||
|
isDecimalOverflow(toDecimal32(1000000000, 0)),
|
||||||
|
isDecimalOverflow(toDecimal32(-1000000000, 0), 9),
|
||||||
|
isDecimalOverflow(toDecimal32(-1000000000, 0));
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
1 1 1 1
|
||||||
|
```
|
||||||
|
|
||||||
|
## countDigits {#count-digits}
|
||||||
|
|
||||||
|
Возвращает количество десятичных цифр, необходимых для представления значения.
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
countDigits(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Параметры**
|
||||||
|
|
||||||
|
- `x` — [целое](../../sql-reference/data-types/int-uint.md#uint8-uint16-uint32-uint64-int8-int16-int32-int64) или [дробное](../../sql-reference/data-types/decimal.md#decimalp-s-decimal32s-decimal64s-decimal128s) число.
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
Количество цифр.
|
||||||
|
|
||||||
|
Тип: [UInt8](../../sql-reference/data-types/int-uint.md#uint-ranges).
|
||||||
|
|
||||||
|
!!! note "Примечание"
|
||||||
|
Для `Decimal` значений учитывается их масштаб: вычисляется результат по базовому целочисленному типу, полученному как `(value * scale)`. Например: `countDigits(42) = 2`, `countDigits(42.000) = 5`, `countDigits(0.04200) = 4`. То есть вы можете проверить десятичное переполнение для `Decimal64` с помощью `countDecimal(x) > 18`. Это медленный вариант [isDecimalOverflow](#is-decimal-overflow).
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT countDigits(toDecimal32(1, 9)), countDigits(toDecimal32(-1, 9)),
|
||||||
|
countDigits(toDecimal64(1, 18)), countDigits(toDecimal64(-1, 18)),
|
||||||
|
countDigits(toDecimal128(1, 38)), countDigits(toDecimal128(-1, 38));
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
10 10 19 19 39 39
|
||||||
|
```
|
||||||
|
|
||||||
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/other_functions/) <!--hide-->
|
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/other_functions/) <!--hide-->
|
||||||
|
@ -479,4 +479,75 @@ SELECT trimBoth(' Hello, world! ')
|
|||||||
|
|
||||||
Тип результата — UInt64.
|
Тип результата — UInt64.
|
||||||
|
|
||||||
|
## normalizeQuery {#normalized-query}
|
||||||
|
|
||||||
|
Заменяет литералы, последовательности литералов и сложные псевдонимы заполнителями.
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
``` sql
|
||||||
|
normalizeQuery(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Параметры**
|
||||||
|
|
||||||
|
- `x` — Последовательность символов. [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- Последовательность символов с заполнителями.
|
||||||
|
|
||||||
|
Тип: [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT normalizeQuery('[1, 2, 3, x]') AS query;
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─query────┐
|
||||||
|
│ [?.., x] │
|
||||||
|
└──────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## normalizedQueryHash {#normalized-query-hash}
|
||||||
|
|
||||||
|
Возвращает идентичные 64-битные хэш - суммы без значений литералов для аналогичных запросов. Это помогает анализировать журнал запросов.
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
normalizedQueryHash(x)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Параметры**
|
||||||
|
|
||||||
|
- `x` — Последовательность символов. [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- Хэш-сумма.
|
||||||
|
|
||||||
|
Тип: [UInt64](../../sql-reference/data-types/int-uint.md#uint-ranges).
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT normalizedQueryHash('SELECT 1 AS `xyz`') != normalizedQueryHash('SELECT 1 AS `abc`') AS res;
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─res─┐
|
||||||
|
│ 1 │
|
||||||
|
└─────┘
|
||||||
|
```
|
||||||
|
|
||||||
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/string_functions/) <!--hide-->
|
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/string_functions/) <!--hide-->
|
||||||
|
@ -341,6 +341,89 @@ Result:
|
|||||||
|
|
||||||
Извлечение всех фрагментов строки по регулярному выражению. Если haystack не соответствует регулярному выражению pattern, то возвращается пустая строка. Возвращается массив строк, состоящий из всех соответствий регулярному выражению. В остальном, поведение аналогично функции extract (по прежнему, вынимается первый subpattern, или всё выражение, если subpattern-а нет).
|
Извлечение всех фрагментов строки по регулярному выражению. Если haystack не соответствует регулярному выражению pattern, то возвращается пустая строка. Возвращается массив строк, состоящий из всех соответствий регулярному выражению. В остальном, поведение аналогично функции extract (по прежнему, вынимается первый subpattern, или всё выражение, если subpattern-а нет).
|
||||||
|
|
||||||
|
## extractAllGroupsHorizontal {#extractallgroups-horizontal}
|
||||||
|
|
||||||
|
Разбирает строку `haystack` на фрагменты, соответствующие группам регулярного выражения `pattern`. Возвращает массив массивов, где первый массив содержит все фрагменты, соответствующие первой группе регулярного выражения, второй массив - соответствующие второй группе, и т.д.
|
||||||
|
|
||||||
|
!!! note "Замечание"
|
||||||
|
Функция `extractAllGroupsHorizontal` работает медленнее, чем функция [extractAllGroupsVertical](#extractallgroups-vertical).
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
extractAllGroupsHorizontal(haystack, pattern)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Параметры**
|
||||||
|
|
||||||
|
- `haystack` — строка для разбора. Тип: [String](../../sql-reference/data-types/string.md).
|
||||||
|
- `pattern` — регулярное выражение, построенное по синтаксическим правилам [re2](https://github.com/google/re2/wiki/Syntax). Выражение должно содержать группы, заключенные в круглые скобки. Если выражение не содержит групп, генерируется исключение. Тип: [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- Тип: [Array](../../sql-reference/data-types/array.md).
|
||||||
|
|
||||||
|
Если в строке `haystack` нет групп, соответствующих регулярному выражению `pattern`, возвращается массив пустых массивов.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT extractAllGroupsHorizontal('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─extractAllGroupsHorizontal('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')─┐
|
||||||
|
│ [['abc','def','ghi'],['111','222','333']] │
|
||||||
|
└──────────────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**См. также**
|
||||||
|
- функция [extractAllGroupsVertical](#extractallgroups-vertical)
|
||||||
|
|
||||||
|
## extractAllGroupsVertical {#extractallgroups-vertical}
|
||||||
|
|
||||||
|
Разбирает строку `haystack` на фрагменты, соответствующие группам регулярного выражения `pattern`. Возвращает массив массивов, где каждый массив содержит по одному фрагменту, соответствующему каждой группе регулярного выражения. Фрагменты группируются в массивы в соответствии с порядком появления в исходной строке.
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
extractAllGroupsVertical(haystack, pattern)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Параметры**
|
||||||
|
|
||||||
|
- `haystack` — строка для разбора. Тип: [String](../../sql-reference/data-types/string.md).
|
||||||
|
- `pattern` — регулярное выражение, построенное по синтаксическим правилам [re2](https://github.com/google/re2/wiki/Syntax). Выражение должно содержать группы, заключенные в круглые скобки. Если выражение не содержит групп, генерируется исключение. Тип: [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- Тип: [Array](../../sql-reference/data-types/array.md).
|
||||||
|
|
||||||
|
Если в строке `haystack` нет групп, соответствующих регулярному выражению `pattern`, возвращается пустой массив.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT extractAllGroupsVertical('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─extractAllGroupsVertical('abc=111, def=222, ghi=333', '("[^"]+"|\\w+)=("[^"]+"|\\w+)')─┐
|
||||||
|
│ [['abc','111'],['def','222'],['ghi','333']] │
|
||||||
|
└────────────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**См. также**
|
||||||
|
- функция [extractAllGroupsHorizontal](#extractallgroups-horizontal)
|
||||||
|
|
||||||
## like(haystack, pattern), оператор haystack LIKE pattern {#function-like}
|
## like(haystack, pattern), оператор haystack LIKE pattern {#function-like}
|
||||||
|
|
||||||
Проверка строки на соответствие простому регулярному выражению.
|
Проверка строки на соответствие простому регулярному выражению.
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
|
|
||||||
Поведение ClickHouse при конвертировании похоже на [поведение C++ программ](https://en.cppreference.com/w/cpp/language/implicit_conversion).
|
Поведение ClickHouse при конвертировании похоже на [поведение C++ программ](https://en.cppreference.com/w/cpp/language/implicit_conversion).
|
||||||
|
|
||||||
## toInt(8\|16\|32\|64) {#toint8163264}
|
## toInt(8\|16\|32\|64\|128\|256) {#toint8163264}
|
||||||
|
|
||||||
Преобразует входное значение к типу [Int](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает:
|
Преобразует входное значение к типу [Int](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает:
|
||||||
|
|
||||||
@ -14,6 +14,8 @@
|
|||||||
- `toInt16(expr)` — возвращает значение типа `Int16`.
|
- `toInt16(expr)` — возвращает значение типа `Int16`.
|
||||||
- `toInt32(expr)` — возвращает значение типа `Int32`.
|
- `toInt32(expr)` — возвращает значение типа `Int32`.
|
||||||
- `toInt64(expr)` — возвращает значение типа `Int64`.
|
- `toInt64(expr)` — возвращает значение типа `Int64`.
|
||||||
|
- `toInt128(expr)` — возвращает значение типа `Int128`.
|
||||||
|
- `toInt256(expr)` — возвращает значение типа `Int256`.
|
||||||
|
|
||||||
**Параметры**
|
**Параметры**
|
||||||
|
|
||||||
@ -21,7 +23,7 @@
|
|||||||
|
|
||||||
**Возвращаемое значение**
|
**Возвращаемое значение**
|
||||||
|
|
||||||
Целое число типа `Int8`, `Int16`, `Int32` или `Int64`.
|
Целое число типа `Int8`, `Int16`, `Int32`, `Int64`, `Int128` или `Int256`.
|
||||||
|
|
||||||
Функции используют [округление к нулю](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), т.е. обрезают дробную часть числа.
|
Функции используют [округление к нулю](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), т.е. обрезают дробную часть числа.
|
||||||
|
|
||||||
@ -39,9 +41,9 @@ SELECT toInt64(nan), toInt32(32), toInt16('16'), toInt8(8.8)
|
|||||||
└──────────────────────┴─────────────┴───────────────┴─────────────┘
|
└──────────────────────┴─────────────┴───────────────┴─────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toInt(8\|16\|32\|64)OrZero {#toint8163264orzero}
|
## toInt(8\|16\|32\|64\|128\|256)OrZero {#toint8163264orzero}
|
||||||
|
|
||||||
Принимает аргумент типа String и пытается его распарсить в Int(8\|16\|32\|64). Если не удалось - возвращает 0.
|
Принимает аргумент типа String и пытается его распарсить в Int(8\|16\|32\|64\|128\|256). Если не удалось - возвращает 0.
|
||||||
|
|
||||||
**Пример**
|
**Пример**
|
||||||
|
|
||||||
@ -55,9 +57,9 @@ select toInt64OrZero('123123'), toInt8OrZero('123qwe123')
|
|||||||
└─────────────────────────┴───────────────────────────┘
|
└─────────────────────────┴───────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toInt(8\|16\|32\|64)OrNull {#toint8163264ornull}
|
## toInt(8\|16\|32\|64\|128\|256)OrNull {#toint8163264ornull}
|
||||||
|
|
||||||
Принимает аргумент типа String и пытается его распарсить в Int(8\|16\|32\|64). Если не удалось - возвращает NULL.
|
Принимает аргумент типа String и пытается его распарсить в Int(8\|16\|32\|64\|128\|256). Если не удалось - возвращает NULL.
|
||||||
|
|
||||||
**Пример**
|
**Пример**
|
||||||
|
|
||||||
@ -71,7 +73,7 @@ select toInt64OrNull('123123'), toInt8OrNull('123qwe123')
|
|||||||
└─────────────────────────┴───────────────────────────┘
|
└─────────────────────────┴───────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toUInt(8\|16\|32\|64) {#touint8163264}
|
## toUInt(8\|16\|32\|64\|256) {#touint8163264}
|
||||||
|
|
||||||
Преобраует входное значение к типу [UInt](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает:
|
Преобраует входное значение к типу [UInt](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает:
|
||||||
|
|
||||||
@ -79,6 +81,7 @@ select toInt64OrNull('123123'), toInt8OrNull('123qwe123')
|
|||||||
- `toUInt16(expr)` — возвращает значение типа `UInt16`.
|
- `toUInt16(expr)` — возвращает значение типа `UInt16`.
|
||||||
- `toUInt32(expr)` — возвращает значение типа `UInt32`.
|
- `toUInt32(expr)` — возвращает значение типа `UInt32`.
|
||||||
- `toUInt64(expr)` — возвращает значение типа `UInt64`.
|
- `toUInt64(expr)` — возвращает значение типа `UInt64`.
|
||||||
|
- `toUInt256(expr)` — возвращает значение типа `UInt256`.
|
||||||
|
|
||||||
**Параметры**
|
**Параметры**
|
||||||
|
|
||||||
@ -86,7 +89,7 @@ select toInt64OrNull('123123'), toInt8OrNull('123qwe123')
|
|||||||
|
|
||||||
**Возвращаемое значение**
|
**Возвращаемое значение**
|
||||||
|
|
||||||
Целое число типа `UInt8`, `UInt16`, `UInt32` или `UInt64`.
|
Целое число типа `UInt8`, `UInt16`, `UInt32`, `UInt64` или `UInt256`.
|
||||||
|
|
||||||
Функции используют [округление к нулю](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), т.е. обрезают дробную часть числа.
|
Функции используют [округление к нулю](https://en.wikipedia.org/wiki/Rounding#Rounding_towards_zero), т.е. обрезают дробную часть числа.
|
||||||
|
|
||||||
@ -104,9 +107,9 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
|
|||||||
└─────────────────────┴───────────────┴────────────────┴──────────────┘
|
└─────────────────────┴───────────────┴────────────────┴──────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toUInt(8\|16\|32\|64)OrZero {#touint8163264orzero}
|
## toUInt(8\|16\|32\|64\|256)OrZero {#touint8163264orzero}
|
||||||
|
|
||||||
## toUInt(8\|16\|32\|64)OrNull {#touint8163264ornull}
|
## toUInt(8\|16\|32\|64\|256)OrNull {#touint8163264ornull}
|
||||||
|
|
||||||
## toFloat(32\|64) {#tofloat3264}
|
## toFloat(32\|64) {#tofloat3264}
|
||||||
|
|
||||||
@ -126,21 +129,23 @@ SELECT toUInt64(nan), toUInt32(-32), toUInt16('16'), toUInt8(8.8)
|
|||||||
|
|
||||||
## toDateTimeOrNull {#todatetimeornull}
|
## toDateTimeOrNull {#todatetimeornull}
|
||||||
|
|
||||||
## toDecimal(32\|64\|128) {#todecimal3264128}
|
## toDecimal(32\|64\|128\|256) {#todecimal3264128}
|
||||||
|
|
||||||
Преобразует `value` к типу данных [Decimal](../../sql-reference/functions/type-conversion-functions.md) с точностью `S`. `value` может быть числом или строкой. Параметр `S` (scale) задаёт число десятичных знаков.
|
Преобразует `value` к типу данных [Decimal](../../sql-reference/functions/type-conversion-functions.md) с точностью `S`. `value` может быть числом или строкой. Параметр `S` (scale) задаёт число десятичных знаков.
|
||||||
|
|
||||||
- `toDecimal32(value, S)`
|
- `toDecimal32(value, S)`
|
||||||
- `toDecimal64(value, S)`
|
- `toDecimal64(value, S)`
|
||||||
- `toDecimal128(value, S)`
|
- `toDecimal128(value, S)`
|
||||||
|
- `toDecimal256(value, S)`
|
||||||
|
|
||||||
## toDecimal(32\|64\|128)OrNull {#todecimal3264128ornull}
|
## toDecimal(32\|64\|128\|256)OrNull {#todecimal3264128ornull}
|
||||||
|
|
||||||
Преобразует входную строку в значение с типом данных [Nullable (Decimal (P, S))](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает в себя:
|
Преобразует входную строку в значение с типом данных [Nullable (Decimal (P, S))](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает в себя:
|
||||||
|
|
||||||
- `toDecimal32OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal32(S))`.
|
- `toDecimal32OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal32(S))`.
|
||||||
- `toDecimal64OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal64(S))`.
|
- `toDecimal64OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal64(S))`.
|
||||||
- `toDecimal128OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal128(S))`.
|
- `toDecimal128OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal128(S))`.
|
||||||
|
- `toDecimal256OrNull(expr, S)` — Возвращает значение типа `Nullable(Decimal256(S))`.
|
||||||
|
|
||||||
Эти функции следует использовать вместо функций `toDecimal*()`, если при ошибке обработки входного значения вы хотите получать `NULL` вместо исключения.
|
Эти функции следует использовать вместо функций `toDecimal*()`, если при ошибке обработки входного значения вы хотите получать `NULL` вместо исключения.
|
||||||
|
|
||||||
@ -178,13 +183,14 @@ SELECT toDecimal32OrNull(toString(-1.111), 2) AS val, toTypeName(val)
|
|||||||
└──────┴────────────────────────────────────────────────────┘
|
└──────┴────────────────────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## toDecimal(32\|64\|128)OrZero {#todecimal3264128orzero}
|
## toDecimal(32\|64\|128\|256)OrZero {#todecimal3264128orzero}
|
||||||
|
|
||||||
Преобразует тип входного значения в [Decimal (P, S)](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает в себя:
|
Преобразует тип входного значения в [Decimal (P, S)](../../sql-reference/functions/type-conversion-functions.md). Семейство функций включает в себя:
|
||||||
|
|
||||||
- `toDecimal32OrZero( expr, S)` — возвращает значение типа `Decimal32(S)`.
|
- `toDecimal32OrZero( expr, S)` — возвращает значение типа `Decimal32(S)`.
|
||||||
- `toDecimal64OrZero( expr, S)` — возвращает значение типа `Decimal64(S)`.
|
- `toDecimal64OrZero( expr, S)` — возвращает значение типа `Decimal64(S)`.
|
||||||
- `toDecimal128OrZero( expr, S)` — возвращает значение типа `Decimal128(S)`.
|
- `toDecimal128OrZero( expr, S)` — возвращает значение типа `Decimal128(S)`.
|
||||||
|
- `toDecimal256OrZero( expr, S)` — возвращает значение типа `Decimal256(S)`.
|
||||||
|
|
||||||
Эти функции следует использовать вместо функций `toDecimal*()`, если при ошибке обработки входного значения вы хотите получать `0` вместо исключения.
|
Эти функции следует использовать вместо функций `toDecimal*()`, если при ошибке обработки входного значения вы хотите получать `0` вместо исключения.
|
||||||
|
|
||||||
@ -717,4 +723,44 @@ SELECT toLowCardinality('1')
|
|||||||
└───────────────────────┘
|
└───────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## formatRow {#formatrow}
|
||||||
|
|
||||||
|
Преобразует произвольные выражения в строку заданного формата.
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
formatRow(format, x, y, ...)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Параметры**
|
||||||
|
|
||||||
|
- `format` — Текстовый формат. Например, [CSV](../../interfaces/formats.md#csv), [TSV](../../interfaces/formats.md#tabseparated).
|
||||||
|
- `x`,`y`, ... — Выражения.
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- Отформатированная строка (в текстовых форматах обычно с завершающим переводом строки).
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT formatRow('CSV', number, 'good')
|
||||||
|
FROM numbers(3)
|
||||||
|
```
|
||||||
|
|
||||||
|
Ответ:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─formatRow('CSV', number, 'good')─┐
|
||||||
|
│ 0,"good"
|
||||||
|
│
|
||||||
|
│ 1,"good"
|
||||||
|
│
|
||||||
|
│ 2,"good"
|
||||||
|
│
|
||||||
|
└──────────────────────────────────┘
|
||||||
|
```
|
||||||
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/type_conversion_functions/) <!--hide-->
|
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/functions/type_conversion_functions/) <!--hide-->
|
||||||
|
@ -24,14 +24,14 @@ toc_title: "\u0412\u0432\u0435\u0434\u0435\u043D\u0438\u0435"
|
|||||||
|
|
||||||
| Функция | Описание |
|
| Функция | Описание |
|
||||||
|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------|
|
|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------|
|
||||||
| [file](file.md) | Создаёт таблицу с движком [File](../../sql-reference/table-functions/index.md). |
|
| [file](file.md) | Создаёт таблицу с движком [File](../../engines/table-engines/special/file.md). |
|
||||||
| [merge](merge.md) | Создаёт таблицу с движком [Merge](../../sql-reference/table-functions/index.md). |
|
| [merge](merge.md) | Создаёт таблицу с движком [Merge](../../engines/table-engines/special/merge.md). |
|
||||||
| [numbers](numbers.md) | Создаёт таблицу с единственным столбцом, заполненным целыми числами. |
|
| [numbers](numbers.md) | Создаёт таблицу с единственным столбцом, заполненным целыми числами. |
|
||||||
| [remote](remote.md) | Предоставляет доступ к удалённым серверам, не создавая таблицу с движком [Distributed](../../sql-reference/table-functions/index.md). |
|
| [remote](remote.md) | Предоставляет доступ к удалённым серверам, не создавая таблицу с движком [Distributed](../../engines/table-engines/special/distributed.md). |
|
||||||
| [url](url.md) | Создаёт таблицу с движком [Url](../../sql-reference/table-functions/index.md). |
|
| [url](url.md) | Создаёт таблицу с движком [Url](../../engines/table-engines/special/url.md). |
|
||||||
| [mysql](mysql.md) | Создаёт таблицу с движком [MySQL](../../sql-reference/table-functions/index.md). |
|
| [mysql](mysql.md) | Создаёт таблицу с движком [MySQL](../../engines/table-engines/integrations/mysql.md). |
|
||||||
| [jdbc](jdbc.md) | Создаёт таблицу с дижком [JDBC](../../sql-reference/table-functions/index.md). |
|
| [jdbc](jdbc.md) | Создаёт таблицу с дижком [JDBC](../../engines/table-engines/integrations/jdbc.md). |
|
||||||
| [odbc](odbc.md) | Создаёт таблицу с движком [ODBC](../../sql-reference/table-functions/index.md). |
|
| [odbc](odbc.md) | Создаёт таблицу с движком [ODBC](../../engines/table-engines/integrations/odbc.md). |
|
||||||
| [hdfs](hdfs.md) | Создаёт таблицу с движком [HDFS](../../sql-reference/table-functions/index.md). |
|
| [hdfs](hdfs.md) | Создаёт таблицу с движком [HDFS](../../engines/table-engines/integrations/hdfs.md). |
|
||||||
|
|
||||||
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/table_functions/) <!--hide-->
|
[Оригинальная статья](https://clickhouse.tech/docs/ru/query_language/table_functions/) <!--hide-->
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import os, sys
|
import os, sys
|
||||||
|
@ -71,8 +71,8 @@ def choose_latest_releases(args):
|
|||||||
logging.fatal('Unexpected GitHub response: %s', str(candidates))
|
logging.fatal('Unexpected GitHub response: %s', str(candidates))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
logging.info('Found LTS releases: %s', ', '.join(seen_lts.keys()))
|
logging.info('Found LTS releases: %s', ', '.join(list(seen_lts.keys())))
|
||||||
logging.info('Found stable releases: %s', ', '.join(seen_stable.keys()))
|
logging.info('Found stable releases: %s', ', '.join(list(seen_stable.keys())))
|
||||||
return sorted(list(seen_lts.items()) + list(seen_stable.items()))
|
return sorted(list(seen_lts.items()) + list(seen_stable.items()))
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
|
@ -59,7 +59,7 @@ def build_docs_nav(lang, args):
|
|||||||
_, _, nav = build_nav_entry(docs_dir, args)
|
_, _, nav = build_nav_entry(docs_dir, args)
|
||||||
result = []
|
result = []
|
||||||
index_key = None
|
index_key = None
|
||||||
for key, value in nav.items():
|
for key, value in list(nav.items()):
|
||||||
if key and value:
|
if key and value:
|
||||||
if value == 'index.md':
|
if value == 'index.md':
|
||||||
index_key = key
|
index_key = key
|
||||||
|
@ -59,7 +59,7 @@ def convert_to_dicts(changed_files, batch_size):
|
|||||||
def post_data(prepared_batches, token):
|
def post_data(prepared_batches, token):
|
||||||
headers = {"Authorization": "Bearer {}".format(token)}
|
headers = {"Authorization": "Bearer {}".format(token)}
|
||||||
for batch in prepared_batches:
|
for batch in prepared_batches:
|
||||||
print("Pugring cache for", ", ".join(batch["files"]))
|
print(("Pugring cache for", ", ".join(batch["files"])))
|
||||||
response = requests.post(CLOUDFLARE_URL, json=batch, headers=headers)
|
response = requests.post(CLOUDFLARE_URL, json=batch, headers=headers)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
time.sleep(3)
|
time.sleep(3)
|
||||||
@ -71,8 +71,8 @@ if __name__ == "__main__":
|
|||||||
raise Exception("Env variable CLOUDFLARE_TOKEN is empty")
|
raise Exception("Env variable CLOUDFLARE_TOKEN is empty")
|
||||||
base_domain = os.getenv("BASE_DOMAIN", "https://content.clickhouse.tech/")
|
base_domain = os.getenv("BASE_DOMAIN", "https://content.clickhouse.tech/")
|
||||||
changed_files = collect_changed_files()
|
changed_files = collect_changed_files()
|
||||||
print("Found", len(changed_files), "changed files")
|
print(("Found", len(changed_files), "changed files"))
|
||||||
filtered_files = filter_and_transform_changed_files(changed_files, base_domain)
|
filtered_files = filter_and_transform_changed_files(changed_files, base_domain)
|
||||||
print("Files rest after filtering", len(filtered_files))
|
print(("Files rest after filtering", len(filtered_files)))
|
||||||
prepared_batches = convert_to_dicts(filtered_files, 25)
|
prepared_batches = convert_to_dicts(filtered_files, 25)
|
||||||
post_data(prepared_batches, token)
|
post_data(prepared_batches, token)
|
||||||
|
@ -15,7 +15,7 @@ import website
|
|||||||
|
|
||||||
def recursive_values(item):
|
def recursive_values(item):
|
||||||
if isinstance(item, dict):
|
if isinstance(item, dict):
|
||||||
for _, value in item.items():
|
for _, value in list(item.items()):
|
||||||
yield from recursive_values(value)
|
yield from recursive_values(value)
|
||||||
elif isinstance(item, list):
|
elif isinstance(item, list):
|
||||||
for value in item:
|
for value in item:
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
@ -42,4 +42,4 @@ def typograph(text):
|
|||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
import sys
|
||||||
print(typograph(sys.stdin.read()))
|
print((typograph(sys.stdin.read())))
|
||||||
|
@ -102,7 +102,7 @@ OpenSUSE Tumbleweed için örnek:
|
|||||||
Fedora Rawhide için örnek:
|
Fedora Rawhide için örnek:
|
||||||
|
|
||||||
sudo yum update
|
sudo yum update
|
||||||
yum --nogpg install git cmake make gcc-c++ python2
|
yum --nogpg install git cmake make gcc-c++ python3
|
||||||
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake ../ClickHouse
|
cmake ../ClickHouse
|
||||||
|
@ -468,7 +468,7 @@ clickhouse-client --query "INSERT INTO tutorial.hits_v1 FORMAT TSV" --max_insert
|
|||||||
clickhouse-client --query "INSERT INTO tutorial.visits_v1 FORMAT TSV" --max_insert_block_size=100000 < visits_v1.tsv
|
clickhouse-client --query "INSERT INTO tutorial.visits_v1 FORMAT TSV" --max_insert_block_size=100000 < visits_v1.tsv
|
||||||
```
|
```
|
||||||
|
|
||||||
ClickHouse有很多 [要调整的设置](../operations/settings/index.md) 在控制台客户端中指定它们的一种方法是通过参数,我们可以看到 `--max_insert_block_size`. 找出可用的设置,它们意味着什么以及默认值的最简单方法是查询 `system.settings` 表:
|
ClickHouse有很多 [要调整的设置](../operations/settings/index.md) 在控制台客户端中指定它们的一种方法是通过参数,就像我们看到上面语句中的 `--max_insert_block_size`。找出可用的设置、含义及其默认值的最简单方法是查询 `system.settings` 表:
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
SELECT name, value, changed, description
|
SELECT name, value, changed, description
|
||||||
@ -479,7 +479,7 @@ FORMAT TSV
|
|||||||
max_insert_block_size 1048576 0 "The maximum block size for insertion, if we control the creation of blocks for insertion."
|
max_insert_block_size 1048576 0 "The maximum block size for insertion, if we control the creation of blocks for insertion."
|
||||||
```
|
```
|
||||||
|
|
||||||
您也可以 [OPTIMIZE](../sql-reference/statements/misc.md#misc_operations-optimize) 导入后的表。 使用MergeTree-family引擎配置的表总是在后台合并数据部分以优化数据存储(或至少检查是否有意义)。 这些查询强制表引擎立即进行存储优化,而不是稍后进行一段时间:
|
您也可以 [OPTIMIZE](../sql-reference/statements/misc.md#misc_operations-optimize) 导入后的表。 使用MergeTree-family引擎配置的表总是在后台合并数据部分以优化数据存储(或至少检查是否有意义)。 这些查询强制表引擎立即进行存储优化,而不是稍后一段时间执行:
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
clickhouse-client --query "OPTIMIZE TABLE tutorial.hits_v1 FINAL"
|
clickhouse-client --query "OPTIMIZE TABLE tutorial.hits_v1 FINAL"
|
||||||
@ -521,14 +521,14 @@ WHERE (CounterID = 912887) AND (toYYYYMM(StartDate) = 201403) AND (domain(StartU
|
|||||||
|
|
||||||
ClickHouse集群是一个同质集群。 设置步骤:
|
ClickHouse集群是一个同质集群。 设置步骤:
|
||||||
|
|
||||||
1. 在群集的所有计算机上安装ClickHouse服务器
|
1. 在群集的所有机器上安装ClickHouse服务端
|
||||||
2. 在配置文件中设置群集配置
|
2. 在配置文件中设置群集配置
|
||||||
3. 在每个实例上创建本地表
|
3. 在每个实例上创建本地表
|
||||||
4. 创建一个 [分布式表](../engines/table-engines/special/distributed.md)
|
4. 创建一个 [分布式表](../engines/table-engines/special/distributed.md)
|
||||||
|
|
||||||
[分布式表](../engines/table-engines/special/distributed.md) 实际上是一种 “view” 到ClickHouse集群的本地表。 从分布式表中选择查询使用集群所有分片的资源执行。 您可以为多个集群指定configs,并创建多个分布式表,为不同的集群提供视图。
|
[分布式表](../engines/table-engines/special/distributed.md) 实际上是一种 “视图”,映射到ClickHouse集群的本地表。 从分布式表中执行 **SELECT** 查询会使用集群所有分片的资源。 您可以为多个集群指定configs,并创建多个分布式表,为不同的集群提供视图。
|
||||||
|
|
||||||
具有三个分片的集群的示例配置,每个分片一个副本:
|
具有三个分片,每个分片一个副本的集群的示例配置:
|
||||||
|
|
||||||
``` xml
|
``` xml
|
||||||
<remote_servers>
|
<remote_servers>
|
||||||
@ -555,7 +555,7 @@ ClickHouse集群是一个同质集群。 设置步骤:
|
|||||||
</remote_servers>
|
</remote_servers>
|
||||||
```
|
```
|
||||||
|
|
||||||
为了进一步演示,让我们创建一个新的本地表 `CREATE TABLE` 我们用于查询 `hits_v1`,但不同的表名:
|
为了进一步演示,让我们使用和创建 `hits_v1` 表相同的 `CREATE TABLE` 语句创建一个新的本地表,但表名不同:
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
CREATE TABLE tutorial.hits_local (...) ENGINE = MergeTree() ...
|
CREATE TABLE tutorial.hits_local (...) ENGINE = MergeTree() ...
|
||||||
@ -570,14 +570,14 @@ ENGINE = Distributed(perftest_3shards_1replicas, tutorial, hits_local, rand());
|
|||||||
|
|
||||||
常见的做法是在集群的所有计算机上创建类似的分布式表。 它允许在群集的任何计算机上运行分布式查询。 还有一个替代选项可以使用以下方法为给定的SELECT查询创建临时分布式表 [远程](../sql-reference/table-functions/remote.md) 表功能。
|
常见的做法是在集群的所有计算机上创建类似的分布式表。 它允许在群集的任何计算机上运行分布式查询。 还有一个替代选项可以使用以下方法为给定的SELECT查询创建临时分布式表 [远程](../sql-reference/table-functions/remote.md) 表功能。
|
||||||
|
|
||||||
我们走吧 [INSERT SELECT](../sql-reference/statements/insert-into.md) 将该表传播到多个服务器。
|
让我们运行 [INSERT SELECT](../sql-reference/statements/insert-into.md) 将该表传播到多个服务器。
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
INSERT INTO tutorial.hits_all SELECT * FROM tutorial.hits_v1;
|
INSERT INTO tutorial.hits_all SELECT * FROM tutorial.hits_v1;
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! warning "碌莽禄Notice:"
|
!!! warning "注意:"
|
||||||
这种方法不适合大型表的分片。 有一个单独的工具 [ツ环板-ョツ嘉ッツ偲](../operations/utilities/clickhouse-copier.md) 这可以重新分片任意大表。
|
这种方法不适合大型表的分片。 有一个单独的工具 [clickhouse-copier](../operations/utilities/clickhouse-copier.md) 这可以重新分片任意大表。
|
||||||
|
|
||||||
正如您所期望的那样,如果计算量大的查询使用3台服务器而不是一个,则运行速度快N倍。
|
正如您所期望的那样,如果计算量大的查询使用3台服务器而不是一个,则运行速度快N倍。
|
||||||
|
|
||||||
@ -609,10 +609,10 @@ INSERT INTO tutorial.hits_all SELECT * FROM tutorial.hits_v1;
|
|||||||
</remote_servers>
|
</remote_servers>
|
||||||
```
|
```
|
||||||
|
|
||||||
启用本机复制 [动物园管理员](http://zookeeper.apache.org/) 是必需的。 ClickHouse负责所有副本的数据一致性,并在失败后自动运行恢复过程。 建议将ZooKeeper集群部署在单独的服务器上(其中没有其他进程,包括ClickHouse正在运行)。
|
启用本机复制 [Zookeeper](http://zookeeper.apache.org/) 是必需的。 ClickHouse负责所有副本的数据一致性,并在失败后自动运行恢复过程。 建议将ZooKeeper集群部署在单独的服务器上(其中没有其他进程,包括运行的ClickHouse)。
|
||||||
|
|
||||||
!!! note "注"
|
!!! note "注"
|
||||||
ZooKeeper不是一个严格的requirement:在某些简单的情况下,您可以通过将数据写入应用程序代码中的所有副本来复制数据。 这种方法是 **不** 建议,在这种情况下,ClickHouse将无法保证所有副本上的数据一致性。 因此,它成为您的应用程序的责任。
|
ZooKeeper不是一个严格的要求:在某些简单的情况下,您可以通过将数据写入应用程序代码中的所有副本来复制数据。 这种方法是 **不** 建议的,在这种情况下,ClickHouse将无法保证所有副本上的数据一致性。 因此需要由您的应用来保证这一点。
|
||||||
|
|
||||||
ZooKeeper位置在配置文件中指定:
|
ZooKeeper位置在配置文件中指定:
|
||||||
|
|
||||||
|
@ -134,7 +134,6 @@ private:
|
|||||||
bool stdout_is_a_tty = false; /// stdout is a terminal.
|
bool stdout_is_a_tty = false; /// stdout is a terminal.
|
||||||
|
|
||||||
std::unique_ptr<Connection> connection; /// Connection to DB.
|
std::unique_ptr<Connection> connection; /// Connection to DB.
|
||||||
String query_id; /// Current query_id.
|
|
||||||
String full_query; /// Current query as it was given to the client.
|
String full_query; /// Current query as it was given to the client.
|
||||||
|
|
||||||
// Current query as it will be sent to the server. It may differ from the
|
// Current query as it will be sent to the server. It may differ from the
|
||||||
@ -219,6 +218,9 @@ private:
|
|||||||
QueryFuzzer fuzzer;
|
QueryFuzzer fuzzer;
|
||||||
int query_fuzzer_runs = 0;
|
int query_fuzzer_runs = 0;
|
||||||
|
|
||||||
|
/// We will format query_id in interactive mode in various ways, the default is just to print Query id: ...
|
||||||
|
std::vector<std::pair<String, String>> query_id_formats;
|
||||||
|
|
||||||
void initialize(Poco::Util::Application & self) override
|
void initialize(Poco::Util::Application & self) override
|
||||||
{
|
{
|
||||||
Poco::Util::Application::initialize(self);
|
Poco::Util::Application::initialize(self);
|
||||||
@ -243,6 +245,17 @@ private:
|
|||||||
/// Set path for format schema files
|
/// Set path for format schema files
|
||||||
if (config().has("format_schema_path"))
|
if (config().has("format_schema_path"))
|
||||||
context.setFormatSchemaPath(Poco::Path(config().getString("format_schema_path")).toString());
|
context.setFormatSchemaPath(Poco::Path(config().getString("format_schema_path")).toString());
|
||||||
|
|
||||||
|
/// Initialize query_id_formats if any
|
||||||
|
if (config().has("query_id_formats"))
|
||||||
|
{
|
||||||
|
Poco::Util::AbstractConfiguration::Keys keys;
|
||||||
|
config().keys("query_id_formats", keys);
|
||||||
|
for (const auto & name : keys)
|
||||||
|
query_id_formats.emplace_back(name + ":", config().getString("query_id_formats." + name));
|
||||||
|
}
|
||||||
|
if (query_id_formats.empty())
|
||||||
|
query_id_formats.emplace_back("Query id:", " {query_id}\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -559,7 +572,7 @@ private:
|
|||||||
|
|
||||||
if (is_interactive)
|
if (is_interactive)
|
||||||
{
|
{
|
||||||
if (!query_id.empty())
|
if (config().has("query_id"))
|
||||||
throw Exception("query_id could be specified only in non-interactive mode", ErrorCodes::BAD_ARGUMENTS);
|
throw Exception("query_id could be specified only in non-interactive mode", ErrorCodes::BAD_ARGUMENTS);
|
||||||
if (print_time_to_stderr)
|
if (print_time_to_stderr)
|
||||||
throw Exception("time option could be specified only in non-interactive mode", ErrorCodes::BAD_ARGUMENTS);
|
throw Exception("time option could be specified only in non-interactive mode", ErrorCodes::BAD_ARGUMENTS);
|
||||||
@ -665,7 +678,9 @@ private:
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
query_id = config().getString("query_id", "");
|
auto query_id = config().getString("query_id", "");
|
||||||
|
if (!query_id.empty())
|
||||||
|
context.setCurrentQueryId(query_id);
|
||||||
if (query_fuzzer_runs)
|
if (query_fuzzer_runs)
|
||||||
{
|
{
|
||||||
nonInteractiveWithFuzzing();
|
nonInteractiveWithFuzzing();
|
||||||
@ -1274,6 +1289,19 @@ private:
|
|||||||
std_out.next();
|
std_out.next();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (is_interactive)
|
||||||
|
{
|
||||||
|
// Generate a new query_id
|
||||||
|
context.setCurrentQueryId("");
|
||||||
|
for (const auto & query_id_format : query_id_formats)
|
||||||
|
{
|
||||||
|
writeString(query_id_format.first, std_out);
|
||||||
|
writeString(fmt::format(query_id_format.second, fmt::arg("query_id", context.getCurrentQueryId())), std_out);
|
||||||
|
writeChar('\n', std_out);
|
||||||
|
std_out.next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
watch.restart();
|
watch.restart();
|
||||||
processed_rows = 0;
|
processed_rows = 0;
|
||||||
progress.reset();
|
progress.reset();
|
||||||
@ -1399,7 +1427,7 @@ private:
|
|||||||
connection->sendQuery(
|
connection->sendQuery(
|
||||||
connection_parameters.timeouts,
|
connection_parameters.timeouts,
|
||||||
query_to_send,
|
query_to_send,
|
||||||
query_id,
|
context.getCurrentQueryId(),
|
||||||
QueryProcessingStage::Complete,
|
QueryProcessingStage::Complete,
|
||||||
&context.getSettingsRef(),
|
&context.getSettingsRef(),
|
||||||
&context.getClientInfo(),
|
&context.getClientInfo(),
|
||||||
@ -1440,7 +1468,7 @@ private:
|
|||||||
connection->sendQuery(
|
connection->sendQuery(
|
||||||
connection_parameters.timeouts,
|
connection_parameters.timeouts,
|
||||||
query_to_send,
|
query_to_send,
|
||||||
query_id,
|
context.getCurrentQueryId(),
|
||||||
QueryProcessingStage::Complete,
|
QueryProcessingStage::Complete,
|
||||||
&context.getSettingsRef(),
|
&context.getSettingsRef(),
|
||||||
&context.getClientInfo(),
|
&context.getClientInfo(),
|
||||||
@ -1555,9 +1583,8 @@ private:
|
|||||||
BlockInputStreamPtr block_input = context.getInputFormat(
|
BlockInputStreamPtr block_input = context.getInputFormat(
|
||||||
current_format, buf, sample, insert_format_max_block_size);
|
current_format, buf, sample, insert_format_max_block_size);
|
||||||
|
|
||||||
const auto & column_defaults = columns_description.getDefaults();
|
if (columns_description.hasDefaults())
|
||||||
if (!column_defaults.empty())
|
block_input = std::make_shared<AddingDefaultsBlockInputStream>(block_input, columns_description, context);
|
||||||
block_input = std::make_shared<AddingDefaultsBlockInputStream>(block_input, column_defaults, context);
|
|
||||||
|
|
||||||
BlockInputStreamPtr async_block_input = std::make_shared<AsynchronousBlockInputStream>(block_input);
|
BlockInputStreamPtr async_block_input = std::make_shared<AsynchronousBlockInputStream>(block_input);
|
||||||
|
|
||||||
|
@ -168,6 +168,26 @@ ASTPtr extractOrderBy(const ASTPtr & storage_ast)
|
|||||||
throw Exception("ORDER BY cannot be empty", ErrorCodes::BAD_ARGUMENTS);
|
throw Exception("ORDER BY cannot be empty", ErrorCodes::BAD_ARGUMENTS);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Wraps only identifiers with backticks.
|
||||||
|
std::string wrapIdentifiersWithBackticks(const ASTPtr & root)
|
||||||
|
{
|
||||||
|
if (auto identifier = std::dynamic_pointer_cast<ASTIdentifier>(root))
|
||||||
|
return backQuote(identifier->name);
|
||||||
|
|
||||||
|
if (auto function = std::dynamic_pointer_cast<ASTFunction>(root))
|
||||||
|
return function->name + '(' + wrapIdentifiersWithBackticks(function->arguments) + ')';
|
||||||
|
|
||||||
|
if (auto expression_list = std::dynamic_pointer_cast<ASTExpressionList>(root))
|
||||||
|
{
|
||||||
|
Names function_arguments(expression_list->children.size());
|
||||||
|
for (size_t i = 0; i < expression_list->children.size(); ++i)
|
||||||
|
function_arguments[i] = wrapIdentifiersWithBackticks(expression_list->children[0]);
|
||||||
|
return boost::algorithm::join(function_arguments, ", ");
|
||||||
|
}
|
||||||
|
|
||||||
|
throw Exception("Primary key could be represented only as columns or functions from columns.", ErrorCodes::BAD_ARGUMENTS);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
Names extractPrimaryKeyColumnNames(const ASTPtr & storage_ast)
|
Names extractPrimaryKeyColumnNames(const ASTPtr & storage_ast)
|
||||||
{
|
{
|
||||||
@ -189,13 +209,14 @@ Names extractPrimaryKeyColumnNames(const ASTPtr & storage_ast)
|
|||||||
ErrorCodes::BAD_ARGUMENTS);
|
ErrorCodes::BAD_ARGUMENTS);
|
||||||
|
|
||||||
Names primary_key_columns;
|
Names primary_key_columns;
|
||||||
Names sorting_key_columns;
|
|
||||||
NameSet primary_key_columns_set;
|
NameSet primary_key_columns_set;
|
||||||
|
|
||||||
for (size_t i = 0; i < sorting_key_size; ++i)
|
for (size_t i = 0; i < sorting_key_size; ++i)
|
||||||
{
|
{
|
||||||
|
/// Column name could be represented as a f_1(f_2(...f_n(column_name))).
|
||||||
|
/// Each f_i could take one or more parameters.
|
||||||
|
/// We will wrap identifiers with backticks to allow non-standart identifier names.
|
||||||
String sorting_key_column = sorting_key_expr_list->children[i]->getColumnName();
|
String sorting_key_column = sorting_key_expr_list->children[i]->getColumnName();
|
||||||
sorting_key_columns.push_back(sorting_key_column);
|
|
||||||
|
|
||||||
if (i < primary_key_size)
|
if (i < primary_key_size)
|
||||||
{
|
{
|
||||||
@ -208,7 +229,7 @@ Names extractPrimaryKeyColumnNames(const ASTPtr & storage_ast)
|
|||||||
if (!primary_key_columns_set.emplace(pk_column).second)
|
if (!primary_key_columns_set.emplace(pk_column).second)
|
||||||
throw Exception("Primary key contains duplicate columns", ErrorCodes::BAD_ARGUMENTS);
|
throw Exception("Primary key contains duplicate columns", ErrorCodes::BAD_ARGUMENTS);
|
||||||
|
|
||||||
primary_key_columns.push_back(pk_column);
|
primary_key_columns.push_back(wrapIdentifiersWithBackticks(primary_key_expr_list->children[i]));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,6 +6,9 @@
|
|||||||
|
|
||||||
#include <Core/Defines.h>
|
#include <Core/Defines.h>
|
||||||
|
|
||||||
|
#include <ext/map.h>
|
||||||
|
#include <boost/algorithm/string/join.hpp>
|
||||||
|
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
@ -269,7 +272,7 @@ inline TaskTable::TaskTable(TaskCluster & parent, const Poco::Util::AbstractConf
|
|||||||
ParserStorage parser_storage;
|
ParserStorage parser_storage;
|
||||||
engine_push_ast = parseQuery(parser_storage, engine_push_str, 0, DBMS_DEFAULT_MAX_PARSER_DEPTH);
|
engine_push_ast = parseQuery(parser_storage, engine_push_str, 0, DBMS_DEFAULT_MAX_PARSER_DEPTH);
|
||||||
engine_push_partition_key_ast = extractPartitionKey(engine_push_ast);
|
engine_push_partition_key_ast = extractPartitionKey(engine_push_ast);
|
||||||
primary_key_comma_separated = Nested::createCommaSeparatedStringFrom(extractPrimaryKeyColumnNames(engine_push_ast));
|
primary_key_comma_separated = boost::algorithm::join(extractPrimaryKeyColumnNames(engine_push_ast), ", ");
|
||||||
is_replicated_table = isReplicatedTableEngine(engine_push_ast);
|
is_replicated_table = isReplicatedTableEngine(engine_push_ast);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,10 +2,15 @@
|
|||||||
#include <setjmp.h>
|
#include <setjmp.h>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
|
|
||||||
|
#ifdef __linux__
|
||||||
|
#include <sys/mman.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#include <new>
|
#include <new>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include <tuple>
|
||||||
#include <utility> /// pair
|
#include <utility> /// pair
|
||||||
|
|
||||||
#if !defined(ARCADIA_BUILD)
|
#if !defined(ARCADIA_BUILD)
|
||||||
@ -57,6 +62,7 @@ int mainEntryClickHouseStatus(int argc, char ** argv);
|
|||||||
int mainEntryClickHouseRestart(int argc, char ** argv);
|
int mainEntryClickHouseRestart(int argc, char ** argv);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#define ARRAY_SIZE(a) (sizeof(a)/sizeof((a)[0]))
|
||||||
|
|
||||||
namespace
|
namespace
|
||||||
{
|
{
|
||||||
@ -150,28 +156,29 @@ enum class InstructionFail
|
|||||||
AVX512 = 8
|
AVX512 = 8
|
||||||
};
|
};
|
||||||
|
|
||||||
const char * instructionFailToString(InstructionFail fail)
|
std::pair<const char *, size_t> instructionFailToString(InstructionFail fail)
|
||||||
{
|
{
|
||||||
switch (fail)
|
switch (fail)
|
||||||
{
|
{
|
||||||
|
#define ret(x) return std::make_pair(x, ARRAY_SIZE(x) - 1)
|
||||||
case InstructionFail::NONE:
|
case InstructionFail::NONE:
|
||||||
return "NONE";
|
ret("NONE");
|
||||||
case InstructionFail::SSE3:
|
case InstructionFail::SSE3:
|
||||||
return "SSE3";
|
ret("SSE3");
|
||||||
case InstructionFail::SSSE3:
|
case InstructionFail::SSSE3:
|
||||||
return "SSSE3";
|
ret("SSSE3");
|
||||||
case InstructionFail::SSE4_1:
|
case InstructionFail::SSE4_1:
|
||||||
return "SSE4.1";
|
ret("SSE4.1");
|
||||||
case InstructionFail::SSE4_2:
|
case InstructionFail::SSE4_2:
|
||||||
return "SSE4.2";
|
ret("SSE4.2");
|
||||||
case InstructionFail::POPCNT:
|
case InstructionFail::POPCNT:
|
||||||
return "POPCNT";
|
ret("POPCNT");
|
||||||
case InstructionFail::AVX:
|
case InstructionFail::AVX:
|
||||||
return "AVX";
|
ret("AVX");
|
||||||
case InstructionFail::AVX2:
|
case InstructionFail::AVX2:
|
||||||
return "AVX2";
|
ret("AVX2");
|
||||||
case InstructionFail::AVX512:
|
case InstructionFail::AVX512:
|
||||||
return "AVX512";
|
ret("AVX512");
|
||||||
}
|
}
|
||||||
__builtin_unreachable();
|
__builtin_unreachable();
|
||||||
}
|
}
|
||||||
@ -238,7 +245,7 @@ void checkRequiredInstructionsImpl(volatile InstructionFail & fail)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// This function is safe to use in static initializers.
|
/// This function is safe to use in static initializers.
|
||||||
void writeError(const char * data, size_t size)
|
void writeErrorLen(const char * data, size_t size)
|
||||||
{
|
{
|
||||||
while (size != 0)
|
while (size != 0)
|
||||||
{
|
{
|
||||||
@ -254,6 +261,12 @@ void writeError(const char * data, size_t size)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
/// Macros to avoid using strlen(), since it may fail if SSE is not supported.
|
||||||
|
#define writeError(data) do \
|
||||||
|
{ \
|
||||||
|
static_assert(__builtin_constant_p(data)); \
|
||||||
|
writeErrorLen(data, ARRAY_SIZE(data) - 1); \
|
||||||
|
} while (false)
|
||||||
|
|
||||||
/// Check SSE and others instructions availability. Calls exit on fail.
|
/// Check SSE and others instructions availability. Calls exit on fail.
|
||||||
/// This function must be called as early as possible, even before main, because static initializers may use unavailable instructions.
|
/// This function must be called as early as possible, even before main, because static initializers may use unavailable instructions.
|
||||||
@ -272,8 +285,7 @@ void checkRequiredInstructions()
|
|||||||
/// Typical implementation of strlen is using SSE4.2 or AVX2.
|
/// Typical implementation of strlen is using SSE4.2 or AVX2.
|
||||||
/// But this is not the case because it's compiler builtin and is executed at compile time.
|
/// But this is not the case because it's compiler builtin and is executed at compile time.
|
||||||
|
|
||||||
const char * msg = "Can not set signal handler\n";
|
writeError("Can not set signal handler\n");
|
||||||
writeError(msg, strlen(msg));
|
|
||||||
_Exit(1);
|
_Exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -281,12 +293,9 @@ void checkRequiredInstructions()
|
|||||||
|
|
||||||
if (sigsetjmp(jmpbuf, 1))
|
if (sigsetjmp(jmpbuf, 1))
|
||||||
{
|
{
|
||||||
const char * msg1 = "Instruction check fail. The CPU does not support ";
|
writeError("Instruction check fail. The CPU does not support ");
|
||||||
writeError(msg1, strlen(msg1));
|
std::apply(writeErrorLen, instructionFailToString(fail));
|
||||||
const char * msg2 = instructionFailToString(fail);
|
writeError(" instruction set.\n");
|
||||||
writeError(msg2, strlen(msg2));
|
|
||||||
const char * msg3 = " instruction set.\n";
|
|
||||||
writeError(msg3, strlen(msg3));
|
|
||||||
_Exit(1);
|
_Exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -294,13 +303,60 @@ void checkRequiredInstructions()
|
|||||||
|
|
||||||
if (sigaction(signal, &sa_old, nullptr))
|
if (sigaction(signal, &sa_old, nullptr))
|
||||||
{
|
{
|
||||||
const char * msg = "Can not set signal handler\n";
|
writeError("Can not set signal handler\n");
|
||||||
writeError(msg, strlen(msg));
|
|
||||||
_Exit(1);
|
_Exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Checker { Checker() { checkRequiredInstructions(); } } checker;
|
#ifdef __linux__
|
||||||
|
/// clickhouse uses jemalloc as a production allocator
|
||||||
|
/// and jemalloc relies on working MADV_DONTNEED,
|
||||||
|
/// which doesn't work under qemu
|
||||||
|
///
|
||||||
|
/// but do this only under for linux, since only it return zeroed pages after MADV_DONTNEED
|
||||||
|
/// (and jemalloc assumes this too, see contrib/jemalloc-cmake/include_linux_x86_64/jemalloc/internal/jemalloc_internal_defs.h.in)
|
||||||
|
void checkRequiredMadviseFlags()
|
||||||
|
{
|
||||||
|
size_t size = 1 << 16;
|
||||||
|
void * addr = mmap(nullptr, size, PROT_READ|PROT_WRITE, MAP_PRIVATE|MAP_ANONYMOUS, -1, 0);
|
||||||
|
if (addr == MAP_FAILED)
|
||||||
|
{
|
||||||
|
writeError("Can not mmap pages for MADV_DONTNEED check\n");
|
||||||
|
_Exit(1);
|
||||||
|
}
|
||||||
|
memset(addr, 'A', size);
|
||||||
|
|
||||||
|
if (!madvise(addr, size, MADV_DONTNEED))
|
||||||
|
{
|
||||||
|
/// Suboptimal, but should be simple.
|
||||||
|
for (size_t i = 0; i < size; ++i)
|
||||||
|
{
|
||||||
|
if (reinterpret_cast<unsigned char *>(addr)[i] != 0)
|
||||||
|
{
|
||||||
|
writeError("MADV_DONTNEED does not zeroed page. jemalloc will be broken\n");
|
||||||
|
_Exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (munmap(addr, size))
|
||||||
|
{
|
||||||
|
writeError("Can not munmap pages for MADV_DONTNEED check\n");
|
||||||
|
_Exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
struct Checker
|
||||||
|
{
|
||||||
|
Checker()
|
||||||
|
{
|
||||||
|
checkRequiredInstructions();
|
||||||
|
#ifdef __linux__
|
||||||
|
checkRequiredMadviseFlags();
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
} checker;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -267,12 +267,6 @@ int Server::main(const std::vector<std::string> & /*args*/)
|
|||||||
registerDictionaries();
|
registerDictionaries();
|
||||||
registerDisks();
|
registerDisks();
|
||||||
|
|
||||||
#if !defined(ARCADIA_BUILD)
|
|
||||||
#if USE_OPENCL
|
|
||||||
BitonicSort::getInstance().configure();
|
|
||||||
#endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
CurrentMetrics::set(CurrentMetrics::Revision, ClickHouseRevision::getVersionRevision());
|
CurrentMetrics::set(CurrentMetrics::Revision, ClickHouseRevision::getVersionRevision());
|
||||||
CurrentMetrics::set(CurrentMetrics::VersionInteger, ClickHouseRevision::getVersionInteger());
|
CurrentMetrics::set(CurrentMetrics::VersionInteger, ClickHouseRevision::getVersionInteger());
|
||||||
|
|
||||||
|
2
release
2
release
@ -66,7 +66,7 @@ do
|
|||||||
shift
|
shift
|
||||||
elif [[ $1 == '--fast' ]]; then
|
elif [[ $1 == '--fast' ]]; then
|
||||||
# Wrong but fast pbuilder mode: create base package with all depends
|
# Wrong but fast pbuilder mode: create base package with all depends
|
||||||
EXTRAPACKAGES="$EXTRAPACKAGES debhelper cmake ninja-build gcc-8 g++-8 libc6-dev libicu-dev libreadline-dev psmisc bash expect python python-lxml python-termcolor python-requests curl perl sudo openssl netcat-openbsd"
|
EXTRAPACKAGES="$EXTRAPACKAGES debhelper cmake ninja-build gcc-8 g++-8 libc6-dev libicu-dev libreadline-dev psmisc bash expect python3 python3-lxml python3-termcolor python3-requests curl perl sudo openssl netcat-openbsd"
|
||||||
shift
|
shift
|
||||||
elif [[ $1 == '--rpm' ]]; then
|
elif [[ $1 == '--rpm' ]]; then
|
||||||
MAKE_RPM=1
|
MAKE_RPM=1
|
||||||
|
@ -143,13 +143,13 @@ void LinearModelData::updateState()
|
|||||||
|
|
||||||
void LinearModelData::predict(
|
void LinearModelData::predict(
|
||||||
ColumnVector<Float64>::Container & container,
|
ColumnVector<Float64>::Container & container,
|
||||||
Block & block,
|
ColumnsWithTypeAndName & columns,
|
||||||
size_t offset,
|
size_t offset,
|
||||||
size_t limit,
|
size_t limit,
|
||||||
const ColumnNumbers & arguments,
|
const ColumnNumbers & arguments,
|
||||||
const Context & context) const
|
const Context & context) const
|
||||||
{
|
{
|
||||||
gradient_computer->predict(container, block, offset, limit, arguments, weights, bias, context);
|
gradient_computer->predict(container, columns, offset, limit, arguments, weights, bias, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
void LinearModelData::returnWeights(IColumn & to) const
|
void LinearModelData::returnWeights(IColumn & to) const
|
||||||
@ -449,7 +449,7 @@ void IWeightsUpdater::addToBatch(
|
|||||||
|
|
||||||
void LogisticRegression::predict(
|
void LogisticRegression::predict(
|
||||||
ColumnVector<Float64>::Container & container,
|
ColumnVector<Float64>::Container & container,
|
||||||
Block & block,
|
ColumnsWithTypeAndName & columns,
|
||||||
size_t offset,
|
size_t offset,
|
||||||
size_t limit,
|
size_t limit,
|
||||||
const ColumnNumbers & arguments,
|
const ColumnNumbers & arguments,
|
||||||
@ -457,7 +457,7 @@ void LogisticRegression::predict(
|
|||||||
Float64 bias,
|
Float64 bias,
|
||||||
const Context & /*context*/) const
|
const Context & /*context*/) const
|
||||||
{
|
{
|
||||||
size_t rows_num = block.rows();
|
size_t rows_num = columns[arguments.front()].column->size();
|
||||||
|
|
||||||
if (offset > rows_num || offset + limit > rows_num)
|
if (offset > rows_num || offset + limit > rows_num)
|
||||||
throw Exception("Invalid offset and limit for LogisticRegression::predict. "
|
throw Exception("Invalid offset and limit for LogisticRegression::predict. "
|
||||||
@ -468,7 +468,7 @@ void LogisticRegression::predict(
|
|||||||
|
|
||||||
for (size_t i = 1; i < arguments.size(); ++i)
|
for (size_t i = 1; i < arguments.size(); ++i)
|
||||||
{
|
{
|
||||||
const ColumnWithTypeAndName & cur_col = block.getByPosition(arguments[i]);
|
const ColumnWithTypeAndName & cur_col = columns[arguments[i]];
|
||||||
|
|
||||||
if (!isNativeNumber(cur_col.type))
|
if (!isNativeNumber(cur_col.type))
|
||||||
throw Exception("Prediction arguments must have numeric type", ErrorCodes::BAD_ARGUMENTS);
|
throw Exception("Prediction arguments must have numeric type", ErrorCodes::BAD_ARGUMENTS);
|
||||||
@ -518,7 +518,7 @@ void LogisticRegression::compute(
|
|||||||
|
|
||||||
void LinearRegression::predict(
|
void LinearRegression::predict(
|
||||||
ColumnVector<Float64>::Container & container,
|
ColumnVector<Float64>::Container & container,
|
||||||
Block & block,
|
ColumnsWithTypeAndName & columns,
|
||||||
size_t offset,
|
size_t offset,
|
||||||
size_t limit,
|
size_t limit,
|
||||||
const ColumnNumbers & arguments,
|
const ColumnNumbers & arguments,
|
||||||
@ -531,7 +531,7 @@ void LinearRegression::predict(
|
|||||||
throw Exception("In predict function number of arguments differs from the size of weights vector", ErrorCodes::LOGICAL_ERROR);
|
throw Exception("In predict function number of arguments differs from the size of weights vector", ErrorCodes::LOGICAL_ERROR);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t rows_num = block.rows();
|
size_t rows_num = columns[arguments.front()].column->size();
|
||||||
|
|
||||||
if (offset > rows_num || offset + limit > rows_num)
|
if (offset > rows_num || offset + limit > rows_num)
|
||||||
throw Exception("Invalid offset and limit for LogisticRegression::predict. "
|
throw Exception("Invalid offset and limit for LogisticRegression::predict. "
|
||||||
@ -542,7 +542,7 @@ void LinearRegression::predict(
|
|||||||
|
|
||||||
for (size_t i = 1; i < arguments.size(); ++i)
|
for (size_t i = 1; i < arguments.size(); ++i)
|
||||||
{
|
{
|
||||||
const ColumnWithTypeAndName & cur_col = block.getByPosition(arguments[i]);
|
const ColumnWithTypeAndName & cur_col = columns[arguments[i]];
|
||||||
|
|
||||||
if (!isNativeNumber(cur_col.type))
|
if (!isNativeNumber(cur_col.type))
|
||||||
throw Exception("Prediction arguments must have numeric type", ErrorCodes::BAD_ARGUMENTS);
|
throw Exception("Prediction arguments must have numeric type", ErrorCodes::BAD_ARGUMENTS);
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user