Merge remote-tracking branch 'origin/master' into 16414-totimezone-throw

This commit is contained in:
Jordi Villar 2023-04-13 14:03:45 +02:00
commit eb32b710f0
226 changed files with 2703 additions and 1027 deletions

View File

@ -58,7 +58,7 @@ if (ENABLE_CHECK_HEAVY_BUILDS)
set (RLIMIT_CPU 1000)
# gcc10/gcc10/clang -fsanitize=memory is too heavy
if (SANITIZE STREQUAL "memory" OR COMPILER_GCC)
if (SANITIZE STREQUAL "memory")
set (RLIMIT_DATA 10000000000) # 10G
endif()
@ -286,48 +286,31 @@ set (CMAKE_C_STANDARD 11)
set (CMAKE_C_EXTENSIONS ON) # required by most contribs written in C
set (CMAKE_C_STANDARD_REQUIRED ON)
if (COMPILER_GCC OR COMPILER_CLANG)
# Enable C++14 sized global deallocation functions. It should be enabled by setting -std=c++14 but I'm not sure.
# See https://reviews.llvm.org/D112921
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsized-deallocation")
endif ()
# falign-functions=32 prevents from random performance regressions with the code change. Thus, providing more stable
# benchmarks.
if (COMPILER_GCC OR COMPILER_CLANG)
set(COMPILER_FLAGS "${COMPILER_FLAGS} -falign-functions=32")
endif ()
if (ARCH_AMD64)
# align branches within a 32-Byte boundary to avoid the potential performance loss when code layout change,
# which makes benchmark results more stable.
set(BRANCHES_WITHIN_32B_BOUNDARIES "-mbranches-within-32B-boundaries")
if (COMPILER_GCC)
# gcc is in assembler, need to add "-Wa," prefix
set(BRANCHES_WITHIN_32B_BOUNDARIES "-Wa,${BRANCHES_WITHIN_32B_BOUNDARIES}")
endif()
set(COMPILER_FLAGS "${COMPILER_FLAGS} ${BRANCHES_WITHIN_32B_BOUNDARIES}")
endif()
if (COMPILER_GCC)
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fcoroutines")
endif ()
# Compiler-specific coverage flags e.g. -fcoverage-mapping for gcc
option(WITH_COVERAGE "Profile the resulting binary/binaries" OFF)
if (WITH_COVERAGE AND COMPILER_CLANG)
set(COMPILER_FLAGS "${COMPILER_FLAGS} -fprofile-instr-generate -fcoverage-mapping")
# If we want to disable coverage for specific translation units
set(WITHOUT_COVERAGE "-fno-profile-instr-generate -fno-coverage-mapping")
endif()
if (COMPILER_CLANG)
# Enable C++14 sized global deallocation functions. It should be enabled by setting -std=c++14 but I'm not sure.
# See https://reviews.llvm.org/D112921
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsized-deallocation")
if (WITH_COVERAGE AND COMPILER_GCC)
set(COMPILER_FLAGS "${COMPILER_FLAGS} -fprofile-arcs -ftest-coverage")
set(COVERAGE_OPTION "-lgcov")
set(WITHOUT_COVERAGE "-fno-profile-arcs -fno-test-coverage")
endif()
# falign-functions=32 prevents from random performance regressions with the code change. Thus, providing more stable
# benchmarks.
set(COMPILER_FLAGS "${COMPILER_FLAGS} -falign-functions=32")
if (ARCH_AMD64)
# align branches within a 32-Byte boundary to avoid the potential performance loss when code layout change,
# which makes benchmark results more stable.
set(BRANCHES_WITHIN_32B_BOUNDARIES "-mbranches-within-32B-boundaries")
set(COMPILER_FLAGS "${COMPILER_FLAGS} ${BRANCHES_WITHIN_32B_BOUNDARIES}")
endif()
if (WITH_COVERAGE)
set(COMPILER_FLAGS "${COMPILER_FLAGS} -fprofile-instr-generate -fcoverage-mapping")
# If we want to disable coverage for specific translation units
set(WITHOUT_COVERAGE "-fno-profile-instr-generate -fno-coverage-mapping")
endif()
endif ()
set (COMPILER_FLAGS "${COMPILER_FLAGS}")

View File

@ -2,13 +2,6 @@ set (DEFAULT_LIBS "-nodefaultlibs")
set (DEFAULT_LIBS "${DEFAULT_LIBS} ${COVERAGE_OPTION} -lc -lm -lpthread -ldl")
if (COMPILER_GCC)
set (DEFAULT_LIBS "${DEFAULT_LIBS} -lgcc_eh")
if (ARCH_AARCH64)
set (DEFAULT_LIBS "${DEFAULT_LIBS} -lgcc")
endif ()
endif ()
message(STATUS "Default libraries: ${DEFAULT_LIBS}")
set(CMAKE_CXX_STANDARD_LIBRARIES ${DEFAULT_LIBS})

View File

@ -1,9 +1,5 @@
set (DEFAULT_LIBS "-nodefaultlibs")
if (NOT COMPILER_CLANG)
message (FATAL_ERROR "FreeBSD build is supported only for Clang")
endif ()
if (${CMAKE_SYSTEM_PROCESSOR} STREQUAL "amd64")
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --print-file-name=libclang_rt.builtins-x86_64.a OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
else ()

View File

@ -11,8 +11,6 @@ if (COMPILER_CLANG)
if (NOT EXISTS "${BUILTINS_LIBRARY}")
set (BUILTINS_LIBRARY "-lgcc")
endif ()
else ()
set (BUILTINS_LIBRARY "-lgcc")
endif ()
if (OS_ANDROID)

View File

@ -8,12 +8,6 @@ option (SANITIZE "Enable one of the code sanitizers" "")
set (SAN_FLAGS "${SAN_FLAGS} -g -fno-omit-frame-pointer -DSANITIZER")
# gcc with -nodefaultlibs does not add sanitizer libraries
# with -static-libasan and similar
macro(add_explicit_sanitizer_library lib)
target_link_libraries(global-libs INTERFACE "-Wl,-static -l${lib} -Wl,-Bdynamic")
endmacro()
if (SANITIZE)
if (SANITIZE STREQUAL "address")
# LLVM-15 has a bug in Address Sanitizer, preventing the usage of 'sanitize-address-use-after-scope',
@ -28,9 +22,6 @@ if (SANITIZE)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libasan")
endif ()
if (COMPILER_GCC)
add_explicit_sanitizer_library(asan)
endif()
elseif (SANITIZE STREQUAL "memory")
# MemorySanitizer flags are set according to the official documentation:
@ -58,11 +49,6 @@ if (SANITIZE)
set (TSAN_FLAGS "-fsanitize=thread")
if (COMPILER_CLANG)
set (TSAN_FLAGS "${TSAN_FLAGS} -fsanitize-blacklist=${CMAKE_SOURCE_DIR}/tests/tsan_suppressions.txt")
else()
set (MESSAGE "TSAN suppressions was not passed to the compiler (since the compiler is not clang)\n")
set (MESSAGE "${MESSAGE}Use the following command to pass them manually:\n")
set (MESSAGE "${MESSAGE} export TSAN_OPTIONS=\"$TSAN_OPTIONS suppressions=${CMAKE_SOURCE_DIR}/tests/tsan_suppressions.txt\"")
message (WARNING "${MESSAGE}")
endif()
@ -74,9 +60,6 @@ if (SANITIZE)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libtsan")
endif ()
if (COMPILER_GCC)
add_explicit_sanitizer_library(tsan)
endif()
elseif (SANITIZE STREQUAL "undefined")
set (UBSAN_FLAGS "-fsanitize=undefined -fno-sanitize-recover=all -fno-sanitize=float-divide-by-zero")
@ -91,11 +74,6 @@ if (SANITIZE)
endif()
if (COMPILER_CLANG)
set (UBSAN_FLAGS "${UBSAN_FLAGS} -fsanitize-blacklist=${CMAKE_SOURCE_DIR}/tests/ubsan_suppressions.txt")
else()
set (MESSAGE "UBSAN suppressions was not passed to the compiler (since the compiler is not clang)\n")
set (MESSAGE "${MESSAGE}Use the following command to pass them manually:\n")
set (MESSAGE "${MESSAGE} export UBSAN_OPTIONS=\"$UBSAN_OPTIONS suppressions=${CMAKE_SOURCE_DIR}/tests/ubsan_suppressions.txt\"")
message (WARNING "${MESSAGE}")
endif()
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} ${UBSAN_FLAGS}")
@ -106,9 +84,6 @@ if (SANITIZE)
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libubsan")
endif ()
if (COMPILER_GCC)
add_explicit_sanitizer_library(ubsan)
endif()
# llvm-tblgen, that is used during LLVM build, doesn't work with UBSan.
set (ENABLE_EMBEDDED_COMPILER 0 CACHE BOOL "")

View File

@ -1,8 +1,6 @@
# Compiler
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (COMPILER_GCC 1)
elseif (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
set (COMPILER_CLANG 1) # Safe to treat AppleClang as a regular Clang, in general.
elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
set (COMPILER_CLANG 1)
@ -18,16 +16,8 @@ message (STATUS "Using compiler:\n${COMPILER_SELF_IDENTIFICATION}")
set (CLANG_MINIMUM_VERSION 15)
set (XCODE_MINIMUM_VERSION 12.0)
set (APPLE_CLANG_MINIMUM_VERSION 12.0.0)
set (GCC_MINIMUM_VERSION 11)
if (COMPILER_GCC)
message (FATAL_ERROR "Compilation with GCC is unsupported. Please use Clang instead.")
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${GCC_MINIMUM_VERSION})
message (FATAL_ERROR "Compilation with GCC version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${GCC_MINIMUM_VERSION}.")
endif ()
elseif (COMPILER_CLANG)
if (COMPILER_CLANG)
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
# (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
if (NOT ALLOW_APPLECLANG AND NOT DEFINED ENV{ALLOW_APPLECLANG})
@ -58,9 +48,7 @@ if (LINKER_NAME MATCHES "gold")
endif ()
if (NOT LINKER_NAME)
if (COMPILER_GCC)
find_program (LLD_PATH NAMES "ld.lld")
elseif (COMPILER_CLANG)
if (COMPILER_CLANG)
if (OS_LINUX)
if (NOT ARCH_S390X) # s390x doesnt support lld
find_program (LLD_PATH NAMES "ld.lld-${COMPILER_VERSION_MAJOR}" "ld.lld")
@ -69,10 +57,7 @@ if (NOT LINKER_NAME)
endif ()
if (OS_LINUX)
if (LLD_PATH)
if (COMPILER_GCC)
# GCC driver requires one of supported linker names like "lld".
set (LINKER_NAME "lld")
else ()
if (COMPILER_CLANG)
# Clang driver simply allows full linker path.
set (LINKER_NAME ${LLD_PATH})
endif ()
@ -91,8 +76,6 @@ if (LINKER_NAME)
configure_file ("${CMAKE_CURRENT_SOURCE_DIR}/cmake/ld.lld.in" "${LLD_WRAPPER}" @ONLY)
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} --ld-path=${LLD_WRAPPER}")
else ()
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=${LINKER_NAME}")
endif ()
endif ()
@ -105,9 +88,7 @@ endif()
# Archiver
if (COMPILER_GCC)
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-15" "llvm-ar-14" "llvm-ar-13" "llvm-ar-12")
else ()
if (COMPILER_CLANG)
find_program (LLVM_AR_PATH NAMES "llvm-ar-${COMPILER_VERSION_MAJOR}" "llvm-ar")
endif ()
@ -119,9 +100,7 @@ message(STATUS "Using archiver: ${CMAKE_AR}")
# Ranlib
if (COMPILER_GCC)
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-15" "llvm-ranlib-14" "llvm-ranlib-13" "llvm-ranlib-12")
else ()
if (COMPILER_CLANG)
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib-${COMPILER_VERSION_MAJOR}" "llvm-ranlib")
endif ()
@ -133,9 +112,7 @@ message(STATUS "Using ranlib: ${CMAKE_RANLIB}")
# Install Name Tool
if (COMPILER_GCC)
find_program (LLVM_INSTALL_NAME_TOOL_PATH NAMES "llvm-install-name-tool" "llvm-install-name-tool-15" "llvm-install-name-tool-14" "llvm-install-name-tool-13" "llvm-install-name-tool-12")
else ()
if (COMPILER_CLANG)
find_program (LLVM_INSTALL_NAME_TOOL_PATH NAMES "llvm-install-name-tool-${COMPILER_VERSION_MAJOR}" "llvm-install-name-tool")
endif ()
@ -147,9 +124,7 @@ message(STATUS "Using install-name-tool: ${CMAKE_INSTALL_NAME_TOOL}")
# Objcopy
if (COMPILER_GCC)
find_program (OBJCOPY_PATH NAMES "llvm-objcopy" "llvm-objcopy-15" "llvm-objcopy-14" "llvm-objcopy-13" "llvm-objcopy-12" "objcopy")
else ()
if (COMPILER_CLANG)
find_program (OBJCOPY_PATH NAMES "llvm-objcopy-${COMPILER_VERSION_MAJOR}" "llvm-objcopy" "objcopy")
endif ()
@ -161,9 +136,7 @@ endif ()
# Strip
if (COMPILER_GCC)
find_program (STRIP_PATH NAMES "llvm-strip" "llvm-strip-15" "llvm-strip-14" "llvm-strip-13" "llvm-strip-12" "strip")
else ()
if (COMPILER_CLANG)
find_program (STRIP_PATH NAMES "llvm-strip-${COMPILER_VERSION_MAJOR}" "llvm-strip" "strip")
endif ()

View File

@ -47,115 +47,4 @@ if (COMPILER_CLANG)
no_warning(enum-constexpr-conversion) # breaks magic-enum library in clang-16
no_warning(unsafe-buffer-usage) # too aggressive
# TODO Enable conversion, sign-conversion, double-promotion warnings.
elseif (COMPILER_GCC)
# Add compiler options only to c++ compiler
function(add_cxx_compile_options option)
add_compile_options("$<$<STREQUAL:$<TARGET_PROPERTY:LINKER_LANGUAGE>,CXX>:${option}>")
endfunction()
# Warn about boolean expression compared with an integer value different from true/false
add_cxx_compile_options(-Wbool-compare)
# Warn whenever a pointer is cast such that the required alignment of the target is increased.
add_cxx_compile_options(-Wcast-align)
# Warn whenever a pointer is cast so as to remove a type qualifier from the target type.
add_cxx_compile_options(-Wcast-qual)
# Warn when deleting a pointer to incomplete type, which may cause undefined behavior at runtime
add_cxx_compile_options(-Wdelete-incomplete)
# Warn if a requested optimization pass is disabled. Code is too big or too complex
add_cxx_compile_options(-Wdisabled-optimization)
# Warn about duplicated conditions in an if-else-if chain
add_cxx_compile_options(-Wduplicated-cond)
# Warn about a comparison between values of different enumerated types
add_cxx_compile_options(-Wenum-compare)
# Warn about uninitialized variables that are initialized with themselves
add_cxx_compile_options(-Winit-self)
# Warn about logical not used on the left hand side operand of a comparison
add_cxx_compile_options(-Wlogical-not-parentheses)
# Warn about suspicious uses of logical operators in expressions
add_cxx_compile_options(-Wlogical-op)
# Warn if there exists a path from the function entry to a use of the variable that is uninitialized.
add_cxx_compile_options(-Wmaybe-uninitialized)
# Warn when the indentation of the code does not reflect the block structure
add_cxx_compile_options(-Wmisleading-indentation)
# Warn if a global function is defined without a previous declaration - disabled because of build times
# add_cxx_compile_options(-Wmissing-declarations)
# Warn if a user-supplied include directory does not exist
add_cxx_compile_options(-Wmissing-include-dirs)
# Obvious
add_cxx_compile_options(-Wnon-virtual-dtor)
# Obvious
add_cxx_compile_options(-Wno-return-local-addr)
# This warning is disabled due to false positives if compiled with libc++: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=90037
#add_cxx_compile_options(-Wnull-dereference)
# Obvious
add_cxx_compile_options(-Wodr)
# Obvious
add_cxx_compile_options(-Wold-style-cast)
# Warn when a function declaration hides virtual functions from a base class
# add_cxx_compile_options(-Woverloaded-virtual)
# Warn about placement new expressions with undefined behavior
add_cxx_compile_options(-Wplacement-new=2)
# Warn about anything that depends on the size of a function type or of void
add_cxx_compile_options(-Wpointer-arith)
# Warn if anything is declared more than once in the same scope
add_cxx_compile_options(-Wredundant-decls)
# Member initialization reordering
add_cxx_compile_options(-Wreorder)
# Obvious
add_cxx_compile_options(-Wshadow)
# Warn if left shifting a negative value
add_cxx_compile_options(-Wshift-negative-value)
# Warn about a definition of an unsized deallocation function
add_cxx_compile_options(-Wsized-deallocation)
# Warn when the sizeof operator is applied to a parameter that is declared as an array in a function definition
add_cxx_compile_options(-Wsizeof-array-argument)
# Warn for suspicious length parameters to certain string and memory built-in functions if the argument uses sizeof
add_cxx_compile_options(-Wsizeof-pointer-memaccess)
# Warn about overriding virtual functions that are not marked with the override keyword
add_cxx_compile_options(-Wsuggest-override)
# Warn whenever a switch statement has an index of boolean type and the case values are outside the range of a boolean type
add_cxx_compile_options(-Wswitch-bool)
# Warn if a self-comparison always evaluates to true or false
add_cxx_compile_options(-Wtautological-compare)
# Warn about trampolines generated for pointers to nested functions
add_cxx_compile_options(-Wtrampolines)
# Obvious
add_cxx_compile_options(-Wunused)
add_cxx_compile_options(-Wundef)
# Warn if vector operation is not implemented via SIMD capabilities of the architecture
add_cxx_compile_options(-Wvector-operation-performance)
# Warn when a literal 0 is used as null pointer constant.
add_cxx_compile_options(-Wzero-as-null-pointer-constant)
# The following warnings are generally useful but had to be disabled because of compiler bugs with older GCCs.
# XXX: We should try again on more recent GCCs (--> see CMake variable GCC_MINIMUM_VERSION).
# gcc10 stuck with this option while compiling GatherUtils code, anyway there are builds with clang that will warn
add_cxx_compile_options(-Wno-sequence-point)
# gcc10 false positive with this warning in MergeTreePartition.cpp
# inlined from 'void writeHexByteLowercase(UInt8, void*)' at ../src/Common/hex.h:39:11,
# inlined from 'DB::String DB::MergeTreePartition::getID(const DB::Block&) const' at ../src/Storages/MergeTree/MergeTreePartition.cpp:85:30:
# ../contrib/libc-headers/x86_64-linux-gnu/bits/string_fortified.h:34:33: error: writing 2 bytes into a region of size 0 [-Werror=stringop-overflow=]
# 34 | return __builtin___memcpy_chk (__dest, __src, __len, __bos0 (__dest));
# For some reason (bug in gcc?) macro 'GCC diagnostic ignored "-Wstringop-overflow"' doesn't help.
add_cxx_compile_options(-Wno-stringop-overflow)
# reinterpretAs.cpp:182:31: error: void* memcpy(void*, const void*, size_t) copying an object of non-trivial type
# using ToFieldType = using FieldType = using UUID = struct StrongTypedef<wide::integer<128, unsigned int>, DB::UUIDTag>
# {aka struct StrongTypedef<wide::integer<128, unsigned int>, DB::UUIDTag>} from an array of const char8_t
add_cxx_compile_options(-Wno-error=class-memaccess)
# Maybe false positive...
# In file included from /home/jakalletti/ClickHouse/ClickHouse/contrib/libcxx/include/memory:673,
# In function void std::__1::__libcpp_operator_delete(_Args ...) [with _Args = {void*, long unsigned int}],
# inlined from void std::__1::__do_deallocate_handle_size(void*, size_t, _Args ...) [with _Args = {}] at /home/jakalletti/ClickHouse/ClickHouse/contrib/libcxx/include/new:271:34,
# inlined from void std::__1::__libcpp_deallocate(void*, size_t, size_t) at /home/jakalletti/ClickHouse/ClickHouse/contrib/libcxx/include/new:285:41,
# inlined from constexpr void std::__1::allocator<_Tp>::deallocate(_Tp*, size_t) [with _Tp = char] at /home/jakalletti/ClickHouse/ClickHouse/contrib/libcxx/include/memory:849:39,
# inlined from static constexpr void std::__1::allocator_traits<_Alloc>::deallocate(std::__1::allocator_traits<_Alloc>::allocator_type&, std::__1::allocator_traits<_Alloc>::pointer, std::__1::allocator_traits<_Alloc>::size_type) [with _Alloc = std::__1::allocator<char>] at /home/jakalletti/ClickHouse/ClickHouse/contrib/libcxx/include/__memory/allocator_traits.h:476:24,
# inlined from std::__1::basic_string<_CharT, _Traits, _Allocator>::~basic_string() [with _CharT = char; _Traits = std::__1::char_traits<char>; _Allocator = std::__1::allocator<char>] at /home/jakalletti/ClickHouse/ClickHouse/contrib/libcxx/include/string:2219:35,
# inlined from std::__1::basic_string<_CharT, _Traits, _Allocator>::~basic_string() [with _CharT = char; _Traits = std::__1::char_traits<char>; _Allocator = std::__1::allocator<char>] at /home/jakalletti/ClickHouse/ClickHouse/contrib/libcxx/include/string:2213:1,
# inlined from DB::JSONBuilder::JSONMap::Pair::~Pair() at /home/jakalletti/ClickHouse/ClickHouse/src/Common/JSONBuilder.h:90:12,
# inlined from void DB::JSONBuilder::JSONMap::add(std::__1::string, DB::JSONBuilder::ItemPtr) at /home/jakalletti/ClickHouse/ClickHouse/src/Common/JSONBuilder.h:97:68,
# inlined from virtual void DB::ExpressionStep::describeActions(DB::JSONBuilder::JSONMap&) const at /home/jakalletti/ClickHouse/ClickHouse/src/Processors/QueryPlan/ExpressionStep.cpp:102:12:
# /home/jakalletti/ClickHouse/ClickHouse/contrib/libcxx/include/new:247:20: error: void operator delete(void*, size_t) called on a pointer to an unallocated object 7598543875853023301 [-Werror=free-nonheap-object]
add_cxx_compile_options(-Wno-error=free-nonheap-object)
# AggregateFunctionAvg.h:203:100: error: this pointer is null [-Werror=nonnull]
add_cxx_compile_options(-Wno-error=nonnull)
endif ()

View File

@ -9,8 +9,6 @@ if (WITH_COVERAGE)
# disable coverage for contib files and build with optimisations
if (COMPILER_CLANG)
add_compile_options(-O3 -DNDEBUG -finline-functions -finline-hint-functions ${WITHOUT_COVERAGE_LIST})
else()
add_compile_options(-O3 -DNDEBUG -finline-functions ${WITHOUT_COVERAGE_LIST})
endif()
endif()

View File

@ -1,9 +1,4 @@
# disable grpc due to conflicts of abseil (required by grpc) dynamic annotations with libtsan.a
if (SANITIZE STREQUAL "thread" AND COMPILER_GCC)
set(ENABLE_GRPC_DEFAULT OFF)
else()
set(ENABLE_GRPC_DEFAULT ${ENABLE_LIBRARIES})
endif()
set(ENABLE_GRPC_DEFAULT ${ENABLE_LIBRARIES})
option(ENABLE_GRPC "Use gRPC" ${ENABLE_GRPC_DEFAULT})
if(NOT ENABLE_GRPC)

View File

@ -69,11 +69,6 @@ if (USE_MUSL)
target_compile_definitions(cxx PUBLIC -D_LIBCPP_HAS_MUSL_LIBC=1)
endif ()
# Override the deduced attribute support that causes error.
if (OS_DARWIN AND COMPILER_GCC)
add_compile_definitions(_LIBCPP_INIT_PRIORITY_MAX)
endif ()
target_compile_options(cxx PUBLIC $<$<COMPILE_LANGUAGE:CXX>:-nostdinc++>)
# Third party library may have substandard code.
@ -84,11 +79,6 @@ target_compile_definitions(cxx PUBLIC -D_LIBCPP_ENABLE_THREAD_SAFETY_ANNOTATIONS
target_link_libraries(cxx PUBLIC cxxabi)
# For __udivmodti4, __divmodti4.
if (OS_DARWIN AND COMPILER_GCC)
target_link_libraries(cxx PRIVATE gcc)
endif ()
install(
TARGETS cxx
EXPORT global

2
contrib/libhdfs3 vendored

@ -1 +1 @@
Subproject commit 9ee3ce77215fca83b7fdfcfe2186a3db0d0bdb74
Subproject commit 3c91d96ff29fe5928f055519c6d979c4b104db9e

View File

@ -1,9 +1,3 @@
# once fixed, please remove similar places in CMakeLists of libuv users (search "ch_contrib::uv")
if (OS_DARWIN AND COMPILER_GCC)
message (WARNING "libuv cannot be built with GCC in macOS due to a bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93082")
return()
endif()
# This file is a modified version of contrib/libuv/CMakeLists.txt
set (SOURCE_DIR "${CMAKE_SOURCE_DIR}/contrib/libuv")

View File

@ -18,13 +18,13 @@ RUN apt-get update \
# and MEMORY_LIMIT_EXCEEDED exceptions in Functional tests (total memory limit in Functional tests is ~55.24 GiB).
# TSAN will flush shadow memory when reaching this limit.
# It may cause false-negatives, but it's better than OOM.
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7 memory_limit_mb=46080'" >> /etc/environment
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'" >> /etc/environment
RUN echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment
RUN echo "MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'" >> /etc/environment
RUN echo "LSAN_OPTIONS='suppressions=/usr/share/clickhouse-test/config/lsan_suppressions.txt'" >> /etc/environment
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080'
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'
ENV UBSAN_OPTIONS='print_stacktrace=1'
ENV MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'

View File

@ -108,6 +108,12 @@ RUN set -x \
&& echo 'dockremap:165536:65536' >> /etc/subuid \
&& echo 'dockremap:165536:65536' >> /etc/subgid
# Same options as in test/base/Dockerfile
# (in case you need to override them in tests)
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080 second_deadlock_stack=1'
ENV UBSAN_OPTIONS='print_stacktrace=1'
ENV MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'
EXPOSE 2375
ENTRYPOINT ["dockerd-entrypoint.sh"]
CMD ["sh", "-c", "pytest $PYTEST_OPTS"]

View File

@ -13,7 +13,7 @@ You can install pre-built ClickHouse as described in [Quick Start](https://click
The build works on x86_64 (Intel) and arm64 (Apple Silicon) based on macOS 10.15 (Catalina) or higher with Homebrew's vanilla Clang.
:::note
It is also possible to compile with Apple's XCode `apple-clang` or Homebrew's `gcc`, but it's strongly discouraged.
It is also possible to compile with Apple's XCode `apple-clang`, but it's strongly discouraged.
:::
## Install Homebrew {#install-homebrew}
@ -75,20 +75,6 @@ cmake --open .
# The resulting binary will be created at: ./programs/Debug/clickhouse
```
To build using Homebrew's vanilla GCC compiler (this option is only for development experiments, and is **absolutely not recommended** unless you really know what you are doing):
``` bash
cd ClickHouse
mkdir build
export PATH=$(brew --prefix binutils)/bin:$PATH
export PATH=$(brew --prefix gcc)/bin:$PATH
export CC=$(brew --prefix gcc)/bin/gcc-11
export CXX=$(brew --prefix gcc)/bin/g++-11
cmake -G Ninja -DCMAKE_BUILD_TYPE=RelWithDebInfo -S . -B build
cmake --build build
# The resulting binary will be created at: build/programs/clickhouse
```
## Caveats {#caveats}
If you intend to run `clickhouse-server`, make sure to increase the systems `maxfiles` variable.

View File

@ -140,3 +140,4 @@ DESCRIBE TABLE test_database.test_table;
## Related content
- Blog: [ClickHouse and PostgreSQL - a match made in data heaven - part 1](https://clickhouse.com/blog/migrating-data-between-clickhouse-postgres)
- Blog: [ClickHouse and PostgreSQL - a Match Made in Data Heaven - part 2](https://clickhouse.com/blog/migrating-data-between-clickhouse-postgres-part-2)

View File

@ -177,4 +177,6 @@ CREATE TABLE pg_table_schema_with_dots (a UInt32)
- [Using PostgreSQL as a dictionary source](../../../sql-reference/dictionaries/index.md#dictionary-sources#dicts-external_dicts_dict_sources-postgresql)
## Related content
- Blog: [ClickHouse and PostgreSQL - a match made in data heaven - part 1](https://clickhouse.com/blog/migrating-data-between-clickhouse-postgres)
- Blog: [ClickHouse and PostgreSQL - a Match Made in Data Heaven - part 2](https://clickhouse.com/blog/migrating-data-between-clickhouse-postgres-part-2)

View File

@ -122,3 +122,7 @@ FROM test.mv_visits
GROUP BY StartDate
ORDER BY StartDate;
```
## Related Content
- Blog: [Using Aggregate Combinators in ClickHouse](https://clickhouse.com/blog/aggregate-functions-combinators-in-clickhouse-for-arrays-maps-and-states)

View File

@ -191,3 +191,7 @@ is performance. In practice, users often search for multiple terms at once. For
'%big%'` can be evaluated directly using an inverted index by forming the union of the row id lists for terms "little" and "big". This also
means that the parameter `GRANULARITY` supplied to index creation has no meaning (it may be removed from the syntax in the future).
:::
## Related Content
- Blog: [Introducing Inverted Indices in ClickHouse](https://clickhouse.com/blog/clickhouse-search-with-inverted-indices)

View File

@ -186,3 +186,7 @@ ARRAY JOIN
When requesting data, use the [sumMap(key, value)](../../../sql-reference/aggregate-functions/reference/summap.md) function for aggregation of `Map`.
For nested data structure, you do not need to specify its columns in the tuple of columns for summation.
## Related Content
- Blog: [Using Aggregate Combinators in ClickHouse](https://clickhouse.com/blog/aggregate-functions-combinators-in-clickhouse-for-arrays-maps-and-states)

View File

@ -112,3 +112,7 @@ If setting `keeper_map_strict_mode` is set to `true`, fetching and updating data
```sql
ALTER TABLE keeper_map_table UPDATE v1 = v1 * 10 + 2 WHERE key LIKE 'some%' AND v3 > 3.1;
```
## Related content
- Blog: [Building a Real-time Analytics Apps with ClickHouse and Hex](https://clickhouse.com/blog/building-real-time-applications-with-clickhouse-and-hex-notebook-keeper-engine)

View File

@ -2499,7 +2499,9 @@ LIMIT 20
We welcome exact and improved solutions here.
# Related Content
## Related Content
- [Git commits and our community](https://clickhouse.com/blog/clickhouse-git-community-commits)
- [Window and array functions for Git commit sequences](https://clickhouse.com/blog/clickhouse-window-array-functions-git-commits)
- Blog: [Git commits and our community](https://clickhouse.com/blog/clickhouse-git-community-commits)
- Blog: [Window and array functions for Git commit sequences](https://clickhouse.com/blog/clickhouse-window-array-functions-git-commits)
- Blog: [Building a Real-time Analytics Apps with ClickHouse and Hex](https://clickhouse.com/blog/building-real-time-applications-with-clickhouse-and-hex-notebook-keeper-engine)
- Blog: [A Story of Open-source GitHub Activity using ClickHouse + Grafana](https://clickhouse.com/blog/introduction-to-clickhouse-and-grafana-webinar)

View File

@ -61,3 +61,7 @@ FROM system.opentelemetry_span_log
```
In case of any errors, the part of the log data for which the error has occurred will be silently lost. Check the server log for error messages if the data does not arrive.
## Related Content
- Blog: [Building an Observability Solution with ClickHouse - Part 2 - Traces](https://clickhouse.com/blog/storing-traces-and-spans-open-telemetry-in-clickhouse)

View File

@ -124,3 +124,7 @@ Finally, entries in the query cache are not shared between users due to security
row policy on a table by running the same query as another user B for whom no such policy exists. However, if necessary, cache entries can
be marked accessible by other users (i.e. shared) by supplying setting
[query_cache_share_between_users](settings/settings.md#query-cache-share-between-users).
## Related Content
- Blog: [Introducing the ClickHouse Query Cache](https://clickhouse.com/blog/introduction-to-the-clickhouse-query-cache-and-design)

View File

@ -1045,7 +1045,7 @@ Default value: `0`.
## background_pool_size {#background_pool_size}
Sets the number of threads performing background merges and mutations for tables with MergeTree engines. This setting is also could be applied at server startup from the `default` profile configuration for backward compatibility at the ClickHouse server start. You can only increase the number of threads at runtime. To lower the number of threads you have to restart the server. By adjusting this setting, you manage CPU and disk load. Smaller pool size utilizes less CPU and disk resources, but background processes advance slower which might eventually impact query performance.
Sets the number of threads performing background merges and mutations for tables with MergeTree engines. This setting is also could be applied at server startup from the `default` profile configuration for backward compatibility at the ClickHouse server start. You can only increase the number of threads at runtime. To lower the number of threads you have to restart the server. By adjusting this setting, you manage CPU and disk load. Smaller pool size utilizes less CPU and disk resources, but background processes advance slower which might eventually impact query performance.
Before changing it, please also take a look at related MergeTree settings, such as [number_of_free_entries_in_pool_to_lower_max_size_of_merge](../../operations/settings/merge-tree-settings.md#number-of-free-entries-in-pool-to-lower-max-size-of-merge) and [number_of_free_entries_in_pool_to_execute_mutation](../../operations/settings/merge-tree-settings.md#number-of-free-entries-in-pool-to-execute-mutation).
@ -1063,8 +1063,8 @@ Default value: 16.
## background_merges_mutations_concurrency_ratio {#background_merges_mutations_concurrency_ratio}
Sets a ratio between the number of threads and the number of background merges and mutations that can be executed concurrently. For example if the ratio equals to 2 and
`background_pool_size` is set to 16 then ClickHouse can execute 32 background merges concurrently. This is possible, because background operation could be suspended and postponed. This is needed to give small merges more execution priority. You can only increase this ratio at runtime. To lower it you have to restart the server.
Sets a ratio between the number of threads and the number of background merges and mutations that can be executed concurrently. For example, if the ratio equals to 2 and
`background_pool_size` is set to 16 then ClickHouse can execute 32 background merges concurrently. This is possible, because background operations could be suspended and postponed. This is needed to give small merges more execution priority. You can only increase this ratio at runtime. To lower it you have to restart the server.
The same as for `background_pool_size` setting `background_merges_mutations_concurrency_ratio` could be applied from the `default` profile for backward compatibility.
Possible values:
@ -1079,6 +1079,33 @@ Default value: 2.
<background_merges_mutations_concurrency_ratio>3</background_merges_mutations_concurrency_ratio>
```
## merges_mutations_memory_usage_soft_limit {#merges_mutations_memory_usage_soft_limit}
Sets the limit on how much RAM is allowed to use for performing merge and mutation operations.
Zero means unlimited.
If ClickHouse reaches this limit, it won't schedule any new background merge or mutation operations but will continue to execute already scheduled tasks.
Possible values:
- Any positive integer.
**Example**
```xml
<merges_mutations_memory_usage_soft_limit>0</merges_mutations_memory_usage_soft_limit>
```
## merges_mutations_memory_usage_to_ram_ratio {#merges_mutations_memory_usage_to_ram_ratio}
The default `merges_mutations_memory_usage_soft_limit` value is calculated as `memory_amount * merges_mutations_memory_usage_to_ram_ratio`.
Default value: `0.5`.
**See also**
- [max_memory_usage](../../operations/settings/query-complexity.md#settings_max_memory_usage)
- [merges_mutations_memory_usage_soft_limit](#merges_mutations_memory_usage_soft_limit)
## background_merges_mutations_scheduling_policy {#background_merges_mutations_scheduling_policy}
Algorithm used to select next merge or mutation to be executed by background thread pool. Policy may be changed at runtime without server restart.

View File

@ -40,6 +40,39 @@ SETTINGS additional_table_filters = (('table_1', 'x != 2'))
└───┴──────┘
```
## additional_result_filter
An additional filter expression to apply to the result of `SELECT` query.
This setting is not applied to any subquery.
Default value: `''`.
**Example**
``` sql
insert into table_1 values (1, 'a'), (2, 'bb'), (3, 'ccc'), (4, 'dddd');
```
```response
┌─x─┬─y────┐
│ 1 │ a │
│ 2 │ bb │
│ 3 │ ccc │
│ 4 │ dddd │
└───┴──────┘
```
```sql
SELECT *
FROM table_1
SETTINGS additional_result_filter = 'x != 2'
```
```response
┌─x─┬─y────┐
│ 1 │ a │
│ 3 │ ccc │
│ 4 │ dddd │
└───┴──────┘
```
## allow_nondeterministic_mutations {#allow_nondeterministic_mutations}
User-level setting that allows mutations on replicated tables to make use of non-deterministic functions such as `dictGet`.

View File

@ -6,6 +6,10 @@ sidebar_label: clickhouse-local
# clickhouse-local
## Related Content
- Blog: [Extracting, Converting, and Querying Data in Local Files using clickhouse-local](https://clickhouse.com/blog/extracting-converting-querying-local-files-with-sql-clickhouse-local)
## When to use clickhouse-local vs. ClickHouse
`clickhouse-local` is an easy-to-use version of ClickHouse that is ideal for developers who need to perform fast processing on local and remote files using SQL without having to install a full database server. With `clickhouse-local`, developers can use SQL commands (using the [ClickHouse SQL dialect](../../sql-reference/index.md)) directly from the command line, providing a simple and efficient way to access ClickHouse features without the need for a full ClickHouse installation. One of the main benefits of `clickhouse-local` is that it is already included when installing [clickhouse-client](https://clickhouse.com/docs/en/integrations/sql-clients/clickhouse-client-local). This means that developers can get started with `clickhouse-local` quickly, without the need for a complex installation process.

View File

@ -285,3 +285,8 @@ FROM people
│ [3,2] │ [11.5,12.949999809265137] │
└────────┴───────────────────────────┘
```
## Related Content
- Blog: [Using Aggregate Combinators in ClickHouse](https://clickhouse.com/blog/aggregate-functions-combinators-in-clickhouse-for-arrays-maps-and-states)

View File

@ -63,3 +63,8 @@ SELECT uniqMerge(state) FROM (SELECT uniqState(UserID) AS state FROM table GROUP
## Usage Example
See [AggregatingMergeTree](../../engines/table-engines/mergetree-family/aggregatingmergetree.md) engine description.
## Related Content
- Blog: [Using Aggregate Combinators in ClickHouse](https://clickhouse.com/blog/aggregate-functions-combinators-in-clickhouse-for-arrays-maps-and-states)

View File

@ -108,3 +108,8 @@ Result:
- [map()](../../sql-reference/functions/tuple-map-functions.md#function-map) function
- [CAST()](../../sql-reference/functions/type-conversion-functions.md#type_conversion_function-cast) function
## Related content
- Blog: [Building an Observability Solution with ClickHouse - Part 2 - Traces](https://clickhouse.com/blog/storing-traces-and-spans-open-telemetry-in-clickhouse)

View File

@ -645,7 +645,7 @@ For an alternative to `date\_diff`, see function `age`.
date_diff('unit', startdate, enddate, [timezone])
```
Aliases: `dateDiff`, `DATE_DIFF`.
Aliases: `dateDiff`, `DATE_DIFF`, `timestampDiff`, `timestamp_diff`, `TIMESTAMP_DIFF`.
**Arguments**

View File

@ -194,7 +194,14 @@ Accepts a number. If the number is less than one, it returns 0. Otherwise, it ro
## roundAge(num)
Accepts a number. If the number is less than 18, it returns 0. Otherwise, it rounds the number down to a number from the set: 18, 25, 35, 45, 55.
Accepts a number. If the number is
- smaller than 1, it returns 0,
- between 1 and 17, it returns 17,
- between 18 and 24, it returns 18,
- between 25 and 34, it returns 25,
- between 35 and 44, it returns 35,
- between 45 and 54, it returns 45,
- larger than 55, it returns 55.
## roundDown(num, arr)

View File

@ -6,7 +6,7 @@ sidebar_label: Strings
# Functions for Working with Strings
:::note
:::note
Functions for [searching](../../sql-reference/functions/string-search-functions.md) and [replacing](../../sql-reference/functions/string-replace-functions.md) in strings are described separately.
:::
@ -1193,6 +1193,42 @@ Result:
```
## concatWithSeparatorAssumeInjective
Same as concatWithSeparator, the difference is that you need to ensure that concatWithSeparator(sep, expr1, expr2, expr3...) → result is injective, it will be used for optimization of GROUP BY.
The function is named “injective” if it always returns different result for different values of arguments. In other words: different arguments never yield identical result.
## soundex
Returns the [Soundex code](https://en.wikipedia.org/wiki/Soundex) of a string.
**Syntax**
``` sql
soundex(val)
```
**Arguments**
- `val` - Input value. [String](../data-types/string.md)
**Returned value**
- The Soundex code of the input value. [String](../data-types/string.md)
**Example**
Query:
``` sql
select soundex('aksel');
```
Result:
``` text
┌─soundex('aksel')─┐
│ A240 │
└──────────────────┘
```

View File

@ -28,3 +28,7 @@ The synchronicity of the query processing is defined by the [mutations_sync](/do
- [Mutations](/docs/en/sql-reference/statements/alter/index.md#mutations)
- [Synchronicity of ALTER Queries](/docs/en/sql-reference/statements/alter/index.md#synchronicity-of-alter-queries)
- [mutations_sync](/docs/en/operations/settings/settings.md/#mutations_sync) setting
## Related content
- Blog: [Handling Updates and Deletes in ClickHouse](https://clickhouse.com/blog/handling-updates-and-deletes-in-clickhouse)

View File

@ -61,3 +61,7 @@ For all `ALTER` queries, if `alter_sync = 2` and some replicas are not active fo
:::
For `ALTER TABLE ... UPDATE|DELETE` queries the synchronicity is defined by the [mutations_sync](/docs/en/operations/settings/settings.md/#mutations_sync) setting.
## Related content
- Blog: [Handling Updates and Deletes in ClickHouse](https://clickhouse.com/blog/handling-updates-and-deletes-in-clickhouse)

View File

@ -27,3 +27,8 @@ The synchronicity of the query processing is defined by the [mutations_sync](/do
- [Mutations](/docs/en/sql-reference/statements/alter/index.md#mutations)
- [Synchronicity of ALTER Queries](/docs/en/sql-reference/statements/alter/index.md#synchronicity-of-alter-queries)
- [mutations_sync](/docs/en/operations/settings/settings.md/#mutations_sync) setting
## Related content
- Blog: [Handling Updates and Deletes in ClickHouse](https://clickhouse.com/blog/handling-updates-and-deletes-in-clickhouse)

View File

@ -364,3 +364,4 @@ The window view is useful in the following scenarios:
## Related Content
- Blog: [Working with time series data in ClickHouse](https://clickhouse.com/blog/working-with-time-series-data-and-functions-ClickHouse)
- Blog: [Building an Observability Solution with ClickHouse - Part 2 - Traces](https://clickhouse.com/blog/storing-traces-and-spans-open-telemetry-in-clickhouse)

View File

@ -55,3 +55,7 @@ With the described implementation now we can see what can negatively affect 'DEL
- Table having a very large number of data parts
- Having a lot of data in Compact parts—in a Compact part, all columns are stored in one file.
## Related content
- Blog: [Handling Updates and Deletes in ClickHouse](https://clickhouse.com/blog/handling-updates-and-deletes-in-clickhouse)

View File

@ -18,6 +18,10 @@ FROM <left_table>
Expressions from `ON` clause and columns from `USING` clause are called “join keys”. Unless otherwise stated, join produces a [Cartesian product](https://en.wikipedia.org/wiki/Cartesian_product) from rows with matching “join keys”, which might produce results with much more rows than the source tables.
## Related Content
- Blog: [ClickHouse: A Blazingly Fast DBMS with Full SQL Join Support - Part 1](https://clickhouse.com/blog/clickhouse-fully-supports-joins)
## Supported Types of JOIN
All standard [SQL JOIN](https://en.wikipedia.org/wiki/Join_(SQL)) types are supported:

View File

@ -133,4 +133,6 @@ CREATE TABLE pg_table_schema_with_dots (a UInt32)
- [Using PostgreSQL as a dictionary source](../../sql-reference/dictionaries/index.md#dictionary-sources#dicts-external_dicts_dict_sources-postgresql)
## Related content
- Blog: [ClickHouse and PostgreSQL - a match made in data heaven - part 1](https://clickhouse.com/blog/migrating-data-between-clickhouse-postgres)
- Blog: [ClickHouse and PostgreSQL - a Match Made in Data Heaven - part 2](https://clickhouse.com/blog/migrating-data-between-clickhouse-postgres-part-2)

View File

@ -168,3 +168,15 @@ SELECT format('{} {}', 'Hello', 'World')
## trimBoth(s) {#trimboths}
返回一个字符串,用于删除任一侧的空白字符。
## soundex(s)
返回一个字符串的soundex值。输出类型是FixedString示例如下
``` sql
select soundex('aksql');
┌─soundex('aksel')─┐
│ A240 │
└──────────────────┘
```

View File

@ -135,6 +135,7 @@ namespace CurrentMetrics
extern const Metric Revision;
extern const Metric VersionInteger;
extern const Metric MemoryTracking;
extern const Metric MergesMutationsMemoryTracking;
extern const Metric MaxDDLEntryID;
extern const Metric MaxPushedDDLEntryID;
}
@ -1225,6 +1226,25 @@ try
total_memory_tracker.setDescription("(total)");
total_memory_tracker.setMetric(CurrentMetrics::MemoryTracking);
size_t merges_mutations_memory_usage_soft_limit = server_settings.merges_mutations_memory_usage_soft_limit;
size_t default_merges_mutations_server_memory_usage = static_cast<size_t>(memory_amount * server_settings.merges_mutations_memory_usage_to_ram_ratio);
if (merges_mutations_memory_usage_soft_limit == 0 || merges_mutations_memory_usage_soft_limit > default_merges_mutations_server_memory_usage)
{
merges_mutations_memory_usage_soft_limit = default_merges_mutations_server_memory_usage;
LOG_WARNING(log, "Setting merges_mutations_memory_usage_soft_limit was set to {}"
" ({} available * {:.2f} merges_mutations_memory_usage_to_ram_ratio)",
formatReadableSizeWithBinarySuffix(merges_mutations_memory_usage_soft_limit),
formatReadableSizeWithBinarySuffix(memory_amount),
server_settings.merges_mutations_memory_usage_to_ram_ratio);
}
LOG_INFO(log, "Merges and mutations memory limit is set to {}",
formatReadableSizeWithBinarySuffix(merges_mutations_memory_usage_soft_limit));
background_memory_tracker.setSoftLimit(merges_mutations_memory_usage_soft_limit);
background_memory_tracker.setDescription("(background)");
background_memory_tracker.setMetric(CurrentMetrics::MergesMutationsMemoryTracking);
total_memory_tracker.setAllowUseJemallocMemory(server_settings.allow_use_jemalloc_memory);
auto * global_overcommit_tracker = global_context->getGlobalOvercommitTracker();

View File

@ -491,7 +491,7 @@ std::vector<std::pair<ASTPtr, StoragePtr>> BackupEntriesCollector::findTablesInD
{
/// Database or table could be replicated - so may use ZooKeeper. We need to retry.
auto zookeeper_retries_info = global_zookeeper_retries_info;
ZooKeeperRetriesControl retries_ctl("getTablesForBackup", zookeeper_retries_info);
ZooKeeperRetriesControl retries_ctl("getTablesForBackup", zookeeper_retries_info, nullptr);
retries_ctl.retryLoop([&](){ db_tables = database->getTablesForBackup(filter_by_table_name, context); });
}
catch (Exception & e)

View File

@ -20,7 +20,7 @@ WithRetries::WithRetries(Poco::Logger * log_, zkutil::GetZooKeeper get_zookeeper
WithRetries::RetriesControlHolder::RetriesControlHolder(const WithRetries * parent, const String & name)
: info(parent->global_zookeeper_retries_info)
, retries_ctl(name, info)
, retries_ctl(name, info, nullptr)
, faulty_zookeeper(parent->getFaultyZooKeeper())
{}

View File

@ -200,14 +200,10 @@ add_library(clickhouse_common_io ${clickhouse_common_io_headers} ${clickhouse_co
add_library (clickhouse_malloc OBJECT Common/malloc.cpp)
set_source_files_properties(Common/malloc.cpp PROPERTIES COMPILE_FLAGS "-fno-builtin")
if (((SANITIZE STREQUAL "thread") OR (SANITIZE STREQUAL "address")) AND COMPILER_GCC)
message(WARNING "Memory tracking is disabled, due to gcc sanitizers")
else()
add_library (clickhouse_new_delete STATIC Common/new_delete.cpp)
target_link_libraries (clickhouse_new_delete PRIVATE clickhouse_common_io)
if (TARGET ch_contrib::jemalloc)
target_link_libraries (clickhouse_new_delete PRIVATE ch_contrib::jemalloc)
endif()
add_library (clickhouse_new_delete STATIC Common/new_delete.cpp)
target_link_libraries (clickhouse_new_delete PRIVATE clickhouse_common_io)
if (TARGET ch_contrib::jemalloc)
target_link_libraries (clickhouse_new_delete PRIVATE ch_contrib::jemalloc)
endif()
if (TARGET ch_contrib::jemalloc)
@ -338,9 +334,9 @@ set_source_files_properties(
PROPERTIES COMPILE_FLAGS "${X86_INTRINSICS_FLAGS}")
if (ENABLE_QPL)
set_source_files_properties(
Compression/CompressionCodecDeflateQpl.cpp
PROPERTIES COMPILE_FLAGS "-mwaitpkg")
set_source_files_properties(
Compression/CompressionCodecDeflateQpl.cpp
PROPERTIES COMPILE_FLAGS "-mwaitpkg")
endif ()
target_link_libraries(clickhouse_common_io

View File

@ -1,6 +1,6 @@
#include <Columns/ColumnCompressed.h>
#pragma GCC diagnostic ignored "-Wold-style-cast"
#pragma clang diagnostic ignored "-Wold-style-cast"
#include <lz4.h>

View File

@ -910,9 +910,6 @@ void ColumnVector<T>::getExtremes(Field & min, Field & max) const
max = NearestFieldType<T>(cur_max);
}
#pragma GCC diagnostic ignored "-Wold-style-cast"
template <typename T>
ColumnPtr ColumnVector<T>::compress() const
{

View File

@ -53,6 +53,7 @@
M(QueryThread, "Number of query processing threads") \
M(ReadonlyReplica, "Number of Replicated tables that are currently in readonly state due to re-initialization after ZooKeeper session loss or due to startup without ZooKeeper configured.") \
M(MemoryTracking, "Total amount of memory (bytes) allocated by the server.") \
M(MergesMutationsMemoryTracking, "Total amount of memory (bytes) allocated by background tasks (merges and mutations).") \
M(EphemeralNode, "Number of ephemeral nodes hold in ZooKeeper.") \
M(ZooKeeperSession, "Number of sessions (connections) to ZooKeeper. Should be no more than one, because using more than one connection to ZooKeeper may lead to bugs due to lack of linearizability (stale reads) that ZooKeeper consistency model allows.") \
M(ZooKeeperWatch, "Number of watches (event subscriptions) in ZooKeeper.") \
@ -138,6 +139,8 @@
M(SystemReplicasThreadsActive, "Number of threads in the system.replicas thread pool running a task.") \
M(RestartReplicaThreads, "Number of threads in the RESTART REPLICA thread pool.") \
M(RestartReplicaThreadsActive, "Number of threads in the RESTART REPLICA thread pool running a task.") \
M(QueryPipelineExecutorThreads, "Number of threads in the PipelineExecutor thread pool.") \
M(QueryPipelineExecutorThreadsActive, "Number of threads in the PipelineExecutor thread pool running a task.") \
M(DistributedFilesToInsert, "Number of pending files to process for asynchronous insertion into Distributed tables. Number of files for every shard is summed.") \
M(BrokenDistributedFilesToInsert, "Number of files for asynchronous insertion into Distributed tables that has been marked as broken. This metric will starts from 0 on start. Number of files for every shard is summed.") \
M(TablesToDropQueueSize, "Number of dropped tables, that are waiting for background data removal.") \

View File

@ -96,6 +96,7 @@ using namespace std::chrono_literals;
static constexpr size_t log_peak_memory_usage_every = 1ULL << 30;
MemoryTracker total_memory_tracker(nullptr, VariableContext::Global);
MemoryTracker background_memory_tracker(&total_memory_tracker, VariableContext::User);
std::atomic<Int64> MemoryTracker::free_memory_in_allocator_arenas;
@ -528,3 +529,10 @@ void MemoryTracker::setOrRaiseProfilerLimit(Int64 value)
while ((value == 0 || old_value < value) && !profiler_limit.compare_exchange_weak(old_value, value))
;
}
bool canEnqueueBackgroundTask()
{
auto limit = background_memory_tracker.getSoftLimit();
auto amount = background_memory_tracker.get();
return limit == 0 || amount < limit;
}

View File

@ -110,6 +110,22 @@ public:
return amount.load(std::memory_order_relaxed);
}
// Merges and mutations may pass memory ownership to other threads thus in the end of execution
// MemoryTracker for background task may have a non-zero counter.
// This method is intended to fix the counter inside of background_memory_tracker.
// NOTE: We can't use alloc/free methods to do it, because they also will change the value inside
// of total_memory_tracker.
void adjustOnBackgroundTaskEnd(const MemoryTracker * child)
{
auto background_memory_consumption = child->amount.load(std::memory_order_relaxed);
amount.fetch_sub(background_memory_consumption, std::memory_order_relaxed);
// Also fix CurrentMetrics::MergesMutationsMemoryTracking
auto metric_loaded = metric.load(std::memory_order_relaxed);
if (metric_loaded != CurrentMetrics::end())
CurrentMetrics::sub(metric_loaded, background_memory_consumption);
}
Int64 getPeak() const
{
return peak.load(std::memory_order_relaxed);
@ -220,3 +236,6 @@ public:
};
extern MemoryTracker total_memory_tracker;
extern MemoryTracker background_memory_tracker;
bool canEnqueueBackgroundTask();

View File

@ -8,7 +8,6 @@
namespace DB
{
#pragma GCC diagnostic warning "-Wold-style-cast"
std::string encodeSHA256(std::string_view text)
{

View File

@ -80,9 +80,6 @@ extern const char empty_pod_array[empty_pod_array_size];
/** Base class that depend only on size of element, not on element itself.
* You can static_cast to this class if you want to insert some data regardless to the actual type T.
*/
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wnull-dereference"
template <size_t ELEMENT_SIZE, size_t initial_bytes, typename TAllocator, size_t pad_right_, size_t pad_left_>
class PODArrayBase : private boost::noncopyable, private TAllocator /// empty base optimization
{
@ -774,7 +771,6 @@ void swap(PODArray<T, initial_bytes, TAllocator, pad_right_, pad_left_> & lhs, P
{
lhs.swap(rhs);
}
#pragma GCC diagnostic pop
/// Prevent implicit template instantiation of PODArray for common numeric types

View File

@ -97,8 +97,8 @@ public:
void write(WriteBuffer & wb) const
{
writeBinary(key, wb);
writeVarUIntOverflow(count, wb);
writeVarUIntOverflow(error, wb);
writeVarUInt(count, wb);
writeVarUInt(error, wb);
}
void read(ReadBuffer & rb)

View File

@ -195,13 +195,13 @@ static void renameNoReplaceFallback(const std::string & old_path, const std::str
}
/// Do not use [[noreturn]] to avoid warnings like "code will never be executed" in other places
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wmissing-noreturn"
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wmissing-noreturn"
static void renameExchangeFallback(const std::string &, const std::string &)
{
throw Exception(ErrorCodes::UNSUPPORTED_METHOD, "System call renameat2() is not supported");
}
#pragma GCC diagnostic pop
#pragma clang diagnostic pop
void renameNoReplace(const std::string & old_path, const std::string & new_path)
{

View File

@ -10,9 +10,6 @@
#include <Common/Arena.h>
#include <Common/Stopwatch.h>
#pragma GCC diagnostic ignored "-Wframe-larger-than="
/** This test program evaluates different solutions for a simple degenerate task:
* Aggregate data by UInt8 key, calculate "avg" function on Float values.
*

View File

@ -6,13 +6,10 @@
#include <iostream>
#include <dlfcn.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-function"
static NO_INLINE const void * getAddress()
[[maybe_unused]] static NO_INLINE const void * getAddress()
{
return __builtin_return_address(0);
}
#pragma GCC diagnostic pop
int main(int argc, char ** argv)
{

View File

@ -5,8 +5,6 @@
/// dedicated object (namely clickhouse_malloc.o), and it will show earlier in the link command
/// than malloc libs like libjemalloc.a. As a result, these symbols get picked in time right after.
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wredundant-decls"
extern "C"
{
void *malloc(size_t size);
@ -21,7 +19,6 @@ extern "C"
void *pvalloc(size_t size);
#endif
}
#pragma GCC diagnostic pop
template<typename T>
inline void ignore(T x __attribute__((unused)))

View File

@ -1,6 +1,4 @@
#pragma once
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wold-style-cast"
#include <new>
#include <base/defines.h>
@ -219,5 +217,3 @@ inline ALWAYS_INLINE void untrackMemory(void * ptr [[maybe_unused]], std::size_t
}
}
#pragma GCC diagnostic pop

View File

@ -4,11 +4,9 @@
#include <Poco/Util/XMLConfiguration.h>
#include <Poco/XML/XMLException.h>
#pragma GCC diagnostic ignored "-Wsign-compare"
#ifdef __clang__
# pragma clang diagnostic ignored "-Wzero-as-null-pointer-constant"
# pragma clang diagnostic ignored "-Wundef"
#endif
#pragma clang diagnostic ignored "-Wsign-compare"
#pragma clang diagnostic ignored "-Wzero-as-null-pointer-constant"
#pragma clang diagnostic ignored "-Wundef"
#include <gtest/gtest.h>
#include <chrono>

View File

@ -10,8 +10,8 @@
#include <sys/wait.h>
#include <unistd.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wgnu-statement-expression"
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wgnu-statement-expression"
#define HANDLE_EINTR(x) ({ \
decltype(x) eintr_wrapper_result; \
do { \
@ -202,4 +202,4 @@ bool waitForPid(pid_t pid, size_t timeout_in_seconds)
}
}
#pragma GCC diagnostic pop
#pragma clang diagnostic pop

View File

@ -13,7 +13,7 @@
#include <IO/WriteHelpers.h>
#include <IO/BufferWithOwnMemory.h>
#pragma GCC diagnostic ignored "-Wold-style-cast"
#pragma clang diagnostic ignored "-Wold-style-cast"
namespace DB

View File

@ -28,12 +28,6 @@
#include <cstring>
/// For the expansion of gtest macros.
#if defined(__clang__)
#pragma clang diagnostic ignored "-Wdeprecated"
#elif defined (__GNUC__) && __GNUC__ >= 9
#pragma GCC diagnostic ignored "-Wdeprecated-copy"
#endif
#include <gtest/gtest.h>
using namespace DB;

View File

@ -19,6 +19,7 @@
#include <Core/ExternalTable.h>
#include <Poco/Net/MessageHeader.h>
#include <base/find_symbols.h>
#include <base/scope_guard.h>
namespace DB

View File

@ -147,10 +147,7 @@ void Sha256Password::authenticate(
throw Exception(ErrorCodes::OPENSSL_ERROR, "Failed to write public key to memory. Error: {}", getOpenSSLErrors());
}
char * pem_buf = nullptr;
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wold-style-cast"
int64_t pem_size = BIO_get_mem_data(mem, &pem_buf);
# pragma GCC diagnostic pop
String pem(pem_buf, pem_size);
LOG_TRACE(log, "Key: {}", pem);

View File

@ -40,6 +40,8 @@ namespace DB
M(String, temporary_data_in_cache, "", "Cache disk name for temporary data.", 0) \
M(UInt64, max_server_memory_usage, 0, "Limit on total memory usage. Zero means Unlimited.", 0) \
M(Double, max_server_memory_usage_to_ram_ratio, 0.9, "Same as max_server_memory_usage but in to ram ratio. Allows to lower max memory on low-memory systems.", 0) \
M(UInt64, merges_mutations_memory_usage_soft_limit, 0, "Limit on total memory usage for merges and mutations. Zero means Unlimited.", 0) \
M(Double, merges_mutations_memory_usage_to_ram_ratio, 0.5, "Same as merges_mutations_memory_usage_soft_limit but in to ram ratio. Allows to lower memory limit on low-memory systems.", 0) \
M(Bool, allow_use_jemalloc_memory, true, "Allows to use jemalloc memory.", 0) \
\
M(UInt64, max_concurrent_queries, 0, "Limit on total number of concurrently executed queries. Zero means Unlimited.", 0) \

View File

@ -453,8 +453,8 @@ struct SettingFieldMultiEnum
explicit operator StorageType() const { return value.getValue(); }
explicit operator Field() const { return toString(); }
SettingFieldMultiEnum & operator= (StorageType x) { changed = x != value.getValue(); value.setValue(x); return *this; }
SettingFieldMultiEnum & operator= (ValueType x) { changed = !(x == value); value = x; return *this; }
SettingFieldMultiEnum & operator= (StorageType x) { changed = true; value.setValue(x); return *this; }
SettingFieldMultiEnum & operator= (ValueType x) { changed = true; value = x; return *this; }
SettingFieldMultiEnum & operator= (const Field & x) { parseFromString(x.safeGet<const String &>()); return *this; }
String toString() const

View File

@ -1,4 +1,3 @@
#pragma GCC diagnostic ignored "-Wmissing-declarations"
#include <gtest/gtest.h>
#include <Core/DecimalFunctions.h>

View File

@ -122,7 +122,7 @@ GTEST_TEST(SettingMySQLDataTypesSupport, SetString)
// comma with spaces
setting = " datetime64 , decimal ";
ASSERT_FALSE(setting.changed); // false since value is the same as previous one.
ASSERT_TRUE(setting.changed);
ASSERT_TRUE(setting.value.isSet(MySQLDataTypesSupport::DECIMAL));
ASSERT_TRUE(setting.value.isSet(MySQLDataTypesSupport::DATETIME64));
ASSERT_EQ("decimal,datetime64", setting.toString());
@ -136,7 +136,7 @@ GTEST_TEST(SettingMySQLDataTypesSupport, SetString)
ASSERT_EQ(Field("decimal"), setting);
setting = String(",decimal,decimal,decimal,decimal,decimal,decimal,decimal,decimal,decimal,");
ASSERT_FALSE(setting.changed); //since previous value was DECIMAL
ASSERT_TRUE(setting.changed); //since previous value was DECIMAL
ASSERT_TRUE(setting.value.isSet(MySQLDataTypesSupport::DECIMAL));
ASSERT_FALSE(setting.value.isSet(MySQLDataTypesSupport::DATETIME64));
ASSERT_EQ("decimal", setting.toString());
@ -163,7 +163,7 @@ GTEST_TEST(SettingMySQLDataTypesSupport, SetInvalidString)
ASSERT_EQ(0, setting.value.getValue());
EXPECT_NO_THROW(setting = String(", "));
ASSERT_FALSE(setting.changed);
ASSERT_TRUE(setting.changed);
ASSERT_EQ(0, setting.value.getValue());
}

View File

@ -63,7 +63,7 @@
#include "config_version.h"
#if defined(OS_DARWIN)
# pragma GCC diagnostic ignored "-Wunused-macros"
# pragma clang diagnostic ignored "-Wunused-macros"
// NOLINTNEXTLINE(bugprone-reserved-identifier)
# define _XOPEN_SOURCE 700 // ucontext is not available without _XOPEN_SOURCE
#endif

View File

@ -12,6 +12,7 @@ namespace DB
namespace ErrorCodes
{
extern const int ARGUMENT_OUT_OF_BOUND;
extern const int LOGICAL_ERROR;
}
static constexpr UInt32 max_scale = 9;
@ -56,4 +57,14 @@ SerializationPtr DataTypeDateTime64::doGetDefaultSerialization() const
return std::make_shared<SerializationDateTime64>(scale, *this);
}
std::string getDateTimeTimezone(const IDataType & data_type)
{
if (const auto * type = typeid_cast<const DataTypeDateTime *>(&data_type))
return type->hasExplicitTimeZone() ? type->getTimeZone().getTimeZone() : std::string();
if (const auto * type = typeid_cast<const DataTypeDateTime64 *>(&data_type))
return type->hasExplicitTimeZone() ? type->getTimeZone().getTimeZone() : std::string();
throw Exception(ErrorCodes::LOGICAL_ERROR, "Cannot get time zone from type {}", data_type.getName());
}
}

View File

@ -41,5 +41,7 @@ protected:
SerializationPtr doGetDefaultSerialization() const override;
};
std::string getDateTimeTimezone(const IDataType & data_type);
}

View File

@ -556,6 +556,7 @@ inline bool isNullableOrLowCardinalityNullable(const DataTypePtr & data_type)
template <typename DataType> constexpr bool IsDataTypeDecimal = false;
template <typename DataType> constexpr bool IsDataTypeNumber = false;
template <typename DataType> constexpr bool IsDataTypeDateOrDateTime = false;
template <typename DataType> constexpr bool IsDataTypeDate = false;
template <typename DataType> constexpr bool IsDataTypeEnum = false;
template <typename DataType> constexpr bool IsDataTypeDecimalOrNumber = IsDataTypeDecimal<DataType> || IsDataTypeNumber<DataType>;
@ -576,6 +577,9 @@ template <> inline constexpr bool IsDataTypeDecimal<DataTypeDateTime64> = true;
template <typename T> constexpr bool IsDataTypeNumber<DataTypeNumber<T>> = true;
template <> inline constexpr bool IsDataTypeDate<DataTypeDate> = true;
template <> inline constexpr bool IsDataTypeDate<DataTypeDate32> = true;
template <> inline constexpr bool IsDataTypeDateOrDateTime<DataTypeDate> = true;
template <> inline constexpr bool IsDataTypeDateOrDateTime<DataTypeDate32> = true;
template <> inline constexpr bool IsDataTypeDateOrDateTime<DataTypeDateTime> = true;

View File

@ -9,13 +9,8 @@
# include <DataTypes/DataTypeNullable.h>
# include <Columns/ColumnConst.h>
# include <Columns/ColumnNullable.h>
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wunused-parameter"
# include <llvm/IR/IRBuilder.h>
# pragma GCC diagnostic pop
namespace DB
{

View File

@ -88,7 +88,7 @@ DataTypePtr getNumericType(const TypeIndexSet & types)
maximize(max_bits_of_unsigned_integer, 8);
else if (type == TypeIndex::UInt16)
maximize(max_bits_of_unsigned_integer, 16);
else if (type == TypeIndex::UInt32)
else if (type == TypeIndex::UInt32 || type == TypeIndex::IPv4)
maximize(max_bits_of_unsigned_integer, 32);
else if (type == TypeIndex::UInt64)
maximize(max_bits_of_unsigned_integer, 64);

View File

@ -7,7 +7,6 @@
#include <Formats/FormatSettings.h>
#include <IO/ReadBuffer.h>
#pragma GCC diagnostic ignored "-Wmissing-declarations"
#include <gtest/gtest.h>
#include <string>

View File

@ -3,7 +3,6 @@
#include <DataTypes/getMostSubtype.h>
#include <sstream>
#pragma GCC diagnostic ignored "-Wmissing-declarations"
#include <gtest/gtest.h>
namespace DB

View File

@ -115,10 +115,13 @@ void DDLLoadingDependencyVisitor::visit(const ASTStorage & storage, Data & data)
{
if (!storage.engine)
return;
if (storage.engine->name != "Dictionary")
return;
extractTableNameFromArgument(*storage.engine, data, 0);
if (storage.engine->name == "Distributed")
/// Checks that dict* expression was used as sharding_key and builds dependency between the dictionary and current table.
/// Distributed(logs, default, hits[, sharding_key[, policy_name]])
extractTableNameFromArgument(*storage.engine, data, 3);
else if (storage.engine->name == "Dictionary")
extractTableNameFromArgument(*storage.engine, data, 0);
}
@ -131,7 +134,29 @@ void DDLLoadingDependencyVisitor::extractTableNameFromArgument(const ASTFunction
QualifiedTableName qualified_name;
const auto * arg = function.arguments->as<ASTExpressionList>()->children[arg_idx].get();
if (const auto * literal = arg->as<ASTLiteral>())
if (const auto * dict_function = arg->as<ASTFunction>())
{
if (!functionIsDictGet(dict_function->name))
return;
/// Get the dictionary name from `dict*` function.
const auto * literal_arg = dict_function->arguments->as<ASTExpressionList>()->children[0].get();
const auto * dictionary_name = literal_arg->as<ASTLiteral>();
if (!dictionary_name)
return;
if (dictionary_name->value.getType() != Field::Types::String)
return;
auto maybe_qualified_name = QualifiedTableName::tryParseFromString(dictionary_name->value.get<String>());
if (!maybe_qualified_name)
return;
qualified_name = std::move(*maybe_qualified_name);
}
else if (const auto * literal = arg->as<ASTLiteral>())
{
if (literal->value.getType() != Field::Types::String)
return;
@ -167,5 +192,4 @@ void DDLLoadingDependencyVisitor::extractTableNameFromArgument(const ASTFunction
}
data.dependencies.emplace(std::move(qualified_name));
}
}

View File

@ -16,10 +16,8 @@
#include <Interpreters/IExternalLoadable.h>
#if defined(__GNUC__)
/// GCC mistakenly warns about the names in enum class.
#pragma GCC diagnostic ignored "-Wshadow"
#endif
/// Clang mistakenly warns about the names in enum class.
#pragma clang diagnostic ignored "-Wshadow"
namespace DB
{

View File

@ -271,14 +271,16 @@ void RegExpTreeDictionary::initGraph()
for (const auto & [id, value]: regex_nodes)
if (value->parent_id == 0) // this is root node.
initTopologyOrder(id, visited, topology_id);
/// If there is a cycle and all nodes have a parent, this condition will be met.
if (topology_order.size() != regex_nodes.size())
throw Exception(ErrorCodes::LOGICAL_ERROR, "The topology order cannot match the number of regex nodes. This is likely a internal bug.");
throw Exception(ErrorCodes::INCORRECT_DICTIONARY_DEFINITION, "The regexp tree is cyclical. Please check your config.");
}
void RegExpTreeDictionary::initTopologyOrder(UInt64 node_idx, std::set<UInt64> & visited, UInt64 & topology_id)
{
visited.insert(node_idx);
for (UInt64 child_idx : regex_nodes[node_idx]->children)
/// there is a cycle when dfs the graph.
if (visited.contains(child_idx))
throw Exception(ErrorCodes::INCORRECT_DICTIONARY_DEFINITION, "The regexp tree is cyclical. Please check your config.");
else

View File

@ -15,9 +15,9 @@
using namespace DB;
static bool registered = false;
/// For debug
#pragma GCC diagnostic ignored "-Wunused-function"
static std::string configurationToString(const DictionaryConfigurationPtr & config)
[[maybe_unused]] static std::string configurationToString(const DictionaryConfigurationPtr & config)
{
const Poco::Util::XMLConfiguration & xml_config = dynamic_cast<const Poco::Util::XMLConfiguration &>(*config);
std::ostringstream oss; // STYLE_CHECK_ALLOW_STD_STRING_STREAM

View File

@ -617,6 +617,10 @@ void CachedOnDiskReadBufferFromFile::predownload(FileSegmentPtr & file_segment)
continue_predownload = false;
}
}
else
{
file_segment->completeWithState(FileSegment::State::PARTIALLY_DOWNLOADED_NO_CONTINUATION);
}
if (!continue_predownload)
{
@ -636,8 +640,8 @@ void CachedOnDiskReadBufferFromFile::predownload(FileSegmentPtr & file_segment)
/// TODO: allow seek more than once with seek avoiding.
bytes_to_predownload = 0;
file_segment->completeWithState(FileSegment::State::PARTIALLY_DOWNLOADED_NO_CONTINUATION);
chassert(file_segment->state() == FileSegment::State::PARTIALLY_DOWNLOADED_NO_CONTINUATION);
LOG_TEST(log, "Bypassing cache because for {}", file_segment->getInfoForLog());
read_type = ReadType::REMOTE_FS_READ_BYPASS_CACHE;

View File

@ -17,9 +17,6 @@ namespace ErrorCodes
extern const int ILLEGAL_DIVISION;
}
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wsign-compare"
template <typename A, typename B>
inline void throwIfDivisionLeadsToFPE(A a, B b)
{
@ -63,8 +60,6 @@ inline auto checkedDivision(A a, B b)
}
#pragma GCC diagnostic pop
template <typename A, typename B>
struct DivideIntegralImpl
{

View File

@ -45,10 +45,7 @@
#include <Interpreters/Context.h>
#if USE_EMBEDDED_COMPILER
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wunused-parameter"
# include <llvm/IR/IRBuilder.h>
# pragma GCC diagnostic pop
#endif
#include <cassert>

View File

@ -17,10 +17,7 @@
#include <Common/TargetSpecific.h>
#if USE_EMBEDDED_COMPILER
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wunused-parameter"
# include <llvm/IR/IRBuilder.h>
# pragma GCC diagnostic pop
#endif

View File

@ -42,12 +42,8 @@
#include <type_traits>
#if USE_EMBEDDED_COMPILER
#include <DataTypes/Native.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-parameter"
#include <llvm/IR/IRBuilder.h>
#pragma GCC diagnostic pop
# include <DataTypes/Native.h>
# include <llvm/IR/IRBuilder.h>
#endif

View File

@ -11,12 +11,8 @@
#if USE_EMBEDDED_COMPILER
#include <DataTypes/Native.h>
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-parameter"
#include <llvm/IR/IRBuilder.h>
#pragma GCC diagnostic pop
# include <DataTypes/Native.h>
# include <llvm/IR/IRBuilder.h>
#endif

View File

@ -4,7 +4,6 @@
namespace DB::GatherUtils
{
#pragma GCC visibility push(hidden)
template <typename T>
struct NumericArraySink;
@ -32,5 +31,4 @@ protected:
~ArraySinkVisitorImpl() = default;
};
#pragma GCC visibility pop
}

View File

@ -4,7 +4,6 @@
namespace DB::GatherUtils
{
#pragma GCC visibility push(hidden)
template <typename T>
struct NumericArraySource;
@ -33,5 +32,4 @@ protected:
~ArraySourceVisitorImpl() = default;
};
#pragma GCC visibility pop
}

View File

@ -24,12 +24,9 @@ struct IArraySink
}
};
#pragma GCC visibility push(hidden)
template <typename Derived>
class ArraySinkImpl : public Visitable<Derived, IArraySink, ArraySinkVisitor> {};
#pragma GCC visibility pop
}
}

View File

@ -30,12 +30,9 @@ struct IArraySource
}
};
#pragma GCC visibility push(hidden)
template <typename Derived>
class ArraySourceImpl : public Visitable<Derived, IArraySource, ArraySourceVisitor> {};
#pragma GCC visibility pop
}
}

View File

@ -26,12 +26,9 @@ struct IValueSource
virtual bool isConst() const { return false; }
};
#pragma GCC visibility push(hidden)
template <typename Derived>
class ValueSourceImpl : public Visitable<Derived, IValueSource, ValueSourceVisitor> {};
#pragma GCC visibility pop
}
}

View File

@ -17,7 +17,6 @@ namespace ErrorCodes
namespace GatherUtils
{
#pragma GCC visibility push(hidden)
/// Base classes which selects template function implementation with concrete ArraySource or ArraySink
/// Derived classes should implement selectImpl for ArraySourceSelector and ArraySinkSelector,
@ -165,7 +164,6 @@ struct ArrayAndValueSourceSelectorBySink : public ArraySinkSelector<ArrayAndValu
}
};
#pragma GCC visibility pop
}
}

View File

@ -15,7 +15,6 @@
namespace DB::GatherUtils
{
#pragma GCC visibility push(hidden)
template <typename T>
struct NumericArraySource;
@ -215,5 +214,4 @@ struct NullableArraySink : public ArraySink
}
};
#pragma GCC visibility pop
}

View File

@ -4,7 +4,6 @@
namespace DB::GatherUtils
{
#pragma GCC visibility push(hidden)
template <typename T>
struct NumericArraySlice
@ -43,6 +42,5 @@ struct GenericValueSlice
static constexpr size_t size = 1;
};
#pragma GCC visibility pop
}

View File

@ -28,8 +28,6 @@ namespace ErrorCodes
namespace GatherUtils
{
#pragma GCC visibility push(hidden)
template <typename T> struct NumericArraySink;
struct StringSink;
struct FixedStringSink;
@ -828,6 +826,4 @@ struct NullableValueSource : public ValueSource
};
}
#pragma GCC visibility pop
}

View File

@ -4,7 +4,6 @@
namespace DB::GatherUtils
{
#pragma GCC visibility push(hidden)
template <typename T>
struct NumericValueSource;
@ -37,5 +36,4 @@ protected:
~ValueSourceVisitorImpl() = default;
};
#pragma GCC visibility pop
}

View File

@ -20,10 +20,7 @@
#include "config.h"
#if USE_EMBEDDED_COMPILER
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wunused-parameter"
# include <llvm/IR/IRBuilder.h>
# pragma GCC diagnostic pop
#endif

View File

@ -11,12 +11,10 @@
#include <base/range.h>
/// Warning in boost::geometry during template strategy substitution.
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wunused-parameter"
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunused-parameter"
#include <boost/geometry.hpp>
#pragma GCC diagnostic pop
#pragma clang diagnostic pop
#include <boost/geometry/geometries/point_xy.hpp>
#include <boost/geometry/geometries/polygon.hpp>

View File

@ -5,6 +5,9 @@
#include <Columns/ColumnsNumber.h>
#include <DataTypes/DataTypeArray.h>
#include <DataTypes/DataTypeDate.h>
#include <DataTypes/DataTypeDate32.h>
#include <DataTypes/DataTypeDateTime.h>
#include <DataTypes/DataTypeDateTime64.h>
#include <DataTypes/DataTypesDecimal.h>
#include <DataTypes/DataTypesNumber.h>
@ -81,9 +84,10 @@ struct ArrayAggregateResultImpl<ArrayElement, AggregateOperation::sum>
std::conditional_t<std::is_same_v<ArrayElement, Decimal64>, Decimal128,
std::conditional_t<std::is_same_v<ArrayElement, Decimal128>, Decimal128,
std::conditional_t<std::is_same_v<ArrayElement, Decimal256>, Decimal256,
std::conditional_t<std::is_same_v<ArrayElement, DateTime64>, Decimal128,
std::conditional_t<std::is_floating_point_v<ArrayElement>, Float64,
std::conditional_t<std::is_signed_v<ArrayElement>, Int64,
UInt64>>>>>>>>>>;
UInt64>>>>>>>>>>>;
};
template <typename ArrayElement, AggregateOperation operation>
@ -108,26 +112,53 @@ struct ArrayAggregateImpl
using Types = std::decay_t<decltype(types)>;
using DataType = typename Types::LeftType;
if constexpr (aggregate_operation == AggregateOperation::average || aggregate_operation == AggregateOperation::product)
if constexpr (!IsDataTypeDateOrDateTime<DataType>)
{
result = std::make_shared<DataTypeFloat64>();
if constexpr (aggregate_operation == AggregateOperation::average || aggregate_operation == AggregateOperation::product)
{
result = std::make_shared<DataTypeFloat64>();
return true;
return true;
}
else if constexpr (IsDataTypeNumber<DataType>)
{
using NumberReturnType = ArrayAggregateResult<typename DataType::FieldType, aggregate_operation>;
result = std::make_shared<DataTypeNumber<NumberReturnType>>();
return true;
}
else if constexpr (IsDataTypeDecimal<DataType>)
{
using DecimalReturnType = ArrayAggregateResult<typename DataType::FieldType, aggregate_operation>;
UInt32 scale = getDecimalScale(*expression_return);
result = std::make_shared<DataTypeDecimal<DecimalReturnType>>(DecimalUtils::max_precision<DecimalReturnType>, scale);
return true;
}
}
else if constexpr (IsDataTypeNumber<DataType>)
else if constexpr (aggregate_operation == AggregateOperation::max || aggregate_operation == AggregateOperation::min)
{
using NumberReturnType = ArrayAggregateResult<typename DataType::FieldType, aggregate_operation>;
result = std::make_shared<DataTypeNumber<NumberReturnType>>();
if constexpr (IsDataTypeDate<DataType>)
{
result = std::make_shared<DataType>();
return true;
}
else if constexpr (IsDataTypeDecimal<DataType> && !IsDataTypeDateOrDateTime<DataType>)
{
using DecimalReturnType = ArrayAggregateResult<typename DataType::FieldType, aggregate_operation>;
UInt32 scale = getDecimalScale(*expression_return);
result = std::make_shared<DataTypeDecimal<DecimalReturnType>>(DecimalUtils::max_precision<DecimalReturnType>, scale);
return true;
}
else if constexpr (!IsDataTypeDecimal<DataType>)
{
std::string timezone = getDateTimeTimezone(*expression_return);
result = std::make_shared<DataTypeDateTime>(timezone);
return true;
return true;
}
else
{
std::string timezone = getDateTimeTimezone(*expression_return);
UInt32 scale = getDecimalScale(*expression_return);
result = std::make_shared<DataTypeDateTime64>(scale, timezone);
return true;
}
}
return false;
@ -370,7 +401,8 @@ struct ArrayAggregateImpl
executeType<Decimal32>(mapped, offsets, res) ||
executeType<Decimal64>(mapped, offsets, res) ||
executeType<Decimal128>(mapped, offsets, res) ||
executeType<Decimal256>(mapped, offsets, res))
executeType<Decimal256>(mapped, offsets, res) ||
executeType<DateTime64>(mapped, offsets, res))
{
return res;
}

View File

@ -35,10 +35,10 @@ struct ArrayDifferenceImpl
if (which.isUInt8() || which.isInt8())
return std::make_shared<DataTypeArray>(std::make_shared<DataTypeInt16>());
if (which.isUInt16() || which.isInt16())
if (which.isUInt16() || which.isInt16() || which.isDate())
return std::make_shared<DataTypeArray>(std::make_shared<DataTypeInt32>());
if (which.isUInt32() || which.isUInt64() || which.isInt32() || which.isInt64())
if (which.isUInt32() || which.isUInt64() || which.isInt32() || which.isInt64() || which.isDate32() || which.isDateTime())
return std::make_shared<DataTypeArray>(std::make_shared<DataTypeInt64>());
if (which.isFloat32() || which.isFloat64())
@ -47,6 +47,14 @@ struct ArrayDifferenceImpl
if (which.isDecimal())
return std::make_shared<DataTypeArray>(expression_return);
if (which.isDateTime64())
{
UInt32 scale = getDecimalScale(*expression_return);
UInt32 precision = getDecimalPrecision(*expression_return);
return std::make_shared<DataTypeArray>(std::make_shared<DataTypeDecimal<Decimal64>>(precision, scale));
}
throw Exception(ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT, "arrayDifference cannot process values of type {}", expression_return->getName());
}
@ -146,7 +154,8 @@ struct ArrayDifferenceImpl
executeType<Decimal32, Decimal32>(mapped, array, res) ||
executeType<Decimal64, Decimal64>(mapped, array, res) ||
executeType<Decimal128, Decimal128>(mapped, array, res) ||
executeType<Decimal256, Decimal256>(mapped, array, res))
executeType<Decimal256, Decimal256>(mapped, array, res) ||
executeType<DateTime64, Decimal64>(mapped, array, res))
return res;
else
throw Exception(ErrorCodes::ILLEGAL_COLUMN, "Unexpected column for arrayDifference: {}", mapped->getName());

Some files were not shown because too many files have changed in this diff Show More