mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-12-11 17:02:25 +00:00
Merge remote-tracking branch 'upstream/master' into nikvas0/index
This commit is contained in:
commit
10355ae555
@ -1,48 +0,0 @@
|
|||||||
# Check prereqs
|
|
||||||
FIND_PROGRAM(GCOV_PATH gcov)
|
|
||||||
FIND_PROGRAM(LCOV_PATH lcov)
|
|
||||||
FIND_PROGRAM(GENHTML_PATH genhtml)
|
|
||||||
|
|
||||||
IF(NOT GCOV_PATH)
|
|
||||||
MESSAGE(FATAL_ERROR "gcov not found! Aborting...")
|
|
||||||
ENDIF(NOT GCOV_PATH)
|
|
||||||
|
|
||||||
IF(NOT CMAKE_BUILD_TYPE STREQUAL Debug)
|
|
||||||
MESSAGE(WARNING "Code coverage results with an optimised (non-Debug) build may be misleading")
|
|
||||||
ENDIF(NOT CMAKE_BUILD_TYPE STREQUAL Debug)
|
|
||||||
|
|
||||||
#Setup compiler options
|
|
||||||
ADD_DEFINITIONS(-fprofile-arcs -ftest-coverage)
|
|
||||||
|
|
||||||
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs ")
|
|
||||||
SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -fprofile-arcs ")
|
|
||||||
|
|
||||||
IF(NOT LCOV_PATH)
|
|
||||||
MESSAGE(FATAL_ERROR "lcov not found! Aborting...")
|
|
||||||
ENDIF(NOT LCOV_PATH)
|
|
||||||
|
|
||||||
IF(NOT GENHTML_PATH)
|
|
||||||
MESSAGE(FATAL_ERROR "genhtml not found! Aborting...")
|
|
||||||
ENDIF(NOT GENHTML_PATH)
|
|
||||||
|
|
||||||
#Setup target
|
|
||||||
ADD_CUSTOM_TARGET(ShowCoverage
|
|
||||||
#Capturing lcov counters and generating report
|
|
||||||
COMMAND ${LCOV_PATH} --directory . --capture --output-file CodeCoverage.info
|
|
||||||
COMMAND ${LCOV_PATH} --remove CodeCoverage.info '${CMAKE_CURRENT_BINARY_DIR}/*' 'test/*' 'mock/*' '/usr/*' '/opt/*' '*ext/rhel5_x86_64*' '*ext/osx*' --output-file CodeCoverage.info.cleaned
|
|
||||||
COMMAND ${GENHTML_PATH} -o CodeCoverageReport CodeCoverage.info.cleaned
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
ADD_CUSTOM_TARGET(ShowAllCoverage
|
|
||||||
#Capturing lcov counters and generating report
|
|
||||||
COMMAND ${LCOV_PATH} -a CodeCoverage.info.cleaned -a CodeCoverage.info.cleaned_withoutHA -o AllCodeCoverage.info
|
|
||||||
COMMAND sed -e 's|/.*/src|${CMAKE_SOURCE_DIR}/src|' -ig AllCodeCoverage.info
|
|
||||||
COMMAND ${GENHTML_PATH} -o AllCodeCoverageReport AllCodeCoverage.info
|
|
||||||
)
|
|
||||||
|
|
||||||
ADD_CUSTOM_TARGET(ResetCoverage
|
|
||||||
#Cleanup lcov
|
|
||||||
COMMAND ${LCOV_PATH} --directory . --zerocounters
|
|
||||||
)
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
@ -1,38 +1,15 @@
|
|||||||
OPTION(ENABLE_COVERAGE "enable code coverage" OFF)
|
|
||||||
OPTION(ENABLE_DEBUG "enable debug build" OFF)
|
|
||||||
OPTION(ENABLE_SSE "enable SSE4.2 buildin function" ON)
|
OPTION(ENABLE_SSE "enable SSE4.2 buildin function" ON)
|
||||||
OPTION(ENABLE_FRAME_POINTER "enable frame pointer on 64bit system with flag -fno-omit-frame-pointer, on 32bit system, it is always enabled" ON)
|
|
||||||
OPTION(ENABLE_LIBCPP "using libc++ instead of libstdc++, only valid for clang compiler" OFF)
|
|
||||||
OPTION(ENABLE_BOOST "using boost instead of native compiler c++0x support" OFF)
|
|
||||||
|
|
||||||
INCLUDE (CheckFunctionExists)
|
INCLUDE (CheckFunctionExists)
|
||||||
CHECK_FUNCTION_EXISTS(dladdr HAVE_DLADDR)
|
CHECK_FUNCTION_EXISTS(dladdr HAVE_DLADDR)
|
||||||
CHECK_FUNCTION_EXISTS(nanosleep HAVE_NANOSLEEP)
|
CHECK_FUNCTION_EXISTS(nanosleep HAVE_NANOSLEEP)
|
||||||
|
|
||||||
IF(ENABLE_DEBUG STREQUAL ON)
|
|
||||||
SET(CMAKE_BUILD_TYPE Debug CACHE
|
|
||||||
STRING "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel." FORCE)
|
|
||||||
SET(CMAKE_CXX_FLAGS_DEBUG "-g -O0" CACHE STRING "compiler flags for debug" FORCE)
|
|
||||||
SET(CMAKE_C_FLAGS_DEBUG "-g -O0" CACHE STRING "compiler flags for debug" FORCE)
|
|
||||||
ELSE(ENABLE_DEBUG STREQUAL ON)
|
|
||||||
SET(CMAKE_BUILD_TYPE RelWithDebInfo CACHE
|
|
||||||
STRING "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel." FORCE)
|
|
||||||
ENDIF(ENABLE_DEBUG STREQUAL ON)
|
|
||||||
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-strict-aliasing")
|
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-strict-aliasing")
|
||||||
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fno-strict-aliasing")
|
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fno-strict-aliasing")
|
||||||
|
|
||||||
IF(ENABLE_COVERAGE STREQUAL ON)
|
|
||||||
INCLUDE(CodeCoverage)
|
|
||||||
ENDIF(ENABLE_COVERAGE STREQUAL ON)
|
|
||||||
|
|
||||||
IF(ENABLE_FRAME_POINTER STREQUAL ON)
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-omit-frame-pointer")
|
|
||||||
ENDIF(ENABLE_FRAME_POINTER STREQUAL ON)
|
|
||||||
|
|
||||||
IF(ENABLE_SSE STREQUAL ON)
|
IF(ENABLE_SSE STREQUAL ON)
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -msse4.2")
|
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -msse4.2")
|
||||||
ENDIF(ENABLE_SSE STREQUAL ON)
|
ENDIF(ENABLE_SSE STREQUAL ON)
|
||||||
|
|
||||||
IF(NOT TEST_HDFS_PREFIX)
|
IF(NOT TEST_HDFS_PREFIX)
|
||||||
SET(TEST_HDFS_PREFIX "./" CACHE STRING "default directory prefix used for test." FORCE)
|
SET(TEST_HDFS_PREFIX "./" CACHE STRING "default directory prefix used for test." FORCE)
|
||||||
@ -41,76 +18,7 @@ ENDIF(NOT TEST_HDFS_PREFIX)
|
|||||||
ADD_DEFINITIONS(-DTEST_HDFS_PREFIX="${TEST_HDFS_PREFIX}")
|
ADD_DEFINITIONS(-DTEST_HDFS_PREFIX="${TEST_HDFS_PREFIX}")
|
||||||
ADD_DEFINITIONS(-D__STDC_FORMAT_MACROS)
|
ADD_DEFINITIONS(-D__STDC_FORMAT_MACROS)
|
||||||
ADD_DEFINITIONS(-D_GNU_SOURCE)
|
ADD_DEFINITIONS(-D_GNU_SOURCE)
|
||||||
|
ADD_DEFINITIONS(-D_GLIBCXX_USE_NANOSLEEP)
|
||||||
IF(OS_MACOSX AND CMAKE_COMPILER_IS_GNUCXX)
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wl,-bind_at_load")
|
|
||||||
ENDIF(OS_MACOSX AND CMAKE_COMPILER_IS_GNUCXX)
|
|
||||||
|
|
||||||
IF(OS_LINUX)
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wl,--export-dynamic")
|
|
||||||
ENDIF(OS_LINUX)
|
|
||||||
|
|
||||||
SET(BOOST_ROOT ${CMAKE_PREFIX_PATH})
|
|
||||||
IF(ENABLE_BOOST STREQUAL ON)
|
|
||||||
MESSAGE(STATUS "using boost instead of native compiler c++0x support.")
|
|
||||||
FIND_PACKAGE(Boost 1.50 REQUIRED)
|
|
||||||
SET(NEED_BOOST true CACHE INTERNAL "boost is required")
|
|
||||||
ELSE(ENABLE_BOOST STREQUAL ON)
|
|
||||||
SET(NEED_BOOST false CACHE INTERNAL "boost is required")
|
|
||||||
ENDIF(ENABLE_BOOST STREQUAL ON)
|
|
||||||
|
|
||||||
IF(CMAKE_COMPILER_IS_GNUCXX)
|
|
||||||
IF(ENABLE_LIBCPP STREQUAL ON)
|
|
||||||
MESSAGE(FATAL_ERROR "Unsupport using GCC compiler with libc++")
|
|
||||||
ENDIF(ENABLE_LIBCPP STREQUAL ON)
|
|
||||||
|
|
||||||
IF((GCC_COMPILER_VERSION_MAJOR EQUAL 4) AND (GCC_COMPILER_VERSION_MINOR EQUAL 4) AND OS_MACOSX)
|
|
||||||
SET(NEED_GCCEH true CACHE INTERNAL "Explicitly link with gcc_eh")
|
|
||||||
MESSAGE(STATUS "link with -lgcc_eh for TLS")
|
|
||||||
ENDIF((GCC_COMPILER_VERSION_MAJOR EQUAL 4) AND (GCC_COMPILER_VERSION_MINOR EQUAL 4) AND OS_MACOSX)
|
|
||||||
|
|
||||||
IF((GCC_COMPILER_VERSION_MAJOR LESS 4) OR ((GCC_COMPILER_VERSION_MAJOR EQUAL 4) AND (GCC_COMPILER_VERSION_MINOR LESS 4)))
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
|
|
||||||
IF(NOT ENABLE_BOOST STREQUAL ON)
|
|
||||||
MESSAGE(STATUS "gcc version is older than 4.6.0, boost is required.")
|
|
||||||
FIND_PACKAGE(Boost 1.50 REQUIRED)
|
|
||||||
SET(NEED_BOOST true CACHE INTERNAL "boost is required")
|
|
||||||
ENDIF(NOT ENABLE_BOOST STREQUAL ON)
|
|
||||||
ELSEIF((GCC_COMPILER_VERSION_MAJOR EQUAL 4) AND (GCC_COMPILER_VERSION_MINOR LESS 7))
|
|
||||||
IF(NOT ENABLE_BOOST STREQUAL ON)
|
|
||||||
MESSAGE(STATUS "gcc version is older than 4.6.0, boost is required.")
|
|
||||||
FIND_PACKAGE(Boost 1.50 REQUIRED)
|
|
||||||
SET(NEED_BOOST true CACHE INTERNAL "boost is required")
|
|
||||||
ENDIF(NOT ENABLE_BOOST STREQUAL ON)
|
|
||||||
MESSAGE(STATUS "adding c++0x support for gcc compiler")
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")
|
|
||||||
ELSE((GCC_COMPILER_VERSION_MAJOR LESS 4) OR ((GCC_COMPILER_VERSION_MAJOR EQUAL 4) AND (GCC_COMPILER_VERSION_MINOR LESS 4)))
|
|
||||||
MESSAGE(STATUS "adding c++0x support for gcc compiler")
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")
|
|
||||||
ENDIF((GCC_COMPILER_VERSION_MAJOR LESS 4) OR ((GCC_COMPILER_VERSION_MAJOR EQUAL 4) AND (GCC_COMPILER_VERSION_MINOR LESS 4)))
|
|
||||||
|
|
||||||
IF(NEED_BOOST)
|
|
||||||
IF((Boost_MAJOR_VERSION LESS 1) OR ((Boost_MAJOR_VERSION EQUAL 1) AND (Boost_MINOR_VERSION LESS 50)))
|
|
||||||
MESSAGE(FATAL_ERROR "boost 1.50+ is required")
|
|
||||||
ENDIF()
|
|
||||||
ELSE(NEED_BOOST)
|
|
||||||
IF(HAVE_NANOSLEEP)
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_GLIBCXX_USE_NANOSLEEP")
|
|
||||||
ELSE(HAVE_NANOSLEEP)
|
|
||||||
MESSAGE(FATAL_ERROR "nanosleep() is required")
|
|
||||||
ENDIF(HAVE_NANOSLEEP)
|
|
||||||
ENDIF(NEED_BOOST)
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
|
|
||||||
ELSEIF(CMAKE_COMPILER_IS_CLANG)
|
|
||||||
MESSAGE(STATUS "adding c++0x support for clang compiler")
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x")
|
|
||||||
SET(CMAKE_XCODE_ATTRIBUTE_CLANG_CXX_LANGUAGE_STANDARD "c++0x")
|
|
||||||
IF(ENABLE_LIBCPP STREQUAL ON)
|
|
||||||
MESSAGE(STATUS "using libc++ instead of libstdc++")
|
|
||||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
|
|
||||||
SET(CMAKE_XCODE_ATTRIBUTE_CLANG_CXX_LIBRARY "libc++")
|
|
||||||
ENDIF(ENABLE_LIBCPP STREQUAL ON)
|
|
||||||
ENDIF(CMAKE_COMPILER_IS_GNUCXX)
|
|
||||||
|
|
||||||
TRY_COMPILE(STRERROR_R_RETURN_INT
|
TRY_COMPILE(STRERROR_R_RETURN_INT
|
||||||
${CMAKE_CURRENT_BINARY_DIR}
|
${CMAKE_CURRENT_BINARY_DIR}
|
||||||
@ -138,32 +46,8 @@ TRY_COMPILE(HAVE_NESTED_EXCEPTION
|
|||||||
CMAKE_FLAGS "-DCMAKE_CXX_LINK_EXECUTABLE='echo not linking now...'"
|
CMAKE_FLAGS "-DCMAKE_CXX_LINK_EXECUTABLE='echo not linking now...'"
|
||||||
OUTPUT_VARIABLE OUTPUT)
|
OUTPUT_VARIABLE OUTPUT)
|
||||||
|
|
||||||
FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/test.cpp "#include <boost/chrono.hpp>")
|
SET(HAVE_BOOST_CHRONO 0)
|
||||||
TRY_COMPILE(HAVE_BOOST_CHRONO
|
SET(HAVE_BOOST_ATOMIC 0)
|
||||||
${CMAKE_CURRENT_BINARY_DIR}
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/test.cpp
|
|
||||||
CMAKE_FLAGS "-DCMAKE_CXX_LINK_EXECUTABLE='echo not linking now...'"
|
|
||||||
-DINCLUDE_DIRECTORIES=${Boost_INCLUDE_DIR}
|
|
||||||
OUTPUT_VARIABLE OUTPUT)
|
|
||||||
|
|
||||||
FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/test.cpp "#include <chrono>")
|
SET(HAVE_STD_CHRONO 1)
|
||||||
TRY_COMPILE(HAVE_STD_CHRONO
|
SET(HAVE_STD_ATOMIC 1)
|
||||||
${CMAKE_CURRENT_BINARY_DIR}
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/test.cpp
|
|
||||||
CMAKE_FLAGS "-DCMAKE_CXX_LINK_EXECUTABLE='echo not linking now...'"
|
|
||||||
OUTPUT_VARIABLE OUTPUT)
|
|
||||||
|
|
||||||
FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/test.cpp "#include <boost/atomic.hpp>")
|
|
||||||
TRY_COMPILE(HAVE_BOOST_ATOMIC
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/test.cpp
|
|
||||||
CMAKE_FLAGS "-DCMAKE_CXX_LINK_EXECUTABLE='echo not linking now...'"
|
|
||||||
-DINCLUDE_DIRECTORIES=${Boost_INCLUDE_DIR}
|
|
||||||
OUTPUT_VARIABLE OUTPUT)
|
|
||||||
|
|
||||||
FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/test.cpp "#include <atomic>")
|
|
||||||
TRY_COMPILE(HAVE_STD_ATOMIC
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/test.cpp
|
|
||||||
CMAKE_FLAGS "-DCMAKE_CXX_LINK_EXECUTABLE='echo not linking now...'"
|
|
||||||
OUTPUT_VARIABLE OUTPUT)
|
|
||||||
|
@ -36,7 +36,7 @@ option (WEVERYTHING "Enables -Weverything option with some exceptions. This is i
|
|||||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wpedantic -Wno-vla-extension -Wno-zero-length-array -Wno-gnu-anonymous-struct -Wno-nested-anon-types")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wpedantic -Wno-vla-extension -Wno-zero-length-array -Wno-gnu-anonymous-struct -Wno-nested-anon-types")
|
||||||
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra-semi -Wcomma -Winconsistent-missing-destructor-override -Wunused-exception-parameter -Wshadow-uncaptured-local -Wcovered-switch-default -Wshadow -Wold-style-cast -Wrange-loop-analysis -Wunused-member-function -Wunreachable-code -Wunreachable-code-return -Wnewline-eof -Wembedded-directive -Wgnu-case-range -Wunused-macros -Wconditional-uninitialized -Wdeprecated -Wundef -Wreserved-id-macro -Wredundant-parens -Wzero-as-null-pointer-constant")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wshadow -Wshadow-uncaptured-local -Wextra-semi -Wcomma -Winconsistent-missing-destructor-override -Wunused-exception-parameter -Wcovered-switch-default -Wold-style-cast -Wrange-loop-analysis -Wunused-member-function -Wunreachable-code -Wunreachable-code-return -Wnewline-eof -Wembedded-directive -Wgnu-case-range -Wunused-macros -Wconditional-uninitialized -Wdeprecated -Wundef -Wreserved-id-macro -Wredundant-parens -Wzero-as-null-pointer-constant")
|
||||||
|
|
||||||
if (WEVERYTHING)
|
if (WEVERYTHING)
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Weverything -Wno-c++98-compat -Wno-c++98-compat-pedantic -Wno-missing-noreturn -Wno-padded -Wno-switch-enum -Wno-shadow-field-in-constructor -Wno-deprecated-dynamic-exception-spec -Wno-float-equal -Wno-weak-vtables -Wno-shift-sign-overflow -Wno-sign-conversion -Wno-conversion -Wno-exit-time-destructors -Wno-undefined-func-template -Wno-documentation-unknown-command -Wno-missing-variable-declarations -Wno-unused-template -Wno-global-constructors -Wno-c99-extensions -Wno-missing-prototypes -Wno-weak-template-vtables -Wno-zero-length-array -Wno-gnu-anonymous-struct -Wno-nested-anon-types -Wno-double-promotion -Wno-disabled-macro-expansion -Wno-used-but-marked-unused -Wno-vla-extension -Wno-vla -Wno-packed")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Weverything -Wno-c++98-compat -Wno-c++98-compat-pedantic -Wno-missing-noreturn -Wno-padded -Wno-switch-enum -Wno-shadow-field-in-constructor -Wno-deprecated-dynamic-exception-spec -Wno-float-equal -Wno-weak-vtables -Wno-shift-sign-overflow -Wno-sign-conversion -Wno-conversion -Wno-exit-time-destructors -Wno-undefined-func-template -Wno-documentation-unknown-command -Wno-missing-variable-declarations -Wno-unused-template -Wno-global-constructors -Wno-c99-extensions -Wno-missing-prototypes -Wno-weak-template-vtables -Wno-zero-length-array -Wno-gnu-anonymous-struct -Wno-nested-anon-types -Wno-double-promotion -Wno-disabled-macro-expansion -Wno-used-but-marked-unused -Wno-vla-extension -Wno-vla -Wno-packed")
|
||||||
@ -53,10 +53,10 @@ if (CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8)
|
if (NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS 8)
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra-semi-stmt")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra-semi-stmt -Wshadow-field -Wstring-plus-int")
|
||||||
|
|
||||||
if (WEVERYTHING)
|
if (WEVERYTHING)
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-shadow-field") # TODO Enable
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
endif ()
|
endif ()
|
||||||
|
@ -231,7 +231,7 @@ public:
|
|||||||
nested_state += nested_size_of_data;
|
nested_state += nested_size_of_data;
|
||||||
}
|
}
|
||||||
|
|
||||||
offsets_to.push_back(offsets_to.empty() ? state.dynamic_array_size : offsets_to.back() + state.dynamic_array_size);
|
offsets_to.push_back(offsets_to.back() + state.dynamic_array_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool allocatesMemoryInArena() const override
|
bool allocatesMemoryInArena() const override
|
||||||
|
@ -203,7 +203,7 @@ public:
|
|||||||
for (size_t i = arr.size(); i < result_array_size; ++i)
|
for (size_t i = arr.size(); i < result_array_size; ++i)
|
||||||
to_data.insert(default_value);
|
to_data.insert(default_value);
|
||||||
|
|
||||||
to_offsets.push_back((to_offsets.empty() ? 0 : to_offsets.back()) + result_array_size);
|
to_offsets.push_back(to_offsets.back() + result_array_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
const char * getHeaderFilePath() const override { return __FILE__; }
|
const char * getHeaderFilePath() const override { return __FILE__; }
|
||||||
|
@ -187,8 +187,8 @@ template <bool result_is_nullable>
|
|||||||
class AggregateFunctionNullUnary final : public AggregateFunctionNullBase<result_is_nullable, AggregateFunctionNullUnary<result_is_nullable>>
|
class AggregateFunctionNullUnary final : public AggregateFunctionNullBase<result_is_nullable, AggregateFunctionNullUnary<result_is_nullable>>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
AggregateFunctionNullUnary(AggregateFunctionPtr nested_function)
|
AggregateFunctionNullUnary(AggregateFunctionPtr nested_function_)
|
||||||
: AggregateFunctionNullBase<result_is_nullable, AggregateFunctionNullUnary<result_is_nullable>>(nested_function)
|
: AggregateFunctionNullBase<result_is_nullable, AggregateFunctionNullUnary<result_is_nullable>>(std::move(nested_function_))
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -209,8 +209,8 @@ template <bool result_is_nullable>
|
|||||||
class AggregateFunctionNullVariadic final : public AggregateFunctionNullBase<result_is_nullable, AggregateFunctionNullVariadic<result_is_nullable>>
|
class AggregateFunctionNullVariadic final : public AggregateFunctionNullBase<result_is_nullable, AggregateFunctionNullVariadic<result_is_nullable>>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
AggregateFunctionNullVariadic(AggregateFunctionPtr nested_function, const DataTypes & arguments)
|
AggregateFunctionNullVariadic(AggregateFunctionPtr nested_function_, const DataTypes & arguments)
|
||||||
: AggregateFunctionNullBase<result_is_nullable, AggregateFunctionNullVariadic<result_is_nullable>>(nested_function),
|
: AggregateFunctionNullBase<result_is_nullable, AggregateFunctionNullVariadic<result_is_nullable>>(std::move(nested_function_)),
|
||||||
number_of_arguments(arguments.size())
|
number_of_arguments(arguments.size())
|
||||||
{
|
{
|
||||||
if (number_of_arguments == 1)
|
if (number_of_arguments == 1)
|
||||||
|
@ -100,9 +100,12 @@ public:
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
void add(AggregateDataPtr place, const IColumn ** columns, size_t row_num, Arena *) const override
|
void NO_SANITIZE_UNDEFINED add(AggregateDataPtr place, const IColumn ** columns, size_t row_num, Arena *) const override
|
||||||
{
|
{
|
||||||
|
/// Out of range conversion may occur. This is Ok.
|
||||||
|
|
||||||
const auto & column = static_cast<const ColVecType &>(*columns[0]);
|
const auto & column = static_cast<const ColVecType &>(*columns[0]);
|
||||||
|
|
||||||
if constexpr (has_second_arg)
|
if constexpr (has_second_arg)
|
||||||
this->data(place).add(
|
this->data(place).add(
|
||||||
column.getData()[row_num],
|
column.getData()[row_num],
|
||||||
|
@ -226,14 +226,14 @@ public:
|
|||||||
|
|
||||||
// Advance column offsets
|
// Advance column offsets
|
||||||
auto & to_keys_offsets = to_keys_arr.getOffsets();
|
auto & to_keys_offsets = to_keys_arr.getOffsets();
|
||||||
to_keys_offsets.push_back((to_keys_offsets.empty() ? 0 : to_keys_offsets.back()) + size);
|
to_keys_offsets.push_back(to_keys_offsets.back() + size);
|
||||||
to_keys_col.reserve(size);
|
to_keys_col.reserve(size);
|
||||||
|
|
||||||
for (size_t col = 0; col < values_types.size(); ++col)
|
for (size_t col = 0; col < values_types.size(); ++col)
|
||||||
{
|
{
|
||||||
auto & to_values_arr = static_cast<ColumnArray &>(to_tuple.getColumn(col + 1));
|
auto & to_values_arr = static_cast<ColumnArray &>(to_tuple.getColumn(col + 1));
|
||||||
auto & to_values_offsets = to_values_arr.getOffsets();
|
auto & to_values_offsets = to_values_arr.getOffsets();
|
||||||
to_values_offsets.push_back((to_values_offsets.empty() ? 0 : to_values_offsets.back()) + size);
|
to_values_offsets.push_back(to_values_offsets.back() + size);
|
||||||
to_values_arr.getData().reserve(size);
|
to_values_arr.getData().reserve(size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -233,7 +233,10 @@ void ColumnArray::insertFrom(const IColumn & src_, size_t n)
|
|||||||
|
|
||||||
void ColumnArray::insertDefault()
|
void ColumnArray::insertDefault()
|
||||||
{
|
{
|
||||||
getOffsets().push_back(getOffsets().back());
|
/// NOTE 1: We can use back() even if the array is empty (due to zero -1th element in PODArray).
|
||||||
|
/// NOTE 2: We cannot use reference in push_back, because reference get invalidated if array is reallocated.
|
||||||
|
auto last_offset = getOffsets().back();
|
||||||
|
getOffsets().push_back(last_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ private:
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
const char * getFamilyName() const override { return "Nothing"; }
|
const char * getFamilyName() const override { return "Nothing"; }
|
||||||
MutableColumnPtr cloneDummy(size_t s) const override { return ColumnNothing::create(s); }
|
MutableColumnPtr cloneDummy(size_t s_) const override { return ColumnNothing::create(s_); }
|
||||||
|
|
||||||
bool canBeInsideNullable() const override { return true; }
|
bool canBeInsideNullable() const override { return true; }
|
||||||
};
|
};
|
||||||
|
@ -127,9 +127,6 @@ protected:
|
|||||||
|
|
||||||
c_end = c_start + end_diff;
|
c_end = c_start + end_diff;
|
||||||
c_end_of_storage = c_start + bytes - pad_right - pad_left;
|
c_end_of_storage = c_start + bytes - pad_right - pad_left;
|
||||||
|
|
||||||
if (pad_left) /// TODO Do we need it?
|
|
||||||
memset(c_start - ELEMENT_SIZE, 0, ELEMENT_SIZE);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool isInitialized() const
|
bool isInitialized() const
|
||||||
@ -312,13 +309,13 @@ public:
|
|||||||
this->c_end = this->c_start + this->byte_size(n);
|
this->c_end = this->c_start + this->byte_size(n);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ... TAllocatorParams>
|
template <typename U, typename ... TAllocatorParams>
|
||||||
void push_back(const T & x, TAllocatorParams &&... allocator_params)
|
void push_back(U && x, TAllocatorParams &&... allocator_params)
|
||||||
{
|
{
|
||||||
if (unlikely(this->c_end == this->c_end_of_storage))
|
if (unlikely(this->c_end == this->c_end_of_storage))
|
||||||
this->reserveForNextSize(std::forward<TAllocatorParams>(allocator_params)...);
|
this->reserveForNextSize(std::forward<TAllocatorParams>(allocator_params)...);
|
||||||
|
|
||||||
*t_end() = x;
|
new (t_end()) T(std::forward<U>(x));
|
||||||
this->c_end += this->byte_size(1);
|
this->c_end += this->byte_size(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -150,8 +150,8 @@ std::unique_ptr<ShellCommand> ShellCommand::execute(const std::string & command,
|
|||||||
{
|
{
|
||||||
/// Arguments in non-constant chunks of memory (as required for `execv`).
|
/// Arguments in non-constant chunks of memory (as required for `execv`).
|
||||||
/// Moreover, their copying must be done before calling `vfork`, so after `vfork` do a minimum of things.
|
/// Moreover, their copying must be done before calling `vfork`, so after `vfork` do a minimum of things.
|
||||||
std::vector<char> argv0("sh", "sh" + strlen("sh") + 1);
|
std::vector<char> argv0("sh", &("sh"[3]));
|
||||||
std::vector<char> argv1("-c", "-c" + strlen("-c") + 1);
|
std::vector<char> argv1("-c", &("-c"[3]));
|
||||||
std::vector<char> argv2(command.data(), command.data() + command.size() + 1);
|
std::vector<char> argv2(command.data(), command.data() + command.size() + 1);
|
||||||
|
|
||||||
char * const argv[] = { argv0.data(), argv1.data(), argv2.data(), nullptr };
|
char * const argv[] = { argv0.data(), argv1.data(), argv2.data(), nullptr };
|
||||||
|
@ -194,9 +194,9 @@ template <bool CaseSensitive, bool ASCII> struct VolnitskyImpl;
|
|||||||
/// Case sensitive comparison
|
/// Case sensitive comparison
|
||||||
template <bool ASCII> struct VolnitskyImpl<true, ASCII> : VolnitskyBase<VolnitskyImpl<true, ASCII>>
|
template <bool ASCII> struct VolnitskyImpl<true, ASCII> : VolnitskyBase<VolnitskyImpl<true, ASCII>>
|
||||||
{
|
{
|
||||||
VolnitskyImpl(const char * const needle, const size_t needle_size, const size_t haystack_size_hint = 0)
|
VolnitskyImpl(const char * const needle_, const size_t needle_size_, const size_t haystack_size_hint = 0)
|
||||||
: VolnitskyBase<VolnitskyImpl<true, ASCII>>{needle, needle_size, haystack_size_hint},
|
: VolnitskyBase<VolnitskyImpl<true, ASCII>>{needle_, needle_size_, haystack_size_hint},
|
||||||
fallback_searcher{needle, needle_size}
|
fallback_searcher{needle_, needle_size_}
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -222,8 +222,8 @@ template <bool ASCII> struct VolnitskyImpl<true, ASCII> : VolnitskyBase<Volnitsk
|
|||||||
/// Case-insensitive ASCII
|
/// Case-insensitive ASCII
|
||||||
template <> struct VolnitskyImpl<false, true> : VolnitskyBase<VolnitskyImpl<false, true>>
|
template <> struct VolnitskyImpl<false, true> : VolnitskyBase<VolnitskyImpl<false, true>>
|
||||||
{
|
{
|
||||||
VolnitskyImpl(const char * const needle, const size_t needle_size, const size_t haystack_size_hint = 0)
|
VolnitskyImpl(const char * const needle_, const size_t needle_size_, const size_t haystack_size_hint = 0)
|
||||||
: VolnitskyBase{needle, needle_size, haystack_size_hint}, fallback_searcher{needle, needle_size}
|
: VolnitskyBase{needle_, needle_size_, haystack_size_hint}, fallback_searcher{needle_, needle_size_}
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -248,8 +248,8 @@ template <> struct VolnitskyImpl<false, true> : VolnitskyBase<VolnitskyImpl<fals
|
|||||||
/// Case-sensitive UTF-8
|
/// Case-sensitive UTF-8
|
||||||
template <> struct VolnitskyImpl<false, false> : VolnitskyBase<VolnitskyImpl<false, false>>
|
template <> struct VolnitskyImpl<false, false> : VolnitskyBase<VolnitskyImpl<false, false>>
|
||||||
{
|
{
|
||||||
VolnitskyImpl(const char * const needle, const size_t needle_size, const size_t haystack_size_hint = 0)
|
VolnitskyImpl(const char * const needle_, const size_t needle_size_, const size_t haystack_size_hint = 0)
|
||||||
: VolnitskyBase{needle, needle_size, haystack_size_hint}, fallback_searcher{needle, needle_size}
|
: VolnitskyBase{needle_, needle_size_, haystack_size_hint}, fallback_searcher{needle_, needle_size_}
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -839,7 +839,7 @@ int32_t ZooKeeper::tryMultiNoThrow(const Coordination::Requests & requests, Coor
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
size_t KeeperMultiException::getFailedOpIndex(int32_t code, const Coordination::Responses & responses)
|
size_t KeeperMultiException::getFailedOpIndex(int32_t exception_code, const Coordination::Responses & responses)
|
||||||
{
|
{
|
||||||
if (responses.empty())
|
if (responses.empty())
|
||||||
throw DB::Exception("Responses for multi transaction is empty", DB::ErrorCodes::LOGICAL_ERROR);
|
throw DB::Exception("Responses for multi transaction is empty", DB::ErrorCodes::LOGICAL_ERROR);
|
||||||
@ -848,17 +848,17 @@ size_t KeeperMultiException::getFailedOpIndex(int32_t code, const Coordination::
|
|||||||
if (responses[index]->error)
|
if (responses[index]->error)
|
||||||
return index;
|
return index;
|
||||||
|
|
||||||
if (!Coordination::isUserError(code))
|
if (!Coordination::isUserError(exception_code))
|
||||||
throw DB::Exception("There are no failed OPs because '" + ZooKeeper::error2string(code) + "' is not valid response code for that",
|
throw DB::Exception("There are no failed OPs because '" + ZooKeeper::error2string(exception_code) + "' is not valid response code for that",
|
||||||
DB::ErrorCodes::LOGICAL_ERROR);
|
DB::ErrorCodes::LOGICAL_ERROR);
|
||||||
|
|
||||||
throw DB::Exception("There is no failed OpResult", DB::ErrorCodes::LOGICAL_ERROR);
|
throw DB::Exception("There is no failed OpResult", DB::ErrorCodes::LOGICAL_ERROR);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
KeeperMultiException::KeeperMultiException(int32_t code, const Coordination::Requests & requests, const Coordination::Responses & responses)
|
KeeperMultiException::KeeperMultiException(int32_t exception_code, const Coordination::Requests & requests, const Coordination::Responses & responses)
|
||||||
: KeeperException("Transaction failed", code),
|
: KeeperException("Transaction failed", exception_code),
|
||||||
requests(requests), responses(responses), failed_op_index(getFailedOpIndex(code, responses))
|
requests(requests), responses(responses), failed_op_index(getFailedOpIndex(exception_code, responses))
|
||||||
{
|
{
|
||||||
addMessage("Op #" + std::to_string(failed_op_index) + ", path: " + getPathForFirstFailedOp());
|
addMessage("Op #" + std::to_string(failed_op_index) + ", path: " + getPathForFirstFailedOp());
|
||||||
}
|
}
|
||||||
@ -869,15 +869,15 @@ std::string KeeperMultiException::getPathForFirstFailedOp() const
|
|||||||
return requests[failed_op_index]->getPath();
|
return requests[failed_op_index]->getPath();
|
||||||
}
|
}
|
||||||
|
|
||||||
void KeeperMultiException::check(int32_t code, const Coordination::Requests & requests, const Coordination::Responses & responses)
|
void KeeperMultiException::check(int32_t exception_code, const Coordination::Requests & requests, const Coordination::Responses & responses)
|
||||||
{
|
{
|
||||||
if (!code)
|
if (!exception_code)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (Coordination::isUserError(code))
|
if (Coordination::isUserError(exception_code))
|
||||||
throw KeeperMultiException(code, requests, responses);
|
throw KeeperMultiException(exception_code, requests, responses);
|
||||||
else
|
else
|
||||||
throw KeeperException(code);
|
throw KeeperException(exception_code);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -263,7 +263,8 @@ inline bool_if_not_safe_conversion<A, B> equalsOp(A a, B b)
|
|||||||
template <typename A, typename B>
|
template <typename A, typename B>
|
||||||
inline bool_if_safe_conversion<A, B> equalsOp(A a, B b)
|
inline bool_if_safe_conversion<A, B> equalsOp(A a, B b)
|
||||||
{
|
{
|
||||||
return a == b;
|
using LargestType = std::conditional_t<sizeof(A) >= sizeof(B), A, B>;
|
||||||
|
return static_cast<LargestType>(a) == static_cast<LargestType>(b);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <>
|
template <>
|
||||||
|
@ -94,7 +94,7 @@ void DataTypeArray::deserializeBinary(IColumn & column, ReadBuffer & istr) const
|
|||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
offsets.push_back((offsets.empty() ? 0 : offsets.back()) + size);
|
offsets.push_back(offsets.back() + size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -255,7 +255,7 @@ void DataTypeArray::deserializeBinaryBulkWithMultipleStreams(
|
|||||||
IColumn & nested_column = column_array.getData();
|
IColumn & nested_column = column_array.getData();
|
||||||
|
|
||||||
/// Number of values corresponding with `offset_values` must be read.
|
/// Number of values corresponding with `offset_values` must be read.
|
||||||
size_t last_offset = (offset_values.empty() ? 0 : offset_values.back());
|
size_t last_offset = offset_values.back();
|
||||||
if (last_offset < nested_column.size())
|
if (last_offset < nested_column.size())
|
||||||
throw Exception("Nested column is longer than last offset", ErrorCodes::LOGICAL_ERROR);
|
throw Exception("Nested column is longer than last offset", ErrorCodes::LOGICAL_ERROR);
|
||||||
size_t nested_limit = last_offset - nested_column.size();
|
size_t nested_limit = last_offset - nested_column.size();
|
||||||
@ -341,7 +341,7 @@ static void deserializeTextImpl(IColumn & column, ReadBuffer & istr, Reader && r
|
|||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
|
||||||
offsets.push_back((offsets.empty() ? 0 : offsets.back()) + size);
|
offsets.push_back(offsets.back() + size);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -507,13 +507,13 @@ void DatabaseOrdinary::shutdown()
|
|||||||
|
|
||||||
void DatabaseOrdinary::alterTable(
|
void DatabaseOrdinary::alterTable(
|
||||||
const Context & context,
|
const Context & context,
|
||||||
const String & name,
|
const String & table_name,
|
||||||
const ColumnsDescription & columns,
|
const ColumnsDescription & columns,
|
||||||
const ASTModifier & storage_modifier)
|
const ASTModifier & storage_modifier)
|
||||||
{
|
{
|
||||||
/// Read the definition of the table and replace the necessary parts with new ones.
|
/// Read the definition of the table and replace the necessary parts with new ones.
|
||||||
|
|
||||||
String table_name_escaped = escapeForFileName(name);
|
String table_name_escaped = escapeForFileName(table_name);
|
||||||
String table_metadata_tmp_path = metadata_path + "/" + table_name_escaped + ".sql.tmp";
|
String table_metadata_tmp_path = metadata_path + "/" + table_name_escaped + ".sql.tmp";
|
||||||
String table_metadata_path = metadata_path + "/" + table_name_escaped + ".sql";
|
String table_metadata_path = metadata_path + "/" + table_name_escaped + ".sql";
|
||||||
String statement;
|
String statement;
|
||||||
|
@ -23,8 +23,8 @@ namespace
|
|||||||
class ShellCommandOwningBlockInputStream : public OwningBlockInputStream<ShellCommand>
|
class ShellCommandOwningBlockInputStream : public OwningBlockInputStream<ShellCommand>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
ShellCommandOwningBlockInputStream(const BlockInputStreamPtr & stream, std::unique_ptr<ShellCommand> own)
|
ShellCommandOwningBlockInputStream(const BlockInputStreamPtr & impl, std::unique_ptr<ShellCommand> own_)
|
||||||
: OwningBlockInputStream(std::move(stream), std::move(own))
|
: OwningBlockInputStream(std::move(impl), std::move(own_))
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ Block BlockInputStreamFromRowInputStream::readImpl()
|
|||||||
throw;
|
throw;
|
||||||
|
|
||||||
++num_errors;
|
++num_errors;
|
||||||
Float64 current_error_ratio = static_cast<Float64>(num_errors) / total_rows;
|
Float32 current_error_ratio = static_cast<Float32>(num_errors) / total_rows;
|
||||||
|
|
||||||
if (num_errors > allow_errors_num
|
if (num_errors > allow_errors_num
|
||||||
&& current_error_ratio > allow_errors_ratio)
|
&& current_error_ratio > allow_errors_ratio)
|
||||||
|
@ -45,7 +45,7 @@ private:
|
|||||||
BlockMissingValues block_missing_values;
|
BlockMissingValues block_missing_values;
|
||||||
|
|
||||||
UInt64 allow_errors_num;
|
UInt64 allow_errors_num;
|
||||||
Float64 allow_errors_ratio;
|
Float32 allow_errors_ratio;
|
||||||
|
|
||||||
size_t total_rows = 0;
|
size_t total_rows = 0;
|
||||||
size_t num_errors = 0;
|
size_t num_errors = 0;
|
||||||
|
@ -60,7 +60,7 @@ struct FormatSettings
|
|||||||
DateTimeInputFormat date_time_input_format = DateTimeInputFormat::Basic;
|
DateTimeInputFormat date_time_input_format = DateTimeInputFormat::Basic;
|
||||||
|
|
||||||
UInt64 input_allow_errors_num = 0;
|
UInt64 input_allow_errors_num = 0;
|
||||||
Float64 input_allow_errors_ratio = 0;
|
Float32 input_allow_errors_ratio = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -8,8 +8,8 @@
|
|||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
JSONCompactRowOutputStream::JSONCompactRowOutputStream(WriteBuffer & ostr_, const Block & sample_, const FormatSettings & settings)
|
JSONCompactRowOutputStream::JSONCompactRowOutputStream(WriteBuffer & ostr_, const Block & sample_, const FormatSettings & settings_)
|
||||||
: JSONRowOutputStream(ostr_, sample_, settings)
|
: JSONRowOutputStream(ostr_, sample_, settings_)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,8 +11,8 @@ namespace DB
|
|||||||
class PrettyCompactBlockOutputStream : public PrettyBlockOutputStream
|
class PrettyCompactBlockOutputStream : public PrettyBlockOutputStream
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
PrettyCompactBlockOutputStream(WriteBuffer & ostr_, const Block & header_, const FormatSettings & format_settings)
|
PrettyCompactBlockOutputStream(WriteBuffer & ostr_, const Block & header_, const FormatSettings & format_settings_)
|
||||||
: PrettyBlockOutputStream(ostr_, header_, format_settings) {}
|
: PrettyBlockOutputStream(ostr_, header_, format_settings_) {}
|
||||||
|
|
||||||
void write(const Block & block) override;
|
void write(const Block & block) override;
|
||||||
|
|
||||||
|
@ -11,8 +11,8 @@ namespace DB
|
|||||||
class PrettySpaceBlockOutputStream : public PrettyBlockOutputStream
|
class PrettySpaceBlockOutputStream : public PrettyBlockOutputStream
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
PrettySpaceBlockOutputStream(WriteBuffer & ostr_, const Block & header_, const FormatSettings & format_settings)
|
PrettySpaceBlockOutputStream(WriteBuffer & ostr_, const Block & header_, const FormatSettings & format_settings_)
|
||||||
: PrettyBlockOutputStream(ostr_, header_, format_settings) {}
|
: PrettyBlockOutputStream(ostr_, header_, format_settings_) {}
|
||||||
|
|
||||||
void write(const Block & block) override;
|
void write(const Block & block) override;
|
||||||
void writeSuffix() override;
|
void writeSuffix() override;
|
||||||
|
@ -9,8 +9,8 @@
|
|||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
TSKVRowOutputStream::TSKVRowOutputStream(WriteBuffer & ostr_, const Block & sample_, const FormatSettings & format_settings)
|
TSKVRowOutputStream::TSKVRowOutputStream(WriteBuffer & ostr_, const Block & sample_, const FormatSettings & format_settings_)
|
||||||
: TabSeparatedRowOutputStream(ostr_, sample_, false, false, format_settings)
|
: TabSeparatedRowOutputStream(ostr_, sample_, false, false, format_settings_)
|
||||||
{
|
{
|
||||||
NamesAndTypesList columns(sample_.getNamesAndTypesList());
|
NamesAndTypesList columns(sample_.getNamesAndTypesList());
|
||||||
fields.assign(columns.begin(), columns.end());
|
fields.assign(columns.begin(), columns.end());
|
||||||
|
@ -13,8 +13,8 @@ namespace DB
|
|||||||
class TabSeparatedRawRowOutputStream : public TabSeparatedRowOutputStream
|
class TabSeparatedRawRowOutputStream : public TabSeparatedRowOutputStream
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
TabSeparatedRawRowOutputStream(WriteBuffer & ostr_, const Block & sample_, bool with_names_, bool with_types_, const FormatSettings & format_settings)
|
TabSeparatedRawRowOutputStream(WriteBuffer & ostr_, const Block & sample_, bool with_names_, bool with_types_, const FormatSettings & format_settings_)
|
||||||
: TabSeparatedRowOutputStream(ostr_, sample_, with_names_, with_types_, format_settings) {}
|
: TabSeparatedRowOutputStream(ostr_, sample_, with_names_, with_types_, format_settings_) {}
|
||||||
|
|
||||||
void writeField(const IColumn & column, const IDataType & type, size_t row_num) override
|
void writeField(const IColumn & column, const IDataType & type, size_t row_num) override
|
||||||
{
|
{
|
||||||
|
@ -183,7 +183,7 @@ private:
|
|||||||
|
|
||||||
for (size_t i = 0; i < size; ++i)
|
for (size_t i = 0; i < size; ++i)
|
||||||
{
|
{
|
||||||
formatReadableSizeWithBinarySuffix(vec_from[i], buf_to);
|
formatReadableSizeWithBinarySuffix(static_cast<double>(vec_from[i]), buf_to);
|
||||||
writeChar(0, buf_to);
|
writeChar(0, buf_to);
|
||||||
offsets_to[i] = buf_to.count();
|
offsets_to[i] = buf_to.count();
|
||||||
}
|
}
|
||||||
|
@ -28,6 +28,7 @@ void registerFunctionsHashing(FunctionFactory & factory)
|
|||||||
factory.registerFunction<FunctionMurmurHash3_32>();
|
factory.registerFunction<FunctionMurmurHash3_32>();
|
||||||
factory.registerFunction<FunctionMurmurHash3_64>();
|
factory.registerFunction<FunctionMurmurHash3_64>();
|
||||||
factory.registerFunction<FunctionMurmurHash3_128>();
|
factory.registerFunction<FunctionMurmurHash3_128>();
|
||||||
|
factory.registerFunction<FunctionGccMurmurHash>();
|
||||||
|
|
||||||
#if USE_XXHASH
|
#if USE_XXHASH
|
||||||
factory.registerFunction<FunctionXxHash32>();
|
factory.registerFunction<FunctionXxHash32>();
|
||||||
|
@ -257,6 +257,25 @@ struct MurmurHash2Impl64
|
|||||||
static constexpr bool use_int_hash_for_pods = false;
|
static constexpr bool use_int_hash_for_pods = false;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// To be compatible with gcc: https://github.com/gcc-mirror/gcc/blob/41d6b10e96a1de98e90a7c0378437c3255814b16/libstdc%2B%2B-v3/include/bits/functional_hash.h#L191
|
||||||
|
struct GccMurmurHashImpl
|
||||||
|
{
|
||||||
|
static constexpr auto name = "gccMurmurHash";
|
||||||
|
using ReturnType = UInt64;
|
||||||
|
|
||||||
|
static UInt64 apply(const char * data, const size_t size)
|
||||||
|
{
|
||||||
|
return MurmurHash64A(data, size, 0xc70f6907UL);
|
||||||
|
}
|
||||||
|
|
||||||
|
static UInt64 combineHashes(UInt64 h1, UInt64 h2)
|
||||||
|
{
|
||||||
|
return IntHash64Impl::apply(h1) ^ h2;
|
||||||
|
}
|
||||||
|
|
||||||
|
static constexpr bool use_int_hash_for_pods = false;
|
||||||
|
};
|
||||||
|
|
||||||
struct MurmurHash3Impl32
|
struct MurmurHash3Impl32
|
||||||
{
|
{
|
||||||
static constexpr auto name = "murmurHash3_32";
|
static constexpr auto name = "murmurHash3_32";
|
||||||
@ -1070,6 +1089,7 @@ using FunctionFarmHash64 = FunctionAnyHash<ImplFarmHash64>;
|
|||||||
using FunctionMetroHash64 = FunctionAnyHash<ImplMetroHash64>;
|
using FunctionMetroHash64 = FunctionAnyHash<ImplMetroHash64>;
|
||||||
using FunctionMurmurHash2_32 = FunctionAnyHash<MurmurHash2Impl32>;
|
using FunctionMurmurHash2_32 = FunctionAnyHash<MurmurHash2Impl32>;
|
||||||
using FunctionMurmurHash2_64 = FunctionAnyHash<MurmurHash2Impl64>;
|
using FunctionMurmurHash2_64 = FunctionAnyHash<MurmurHash2Impl64>;
|
||||||
|
using FunctionGccMurmurHash = FunctionAnyHash<GccMurmurHashImpl>;
|
||||||
using FunctionMurmurHash3_32 = FunctionAnyHash<MurmurHash3Impl32>;
|
using FunctionMurmurHash3_32 = FunctionAnyHash<MurmurHash3Impl32>;
|
||||||
using FunctionMurmurHash3_64 = FunctionAnyHash<MurmurHash3Impl64>;
|
using FunctionMurmurHash3_64 = FunctionAnyHash<MurmurHash3Impl64>;
|
||||||
using FunctionMurmurHash3_128 = FunctionStringHashFixedString<MurmurHash3Impl128>;
|
using FunctionMurmurHash3_128 = FunctionStringHashFixedString<MurmurHash3Impl128>;
|
||||||
|
@ -4,9 +4,11 @@
|
|||||||
|
|
||||||
#include <Functions/GatherUtils/Sources.h>
|
#include <Functions/GatherUtils/Sources.h>
|
||||||
#include <Functions/GatherUtils/Sinks.h>
|
#include <Functions/GatherUtils/Sinks.h>
|
||||||
|
#include <Core/AccurateComparison.h>
|
||||||
|
|
||||||
#include <ext/range.h>
|
#include <ext/range.h>
|
||||||
|
|
||||||
|
|
||||||
namespace DB::ErrorCodes
|
namespace DB::ErrorCodes
|
||||||
{
|
{
|
||||||
extern const int LOGICAL_ERROR;
|
extern const int LOGICAL_ERROR;
|
||||||
@ -31,7 +33,7 @@ void writeSlice(const NumericArraySlice<T> & slice, NumericArraySink<U> & sink)
|
|||||||
sink.elements.resize(sink.current_offset + slice.size);
|
sink.elements.resize(sink.current_offset + slice.size);
|
||||||
for (size_t i = 0; i < slice.size; ++i)
|
for (size_t i = 0; i < slice.size; ++i)
|
||||||
{
|
{
|
||||||
sink.elements[sink.current_offset] = slice.data[i];
|
sink.elements[sink.current_offset] = static_cast<U>(slice.data[i]);
|
||||||
++sink.current_offset;
|
++sink.current_offset;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -421,17 +423,12 @@ bool sliceHasImpl(const FirstSliceType & first, const SecondSliceType & second,
|
|||||||
return all;
|
return all;
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma GCC diagnostic push
|
|
||||||
#pragma GCC diagnostic ignored "-Wsign-compare"
|
|
||||||
|
|
||||||
template <typename T, typename U>
|
template <typename T, typename U>
|
||||||
bool sliceEqualElements(const NumericArraySlice<T> & first, const NumericArraySlice<U> & second, size_t first_ind, size_t second_ind)
|
bool sliceEqualElements(const NumericArraySlice<T> & first, const NumericArraySlice<U> & second, size_t first_ind, size_t second_ind)
|
||||||
{
|
{
|
||||||
return first.data[first_ind] == second.data[second_ind];
|
return accurate::equalsOp(first.data[first_ind], second.data[second_ind]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#pragma GCC diagnostic pop
|
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
bool sliceEqualElements(const NumericArraySlice<T> &, const GenericArraySlice &, size_t, size_t)
|
bool sliceEqualElements(const NumericArraySlice<T> &, const GenericArraySlice &, size_t, size_t)
|
||||||
{
|
{
|
||||||
|
@ -118,18 +118,18 @@ struct ConstSource : public Base
|
|||||||
size_t total_rows;
|
size_t total_rows;
|
||||||
size_t row_num = 0;
|
size_t row_num = 0;
|
||||||
|
|
||||||
explicit ConstSource(const ColumnConst & col)
|
explicit ConstSource(const ColumnConst & col_)
|
||||||
: Base(static_cast<const typename Base::Column &>(col.getDataColumn())), total_rows(col.size())
|
: Base(static_cast<const typename Base::Column &>(col_.getDataColumn())), total_rows(col_.size())
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ColumnType>
|
template <typename ColumnType>
|
||||||
ConstSource(const ColumnType & col, size_t total_rows) : Base(col), total_rows(total_rows)
|
ConstSource(const ColumnType & col_, size_t total_rows_) : Base(col_), total_rows(total_rows_)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename ColumnType>
|
template <typename ColumnType>
|
||||||
ConstSource(const ColumnType & col, const NullMap & null_map, size_t total_rows) : Base(col, null_map), total_rows(total_rows)
|
ConstSource(const ColumnType & col_, const NullMap & null_map_, size_t total_rows_) : Base(col_, null_map_), total_rows(total_rows_)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -34,6 +34,7 @@ struct ArrayFirstImpl
|
|||||||
const auto & offsets = array.getOffsets();
|
const auto & offsets = array.getOffsets();
|
||||||
const auto & data = array.getData();
|
const auto & data = array.getData();
|
||||||
auto out = data.cloneEmpty();
|
auto out = data.cloneEmpty();
|
||||||
|
out->reserve(data.size());
|
||||||
|
|
||||||
size_t pos{};
|
size_t pos{};
|
||||||
for (size_t i = 0; i < offsets.size(); ++i)
|
for (size_t i = 0; i < offsets.size(); ++i)
|
||||||
@ -60,6 +61,7 @@ struct ArrayFirstImpl
|
|||||||
const auto & offsets = array.getOffsets();
|
const auto & offsets = array.getOffsets();
|
||||||
const auto & data = array.getData();
|
const auto & data = array.getData();
|
||||||
auto out = data.cloneEmpty();
|
auto out = data.cloneEmpty();
|
||||||
|
out->reserve(data.size());
|
||||||
|
|
||||||
size_t pos{};
|
size_t pos{};
|
||||||
for (size_t i = 0; i < offsets.size(); ++i)
|
for (size_t i = 0; i < offsets.size(); ++i)
|
||||||
|
@ -12,8 +12,8 @@ namespace DB
|
|||||||
class HashingReadBuffer : public IHashingBuffer<ReadBuffer>
|
class HashingReadBuffer : public IHashingBuffer<ReadBuffer>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
HashingReadBuffer(ReadBuffer & in_, size_t block_size = DBMS_DEFAULT_HASHING_BLOCK_SIZE) :
|
HashingReadBuffer(ReadBuffer & in_, size_t block_size_ = DBMS_DEFAULT_HASHING_BLOCK_SIZE) :
|
||||||
IHashingBuffer<ReadBuffer>(block_size), in(in_)
|
IHashingBuffer<ReadBuffer>(block_size_), in(in_)
|
||||||
{
|
{
|
||||||
working_buffer = in.buffer();
|
working_buffer = in.buffer();
|
||||||
pos = in.position();
|
pos = in.position();
|
||||||
|
@ -53,14 +53,14 @@ ReadBufferFromFile::ReadBufferFromFile(
|
|||||||
|
|
||||||
|
|
||||||
ReadBufferFromFile::ReadBufferFromFile(
|
ReadBufferFromFile::ReadBufferFromFile(
|
||||||
int fd,
|
int fd_,
|
||||||
const std::string & original_file_name,
|
const std::string & original_file_name,
|
||||||
size_t buf_size,
|
size_t buf_size,
|
||||||
char * existing_memory,
|
char * existing_memory,
|
||||||
size_t alignment)
|
size_t alignment)
|
||||||
:
|
:
|
||||||
ReadBufferFromFileDescriptor(fd, buf_size, existing_memory, alignment),
|
ReadBufferFromFileDescriptor(fd_, buf_size, existing_memory, alignment),
|
||||||
file_name(original_file_name.empty() ? "(fd = " + toString(fd) + ")" : original_file_name)
|
file_name(original_file_name.empty() ? "(fd = " + toString(fd_) + ")" : original_file_name)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,13 +96,13 @@ class ReadWriteBufferFromHTTP : public detail::ReadWriteBufferFromHTTPBase<HTTPS
|
|||||||
using Parent = detail::ReadWriteBufferFromHTTPBase<HTTPSessionPtr>;
|
using Parent = detail::ReadWriteBufferFromHTTPBase<HTTPSessionPtr>;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
explicit ReadWriteBufferFromHTTP(Poco::URI uri,
|
explicit ReadWriteBufferFromHTTP(Poco::URI uri_,
|
||||||
const std::string & method = {},
|
const std::string & method_ = {},
|
||||||
OutStreamCallback out_stream_callback = {},
|
OutStreamCallback out_stream_callback = {},
|
||||||
const ConnectionTimeouts & timeouts = {},
|
const ConnectionTimeouts & timeouts = {},
|
||||||
const Poco::Net::HTTPBasicCredentials & credentials = {},
|
const Poco::Net::HTTPBasicCredentials & credentials = {},
|
||||||
size_t buffer_size_ = DBMS_DEFAULT_BUFFER_SIZE)
|
size_t buffer_size_ = DBMS_DEFAULT_BUFFER_SIZE)
|
||||||
: Parent(makeHTTPSession(uri, timeouts), uri, method, out_stream_callback, credentials, buffer_size_)
|
: Parent(makeHTTPSession(uri_, timeouts), uri_, method_, out_stream_callback, credentials, buffer_size_)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -111,16 +111,16 @@ class PooledReadWriteBufferFromHTTP : public detail::ReadWriteBufferFromHTTPBase
|
|||||||
using Parent = detail::ReadWriteBufferFromHTTPBase<PooledHTTPSessionPtr>;
|
using Parent = detail::ReadWriteBufferFromHTTPBase<PooledHTTPSessionPtr>;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
explicit PooledReadWriteBufferFromHTTP(Poco::URI uri,
|
explicit PooledReadWriteBufferFromHTTP(Poco::URI uri_,
|
||||||
const std::string & method = {},
|
const std::string & method_ = {},
|
||||||
OutStreamCallback out_stream_callback = {},
|
OutStreamCallback out_stream_callback = {},
|
||||||
const ConnectionTimeouts & timeouts = {},
|
const ConnectionTimeouts & timeouts = {},
|
||||||
const Poco::Net::HTTPBasicCredentials & credentials = {},
|
const Poco::Net::HTTPBasicCredentials & credentials = {},
|
||||||
size_t buffer_size_ = DBMS_DEFAULT_BUFFER_SIZE,
|
size_t buffer_size_ = DBMS_DEFAULT_BUFFER_SIZE,
|
||||||
size_t max_connections_per_endpoint = DEFAULT_COUNT_OF_HTTP_CONNECTIONS_PER_ENDPOINT)
|
size_t max_connections_per_endpoint = DEFAULT_COUNT_OF_HTTP_CONNECTIONS_PER_ENDPOINT)
|
||||||
: Parent(makePooledHTTPSession(uri, timeouts, max_connections_per_endpoint),
|
: Parent(makePooledHTTPSession(uri_, timeouts, max_connections_per_endpoint),
|
||||||
uri,
|
uri_,
|
||||||
method,
|
method_,
|
||||||
out_stream_callback,
|
out_stream_callback,
|
||||||
credentials,
|
credentials,
|
||||||
buffer_size_)
|
buffer_size_)
|
||||||
|
@ -58,14 +58,14 @@ WriteBufferFromFile::WriteBufferFromFile(
|
|||||||
|
|
||||||
/// Use pre-opened file descriptor.
|
/// Use pre-opened file descriptor.
|
||||||
WriteBufferFromFile::WriteBufferFromFile(
|
WriteBufferFromFile::WriteBufferFromFile(
|
||||||
int fd,
|
int fd_,
|
||||||
const std::string & original_file_name,
|
const std::string & original_file_name,
|
||||||
size_t buf_size,
|
size_t buf_size,
|
||||||
char * existing_memory,
|
char * existing_memory,
|
||||||
size_t alignment)
|
size_t alignment)
|
||||||
:
|
:
|
||||||
WriteBufferFromFileDescriptor(fd, buf_size, existing_memory, alignment),
|
WriteBufferFromFileDescriptor(fd_, buf_size, existing_memory, alignment),
|
||||||
file_name(original_file_name.empty() ? "(fd = " + toString(fd) + ")" : original_file_name)
|
file_name(original_file_name.empty() ? "(fd = " + toString(fd_) + ")" : original_file_name)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,9 +16,7 @@ namespace ErrorCodes
|
|||||||
|
|
||||||
|
|
||||||
WriteBufferFromTemporaryFile::WriteBufferFromTemporaryFile(std::unique_ptr<Poco::TemporaryFile> && tmp_file_)
|
WriteBufferFromTemporaryFile::WriteBufferFromTemporaryFile(std::unique_ptr<Poco::TemporaryFile> && tmp_file_)
|
||||||
:
|
: WriteBufferFromFile(tmp_file_->path(), DBMS_DEFAULT_BUFFER_SIZE, O_RDWR | O_TRUNC | O_CREAT, 0600), tmp_file(std::move(tmp_file_))
|
||||||
WriteBufferFromFile(tmp_file_->path(), DBMS_DEFAULT_BUFFER_SIZE, O_RDWR | O_TRUNC | O_CREAT, 0600),
|
|
||||||
tmp_file(std::move(tmp_file_))
|
|
||||||
{}
|
{}
|
||||||
|
|
||||||
|
|
||||||
@ -34,7 +32,6 @@ WriteBufferFromTemporaryFile::Ptr WriteBufferFromTemporaryFile::create(const std
|
|||||||
class ReadBufferFromTemporaryWriteBuffer : public ReadBufferFromFile
|
class ReadBufferFromTemporaryWriteBuffer : public ReadBufferFromFile
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
|
||||||
static ReadBufferPtr createFrom(WriteBufferFromTemporaryFile * origin)
|
static ReadBufferPtr createFrom(WriteBufferFromTemporaryFile * origin)
|
||||||
{
|
{
|
||||||
int fd = origin->getFD();
|
int fd = origin->getFD();
|
||||||
@ -47,8 +44,8 @@ public:
|
|||||||
return std::make_shared<ReadBufferFromTemporaryWriteBuffer>(fd, file_name, std::move(origin->tmp_file));
|
return std::make_shared<ReadBufferFromTemporaryWriteBuffer>(fd, file_name, std::move(origin->tmp_file));
|
||||||
}
|
}
|
||||||
|
|
||||||
ReadBufferFromTemporaryWriteBuffer(int fd, const std::string & file_name, std::unique_ptr<Poco::TemporaryFile> && tmp_file_)
|
ReadBufferFromTemporaryWriteBuffer(int fd_, const std::string & file_name_, std::unique_ptr<Poco::TemporaryFile> && tmp_file_)
|
||||||
: ReadBufferFromFile(fd, file_name), tmp_file(std::move(tmp_file_))
|
: ReadBufferFromFile(fd_, file_name_), tmp_file(std::move(tmp_file_))
|
||||||
{}
|
{}
|
||||||
|
|
||||||
std::unique_ptr<Poco::TemporaryFile> tmp_file;
|
std::unique_ptr<Poco::TemporaryFile> tmp_file;
|
||||||
|
@ -273,7 +273,7 @@ void Compiler::compile(
|
|||||||
<< " 2>&1"
|
<< " 2>&1"
|
||||||
") || echo Return code: $?";
|
") || echo Return code: $?";
|
||||||
|
|
||||||
#if !NDEBUG
|
#ifndef NDEBUG
|
||||||
LOG_TRACE(log, "Compile command: " << command.str());
|
LOG_TRACE(log, "Compile command: " << command.str());
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -93,12 +93,12 @@ void PartLogElement::appendToBlock(Block & block) const
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool PartLog::addNewPart(Context & context, const MutableDataPartPtr & part, UInt64 elapsed_ns, const ExecutionStatus & execution_status)
|
bool PartLog::addNewPart(Context & current_context, const MutableDataPartPtr & part, UInt64 elapsed_ns, const ExecutionStatus & execution_status)
|
||||||
{
|
{
|
||||||
return addNewParts(context, {part}, elapsed_ns, execution_status);
|
return addNewParts(current_context, {part}, elapsed_ns, execution_status);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool PartLog::addNewParts(Context & context, const PartLog::MutableDataPartsVector & parts, UInt64 elapsed_ns,
|
bool PartLog::addNewParts(Context & current_context, const PartLog::MutableDataPartsVector & parts, UInt64 elapsed_ns,
|
||||||
const ExecutionStatus & execution_status)
|
const ExecutionStatus & execution_status)
|
||||||
{
|
{
|
||||||
if (parts.empty())
|
if (parts.empty())
|
||||||
@ -108,7 +108,7 @@ bool PartLog::addNewParts(Context & context, const PartLog::MutableDataPartsVect
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
part_log = context.getPartLog(parts.front()->storage.getDatabaseName()); // assume parts belong to the same table
|
part_log = current_context.getPartLog(parts.front()->storage.getDatabaseName()); // assume parts belong to the same table
|
||||||
if (!part_log)
|
if (!part_log)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ void dumpToArrayColumns(const Counters & counters, DB::IColumn * column_names_,
|
|||||||
if (column_names)
|
if (column_names)
|
||||||
{
|
{
|
||||||
auto & offsets = column_names->getOffsets();
|
auto & offsets = column_names->getOffsets();
|
||||||
offsets.push_back((offsets.empty() ? 0 : offsets.back()) + size);
|
offsets.push_back(offsets.back() + size);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (column_values)
|
if (column_values)
|
||||||
@ -51,7 +51,7 @@ void dumpToArrayColumns(const Counters & counters, DB::IColumn * column_names_,
|
|||||||
if (!the_same_offsets)
|
if (!the_same_offsets)
|
||||||
{
|
{
|
||||||
auto & offsets = column_values->getOffsets();
|
auto & offsets = column_values->getOffsets();
|
||||||
offsets.push_back((offsets.empty() ? 0 : offsets.back()) + size);
|
offsets.push_back(offsets.back() + size);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -202,7 +202,7 @@ void Settings::dumpToArrayColumns(IColumn * column_names_, IColumn * column_valu
|
|||||||
if (column_names)
|
if (column_names)
|
||||||
{
|
{
|
||||||
auto & offsets = column_names->getOffsets();
|
auto & offsets = column_names->getOffsets();
|
||||||
offsets.push_back((offsets.empty() ? 0 : offsets.back()) + size);
|
offsets.push_back(offsets.back() + size);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Nested columns case
|
/// Nested columns case
|
||||||
@ -211,7 +211,7 @@ void Settings::dumpToArrayColumns(IColumn * column_names_, IColumn * column_valu
|
|||||||
if (column_values && !the_same_offsets)
|
if (column_values && !the_same_offsets)
|
||||||
{
|
{
|
||||||
auto & offsets = column_values->getOffsets();
|
auto & offsets = column_values->getOffsets();
|
||||||
offsets.push_back((offsets.empty() ? 0 : offsets.back()) + size);
|
offsets.push_back(offsets.back() + size);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -308,6 +308,17 @@ const ASTTablesInSelectQueryElement * ASTSelectQuery::join() const
|
|||||||
return getFirstTableJoin(*this);
|
return getFirstTableJoin(*this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static String getTableExpressionAlias(const ASTTableExpression * table_expression)
|
||||||
|
{
|
||||||
|
if (table_expression->subquery)
|
||||||
|
return table_expression->subquery->tryGetAlias();
|
||||||
|
else if (table_expression->table_function)
|
||||||
|
return table_expression->table_function->tryGetAlias();
|
||||||
|
else if (table_expression->database_and_table_name)
|
||||||
|
return table_expression->database_and_table_name->tryGetAlias();
|
||||||
|
|
||||||
|
return String();
|
||||||
|
}
|
||||||
|
|
||||||
void ASTSelectQuery::replaceDatabaseAndTable(const String & database_name, const String & table_name)
|
void ASTSelectQuery::replaceDatabaseAndTable(const String & database_name, const String & table_name)
|
||||||
{
|
{
|
||||||
@ -326,7 +337,11 @@ void ASTSelectQuery::replaceDatabaseAndTable(const String & database_name, const
|
|||||||
table_expression = table_expr.get();
|
table_expression = table_expr.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
String table_alias = getTableExpressionAlias(table_expression);
|
||||||
table_expression->database_and_table_name = createDatabaseAndTableNode(database_name, table_name);
|
table_expression->database_and_table_name = createDatabaseAndTableNode(database_name, table_name);
|
||||||
|
|
||||||
|
if (!table_alias.empty())
|
||||||
|
table_expression->database_and_table_name->setAlias(table_alias);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -347,8 +362,13 @@ void ASTSelectQuery::addTableFunction(ASTPtr & table_function_ptr)
|
|||||||
table_expression = table_expr.get();
|
table_expression = table_expr.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
table_expression->table_function = table_function_ptr;
|
String table_alias = getTableExpressionAlias(table_expression);
|
||||||
|
/// Maybe need to modify the alias, so we should clone new table_function node
|
||||||
|
table_expression->table_function = table_function_ptr->clone();
|
||||||
table_expression->database_and_table_name = nullptr;
|
table_expression->database_and_table_name = nullptr;
|
||||||
|
|
||||||
|
if (table_alias.empty())
|
||||||
|
table_expression->table_function->setAlias(table_alias);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -513,8 +513,7 @@ void MergeTreeReader::fillMissingColumns(Block & res, bool & should_reorder, boo
|
|||||||
{
|
{
|
||||||
ColumnPtr offsets_column = offset_columns[offsets_name];
|
ColumnPtr offsets_column = offset_columns[offsets_name];
|
||||||
DataTypePtr nested_type = typeid_cast<const DataTypeArray &>(*column_to_add.type).getNestedType();
|
DataTypePtr nested_type = typeid_cast<const DataTypeArray &>(*column_to_add.type).getNestedType();
|
||||||
size_t nested_rows = offsets_column->empty() ? 0
|
size_t nested_rows = typeid_cast<const ColumnUInt64 &>(*offsets_column).getData().back();
|
||||||
: typeid_cast<const ColumnUInt64 &>(*offsets_column).getData().back();
|
|
||||||
|
|
||||||
ColumnPtr nested_column = nested_type->createColumnConstWithDefaultValue(nested_rows)->convertToFullColumnIfConst();
|
ColumnPtr nested_column = nested_type->createColumnConstWithDefaultValue(nested_rows)->convertToFullColumnIfConst();
|
||||||
|
|
||||||
|
@ -22,18 +22,18 @@ MergeTreeSelectBlockInputStream::MergeTreeSelectBlockInputStream(
|
|||||||
Names column_names,
|
Names column_names,
|
||||||
const MarkRanges & mark_ranges_,
|
const MarkRanges & mark_ranges_,
|
||||||
bool use_uncompressed_cache_,
|
bool use_uncompressed_cache_,
|
||||||
const PrewhereInfoPtr & prewhere_info,
|
const PrewhereInfoPtr & prewhere_info_,
|
||||||
bool check_columns,
|
bool check_columns,
|
||||||
size_t min_bytes_to_use_direct_io_,
|
size_t min_bytes_to_use_direct_io_,
|
||||||
size_t max_read_buffer_size_,
|
size_t max_read_buffer_size_,
|
||||||
bool save_marks_in_cache_,
|
bool save_marks_in_cache_,
|
||||||
const Names & virt_column_names,
|
const Names & virt_column_names_,
|
||||||
size_t part_index_in_query_,
|
size_t part_index_in_query_,
|
||||||
bool quiet)
|
bool quiet)
|
||||||
:
|
:
|
||||||
MergeTreeBaseSelectBlockInputStream{storage_, prewhere_info, max_block_size_rows_,
|
MergeTreeBaseSelectBlockInputStream{storage_, prewhere_info_, max_block_size_rows_,
|
||||||
preferred_block_size_bytes_, preferred_max_column_in_block_size_bytes_, min_bytes_to_use_direct_io_,
|
preferred_block_size_bytes_, preferred_max_column_in_block_size_bytes_, min_bytes_to_use_direct_io_,
|
||||||
max_read_buffer_size_, use_uncompressed_cache_, save_marks_in_cache_, virt_column_names},
|
max_read_buffer_size_, use_uncompressed_cache_, save_marks_in_cache_, virt_column_names_},
|
||||||
required_columns{column_names},
|
required_columns{column_names},
|
||||||
data_part{owned_data_part_},
|
data_part{owned_data_part_},
|
||||||
part_columns_lock(data_part->columns_lock),
|
part_columns_lock(data_part->columns_lock),
|
||||||
|
@ -11,18 +11,18 @@ MergeTreeThreadSelectBlockInputStream::MergeTreeThreadSelectBlockInputStream(
|
|||||||
const size_t thread,
|
const size_t thread,
|
||||||
const MergeTreeReadPoolPtr & pool,
|
const MergeTreeReadPoolPtr & pool,
|
||||||
const size_t min_marks_to_read_,
|
const size_t min_marks_to_read_,
|
||||||
const size_t max_block_size_rows,
|
const size_t max_block_size_rows_,
|
||||||
size_t preferred_block_size_bytes,
|
size_t preferred_block_size_bytes_,
|
||||||
size_t preferred_max_column_in_block_size_bytes,
|
size_t preferred_max_column_in_block_size_bytes_,
|
||||||
const MergeTreeData & storage,
|
const MergeTreeData & storage_,
|
||||||
const bool use_uncompressed_cache,
|
const bool use_uncompressed_cache_,
|
||||||
const PrewhereInfoPtr & prewhere_info,
|
const PrewhereInfoPtr & prewhere_info_,
|
||||||
const Settings & settings,
|
const Settings & settings,
|
||||||
const Names & virt_column_names)
|
const Names & virt_column_names_)
|
||||||
:
|
:
|
||||||
MergeTreeBaseSelectBlockInputStream{storage, prewhere_info, max_block_size_rows,
|
MergeTreeBaseSelectBlockInputStream{storage_, prewhere_info_, max_block_size_rows_,
|
||||||
preferred_block_size_bytes, preferred_max_column_in_block_size_bytes, settings.min_bytes_to_use_direct_io,
|
preferred_block_size_bytes_, preferred_max_column_in_block_size_bytes_, settings.min_bytes_to_use_direct_io,
|
||||||
settings.max_read_buffer_size, use_uncompressed_cache, true, virt_column_names},
|
settings.max_read_buffer_size, use_uncompressed_cache_, true, virt_column_names_},
|
||||||
thread{thread},
|
thread{thread},
|
||||||
pool{pool}
|
pool{pool}
|
||||||
{
|
{
|
||||||
|
@ -47,13 +47,13 @@ sudo -u clickhouse TSAN_OPTIONS='halt_on_error=1,suppressions=../dbms/tests/tsan
|
|||||||
# How to use Undefined Behaviour Sanitizer
|
# How to use Undefined Behaviour Sanitizer
|
||||||
|
|
||||||
```
|
```
|
||||||
CC=clang CXX=clang++ mkdir build_ubsan && cd build_ubsan
|
mkdir build_ubsan && cd build_ubsan
|
||||||
```
|
```
|
||||||
|
|
||||||
Note: clang is mandatory, because gcc (in version 8) has false positives due to devirtualization and it has less amount of checks.
|
Note: clang is mandatory, because gcc (in version 8) has false positives due to devirtualization and it has less amount of checks.
|
||||||
|
|
||||||
```
|
```
|
||||||
cmake -D SANITIZE=undefined ..
|
CC=clang CXX=clang++ cmake -D SANITIZE=undefined ..
|
||||||
ninja
|
ninja
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -22,5 +22,8 @@
|
|||||||
11303473983767132390
|
11303473983767132390
|
||||||
956517343494314387
|
956517343494314387
|
||||||
956517343494314387
|
956517343494314387
|
||||||
|
9631199822919835226
|
||||||
|
4334672815104069193
|
||||||
|
4334672815104069193
|
||||||
6145F501578671E2877DBA2BE487AF7E
|
6145F501578671E2877DBA2BE487AF7E
|
||||||
16FE7483905CCE7A85670E43E4678877
|
16FE7483905CCE7A85670E43E4678877
|
||||||
|
@ -28,5 +28,10 @@ SELECT murmurHash3_64('foo');
|
|||||||
SELECT murmurHash3_64('\x01');
|
SELECT murmurHash3_64('\x01');
|
||||||
SELECT murmurHash3_64(1);
|
SELECT murmurHash3_64(1);
|
||||||
|
|
||||||
|
SELECT gccMurmurHash('foo');
|
||||||
|
SELECT gccMurmurHash('\x01');
|
||||||
|
SELECT gccMurmurHash(1);
|
||||||
|
|
||||||
SELECT hex(murmurHash3_128('foo'));
|
SELECT hex(murmurHash3_128('foo'));
|
||||||
SELECT hex(murmurHash3_128('\x01'));
|
SELECT hex(murmurHash3_128('\x01'));
|
||||||
|
|
||||||
|
@ -13,3 +13,6 @@
|
|||||||
8163029322371165472
|
8163029322371165472
|
||||||
8788309436660676487
|
8788309436660676487
|
||||||
236561483980029756
|
236561483980029756
|
||||||
|
12384823029245979431
|
||||||
|
4507350192761038840
|
||||||
|
1188926775431157506
|
||||||
|
@ -17,3 +17,7 @@ SELECT murmurHash2_64(('a', [1, 2, 3], 4, (4, ['foo', 'bar'], 1, (1, 2))));
|
|||||||
SELECT murmurHash3_64(1, 2, 3);
|
SELECT murmurHash3_64(1, 2, 3);
|
||||||
SELECT murmurHash3_64(1, 3, 2);
|
SELECT murmurHash3_64(1, 3, 2);
|
||||||
SELECT murmurHash3_64(('a', [1, 2, 3], 4, (4, ['foo', 'bar'], 1, (1, 2))));
|
SELECT murmurHash3_64(('a', [1, 2, 3], 4, (4, ['foo', 'bar'], 1, (1, 2))));
|
||||||
|
|
||||||
|
SELECT gccMurmurHash(1, 2, 3);
|
||||||
|
SELECT gccMurmurHash(1, 3, 2);
|
||||||
|
SELECT gccMurmurHash(('a', [1, 2, 3], 4, (4, ['foo', 'bar'], 1, (1, 2))));
|
@ -0,0 +1,3 @@
|
|||||||
|
1
|
||||||
|
1
|
||||||
|
1
|
@ -0,0 +1,12 @@
|
|||||||
|
DROP TABLE IF EXISTS test.table1;
|
||||||
|
DROP TABLE IF EXISTS test.table2;
|
||||||
|
|
||||||
|
CREATE TABLE test.table1 AS system.columns ENGINE = Distributed('test_shard_localhost', system, columns);
|
||||||
|
CREATE TABLE test.table2 AS system.tables ENGINE = Distributed('test_shard_localhost', system, tables);
|
||||||
|
|
||||||
|
SELECT 1 FROM test.table1 T1 ALL INNER JOIN test.table2 T2 ON T1.table = T2.name LIMIT 1;
|
||||||
|
SELECT 1 FROM cluster('test_shard_localhost', system.columns) T1 ALL INNER JOIN cluster('test_shard_localhost', system.tables) T2 ON T1.table = T2.name LIMIT 1;
|
||||||
|
SELECT 1 FROM (SELECT * FROM test.table1) T1 ALL INNER JOIN (SELECT * FROM test.table2) T2 ON T1.table = T2.name LIMIT 1;
|
||||||
|
|
||||||
|
DROP TABLE IF EXISTS test.table1;
|
||||||
|
DROP TABLE IF EXISTS test.table2;
|
@ -0,0 +1 @@
|
|||||||
|
0
|
@ -0,0 +1 @@
|
|||||||
|
SELECT sum(ignore(*)) FROM (SELECT arrayFirst(x -> empty(x), [[number]]) FROM numbers(10000000));
|
Loading…
Reference in New Issue
Block a user