From c44398212d9b5f3385289443e823cd74a000ac8f Mon Sep 17 00:00:00 2001 From: Karl Pietrzak Date: Fri, 14 Sep 2018 14:26:43 -0400 Subject: [PATCH 01/31] WIP: basic rate() function --- .../AggregateFunctionRate.cpp | 53 ++++++ .../AggregateFunctionRate.h | 158 ++++++++++++++++++ .../registerAggregateFunctions.cpp | 2 + .../00715_aggregation_rate.reference | 2 + .../0_stateless/00715_aggregation_rate.sql | 16 ++ 5 files changed, 231 insertions(+) create mode 100644 dbms/src/AggregateFunctions/AggregateFunctionRate.cpp create mode 100644 dbms/src/AggregateFunctions/AggregateFunctionRate.h create mode 100644 dbms/tests/queries/0_stateless/00715_aggregation_rate.reference create mode 100644 dbms/tests/queries/0_stateless/00715_aggregation_rate.sql diff --git a/dbms/src/AggregateFunctions/AggregateFunctionRate.cpp b/dbms/src/AggregateFunctions/AggregateFunctionRate.cpp new file mode 100644 index 00000000000..e2d6e0f1aff --- /dev/null +++ b/dbms/src/AggregateFunctions/AggregateFunctionRate.cpp @@ -0,0 +1,53 @@ +#include +#include + +#include +#include + +#include +#include + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + + +namespace DB +{ +namespace ErrorCodes +{ + extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH; + extern const int LOGICAL_ERROR; + extern const int ILLEGAL_TYPE_OF_ARGUMENT; +} + +namespace +{ + +AggregateFunctionPtr createAggregateFunctionRate(const std::string & name, const DataTypes & argument_types, const Array & parameters) +{ + assertNoParameters(name, parameters); + + if (argument_types.size() < 2) + throw Exception("Aggregate function " + name + " requires at least two arguments, with the first being a timestamp", + ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH); + + + return std::make_shared(argument_types, parameters); +} + +} + +void registerAggregateFunctionRate(AggregateFunctionFactory & factory) +{ + factory.registerFunction("rate", createAggregateFunctionRate, AggregateFunctionFactory::CaseInsensitive); +} + +} diff --git a/dbms/src/AggregateFunctions/AggregateFunctionRate.h b/dbms/src/AggregateFunctions/AggregateFunctionRate.h new file mode 100644 index 00000000000..ededb0f0cb3 --- /dev/null +++ b/dbms/src/AggregateFunctions/AggregateFunctionRate.h @@ -0,0 +1,158 @@ +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + + +namespace DB +{ + +struct AggregateFunctionRateData +{ + using TimestampEvent = std::pair; + + bool is_first = false; + + TimestampEvent first_event; + TimestampEvent last_event; + + void add(UInt32 timestamp, Float64 f) + { + if(this->is_first) { + first_event = TimestampEvent{timestamp, f}; + is_first = true; + } else { + last_event = TimestampEvent{timestamp, f}; + } + } + + void merge(const AggregateFunctionRateData & other) + { + // if the arg is earlier than us, replace us with them + if(other.first_event.first < first_event.first) { + first_event = other.first_event; + } + // if the arg is _later_ than us, replace us with them + if(other.last_event.first > last_event.second) { + last_event = other.last_event; + } + + } + void serialize(WriteBuffer & buf) const + { + writeBinary(is_first, buf); + writeBinary(first_event.first, buf); + writeBinary(first_event.second, buf); + + writeBinary(last_event.first, buf); + writeBinary(last_event.second, buf); + } + + void deserialize(ReadBuffer & buf) + { + readBinary(is_first, buf); + + readBinary(first_event.first, buf); + readBinary(first_event.second, buf); + + readBinary(last_event.first, buf); + readBinary(last_event.second, buf); + } +}; + +class AggregateFunctionRate final + : public IAggregateFunctionDataHelper +{ +private: + /* + * implements a basic derivative function + * + * (y2 - y1) / (x2 - x1) + */ + Float64 getRate(const AggregateFunctionRateData & data) const + { + if (data.first_event.first == 0) + return 0; + if(data.last_event.first == 0) + return 0; + // void divide by zero in denominator + if(data.last_event.first == data.first_event.first) + return 0; + + return (data.last_event.second - data.first_event.second) / (data.last_event.first - data.first_event.first); + } + +public: + String getName() const override + { + return "rate"; + } + + AggregateFunctionRate(const DataTypes & arguments, const Array & params) + { + const auto time_arg = arguments.front().get(); + if (!typeid_cast(time_arg) && !typeid_cast(time_arg)) + throw Exception{"Illegal type " + time_arg->getName() + " of first argument of aggregate function " + getName() + + ", must be DateTime or UInt32"}; + + const auto number_arg = arguments.at(1).get(); + if (!number_arg->isNumber()) + throw Exception{"Illegal type " + number_arg->getName() + " of argument " + toString(1) + " of aggregate function " + + getName() + ", must be a Number", + ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT}; + + } + + + DataTypePtr getReturnType() const override + { + return std::make_shared(); + } + + void add(AggregateDataPtr place, const IColumn ** columns, const size_t row_num, Arena *) const override + { + const auto timestamp = static_cast *>(columns[0])->getData()[row_num]; + const auto value = static_cast *>(columns[1])->getData()[row_num]; + this->data(place).add(timestamp, value); + } + + void merge(AggregateDataPtr place, ConstAggregateDataPtr rhs, Arena *) const override + { + this->data(place).merge(this->data(rhs)); + } + + void serialize(ConstAggregateDataPtr place, WriteBuffer & buf) const override + { + this->data(place).serialize(buf); + } + + void deserialize(AggregateDataPtr place, ReadBuffer & buf, Arena *) const override + { + this->data(place).deserialize(buf); + } + + void insertResultInto(ConstAggregateDataPtr place, IColumn & to) const override + { + static_cast(to).getData().push_back(getRate(this->data(place))); + } + + const char * getHeaderFilePath() const override + { + return __FILE__; + } +}; + +} diff --git a/dbms/src/AggregateFunctions/registerAggregateFunctions.cpp b/dbms/src/AggregateFunctions/registerAggregateFunctions.cpp index 3517ad57a73..211ce7e93be 100644 --- a/dbms/src/AggregateFunctions/registerAggregateFunctions.cpp +++ b/dbms/src/AggregateFunctions/registerAggregateFunctions.cpp @@ -15,6 +15,7 @@ void registerAggregateFunctionGroupArrayInsertAt(AggregateFunctionFactory &); void registerAggregateFunctionsQuantile(AggregateFunctionFactory &); void registerAggregateFunctionsSequenceMatch(AggregateFunctionFactory &); void registerAggregateFunctionWindowFunnel(AggregateFunctionFactory &); +void registerAggregateFunctionRate(AggregateFunctionFactory &); void registerAggregateFunctionsMinMaxAny(AggregateFunctionFactory &); void registerAggregateFunctionsStatisticsStable(AggregateFunctionFactory &); void registerAggregateFunctionsStatisticsSimple(AggregateFunctionFactory &); @@ -49,6 +50,7 @@ void registerAggregateFunctions() registerAggregateFunctionsQuantile(factory); registerAggregateFunctionsSequenceMatch(factory); registerAggregateFunctionWindowFunnel(factory); + registerAggregateFunctionRate(factory); registerAggregateFunctionsMinMaxAny(factory); registerAggregateFunctionsStatisticsStable(factory); registerAggregateFunctionsStatisticsSimple(factory); diff --git a/dbms/tests/queries/0_stateless/00715_aggregation_rate.reference b/dbms/tests/queries/0_stateless/00715_aggregation_rate.reference new file mode 100644 index 00000000000..6ed281c757a --- /dev/null +++ b/dbms/tests/queries/0_stateless/00715_aggregation_rate.reference @@ -0,0 +1,2 @@ +1 +1 diff --git a/dbms/tests/queries/0_stateless/00715_aggregation_rate.sql b/dbms/tests/queries/0_stateless/00715_aggregation_rate.sql new file mode 100644 index 00000000000..8cea70a09b1 --- /dev/null +++ b/dbms/tests/queries/0_stateless/00715_aggregation_rate.sql @@ -0,0 +1,16 @@ +drop table if exists rate_test; + +create table rate_test (timestamp UInt32, event UInt32) engine=Memory; +insert into rate_test values (0,1000),(1,1001),(2,1002),(3,1003),(4,1004),(5,1005),(6,1006),(7,1007),(8,1008); + +select 1.0 = rate(timestamp, event) from rate_test; + +drop table if exists rate_test2; +create table rate_test2 (uid UInt32 default 1,timestamp DateTime, event UInt32) engine=Memory; +insert into rate_test2(timestamp, event) values ('2018-01-01 01:01:01',1001),('2018-01-01 01:01:02',1002),('2018-01-01 01:01:03',1003),('2018-01-01 01:01:04',1004),('2018-01-01 01:01:05',1005),('2018-01-01 01:01:06',1006),('2018-01-01 01:01:07',1007),('2018-01-01 01:01:08',1008); + + +select 1.0 = rate(timestamp, event ) from rate_test2; + +drop table rate_test; +drop table rate_test2; From dd3f57dd5db87730e7525ad03ace6e66ce47580a Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Thu, 1 Nov 2018 20:54:53 +0300 Subject: [PATCH 02/31] Fixed some code (incomplete) #3139 --- ...gregateFunctionRate.cpp => AggregateFunctionBoundingRatio.cpp} | 0 .../{AggregateFunctionRate.h => AggregateFunctionBoundingRatio.h} | 0 ..._aggregation_rate.reference => 00715_bounding_ratio.reference} | 0 .../{00715_aggregation_rate.sql => 00715_bounding_ratio.sql} | 0 4 files changed, 0 insertions(+), 0 deletions(-) rename dbms/src/AggregateFunctions/{AggregateFunctionRate.cpp => AggregateFunctionBoundingRatio.cpp} (100%) rename dbms/src/AggregateFunctions/{AggregateFunctionRate.h => AggregateFunctionBoundingRatio.h} (100%) rename dbms/tests/queries/0_stateless/{00715_aggregation_rate.reference => 00715_bounding_ratio.reference} (100%) rename dbms/tests/queries/0_stateless/{00715_aggregation_rate.sql => 00715_bounding_ratio.sql} (100%) diff --git a/dbms/src/AggregateFunctions/AggregateFunctionRate.cpp b/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.cpp similarity index 100% rename from dbms/src/AggregateFunctions/AggregateFunctionRate.cpp rename to dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.cpp diff --git a/dbms/src/AggregateFunctions/AggregateFunctionRate.h b/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h similarity index 100% rename from dbms/src/AggregateFunctions/AggregateFunctionRate.h rename to dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h diff --git a/dbms/tests/queries/0_stateless/00715_aggregation_rate.reference b/dbms/tests/queries/0_stateless/00715_bounding_ratio.reference similarity index 100% rename from dbms/tests/queries/0_stateless/00715_aggregation_rate.reference rename to dbms/tests/queries/0_stateless/00715_bounding_ratio.reference diff --git a/dbms/tests/queries/0_stateless/00715_aggregation_rate.sql b/dbms/tests/queries/0_stateless/00715_bounding_ratio.sql similarity index 100% rename from dbms/tests/queries/0_stateless/00715_aggregation_rate.sql rename to dbms/tests/queries/0_stateless/00715_bounding_ratio.sql From accda8bed2fde3699ab9b1a2391b773f949aae63 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Thu, 1 Nov 2018 20:55:11 +0300 Subject: [PATCH 03/31] Fixed some code (incomplete) #3139 --- .../AggregateFunctionBoundingRatio.cpp | 29 +-- .../AggregateFunctionBoundingRatio.h | 221 +++++++++--------- .../AggregateFunctionHistogram.cpp | 9 +- 3 files changed, 125 insertions(+), 134 deletions(-) diff --git a/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.cpp b/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.cpp index e2d6e0f1aff..88dc5bda29d 100644 --- a/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.cpp +++ b/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.cpp @@ -1,31 +1,14 @@ -#include -#include - -#include -#include - -#include -#include - -#include -#include -#include - #include -#include -#include +#include #include -#include -#include namespace DB { + namespace ErrorCodes { extern const int NUMBER_OF_ARGUMENTS_DOESNT_MATCH; - extern const int LOGICAL_ERROR; - extern const int ILLEGAL_TYPE_OF_ARGUMENT; } namespace @@ -34,20 +17,20 @@ namespace AggregateFunctionPtr createAggregateFunctionRate(const std::string & name, const DataTypes & argument_types, const Array & parameters) { assertNoParameters(name, parameters); + assertBinary(name, argument_types); if (argument_types.size() < 2) - throw Exception("Aggregate function " + name + " requires at least two arguments, with the first being a timestamp", + throw Exception("Aggregate function " + name + " requires at least two arguments", ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH); - - return std::make_shared(argument_types, parameters); + return std::make_shared(argument_types); } } void registerAggregateFunctionRate(AggregateFunctionFactory & factory) { - factory.registerFunction("rate", createAggregateFunctionRate, AggregateFunctionFactory::CaseInsensitive); + factory.registerFunction("boundingRatio", createAggregateFunctionRate, AggregateFunctionFactory::CaseInsensitive); } } diff --git a/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h b/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h index ededb0f0cb3..3cc6d92547b 100644 --- a/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h +++ b/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h @@ -13,146 +13,151 @@ #include #include -#include #include +#include namespace DB { -struct AggregateFunctionRateData +struct AggregateFunctionBoundingRatioData { - using TimestampEvent = std::pair; + using TimestampEvent = std::pair; - bool is_first = false; + bool is_first = false; - TimestampEvent first_event; - TimestampEvent last_event; + TimestampEvent first_event; + TimestampEvent last_event; - void add(UInt32 timestamp, Float64 f) - { - if(this->is_first) { - first_event = TimestampEvent{timestamp, f}; - is_first = true; - } else { - last_event = TimestampEvent{timestamp, f}; - } - } + void add(UInt32 timestamp, Float64 f) + { + if (is_first) + { + first_event = TimestampEvent{timestamp, f}; + is_first = true; + } + else + { + last_event = TimestampEvent{timestamp, f}; + } + } - void merge(const AggregateFunctionRateData & other) - { - // if the arg is earlier than us, replace us with them - if(other.first_event.first < first_event.first) { - first_event = other.first_event; - } - // if the arg is _later_ than us, replace us with them - if(other.last_event.first > last_event.second) { - last_event = other.last_event; - } + void merge(const AggregateFunctionBoundingRatioData & other) + { + // if the arg is earlier than us, replace us with them + if (other.first_event.first < first_event.first) + { + first_event = other.first_event; + } + // if the arg is _later_ than us, replace us with them + if (other.last_event.first > last_event.second) + { + last_event = other.last_event; + } + } - } - void serialize(WriteBuffer & buf) const - { - writeBinary(is_first, buf); - writeBinary(first_event.first, buf); - writeBinary(first_event.second, buf); + void serialize(WriteBuffer & buf) const + { + writeBinary(is_first, buf); + writeBinary(first_event.first, buf); + writeBinary(first_event.second, buf); - writeBinary(last_event.first, buf); - writeBinary(last_event.second, buf); - } + writeBinary(last_event.first, buf); + writeBinary(last_event.second, buf); + } - void deserialize(ReadBuffer & buf) - { - readBinary(is_first, buf); + void deserialize(ReadBuffer & buf) + { + readBinary(is_first, buf); - readBinary(first_event.first, buf); - readBinary(first_event.second, buf); + readBinary(first_event.first, buf); + readBinary(first_event.second, buf); - readBinary(last_event.first, buf); - readBinary(last_event.second, buf); - } + readBinary(last_event.first, buf); + readBinary(last_event.second, buf); + } }; -class AggregateFunctionRate final - : public IAggregateFunctionDataHelper + +class AggregateFunctionBoundingRatio final : public IAggregateFunctionDataHelper { private: - /* - * implements a basic derivative function - * - * (y2 - y1) / (x2 - x1) - */ - Float64 getRate(const AggregateFunctionRateData & data) const - { - if (data.first_event.first == 0) - return 0; - if(data.last_event.first == 0) - return 0; - // void divide by zero in denominator - if(data.last_event.first == data.first_event.first) - return 0; + /* implements a basic derivative function + * + * (y2 - y1) / (x2 - x1) + */ + Float64 getBoundingRatio(const AggregateFunctionBoundingRatioData & data) const + { + if (data.first_event.first == 0) + return 0; + if (data.last_event.first == 0) + return 0; + // void divide by zero in denominator + if (data.last_event.first == data.first_event.first) + return 0; - return (data.last_event.second - data.first_event.second) / (data.last_event.first - data.first_event.first); - } + return (data.last_event.second - data.first_event.second) / (data.last_event.first - data.first_event.first); + } public: - String getName() const override - { - return "rate"; - } + String getName() const override + { + return "boundingRatio"; + } - AggregateFunctionRate(const DataTypes & arguments, const Array & params) - { - const auto time_arg = arguments.front().get(); - if (!typeid_cast(time_arg) && !typeid_cast(time_arg)) - throw Exception{"Illegal type " + time_arg->getName() + " of first argument of aggregate function " + getName() - + ", must be DateTime or UInt32"}; - - const auto number_arg = arguments.at(1).get(); - if (!number_arg->isNumber()) - throw Exception{"Illegal type " + number_arg->getName() + " of argument " + toString(1) + " of aggregate function " - + getName() + ", must be a Number", - ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT}; - - } + AggregateFunctionBoundingRatio(const DataTypes & arguments) + { - DataTypePtr getReturnType() const override - { - return std::make_shared(); - } + const auto time_arg = arguments.at(0).get(); + if (!typeid_cast(time_arg) && !typeid_cast(time_arg)) + throw Exception {"Illegal type " + time_arg->getName() + " of first argument of aggregate function " + getName() + + ", must be DateTime or UInt32"}; - void add(AggregateDataPtr place, const IColumn ** columns, const size_t row_num, Arena *) const override - { - const auto timestamp = static_cast *>(columns[0])->getData()[row_num]; - const auto value = static_cast *>(columns[1])->getData()[row_num]; - this->data(place).add(timestamp, value); - } + const auto number_arg = arguments.at(1).get(); + if (!number_arg->isNumber()) + throw Exception {"Illegal type " + number_arg->getName() + " of argument " + toString(1) + " of aggregate function " + getName() + + ", must be a Number", + ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT}; + } - void merge(AggregateDataPtr place, ConstAggregateDataPtr rhs, Arena *) const override - { - this->data(place).merge(this->data(rhs)); - } - void serialize(ConstAggregateDataPtr place, WriteBuffer & buf) const override - { - this->data(place).serialize(buf); - } + DataTypePtr getReturnType() const override + { + return std::make_shared(); + } - void deserialize(AggregateDataPtr place, ReadBuffer & buf, Arena *) const override - { - this->data(place).deserialize(buf); - } + void add(AggregateDataPtr place, const IColumn ** columns, const size_t row_num, Arena *) const override + { + const auto timestamp = static_cast *>(columns[0])->getData()[row_num]; + const auto value = static_cast *>(columns[1])->getData()[row_num]; + data(place).add(timestamp, value); + } - void insertResultInto(ConstAggregateDataPtr place, IColumn & to) const override - { - static_cast(to).getData().push_back(getRate(this->data(place))); - } + void merge(AggregateDataPtr place, ConstAggregateDataPtr rhs, Arena *) const override + { + data(place).merge(data(rhs)); + } - const char * getHeaderFilePath() const override - { - return __FILE__; - } + void serialize(ConstAggregateDataPtr place, WriteBuffer & buf) const override + { + data(place).serialize(buf); + } + + void deserialize(AggregateDataPtr place, ReadBuffer & buf, Arena *) const override + { + data(place).deserialize(buf); + } + + void insertResultInto(ConstAggregateDataPtr place, IColumn & to) const override + { + static_cast(to).getData().push_back(getBoundingRatio(data(place))); + } + + const char * getHeaderFilePath() const override + { + return __FILE__; + } }; } diff --git a/dbms/src/AggregateFunctions/AggregateFunctionHistogram.cpp b/dbms/src/AggregateFunctions/AggregateFunctionHistogram.cpp index de58d7a36d3..bf46bccd9c7 100644 --- a/dbms/src/AggregateFunctions/AggregateFunctionHistogram.cpp +++ b/dbms/src/AggregateFunctions/AggregateFunctionHistogram.cpp @@ -17,12 +17,13 @@ namespace ErrorCodes extern const int PARAMETER_OUT_OF_BOUND; } + namespace { -AggregateFunctionPtr createAggregateFunctionHistogram(const std::string & name, const DataTypes & arguments, const Array & params) +AggregateFunctionPtr createAggregateFunctionHistogram(const std::string &, const DataTypes &, const Array &) { - if (params.size() != 1) +/* if (params.size() != 1) throw Exception("Function " + name + " requires single parameter: bins count", ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH); if (params[0].getType() != Field::Types::UInt64) @@ -43,7 +44,9 @@ AggregateFunctionPtr createAggregateFunctionHistogram(const std::string & name, if (!res) throw Exception("Illegal type " + arguments[0]->getName() + " of argument for aggregate function " + name, ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT); - return res; + return res;*/ + + return nullptr; } } From 63014df313b0cad0b2eabf9532e45a36f010796a Mon Sep 17 00:00:00 2001 From: proller Date: Tue, 18 Dec 2018 14:36:55 +0300 Subject: [PATCH 04/31] Fix macos build --- cmake/find_hdfs3.cmake | 9 ++++++--- cmake/find_libgsasl.cmake | 4 ++-- cmake/find_rdkafka.cmake | 2 +- cmake/find_ssl.cmake | 6 +++--- cmake/find_zlib.cmake | 2 +- dbms/src/Storages/MergeTree/DiskSpaceMonitor.cpp | 2 +- debian/.pbuilderrc | 2 ++ libs/libcommon/cmake/find_jemalloc.cmake | 2 +- 8 files changed, 17 insertions(+), 12 deletions(-) diff --git a/cmake/find_hdfs3.cmake b/cmake/find_hdfs3.cmake index a6fdec20291..a30409b50d5 100644 --- a/cmake/find_hdfs3.cmake +++ b/cmake/find_hdfs3.cmake @@ -15,12 +15,15 @@ if (NOT USE_INTERNAL_HDFS3_LIBRARY) endif () if (HDFS3_LIBRARY AND HDFS3_INCLUDE_DIR) -else () + set(USE_HDFS 1) +elseif (LIBGSASL_LIBRARY) set(HDFS3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include") set(HDFS3_LIBRARY hdfs3) + set(USE_HDFS 1) +else() + set(USE_INTERNAL_HDFS3_LIBRARY 0) endif() -set (USE_HDFS 1) endif() -message (STATUS "Using hdfs3: ${HDFS3_INCLUDE_DIR} : ${HDFS3_LIBRARY}") +message (STATUS "Using hdfs3=${USE_HDFS}: ${HDFS3_INCLUDE_DIR} : ${HDFS3_LIBRARY}") diff --git a/cmake/find_libgsasl.cmake b/cmake/find_libgsasl.cmake index b686bb1df7f..0c88bd2c24a 100644 --- a/cmake/find_libgsasl.cmake +++ b/cmake/find_libgsasl.cmake @@ -1,4 +1,4 @@ -if (NOT APPLE) +if (NOT APPLE AND NOT ARCH_32) option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${NOT_UNBUNDLED}) endif () @@ -13,7 +13,7 @@ if (NOT USE_INTERNAL_LIBGSASL_LIBRARY) endif () if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR) -else () +elseif (NOT APPLE AND NOT ARCH_32) set (LIBGSASL_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src ${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include) set (USE_INTERNAL_LIBGSASL_LIBRARY 1) set (LIBGSASL_LIBRARY libgsasl) diff --git a/cmake/find_rdkafka.cmake b/cmake/find_rdkafka.cmake index 1d2674ea1a3..9ba48cadfcd 100644 --- a/cmake/find_rdkafka.cmake +++ b/cmake/find_rdkafka.cmake @@ -1,4 +1,4 @@ -if (NOT ARCH_ARM) +if (NOT ARCH_ARM AND NOT ARCH_32) option (ENABLE_RDKAFKA "Enable kafka" ON) endif () diff --git a/cmake/find_ssl.cmake b/cmake/find_ssl.cmake index 6d630c7198c..d159bcfba23 100644 --- a/cmake/find_ssl.cmake +++ b/cmake/find_ssl.cmake @@ -1,6 +1,6 @@ -#if (OS_LINUX) -option (USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ${NOT_UNBUNDLED}) -#endif () +if(NOT ARCH_32) + option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ${NOT_UNBUNDLED}) +endif() set (OPENSSL_USE_STATIC_LIBS ${USE_STATIC_LIBRARIES}) diff --git a/cmake/find_zlib.cmake b/cmake/find_zlib.cmake index f6b2d268291..90fe674ab36 100644 --- a/cmake/find_zlib.cmake +++ b/cmake/find_zlib.cmake @@ -1,4 +1,4 @@ -if (NOT OS_FREEBSD) +if (NOT OS_FREEBSD AND NOT ARCH_32) option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ${NOT_UNBUNDLED}) endif () diff --git a/dbms/src/Storages/MergeTree/DiskSpaceMonitor.cpp b/dbms/src/Storages/MergeTree/DiskSpaceMonitor.cpp index e5bb80df138..507ac8b7459 100644 --- a/dbms/src/Storages/MergeTree/DiskSpaceMonitor.cpp +++ b/dbms/src/Storages/MergeTree/DiskSpaceMonitor.cpp @@ -4,7 +4,7 @@ namespace DB { UInt64 DiskSpaceMonitor::reserved_bytes; -size_t DiskSpaceMonitor::reservation_count; +UInt64 DiskSpaceMonitor::reservation_count; std::mutex DiskSpaceMonitor::mutex; } diff --git a/debian/.pbuilderrc b/debian/.pbuilderrc index 4e5722fe9e5..12e275836c2 100644 --- a/debian/.pbuilderrc +++ b/debian/.pbuilderrc @@ -192,6 +192,8 @@ EXTRAPACKAGES+=" psmisc " [[ $CCACHE_PREFIX == 'distcc' ]] && EXTRAPACKAGES+=" $CCACHE_PREFIX " && USENETWORK=yes && export DISTCC_DIR=/var/cache/pbuilder/distcc +[[ $ARCH == 'i386' ]] && EXTRAPACKAGES+=" libssl-dev " + export DEB_BUILD_OPTIONS=parallel=`nproc` # Floating bug with permissions: diff --git a/libs/libcommon/cmake/find_jemalloc.cmake b/libs/libcommon/cmake/find_jemalloc.cmake index f3fa138e5cc..d9bc37f9d6c 100644 --- a/libs/libcommon/cmake/find_jemalloc.cmake +++ b/libs/libcommon/cmake/find_jemalloc.cmake @@ -1,4 +1,4 @@ -if (OS_LINUX AND NOT SANITIZE AND NOT ARCH_ARM) +if (OS_LINUX AND NOT SANITIZE AND NOT ARCH_ARM AND NOT ARCH_32) set(ENABLE_JEMALLOC_DEFAULT 1) else () set(ENABLE_JEMALLOC_DEFAULT 0) From 9ca9f600ad9c21ddcf561565fa2963f47d80a619 Mon Sep 17 00:00:00 2001 From: proller Date: Tue, 18 Dec 2018 17:31:20 +0300 Subject: [PATCH 05/31] Fix macos build --- dbms/src/Storages/MergeTree/MergeTreeDataMergerMutator.h | 4 ++-- dbms/src/Storages/MergeTree/ReplicatedMergeTreeQueue.cpp | 2 +- dbms/src/Storages/StorageMergeTree.cpp | 2 +- dbms/src/Storages/StorageReplicatedMergeTree.cpp | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/dbms/src/Storages/MergeTree/MergeTreeDataMergerMutator.h b/dbms/src/Storages/MergeTree/MergeTreeDataMergerMutator.h index e09ac52a701..0d6cdd3f557 100644 --- a/dbms/src/Storages/MergeTree/MergeTreeDataMergerMutator.h +++ b/dbms/src/Storages/MergeTree/MergeTreeDataMergerMutator.h @@ -45,12 +45,12 @@ public: /** Get maximum total size of parts to do merge, at current moment of time. * It depends on number of free threads in background_pool and amount of free space in disk. */ - size_t getMaxSourcePartsSize(); + UInt64 getMaxSourcePartsSize(); /** For explicitly passed size of pool and number of used tasks. * This method could be used to calculate threshold depending on number of tasks in replication queue. */ - size_t getMaxSourcePartsSize(size_t pool_size, size_t pool_used); + UInt64 getMaxSourcePartsSize(size_t pool_size, size_t pool_used); /** Selects which parts to merge. Uses a lot of heuristics. * diff --git a/dbms/src/Storages/MergeTree/ReplicatedMergeTreeQueue.cpp b/dbms/src/Storages/MergeTree/ReplicatedMergeTreeQueue.cpp index 41a8f8ee224..2499b3239e7 100644 --- a/dbms/src/Storages/MergeTree/ReplicatedMergeTreeQueue.cpp +++ b/dbms/src/Storages/MergeTree/ReplicatedMergeTreeQueue.cpp @@ -900,7 +900,7 @@ bool ReplicatedMergeTreeQueue::shouldExecuteLogEntry( * But if all threads are free (maximal size of merge is allowed) then execute any merge, * (because it may be ordered by OPTIMIZE or early with differrent settings). */ - size_t max_source_parts_size = merger_mutator.getMaxSourcePartsSize(); + UInt64 max_source_parts_size = merger_mutator.getMaxSourcePartsSize(); if (max_source_parts_size != data.settings.max_bytes_to_merge_at_max_space_in_pool && sum_parts_size_in_bytes > max_source_parts_size) { diff --git a/dbms/src/Storages/StorageMergeTree.cpp b/dbms/src/Storages/StorageMergeTree.cpp index d034fe67bd8..f4f69e3ac87 100644 --- a/dbms/src/Storages/StorageMergeTree.cpp +++ b/dbms/src/Storages/StorageMergeTree.cpp @@ -418,7 +418,7 @@ bool StorageMergeTree::merge( if (partition_id.empty()) { - size_t max_source_parts_size = merger_mutator.getMaxSourcePartsSize(); + UInt64 max_source_parts_size = merger_mutator.getMaxSourcePartsSize(); if (max_source_parts_size > 0) selected = merger_mutator.selectPartsToMerge(future_part, aggressive, max_source_parts_size, can_merge, out_disable_reason); } diff --git a/dbms/src/Storages/StorageReplicatedMergeTree.cpp b/dbms/src/Storages/StorageReplicatedMergeTree.cpp index e4bfb4c90d5..10981823b66 100644 --- a/dbms/src/Storages/StorageReplicatedMergeTree.cpp +++ b/dbms/src/Storages/StorageReplicatedMergeTree.cpp @@ -2160,7 +2160,7 @@ void StorageReplicatedMergeTree::mergeSelectingTask() } else { - size_t max_source_parts_size = merger_mutator.getMaxSourcePartsSize( + UInt64 max_source_parts_size = merger_mutator.getMaxSourcePartsSize( data.settings.max_replicated_merges_in_queue, merges_and_mutations_queued); if (max_source_parts_size > 0) From 52eb11a3100c69b4569faec2eb6ff643755fcf39 Mon Sep 17 00:00:00 2001 From: Ilya Skrypitsa Date: Wed, 19 Dec 2018 14:38:52 +0300 Subject: [PATCH 06/31] Typo in Creating a Table example (#3872) --- docs/en/operations/table_engines/summingmergetree.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/en/operations/table_engines/summingmergetree.md b/docs/en/operations/table_engines/summingmergetree.md index 587613c33d7..286507d8b9c 100644 --- a/docs/en/operations/table_engines/summingmergetree.md +++ b/docs/en/operations/table_engines/summingmergetree.md @@ -13,7 +13,7 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster] name1 [type1] [DEFAULT|MATERIALIZED|ALIAS expr1], name2 [type2] [DEFAULT|MATERIALIZED|ALIAS expr2], ... -) ENGINE = MergeTree() +) ENGINE = SummingMergeTree() [PARTITION BY expr] [ORDER BY expr] [SAMPLE BY expr] From 1ae355a8e431a0aea87ebe99de8965671133e2f5 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Wed, 19 Dec 2018 15:56:58 +0300 Subject: [PATCH 07/31] Fixed error in changelog [#CLICKHOUSE-2] --- CHANGELOG_RU.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG_RU.md b/CHANGELOG_RU.md index 0f31a6a3ab3..a03d064cf4b 100644 --- a/CHANGELOG_RU.md +++ b/CHANGELOG_RU.md @@ -31,7 +31,7 @@ * Исправлена работа некоторых случаев `VIEW` и подзапросов без указания базы данных. [Winter Zhang](https://github.com/yandex/ClickHouse/pull/3521) * Исправлен race condition при одновременном чтении из `MATERIALIZED VIEW` и удалением `MATERIALIZED VIEW` из-за отсутствия блокировки внутренней таблицы `MATERIALIZED VIEW`. [#3404](https://github.com/yandex/ClickHouse/pull/3404) [#3694](https://github.com/yandex/ClickHouse/pull/3694) * Исправлена ошибка `Lock handler cannot be nullptr.` [#3689](https://github.com/yandex/ClickHouse/pull/3689) -* Исправления выполнения запросов при включенной настройке `compile_expressions` (выключена по-умолчанию) - убрана свёртка недетерминированных константных выражений, как например, функции `now`. [#3457](https://github.com/yandex/ClickHouse/pull/3457) +* Исправления выполнения запросов при включенной настройке `compile_expressions` (включена по-умолчанию) - убрана свёртка недетерминированных константных выражений, как например, функции `now`. [#3457](https://github.com/yandex/ClickHouse/pull/3457) * Исправлено падение при указании неконстантного аргумента scale в функциях `toDecimal32/64/128`. * Исправлена ошибка при попытке вставки в формате `Values` массива с `NULL` элементами в столбец типа `Array` без `Nullable` (в случае `input_format_values_interpret_expressions` = 1). [#3487](https://github.com/yandex/ClickHouse/pull/3487) [#3503](https://github.com/yandex/ClickHouse/pull/3503) * Исправлено непрерывное логгирование ошибок в `DDLWorker`, если ZooKeeper недоступен. [8f50c620](https://github.com/yandex/ClickHouse/commit/8f50c620334988b28018213ec0092fe6423847e2) From b7ce79c423fc4b1f668b4566ed77ee9d35397b0c Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Wed, 19 Dec 2018 16:11:06 +0300 Subject: [PATCH 08/31] Miscellaneous [#CLICKHOUSE-2] --- dbms/src/Common/Config/ConfigProcessor.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbms/src/Common/Config/ConfigProcessor.cpp b/dbms/src/Common/Config/ConfigProcessor.cpp index 081bcd7c995..7e9579366b1 100644 --- a/dbms/src/Common/Config/ConfigProcessor.cpp +++ b/dbms/src/Common/Config/ConfigProcessor.cpp @@ -24,7 +24,7 @@ namespace DB { /// For cutting prerpocessed path to this base -std::string main_config_path; +static std::string main_config_path; /// Extracts from a string the first encountered number consisting of at least two digits. static std::string numberFromHost(const std::string & s) From cbf3c34cf3eeb6222c7201bd88a410a982a88f3d Mon Sep 17 00:00:00 2001 From: chertus Date: Wed, 19 Dec 2018 16:13:51 +0300 Subject: [PATCH 09/31] minor improvement in ExpressionAnalyzer + array join --- dbms/src/Interpreters/ExpressionAnalyzer.cpp | 28 +++++++++++--------- dbms/src/Interpreters/ExpressionAnalyzer.h | 2 +- dbms/src/Interpreters/SyntaxAnalyzer.cpp | 8 ++++-- dbms/src/Parsers/ASTSelectQuery.cpp | 12 ++++----- dbms/src/Parsers/ASTSelectQuery.h | 2 +- 5 files changed, 29 insertions(+), 23 deletions(-) diff --git a/dbms/src/Interpreters/ExpressionAnalyzer.cpp b/dbms/src/Interpreters/ExpressionAnalyzer.cpp index dda46a008ad..e488aa5d81b 100644 --- a/dbms/src/Interpreters/ExpressionAnalyzer.cpp +++ b/dbms/src/Interpreters/ExpressionAnalyzer.cpp @@ -143,15 +143,17 @@ void ExpressionAnalyzer::analyzeAggregation() ExpressionActionsPtr temp_actions = std::make_shared(source_columns, context); - if (select_query && select_query->array_join_expression_list()) - { - getRootActions(select_query->array_join_expression_list(), true, temp_actions); - addMultipleArrayJoinAction(temp_actions); - array_join_columns = temp_actions->getSampleBlock().getNamesAndTypesList(); - } - if (select_query) { + bool is_array_join_left; + ASTPtr array_join_expression_list = select_query->array_join_expression_list(is_array_join_left); + if (array_join_expression_list) + { + getRootActions(array_join_expression_list, true, temp_actions); + addMultipleArrayJoinAction(temp_actions, is_array_join_left); + array_join_columns = temp_actions->getSampleBlock().getNamesAndTypesList(); + } + const ASTTablesInSelectQueryElement * join = select_query->join(); if (join) { @@ -512,7 +514,7 @@ void ExpressionAnalyzer::initChain(ExpressionActionsChain & chain, const NamesAn } /// "Big" ARRAY JOIN. -void ExpressionAnalyzer::addMultipleArrayJoinAction(ExpressionActionsPtr & actions) const +void ExpressionAnalyzer::addMultipleArrayJoinAction(ExpressionActionsPtr & actions, bool array_join_is_left) const { NameSet result_columns; for (const auto & result_source : syntax->array_join_result_to_source) @@ -525,22 +527,24 @@ void ExpressionAnalyzer::addMultipleArrayJoinAction(ExpressionActionsPtr & actio result_columns.insert(result_source.first); } - actions->add(ExpressionAction::arrayJoin(result_columns, select_query->array_join_is_left(), context)); + actions->add(ExpressionAction::arrayJoin(result_columns, array_join_is_left, context)); } bool ExpressionAnalyzer::appendArrayJoin(ExpressionActionsChain & chain, bool only_types) { assertSelect(); - if (!select_query->array_join_expression_list()) + bool is_array_join_left; + ASTPtr array_join_expression_list = select_query->array_join_expression_list(is_array_join_left); + if (!array_join_expression_list) return false; initChain(chain, source_columns); ExpressionActionsChain::Step & step = chain.steps.back(); - getRootActions(select_query->array_join_expression_list(), only_types, step.actions); + getRootActions(array_join_expression_list, only_types, step.actions); - addMultipleArrayJoinAction(step.actions); + addMultipleArrayJoinAction(step.actions, is_array_join_left); return true; } diff --git a/dbms/src/Interpreters/ExpressionAnalyzer.h b/dbms/src/Interpreters/ExpressionAnalyzer.h index 502bd7198c3..ba1e3a252d0 100644 --- a/dbms/src/Interpreters/ExpressionAnalyzer.h +++ b/dbms/src/Interpreters/ExpressionAnalyzer.h @@ -240,7 +240,7 @@ private: /// Find global subqueries in the GLOBAL IN/JOIN sections. Fills in external_tables. void initGlobalSubqueriesAndExternalTables(); - void addMultipleArrayJoinAction(ExpressionActionsPtr & actions) const; + void addMultipleArrayJoinAction(ExpressionActionsPtr & actions, bool is_left) const; void addJoinAction(ExpressionActionsPtr & actions, bool only_types) const; diff --git a/dbms/src/Interpreters/SyntaxAnalyzer.cpp b/dbms/src/Interpreters/SyntaxAnalyzer.cpp index 78c90a0be8c..3d9a7f55df3 100644 --- a/dbms/src/Interpreters/SyntaxAnalyzer.cpp +++ b/dbms/src/Interpreters/SyntaxAnalyzer.cpp @@ -676,9 +676,13 @@ void optimizeUsing(const ASTSelectQuery * select_query) void getArrayJoinedColumns(ASTPtr & query, SyntaxAnalyzerResult & result, const ASTSelectQuery * select_query, const Names & source_columns, const NameSet & source_columns_set) { - if (select_query && select_query->array_join_expression_list()) + if (!select_query) + return; + + ASTPtr array_join_expression_list = select_query->array_join_expression_list(); + if (array_join_expression_list) { - ASTs & array_join_asts = select_query->array_join_expression_list()->children; + ASTs & array_join_asts = array_join_expression_list->children; for (const auto & ast : array_join_asts) { const String nested_table_name = ast->getColumnName(); diff --git a/dbms/src/Parsers/ASTSelectQuery.cpp b/dbms/src/Parsers/ASTSelectQuery.cpp index 35e9ed75a4e..4a42585b3c1 100644 --- a/dbms/src/Parsers/ASTSelectQuery.cpp +++ b/dbms/src/Parsers/ASTSelectQuery.cpp @@ -283,23 +283,21 @@ bool ASTSelectQuery::final() const } -ASTPtr ASTSelectQuery::array_join_expression_list() const +ASTPtr ASTSelectQuery::array_join_expression_list(bool & is_left) const { const ASTArrayJoin * array_join = getFirstArrayJoin(*this); if (!array_join) return {}; + is_left = (array_join->kind == ASTArrayJoin::Kind::Left); return array_join->expression_list; } -bool ASTSelectQuery::array_join_is_left() const +ASTPtr ASTSelectQuery::array_join_expression_list() const { - const ASTArrayJoin * array_join = getFirstArrayJoin(*this); - if (!array_join) - return {}; - - return array_join->kind == ASTArrayJoin::Kind::Left; + bool is_left; + return array_join_expression_list(is_left); } diff --git a/dbms/src/Parsers/ASTSelectQuery.h b/dbms/src/Parsers/ASTSelectQuery.h index d9bb3f11be4..bf16e4133de 100644 --- a/dbms/src/Parsers/ASTSelectQuery.h +++ b/dbms/src/Parsers/ASTSelectQuery.h @@ -41,9 +41,9 @@ public: /// Compatibility with old parser of tables list. TODO remove ASTPtr sample_size() const; ASTPtr sample_offset() const; + ASTPtr array_join_expression_list(bool & is_left) const; ASTPtr array_join_expression_list() const; const ASTTablesInSelectQueryElement * join() const; - bool array_join_is_left() const; bool final() const; void replaceDatabaseAndTable(const String & database_name, const String & table_name); void addTableFunction(ASTPtr & table_function_ptr); From ea27ec463e047a0b7e64c6959177a28779d00baa Mon Sep 17 00:00:00 2001 From: Sabyanin Maxim Date: Wed, 19 Dec 2018 00:18:54 +0300 Subject: [PATCH 10/31] add SettingLogsLevel --- dbms/src/Common/ErrorCodes.cpp | 1 + dbms/src/Interpreters/Settings.h | 2 +- dbms/src/Interpreters/SettingsCommon.cpp | 55 ++++++++++++++++++++++++ dbms/src/Interpreters/SettingsCommon.h | 21 +++++++++ 4 files changed, 78 insertions(+), 1 deletion(-) diff --git a/dbms/src/Common/ErrorCodes.cpp b/dbms/src/Common/ErrorCodes.cpp index e5b6028594b..0b7d089361c 100644 --- a/dbms/src/Common/ErrorCodes.cpp +++ b/dbms/src/Common/ErrorCodes.cpp @@ -402,6 +402,7 @@ namespace ErrorCodes extern const int SYSTEM_ERROR = 425; extern const int NULL_POINTER_DEREFERENCE = 426; extern const int CANNOT_COMPILE_REGEXP = 427; + extern const int UNKNOWN_LOG_LEVEL = 428; extern const int KEEPER_EXCEPTION = 999; extern const int POCO_EXCEPTION = 1000; diff --git a/dbms/src/Interpreters/Settings.h b/dbms/src/Interpreters/Settings.h index d5775dd3945..fc8ea2c4630 100644 --- a/dbms/src/Interpreters/Settings.h +++ b/dbms/src/Interpreters/Settings.h @@ -277,7 +277,7 @@ struct Settings M(SettingBool, log_profile_events, true, "Log query performance statistics into the query_log and query_thread_log.") \ M(SettingBool, log_query_settings, true, "Log query settings into the query_log.") \ M(SettingBool, log_query_threads, true, "Log query threads into system.query_thread_log table. This setting have effect only when 'log_queries' is true.") \ - M(SettingString, send_logs_level, "none", "Send server text logs with specified minumum level to client. Valid values: 'trace', 'debug', 'information', 'warning', 'error', 'none'") \ + M(SettingLogsLevel, send_logs_level, "none", "Send server text logs with specified minumum level to client. Valid values: 'trace', 'debug', 'information', 'warning', 'error', 'none'") \ M(SettingBool, enable_optimize_predicate_expression, 0, "If it is set to true, optimize predicates to subqueries.") \ \ M(SettingUInt64, low_cardinality_max_dictionary_size, 8192, "Maximum size (in rows) of shared global dictionary for LowCardinality type.") \ diff --git a/dbms/src/Interpreters/SettingsCommon.cpp b/dbms/src/Interpreters/SettingsCommon.cpp index 08e5d1b1781..b65097a0f42 100644 --- a/dbms/src/Interpreters/SettingsCommon.cpp +++ b/dbms/src/Interpreters/SettingsCommon.cpp @@ -23,6 +23,7 @@ namespace ErrorCodes extern const int UNKNOWN_DISTRIBUTED_PRODUCT_MODE; extern const int UNKNOWN_GLOBAL_SUBQUERIES_METHOD; extern const int UNKNOWN_JOIN_STRICTNESS; + extern const int UNKNOWN_LOG_LEVEL; extern const int SIZE_OF_FIXED_STRING_DOESNT_MATCH; extern const int BAD_ARGUMENTS; } @@ -674,4 +675,58 @@ void SettingDateTimeInputFormat::write(WriteBuffer & buf) const writeBinary(toString(), buf); } + +const std::vector SettingLogsLevel::log_levels = +{ + "none", + "trace", + "debug", + "information", + "warning", + "error" +}; + + +SettingLogsLevel::SettingLogsLevel(const String & level) +{ + set(level); +} + + +void SettingLogsLevel::set(const String & level) +{ + auto it = std::find(log_levels.begin(), log_levels.end(), level); + if (it == log_levels.end()) + throw Exception("Log level '" + level + "' not allowed.", ErrorCodes::UNKNOWN_LOG_LEVEL); + + value = *it; + changed = true; +} + + +void SettingLogsLevel::set(const Field & level) +{ + set(safeGet(level)); +} + + +void SettingLogsLevel::set(ReadBuffer & buf) +{ + String x; + readBinary(x, buf); + set(x); +} + + +String SettingLogsLevel::toString() const +{ + return value; +} + + +void SettingLogsLevel::write(WriteBuffer & buf) const +{ + writeBinary(toString(), buf); +} + } diff --git a/dbms/src/Interpreters/SettingsCommon.h b/dbms/src/Interpreters/SettingsCommon.h index 667912d01be..bfc0f30f8e5 100644 --- a/dbms/src/Interpreters/SettingsCommon.h +++ b/dbms/src/Interpreters/SettingsCommon.h @@ -404,4 +404,25 @@ struct SettingDateTimeInputFormat void write(WriteBuffer & buf) const; }; + +class SettingLogsLevel +{ +public: + + String value; + bool changed = false; + static const std::vector log_levels; + + SettingLogsLevel(const String & level); + operator String() const { return value; } + void set(const String & level); + void set(const Field & level); + void set(ReadBuffer & buf); + + String toString() const; + void write(WriteBuffer & buf) const; +}; + + + } From 937c0fb4a47bfb62f5723a19b47fd131f2996a33 Mon Sep 17 00:00:00 2001 From: Ivan Lezhankin Date: Wed, 19 Dec 2018 17:02:26 +0300 Subject: [PATCH 11/31] Treat Enum* types as Int* inside If() function. --- dbms/src/Core/callOnTypeIndex.h | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/dbms/src/Core/callOnTypeIndex.h b/dbms/src/Core/callOnTypeIndex.h index 8f4424ec016..ad2a98d8112 100644 --- a/dbms/src/Core/callOnTypeIndex.h +++ b/dbms/src/Core/callOnTypeIndex.h @@ -27,7 +27,6 @@ bool callOnBasicType(TypeIndex number, F && f) case TypeIndex::UInt16: return f(TypePair()); case TypeIndex::UInt32: return f(TypePair()); case TypeIndex::UInt64: return f(TypePair()); - //case TypeIndex::UInt128>: return f(TypePair()); case TypeIndex::Int8: return f(TypePair()); case TypeIndex::Int16: return f(TypePair()); @@ -35,6 +34,9 @@ bool callOnBasicType(TypeIndex number, F && f) case TypeIndex::Int64: return f(TypePair()); case TypeIndex::Int128: return f(TypePair()); + case TypeIndex::Enum8: return f(TypePair()); + case TypeIndex::Enum16: return f(TypePair()); + default: break; } @@ -89,13 +91,16 @@ inline bool callOnBasicTypes(TypeIndex type_num1, TypeIndex type_num2, F && f) case TypeIndex::UInt16: return callOnBasicType(type_num2, std::forward(f)); case TypeIndex::UInt32: return callOnBasicType(type_num2, std::forward(f)); case TypeIndex::UInt64: return callOnBasicType(type_num2, std::forward(f)); - //case TypeIndex::UInt128: return callOnBasicType(type_num2, std::forward(f)); case TypeIndex::Int8: return callOnBasicType(type_num2, std::forward(f)); case TypeIndex::Int16: return callOnBasicType(type_num2, std::forward(f)); case TypeIndex::Int32: return callOnBasicType(type_num2, std::forward(f)); case TypeIndex::Int64: return callOnBasicType(type_num2, std::forward(f)); case TypeIndex::Int128: return callOnBasicType(type_num2, std::forward(f)); + + case TypeIndex::Enum8: return callOnBasicType(type_num2, std::forward(f)); + case TypeIndex::Enum16: return callOnBasicType(type_num2, std::forward(f)); + default: break; } From 4c3af43ed34c3ec8c730fbe8ebefb00e6d58471d Mon Sep 17 00:00:00 2001 From: Ivan Lezhankin Date: Wed, 19 Dec 2018 17:36:55 +0300 Subject: [PATCH 12/31] Add test. --- .../queries/0_stateless/00806_alter_update.reference | 3 +++ dbms/tests/queries/0_stateless/00806_alter_update.sql | 11 +++++++++++ 2 files changed, 14 insertions(+) create mode 100644 dbms/tests/queries/0_stateless/00806_alter_update.reference create mode 100644 dbms/tests/queries/0_stateless/00806_alter_update.sql diff --git a/dbms/tests/queries/0_stateless/00806_alter_update.reference b/dbms/tests/queries/0_stateless/00806_alter_update.reference new file mode 100644 index 00000000000..fbc804c35ca --- /dev/null +++ b/dbms/tests/queries/0_stateless/00806_alter_update.reference @@ -0,0 +1,3 @@ +0 +foo +foo diff --git a/dbms/tests/queries/0_stateless/00806_alter_update.sql b/dbms/tests/queries/0_stateless/00806_alter_update.sql new file mode 100644 index 00000000000..65e98188c3f --- /dev/null +++ b/dbms/tests/queries/0_stateless/00806_alter_update.sql @@ -0,0 +1,11 @@ +DROP TABLE IF EXISTS test.alter_update; + +CREATE TABLE test.alter_update (d Date, e Enum8('foo'=1, 'bar'=2)) Engine = MergeTree(d, (d), 8192); +INSERT INTO test.alter_update (d, e) VALUES ('2018-01-01', 'foo'); +INSERT INTO test.alter_update (d, e) VALUES ('2018-01-02', 'bar'); + +ALTER TABLE test.alter_update UPDATE e = CAST('foo', 'Enum8(\'foo\' = 1, \'bar\' = 2)') WHERE d='2018-01-02'; + +SELECT sleep(1); -- TODO: there should be setting for sync ALTER UPDATE someday. + +SELECT e FROM test.alter_update ORDER BY d; From 98a798aa52506a6e60d17da30eb7ff4f6550605f Mon Sep 17 00:00:00 2001 From: Michal Lisowski Date: Wed, 19 Dec 2018 16:24:27 +0100 Subject: [PATCH 13/31] Mention about nagios check in 3rd party integrations (#3878) --- docs/en/interfaces/third-party/integrations.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/en/interfaces/third-party/integrations.md b/docs/en/interfaces/third-party/integrations.md index 8d4714a178a..0da0fcf14a8 100644 --- a/docs/en/interfaces/third-party/integrations.md +++ b/docs/en/interfaces/third-party/integrations.md @@ -28,6 +28,8 @@ - [Prometheus](https://prometheus.io/) - [clickhouse_exporter](https://github.com/f1yegor/clickhouse_exporter) - [PromHouse](https://github.com/Percona-Lab/PromHouse) + - [Nagios](https://www.nagios.org/) + - [check_clickhouse](https://github.com/exogroup/check_clickhouse/) - Logging - [fluentd](https://www.fluentd.org) - [loghouse](https://github.com/flant/loghouse) (for [Kubernetes](https://kubernetes.io)) From 92499c725dbf2139cfda6f4ebba27b270f14f53d Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Wed, 19 Dec 2018 18:27:57 +0300 Subject: [PATCH 14/31] Fixed test #3811 --- .../queries/0_stateless/00800_function_java_hash.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/dbms/tests/queries/0_stateless/00800_function_java_hash.sql b/dbms/tests/queries/0_stateless/00800_function_java_hash.sql index 220c1f0ff60..c69cd412f57 100644 --- a/dbms/tests/queries/0_stateless/00800_function_java_hash.sql +++ b/dbms/tests/queries/0_stateless/00800_function_java_hash.sql @@ -1,4 +1,4 @@ -select JavaHash('abc'); -select JavaHash('874293087'); -select HiveHash('abc'); -select HiveHash('874293087'); +select javaHash('abc'); +select javaHash('874293087'); +select hiveHash('abc'); +select hiveHash('874293087'); From 214b500b6e24b04f02d59fd60353a4ff5fe4b1bf Mon Sep 17 00:00:00 2001 From: proller Date: Wed, 19 Dec 2018 18:30:10 +0300 Subject: [PATCH 15/31] Fix build without submodules --- cmake/find_libgsasl.cmake | 3 ++- cmake/find_libxml2.cmake | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/cmake/find_libgsasl.cmake b/cmake/find_libgsasl.cmake index 0c88bd2c24a..f22ead4c765 100644 --- a/cmake/find_libgsasl.cmake +++ b/cmake/find_libgsasl.cmake @@ -5,6 +5,7 @@ endif () if (USE_INTERNAL_LIBGSASL_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h") message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init --recursive") set (USE_INTERNAL_LIBGSASL_LIBRARY 0) + set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1) endif () if (NOT USE_INTERNAL_LIBGSASL_LIBRARY) @@ -13,7 +14,7 @@ if (NOT USE_INTERNAL_LIBGSASL_LIBRARY) endif () if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR) -elseif (NOT APPLE AND NOT ARCH_32) +elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY AND NOT APPLE AND NOT ARCH_32) set (LIBGSASL_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src ${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include) set (USE_INTERNAL_LIBGSASL_LIBRARY 1) set (LIBGSASL_LIBRARY libgsasl) diff --git a/cmake/find_libxml2.cmake b/cmake/find_libxml2.cmake index cfababfbf63..db34801b238 100644 --- a/cmake/find_libxml2.cmake +++ b/cmake/find_libxml2.cmake @@ -3,6 +3,7 @@ option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library if (USE_INTERNAL_LIBXML2_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h") message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init --recursive") set (USE_INTERNAL_LIBXML2_LIBRARY 0) + set (MISSING_INTERNAL_LIBXML2_LIBRARY 1) endif () if (NOT USE_INTERNAL_LIBXML2_LIBRARY) @@ -11,7 +12,7 @@ if (NOT USE_INTERNAL_LIBXML2_LIBRARY) endif () if (LIBXML2_LIBRARY AND LIBXML2_INCLUDE_DIR) -else () +elseif (NOT MISSING_INTERNAL_LIBXML2_LIBRARY) set (LIBXML2_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/libxml2/include ${ClickHouse_SOURCE_DIR}/contrib/libxml2-cmake/linux_x86_64/include) set (USE_INTERNAL_LIBXML2_LIBRARY 1) set (LIBXML2_LIBRARY libxml2) From bb3487d77891cc4d3f9da1fde25a26644dbb9407 Mon Sep 17 00:00:00 2001 From: proller Date: Wed, 19 Dec 2018 18:36:31 +0300 Subject: [PATCH 16/31] Fix build without submodules --- cmake/find_hdfs3.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/find_hdfs3.cmake b/cmake/find_hdfs3.cmake index a30409b50d5..81fe7177304 100644 --- a/cmake/find_hdfs3.cmake +++ b/cmake/find_hdfs3.cmake @@ -16,7 +16,7 @@ endif () if (HDFS3_LIBRARY AND HDFS3_INCLUDE_DIR) set(USE_HDFS 1) -elseif (LIBGSASL_LIBRARY) +elseif (LIBGSASL_LIBRARY AND LIBXML2_LIBRARY) set(HDFS3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include") set(HDFS3_LIBRARY hdfs3) set(USE_HDFS 1) From b56dea9f84dc29c3c0261c25980648d161eaf500 Mon Sep 17 00:00:00 2001 From: proller Date: Wed, 19 Dec 2018 18:37:55 +0300 Subject: [PATCH 17/31] Fix build without submodules --- cmake/find_hdfs3.cmake | 2 +- cmake/find_libgsasl.cmake | 3 ++- cmake/find_libxml2.cmake | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/cmake/find_hdfs3.cmake b/cmake/find_hdfs3.cmake index a30409b50d5..81fe7177304 100644 --- a/cmake/find_hdfs3.cmake +++ b/cmake/find_hdfs3.cmake @@ -16,7 +16,7 @@ endif () if (HDFS3_LIBRARY AND HDFS3_INCLUDE_DIR) set(USE_HDFS 1) -elseif (LIBGSASL_LIBRARY) +elseif (LIBGSASL_LIBRARY AND LIBXML2_LIBRARY) set(HDFS3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include") set(HDFS3_LIBRARY hdfs3) set(USE_HDFS 1) diff --git a/cmake/find_libgsasl.cmake b/cmake/find_libgsasl.cmake index 0c88bd2c24a..f22ead4c765 100644 --- a/cmake/find_libgsasl.cmake +++ b/cmake/find_libgsasl.cmake @@ -5,6 +5,7 @@ endif () if (USE_INTERNAL_LIBGSASL_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h") message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init --recursive") set (USE_INTERNAL_LIBGSASL_LIBRARY 0) + set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1) endif () if (NOT USE_INTERNAL_LIBGSASL_LIBRARY) @@ -13,7 +14,7 @@ if (NOT USE_INTERNAL_LIBGSASL_LIBRARY) endif () if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR) -elseif (NOT APPLE AND NOT ARCH_32) +elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY AND NOT APPLE AND NOT ARCH_32) set (LIBGSASL_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src ${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include) set (USE_INTERNAL_LIBGSASL_LIBRARY 1) set (LIBGSASL_LIBRARY libgsasl) diff --git a/cmake/find_libxml2.cmake b/cmake/find_libxml2.cmake index cfababfbf63..db34801b238 100644 --- a/cmake/find_libxml2.cmake +++ b/cmake/find_libxml2.cmake @@ -3,6 +3,7 @@ option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library if (USE_INTERNAL_LIBXML2_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h") message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init --recursive") set (USE_INTERNAL_LIBXML2_LIBRARY 0) + set (MISSING_INTERNAL_LIBXML2_LIBRARY 1) endif () if (NOT USE_INTERNAL_LIBXML2_LIBRARY) @@ -11,7 +12,7 @@ if (NOT USE_INTERNAL_LIBXML2_LIBRARY) endif () if (LIBXML2_LIBRARY AND LIBXML2_INCLUDE_DIR) -else () +elseif (NOT MISSING_INTERNAL_LIBXML2_LIBRARY) set (LIBXML2_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/libxml2/include ${ClickHouse_SOURCE_DIR}/contrib/libxml2-cmake/linux_x86_64/include) set (USE_INTERNAL_LIBXML2_LIBRARY 1) set (LIBXML2_LIBRARY libxml2) From d5dbfd57418d5235ccfadc7931cdf03f1343a0b0 Mon Sep 17 00:00:00 2001 From: proller Date: Wed, 19 Dec 2018 19:15:29 +0300 Subject: [PATCH 18/31] Fix build without submodules --- cmake/find_libgsasl.cmake | 10 ++++++---- cmake/find_libxml2.cmake | 10 ++++++---- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/cmake/find_libgsasl.cmake b/cmake/find_libgsasl.cmake index f22ead4c765..ef1bbefe0df 100644 --- a/cmake/find_libgsasl.cmake +++ b/cmake/find_libgsasl.cmake @@ -2,10 +2,12 @@ if (NOT APPLE AND NOT ARCH_32) option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${NOT_UNBUNDLED}) endif () -if (USE_INTERNAL_LIBGSASL_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h") - message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init --recursive") - set (USE_INTERNAL_LIBGSASL_LIBRARY 0) - set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1) +if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h") + if (USE_INTERNAL_LIBGSASL_LIBRARY) + message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init --recursive") + set (USE_INTERNAL_LIBGSASL_LIBRARY 0) + endif () + set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1) endif () if (NOT USE_INTERNAL_LIBGSASL_LIBRARY) diff --git a/cmake/find_libxml2.cmake b/cmake/find_libxml2.cmake index db34801b238..b421d113361 100644 --- a/cmake/find_libxml2.cmake +++ b/cmake/find_libxml2.cmake @@ -1,9 +1,11 @@ option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ${NOT_UNBUNDLED}) -if (USE_INTERNAL_LIBXML2_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h") - message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init --recursive") - set (USE_INTERNAL_LIBXML2_LIBRARY 0) - set (MISSING_INTERNAL_LIBXML2_LIBRARY 1) +if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h") + if (USE_INTERNAL_LIBXML2_LIBRARY) + message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init --recursive") + set (USE_INTERNAL_LIBXML2_LIBRARY 0) + endif () + set (MISSING_INTERNAL_LIBXML2_LIBRARY 1) endif () if (NOT USE_INTERNAL_LIBXML2_LIBRARY) From 133b7240a37209401942b1e2e970318ac2677ce5 Mon Sep 17 00:00:00 2001 From: proller Date: Wed, 19 Dec 2018 19:15:29 +0300 Subject: [PATCH 19/31] Fix build without submodules --- cmake/find_libgsasl.cmake | 10 ++++++---- cmake/find_libxml2.cmake | 10 ++++++---- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/cmake/find_libgsasl.cmake b/cmake/find_libgsasl.cmake index f22ead4c765..ef1bbefe0df 100644 --- a/cmake/find_libgsasl.cmake +++ b/cmake/find_libgsasl.cmake @@ -2,10 +2,12 @@ if (NOT APPLE AND NOT ARCH_32) option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${NOT_UNBUNDLED}) endif () -if (USE_INTERNAL_LIBGSASL_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h") - message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init --recursive") - set (USE_INTERNAL_LIBGSASL_LIBRARY 0) - set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1) +if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h") + if (USE_INTERNAL_LIBGSASL_LIBRARY) + message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init --recursive") + set (USE_INTERNAL_LIBGSASL_LIBRARY 0) + endif () + set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1) endif () if (NOT USE_INTERNAL_LIBGSASL_LIBRARY) diff --git a/cmake/find_libxml2.cmake b/cmake/find_libxml2.cmake index db34801b238..b421d113361 100644 --- a/cmake/find_libxml2.cmake +++ b/cmake/find_libxml2.cmake @@ -1,9 +1,11 @@ option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ${NOT_UNBUNDLED}) -if (USE_INTERNAL_LIBXML2_LIBRARY AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h") - message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init --recursive") - set (USE_INTERNAL_LIBXML2_LIBRARY 0) - set (MISSING_INTERNAL_LIBXML2_LIBRARY 1) +if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h") + if (USE_INTERNAL_LIBXML2_LIBRARY) + message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init --recursive") + set (USE_INTERNAL_LIBXML2_LIBRARY 0) + endif () + set (MISSING_INTERNAL_LIBXML2_LIBRARY 1) endif () if (NOT USE_INTERNAL_LIBXML2_LIBRARY) From 6959dc3b335790904ba28f19694449ebb0fcd7d6 Mon Sep 17 00:00:00 2001 From: proller Date: Wed, 19 Dec 2018 19:25:38 +0300 Subject: [PATCH 20/31] Fix build without submodules --- cmake/find_base64.cmake | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/cmake/find_base64.cmake b/cmake/find_base64.cmake index ad71d11c1b4..9b6e28a8ccf 100644 --- a/cmake/find_base64.cmake +++ b/cmake/find_base64.cmake @@ -1,4 +1,11 @@ -option (ENABLE_BASE64 "Enable base64" ON) +if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/base64/lib/lib.c") + set (MISSING_INTERNAL_BASE64_LIBRARY 1) + message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init --recursive") +endif () + +if (NOT MISSING_INTERNAL_BASE64_LIBRARY) + option (ENABLE_BASE64 "Enable base64" ON) +endif () if (ENABLE_BASE64) if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64") @@ -9,4 +16,3 @@ if (ENABLE_BASE64) set (USE_BASE64 1) endif() endif () - From bf625b25c3f0cde70bdbb88451620d0a720f4a68 Mon Sep 17 00:00:00 2001 From: proller Date: Wed, 19 Dec 2018 19:25:38 +0300 Subject: [PATCH 21/31] Fix build without submodules --- cmake/find_base64.cmake | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/cmake/find_base64.cmake b/cmake/find_base64.cmake index ad71d11c1b4..9b6e28a8ccf 100644 --- a/cmake/find_base64.cmake +++ b/cmake/find_base64.cmake @@ -1,4 +1,11 @@ -option (ENABLE_BASE64 "Enable base64" ON) +if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/base64/lib/lib.c") + set (MISSING_INTERNAL_BASE64_LIBRARY 1) + message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init --recursive") +endif () + +if (NOT MISSING_INTERNAL_BASE64_LIBRARY) + option (ENABLE_BASE64 "Enable base64" ON) +endif () if (ENABLE_BASE64) if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64") @@ -9,4 +16,3 @@ if (ENABLE_BASE64) set (USE_BASE64 1) endif() endif () - From fdd22ad187340925858407d32fb3aa8c68012070 Mon Sep 17 00:00:00 2001 From: proller Date: Wed, 19 Dec 2018 20:56:58 +0300 Subject: [PATCH 22/31] Fix includes --- dbms/programs/server/TCPHandler.h | 1 + dbms/src/Interpreters/RequiredSourceColumnsVisitor.h | 1 + dbms/src/Parsers/ASTEnumElement.h | 4 ++-- utils/check-style/check-include | 3 +++ 4 files changed, 7 insertions(+), 2 deletions(-) diff --git a/dbms/programs/server/TCPHandler.h b/dbms/programs/server/TCPHandler.h index 43c544cc401..98b76268047 100644 --- a/dbms/programs/server/TCPHandler.h +++ b/dbms/programs/server/TCPHandler.h @@ -25,6 +25,7 @@ namespace Poco { class Logger; } namespace DB { +class ColumnsDescription; /// State of query processing. struct QueryState diff --git a/dbms/src/Interpreters/RequiredSourceColumnsVisitor.h b/dbms/src/Interpreters/RequiredSourceColumnsVisitor.h index ebf948ffeb8..859326acbe8 100644 --- a/dbms/src/Interpreters/RequiredSourceColumnsVisitor.h +++ b/dbms/src/Interpreters/RequiredSourceColumnsVisitor.h @@ -8,6 +8,7 @@ #include #include #include +#include "InDepthNodeVisitor.h" namespace DB { diff --git a/dbms/src/Parsers/ASTEnumElement.h b/dbms/src/Parsers/ASTEnumElement.h index 8e0a2ec1648..c603f5086de 100644 --- a/dbms/src/Parsers/ASTEnumElement.h +++ b/dbms/src/Parsers/ASTEnumElement.h @@ -1,6 +1,6 @@ #pragma once -#include +#include "IAST.h" #include #include @@ -18,7 +18,7 @@ public: ASTEnumElement(const String & name, const Field & value) : name{name}, value {value} {} - String getID() const override { return "EnumElement"; } + String getID(char) const override { return "EnumElement"; } ASTPtr clone() const override { diff --git a/utils/check-style/check-include b/utils/check-style/check-include index cdc5a82d7ed..eede9940dcd 100755 --- a/utils/check-style/check-include +++ b/utils/check-style/check-include @@ -34,6 +34,9 @@ inc="-I. \ -I./contrib/poco/Foundation/include \ -I./contrib/boost/libs/*/include \ -I./contrib/boost \ +-I/usr/include/llvm-7 \ +-I/usr/include/llvm-6.0 \ +-I/usr/include/llvm-5.0 \ -I./contrib/llvm/llvm/include \ -I${BUILD_DIR}/contrib/llvm/llvm/include \ -I./contrib/libbtrie/include \ From 6869771ae4438d547ada9250ae5ef42a834af4bc Mon Sep 17 00:00:00 2001 From: proller Date: Wed, 19 Dec 2018 20:57:51 +0300 Subject: [PATCH 23/31] Fix includes --- dbms/programs/server/TCPHandler.h | 1 + dbms/src/Interpreters/RequiredSourceColumnsVisitor.h | 1 + dbms/src/Parsers/ASTEnumElement.h | 4 ++-- utils/check-style/check-include | 3 +++ 4 files changed, 7 insertions(+), 2 deletions(-) diff --git a/dbms/programs/server/TCPHandler.h b/dbms/programs/server/TCPHandler.h index 43c544cc401..98b76268047 100644 --- a/dbms/programs/server/TCPHandler.h +++ b/dbms/programs/server/TCPHandler.h @@ -25,6 +25,7 @@ namespace Poco { class Logger; } namespace DB { +class ColumnsDescription; /// State of query processing. struct QueryState diff --git a/dbms/src/Interpreters/RequiredSourceColumnsVisitor.h b/dbms/src/Interpreters/RequiredSourceColumnsVisitor.h index ebf948ffeb8..859326acbe8 100644 --- a/dbms/src/Interpreters/RequiredSourceColumnsVisitor.h +++ b/dbms/src/Interpreters/RequiredSourceColumnsVisitor.h @@ -8,6 +8,7 @@ #include #include #include +#include "InDepthNodeVisitor.h" namespace DB { diff --git a/dbms/src/Parsers/ASTEnumElement.h b/dbms/src/Parsers/ASTEnumElement.h index 8e0a2ec1648..c603f5086de 100644 --- a/dbms/src/Parsers/ASTEnumElement.h +++ b/dbms/src/Parsers/ASTEnumElement.h @@ -1,6 +1,6 @@ #pragma once -#include +#include "IAST.h" #include #include @@ -18,7 +18,7 @@ public: ASTEnumElement(const String & name, const Field & value) : name{name}, value {value} {} - String getID() const override { return "EnumElement"; } + String getID(char) const override { return "EnumElement"; } ASTPtr clone() const override { diff --git a/utils/check-style/check-include b/utils/check-style/check-include index cdc5a82d7ed..eede9940dcd 100755 --- a/utils/check-style/check-include +++ b/utils/check-style/check-include @@ -34,6 +34,9 @@ inc="-I. \ -I./contrib/poco/Foundation/include \ -I./contrib/boost/libs/*/include \ -I./contrib/boost \ +-I/usr/include/llvm-7 \ +-I/usr/include/llvm-6.0 \ +-I/usr/include/llvm-5.0 \ -I./contrib/llvm/llvm/include \ -I${BUILD_DIR}/contrib/llvm/llvm/include \ -I./contrib/libbtrie/include \ From bd179822248094a38576d18a2b9cf6e7988281b0 Mon Sep 17 00:00:00 2001 From: proller Date: Thu, 20 Dec 2018 12:54:28 +0300 Subject: [PATCH 24/31] Fix clang build --- dbms/programs/server/TCPHandler.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dbms/programs/server/TCPHandler.h b/dbms/programs/server/TCPHandler.h index 98b76268047..19641e88d25 100644 --- a/dbms/programs/server/TCPHandler.h +++ b/dbms/programs/server/TCPHandler.h @@ -25,7 +25,7 @@ namespace Poco { class Logger; } namespace DB { -class ColumnsDescription; +struct ColumnsDescription; /// State of query processing. struct QueryState From b63ddfa183d16a15e2a0b7901d1118a2571ffadc Mon Sep 17 00:00:00 2001 From: ogorbacheva Date: Thu, 20 Dec 2018 15:04:28 +0300 Subject: [PATCH 25/31] changed the zookeper settings example (#3883) --- docs/en/operations/server_settings/settings.md | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/docs/en/operations/server_settings/settings.md b/docs/en/operations/server_settings/settings.md index 4275b5514c0..cc65063c70b 100644 --- a/docs/en/operations/server_settings/settings.md +++ b/docs/en/operations/server_settings/settings.md @@ -681,8 +681,20 @@ For more information, see the section "[Replication](../../operations/table_engi **Example** ```xml - + + + example1 + 2181 + + + example2 + 2181 + + + example3 + 2181 + + ``` - [Original article](https://clickhouse.yandex/docs/en/operations/server_settings/settings/) From 7c5cff2dd4b779e912cf36744ae5f753a49b3489 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Thu, 20 Dec 2018 16:23:49 +0300 Subject: [PATCH 26/31] Renamed test #3350 --- ...ase64_functions.reference => 00732_base64_functions.reference} | 0 .../{000732_base64_functions.sql => 00732_base64_functions.sql} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename dbms/tests/queries/0_stateless/{000732_base64_functions.reference => 00732_base64_functions.reference} (100%) rename dbms/tests/queries/0_stateless/{000732_base64_functions.sql => 00732_base64_functions.sql} (100%) diff --git a/dbms/tests/queries/0_stateless/000732_base64_functions.reference b/dbms/tests/queries/0_stateless/00732_base64_functions.reference similarity index 100% rename from dbms/tests/queries/0_stateless/000732_base64_functions.reference rename to dbms/tests/queries/0_stateless/00732_base64_functions.reference diff --git a/dbms/tests/queries/0_stateless/000732_base64_functions.sql b/dbms/tests/queries/0_stateless/00732_base64_functions.sql similarity index 100% rename from dbms/tests/queries/0_stateless/000732_base64_functions.sql rename to dbms/tests/queries/0_stateless/00732_base64_functions.sql From 6730f540a1892b13cca1b20baf70c3ef337e154f Mon Sep 17 00:00:00 2001 From: alesapin Date: Thu, 20 Dec 2018 16:40:20 +0300 Subject: [PATCH 27/31] Fix path to search base64 library --- cmake/find_base64.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmake/find_base64.cmake b/cmake/find_base64.cmake index 9b6e28a8ccf..8e52c8463c8 100644 --- a/cmake/find_base64.cmake +++ b/cmake/find_base64.cmake @@ -1,4 +1,4 @@ -if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/base64/lib/lib.c") +if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64/lib/lib.c") set (MISSING_INTERNAL_BASE64_LIBRARY 1) message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init --recursive") endif () From 2ae6b6414c7131829f49adad610322693755ddd7 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Thu, 20 Dec 2018 17:16:07 +0300 Subject: [PATCH 28/31] Fixed error in 'compressor' tool: the 'level' parameter wasn't working #1345 --- dbms/programs/compressor/Compressor.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/dbms/programs/compressor/Compressor.cpp b/dbms/programs/compressor/Compressor.cpp index 4a412d987b4..544238bf581 100644 --- a/dbms/programs/compressor/Compressor.cpp +++ b/dbms/programs/compressor/Compressor.cpp @@ -61,7 +61,7 @@ int mainEntryClickHouseCompressor(int argc, char ** argv) ("block-size,b", boost::program_options::value()->default_value(DBMS_DEFAULT_BUFFER_SIZE), "compress in blocks of specified size") ("hc", "use LZ4HC instead of LZ4") ("zstd", "use ZSTD instead of LZ4") - ("level", "compression level") + ("level", boost::program_options::value(), "compression level") ("none", "use no compression instead of LZ4") ("stat", "print block statistics of compressed data") ; @@ -94,7 +94,9 @@ int mainEntryClickHouseCompressor(int argc, char ** argv) else if (use_none) method = DB::CompressionMethod::NONE; - DB::CompressionSettings settings(method, options.count("level") > 0 ? options["level"].as() : DB::CompressionSettings::getDefaultLevel(method)); + DB::CompressionSettings settings(method, options.count("level") + ? options["level"].as() + : DB::CompressionSettings::getDefaultLevel(method)); DB::ReadBufferFromFileDescriptor rb(STDIN_FILENO); DB::WriteBufferFromFileDescriptor wb(STDOUT_FILENO); From 731d76821d357683311cf84df1c080ece898a5d6 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Thu, 20 Dec 2018 18:14:32 +0300 Subject: [PATCH 29/31] Merging abandoned pull request with "boundingRatio" function #3139 --- .../AggregateFunctionBoundingRatio.h | 133 +++++++++--------- .../00715_bounding_ratio.reference | 19 +++ .../0_stateless/00715_bounding_ratio.sql | 18 ++- 3 files changed, 99 insertions(+), 71 deletions(-) diff --git a/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h b/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h index 3cc6d92547b..f89943f1fc6 100644 --- a/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h +++ b/dbms/src/AggregateFunctions/AggregateFunctionBoundingRatio.h @@ -1,18 +1,10 @@ #pragma once -#include -#include -#include -#include -#include -#include #include +#include +#include #include #include -#include -#include -#include - #include #include @@ -20,61 +12,80 @@ namespace DB { +namespace ErrorCodes +{ + extern const int BAD_ARGUMENTS; +} + +/** Tracks the leftmost and rightmost (x, y) data points. + */ struct AggregateFunctionBoundingRatioData { - using TimestampEvent = std::pair; - - bool is_first = false; - - TimestampEvent first_event; - TimestampEvent last_event; - - void add(UInt32 timestamp, Float64 f) + struct Point { - if (is_first) + Float64 x; + Float64 y; + }; + + bool empty = true; + Point left; + Point right; + + void add(Float64 x, Float64 y) + { + Point point{x, y}; + + if (empty) { - first_event = TimestampEvent{timestamp, f}; - is_first = true; + left = point; + right = point; + empty = false; } - else + else if (point.x < left.x) { - last_event = TimestampEvent{timestamp, f}; + left = point; + } + else if (point.x > right.x) + { + right = point; } } void merge(const AggregateFunctionBoundingRatioData & other) { - // if the arg is earlier than us, replace us with them - if (other.first_event.first < first_event.first) + if (empty) { - first_event = other.first_event; + *this = other; } - // if the arg is _later_ than us, replace us with them - if (other.last_event.first > last_event.second) + else { - last_event = other.last_event; + if (other.left.x < left.x) + left = other.left; + if (other.right.x > right.x) + right = other.right; } } void serialize(WriteBuffer & buf) const { - writeBinary(is_first, buf); - writeBinary(first_event.first, buf); - writeBinary(first_event.second, buf); + writeBinary(empty, buf); - writeBinary(last_event.first, buf); - writeBinary(last_event.second, buf); + if (!empty) + { + writePODBinary(left, buf); + writePODBinary(right, buf); + } } void deserialize(ReadBuffer & buf) { - readBinary(is_first, buf); + readBinary(empty, buf); - readBinary(first_event.first, buf); - readBinary(first_event.second, buf); - - readBinary(last_event.first, buf); - readBinary(last_event.second, buf); + if (!empty) + { + readPODBinary(left, buf); + readPODBinary(right, buf); + } } }; @@ -82,21 +93,15 @@ struct AggregateFunctionBoundingRatioData class AggregateFunctionBoundingRatio final : public IAggregateFunctionDataHelper { private: - /* implements a basic derivative function - * - * (y2 - y1) / (x2 - x1) - */ + /** Calculates the slope of a line between leftmost and rightmost data points. + * (y2 - y1) / (x2 - x1) + */ Float64 getBoundingRatio(const AggregateFunctionBoundingRatioData & data) const { - if (data.first_event.first == 0) - return 0; - if (data.last_event.first == 0) - return 0; - // void divide by zero in denominator - if (data.last_event.first == data.first_event.first) - return 0; + if (data.empty) + return std::numeric_limits::quiet_NaN(); - return (data.last_event.second - data.first_event.second) / (data.last_event.first - data.first_event.first); + return (data.right.y - data.left.y) / (data.right.x - data.left.x); } public: @@ -107,21 +112,14 @@ public: AggregateFunctionBoundingRatio(const DataTypes & arguments) { + const auto x_arg = arguments.at(0).get(); + const auto y_arg = arguments.at(0).get(); - - const auto time_arg = arguments.at(0).get(); - if (!typeid_cast(time_arg) && !typeid_cast(time_arg)) - throw Exception {"Illegal type " + time_arg->getName() + " of first argument of aggregate function " + getName() - + ", must be DateTime or UInt32"}; - - const auto number_arg = arguments.at(1).get(); - if (!number_arg->isNumber()) - throw Exception {"Illegal type " + number_arg->getName() + " of argument " + toString(1) + " of aggregate function " + getName() - + ", must be a Number", - ErrorCodes::ILLEGAL_TYPE_OF_ARGUMENT}; + if (!x_arg->isValueRepresentedByNumber() || !y_arg->isValueRepresentedByNumber()) + throw Exception("Illegal types of arguments of aggregate function " + getName() + ", must have number representation.", + ErrorCodes::BAD_ARGUMENTS); } - DataTypePtr getReturnType() const override { return std::make_shared(); @@ -129,9 +127,10 @@ public: void add(AggregateDataPtr place, const IColumn ** columns, const size_t row_num, Arena *) const override { - const auto timestamp = static_cast *>(columns[0])->getData()[row_num]; - const auto value = static_cast *>(columns[1])->getData()[row_num]; - data(place).add(timestamp, value); + /// TODO Inefficient. + const auto x = applyVisitor(FieldVisitorConvertToNumber(), (*columns[0])[row_num]); + const auto y = applyVisitor(FieldVisitorConvertToNumber(), (*columns[1])[row_num]); + data(place).add(x, y); } void merge(AggregateDataPtr place, ConstAggregateDataPtr rhs, Arena *) const override diff --git a/dbms/tests/queries/0_stateless/00715_bounding_ratio.reference b/dbms/tests/queries/0_stateless/00715_bounding_ratio.reference index 6ed281c757a..f1e96af83a9 100644 --- a/dbms/tests/queries/0_stateless/00715_bounding_ratio.reference +++ b/dbms/tests/queries/0_stateless/00715_bounding_ratio.reference @@ -1,2 +1,21 @@ 1 1 +1.5 +1.5 +1.5 +0 1.5 +1 1.5 +2 1.5 +3 1.5 +4 1.5 +5 1.5 +6 1.5 +7 1.5 +8 1.5 +9 1.5 + +0 1.5 +1.5 +nan +nan +1 diff --git a/dbms/tests/queries/0_stateless/00715_bounding_ratio.sql b/dbms/tests/queries/0_stateless/00715_bounding_ratio.sql index 8cea70a09b1..ff3cd4c606b 100644 --- a/dbms/tests/queries/0_stateless/00715_bounding_ratio.sql +++ b/dbms/tests/queries/0_stateless/00715_bounding_ratio.sql @@ -3,14 +3,24 @@ drop table if exists rate_test; create table rate_test (timestamp UInt32, event UInt32) engine=Memory; insert into rate_test values (0,1000),(1,1001),(2,1002),(3,1003),(4,1004),(5,1005),(6,1006),(7,1007),(8,1008); -select 1.0 = rate(timestamp, event) from rate_test; +select 1.0 = boundingRatio(timestamp, event) from rate_test; drop table if exists rate_test2; create table rate_test2 (uid UInt32 default 1,timestamp DateTime, event UInt32) engine=Memory; -insert into rate_test2(timestamp, event) values ('2018-01-01 01:01:01',1001),('2018-01-01 01:01:02',1002),('2018-01-01 01:01:03',1003),('2018-01-01 01:01:04',1004),('2018-01-01 01:01:05',1005),('2018-01-01 01:01:06',1006),('2018-01-01 01:01:07',1007),('2018-01-01 01:01:08',1008); +insert into rate_test2(timestamp, event) values ('2018-01-01 01:01:01',1001),('2018-01-01 01:01:02',1002),('2018-01-01 01:01:03',1003),('2018-01-01 01:01:04',1004),('2018-01-01 01:01:05',1005),('2018-01-01 01:01:06',1006),('2018-01-01 01:01:07',1007),('2018-01-01 01:01:08',1008); - -select 1.0 = rate(timestamp, event ) from rate_test2; +select 1.0 = boundingRatio(timestamp, event) from rate_test2; drop table rate_test; drop table rate_test2; + + +SELECT boundingRatio(number, number * 1.5) FROM numbers(10); +SELECT boundingRatio(1000 + number, number * 1.5) FROM numbers(10); +SELECT boundingRatio(1000 + number, number * 1.5 - 111) FROM numbers(10); +SELECT number % 10 AS k, boundingRatio(1000 + number, number * 1.5 - 111) FROM numbers(100) GROUP BY k WITH TOTALS ORDER BY k; + +SELECT boundingRatio(1000 + number, number * 1.5 - 111) FROM numbers(2); +SELECT boundingRatio(1000 + number, number * 1.5 - 111) FROM numbers(1); +SELECT boundingRatio(1000 + number, number * 1.5 - 111) FROM numbers(1) WHERE 0; +SELECT boundingRatio(number, exp(number)) = e() - 1 FROM numbers(2); From 7fc7a4cb7872faa1a539c8ddf757dfa76dc6ba25 Mon Sep 17 00:00:00 2001 From: Alexey Milovidov Date: Thu, 20 Dec 2018 18:22:27 +0300 Subject: [PATCH 30/31] Suppress warning in protobuf build [#CLICKHOUSE-2] --- contrib/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt index 66173322659..989761bfb67 100644 --- a/contrib/CMakeLists.txt +++ b/contrib/CMakeLists.txt @@ -2,7 +2,7 @@ if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-function -Wno-unused-variable -Wno-unused-but-set-variable -Wno-unused-result -Wno-deprecated-declarations -Wno-maybe-uninitialized -Wno-format -Wno-misleading-indentation -Wno-stringop-overflow") - set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-old-style-cast -Wno-unused-function -Wno-unused-variable -Wno-unused-but-set-variable -Wno-unused-result -Wno-deprecated-declarations -Wno-non-virtual-dtor -Wno-maybe-uninitialized -Wno-format -Wno-misleading-indentation -Wno-implicit-fallthrough -Wno-class-memaccess -std=c++1z") + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-old-style-cast -Wno-unused-function -Wno-unused-variable -Wno-unused-but-set-variable -Wno-unused-result -Wno-deprecated-declarations -Wno-non-virtual-dtor -Wno-maybe-uninitialized -Wno-format -Wno-misleading-indentation -Wno-implicit-fallthrough -Wno-class-memaccess -Wno-sign-compare -std=c++1z") elseif (CMAKE_CXX_COMPILER_ID STREQUAL "Clang") set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-unused-function -Wno-unused-variable -Wno-unused-result -Wno-deprecated-declarations -Wno-format -Wno-parentheses-equality") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-old-style-cast -Wno-unused-function -Wno-unused-variable -Wno-unused-result -Wno-deprecated-declarations -Wno-non-virtual-dtor -Wno-format -std=c++1z") From ea018a0a36cafb0c010420adf50169fec832d6e7 Mon Sep 17 00:00:00 2001 From: ogorbacheva Date: Thu, 20 Dec 2018 18:33:47 +0300 Subject: [PATCH 31/31] Add the description of the CHECK TABLE query (#3881) * Added the description of the CHECK table query * some fixes * some fixes * fixes in CHECK TABLE doc --- docs/en/interfaces/formats.md | 2 +- docs/en/operations/settings/settings.md | 2 +- docs/en/query_language/misc.md | 45 ++++++++++++++++++++++--- 3 files changed, 42 insertions(+), 7 deletions(-) diff --git a/docs/en/interfaces/formats.md b/docs/en/interfaces/formats.md index 8ede8c5507d..a21e8b53d2a 100644 --- a/docs/en/interfaces/formats.md +++ b/docs/en/interfaces/formats.md @@ -169,7 +169,7 @@ When formatting, rows are enclosed in double quotes. A double quote inside a str clickhouse-client --format_csv_delimiter="|" --query="INSERT INTO test.csv FORMAT CSV" < data.csv ``` -*By default, the delimiter is `,`. See the [format_csv_delimiter](/operations/settings/settings/#format_csv_delimiter) setting for more information. +*By default, the delimiter is `,`. See the [format_csv_delimiter](../operations/settings/settings.md#format_csv_delimiter) setting for more information. When parsing, all values can be parsed either with or without quotes. Both double and single quotes are supported. Rows can also be arranged without quotes. In this case, they are parsed up to the delimiter character or line feed (CR or LF). In violation of the RFC, when parsing rows without quotes, the leading and trailing spaces and tabs are ignored. For the line feed, Unix (LF), Windows (CR LF) and Mac OS Classic (CR LF) types are all supported. diff --git a/docs/en/operations/settings/settings.md b/docs/en/operations/settings/settings.md index 2d63c3e5e9a..3b4cf268579 100644 --- a/docs/en/operations/settings/settings.md +++ b/docs/en/operations/settings/settings.md @@ -149,7 +149,7 @@ Default value: 0 (off). Used when performing `SELECT` from a distributed table that points to replicated tables. -## max_threads +## max_threads {#max_threads} The maximum number of query processing threads diff --git a/docs/en/query_language/misc.md b/docs/en/query_language/misc.md index 148f4fe69f9..159a7611206 100644 --- a/docs/en/query_language/misc.md +++ b/docs/en/query_language/misc.md @@ -4,8 +4,8 @@ This query is exactly the same as `CREATE`, but -- instead of the word `CREATE` it uses the word `ATTACH`. -- The query doesn't create data on the disk, but assumes that data is already in the appropriate places, and just adds information about the table to the server. +- Instead of the word `CREATE` it uses the word `ATTACH`. +- The query does not create data on the disk, but assumes that data is already in the appropriate places, and just adds information about the table to the server. After executing an ATTACH query, the server will know about the existence of the table. If the table was previously detached (``DETACH``), meaning that its structure is known, you can use shorthand without defining the structure. @@ -16,6 +16,41 @@ ATTACH TABLE [IF NOT EXISTS] [db.]name [ON CLUSTER cluster] This query is used when starting the server. The server stores table metadata as files with `ATTACH` queries, which it simply runs at launch (with the exception of system tables, which are explicitly created on the server). +## CHECK TABLE + +Checks if the data in the table is corrupted. + +``` sql +CHECK TABLE [db.]name +``` + +The `CHECK TABLE` query compares actual file sizes with the expected values which are stored on the server. If the file sizes do not match the stored values, it means the data is corrupted. This can be caused, for example, by a system crash during query execution. + +The query response contains the `result` column with a single row. The row has a value of + [Boolean](../data_types/boolean.md) type: + +- 0 - The data in the table is corrupted. +- 1 - The data maintains integrity. + +The `CHECK TABLE` query is only supported for the following table engines: + +- [Log](../operations/table_engines/log.md) +- [TinyLog](../operations/table_engines/tinylog.md) +- StripeLog + +These engines do not provide automatic data recovery on failure. Use the `CHECK TABLE` query to track data loss in a timely manner. + +To avoid data loss use the [MergeTree](../operations/table_engines/mergetree.md) family tables. + +**If the data is corrupted** + +If the table is corrupted, you can copy the non-corrupted data to another table. To do this: + +1. Create a new table with the same structure as damaged table. To do this execute the query `CREATE TABLE AS `. +2. Set the [max_threads](../operations/settings/settings.md#max_threads) value to 1 to process the next query in a single thread. To do this run the query `SET max_threads = 1`. +3. Execute the query `INSERT INTO SELECT * FROM `. This request copies the non-corrupted data from the damaged table to another table. Only the data before the corrupted part will be copied. +4. Restart the `clickhouse-client` to reset the `max_threads` value. + ## DESCRIBE TABLE ``` sql @@ -198,8 +233,8 @@ SHOW [TEMPORARY] TABLES [FROM db] [LIKE 'pattern'] [INTO OUTFILE filename] [FORM Displays a list of tables -- tables from the current database, or from the 'db' database if "FROM db" is specified. -- all tables, or tables whose name matches the pattern, if "LIKE 'pattern'" is specified. +- Tables from the current database, or from the 'db' database if "FROM db" is specified. +- All tables, or tables whose name matches the pattern, if "LIKE 'pattern'" is specified. This query is identical to: `SELECT name FROM system.tables WHERE database = 'db' [AND name LIKE 'pattern'] [INTO OUTFILE filename] [FORMAT format]`. @@ -207,7 +242,7 @@ See also the section "LIKE operator". ## TRUNCATE -```sql +``` sql TRUNCATE TABLE [IF EXISTS] [db.]name [ON CLUSTER cluster] ```