mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 16:42:05 +00:00
Merge remote-tracking branch 'origin/master' into oandrew-avro
This commit is contained in:
commit
680da3d7ed
45
AUTHORS
45
AUTHORS
@ -1,43 +1,2 @@
|
|||||||
The following authors have created the source code of "ClickHouse"
|
To see the list of authors who created the source code of ClickHouse, published and distributed by YANDEX LLC as the owner,
|
||||||
published and distributed by YANDEX LLC as the owner:
|
run "SELECT * FROM system.contributors;" query on any ClickHouse server.
|
||||||
|
|
||||||
Alexander Makarov <asealback@yandex-team.ru>
|
|
||||||
Alexander Prudaev <aprudaev@yandex-team.ru>
|
|
||||||
Alexey Arno <af-arno@yandex-team.ru>
|
|
||||||
Alexey Milovidov <milovidov@yandex-team.ru>
|
|
||||||
Alexey Tronov <vkusny@yandex-team.ru>
|
|
||||||
Alexey Vasiliev <loudhorr@yandex-team.ru>
|
|
||||||
Alexey Zatelepin <ztlpn@yandex-team.ru>
|
|
||||||
Amy Krishnevsky <krishnevsky@yandex-team.ru>
|
|
||||||
Andrey M <hertz@yandex-team.ru>
|
|
||||||
Andrey Mironov <hertz@yandex-team.ru>
|
|
||||||
Andrey Urusov <drobus@yandex-team.ru>
|
|
||||||
Anton Tikhonov <rokerjoker@yandex-team.ru>
|
|
||||||
Dmitry Bilunov <kmeaw@yandex-team.ru>
|
|
||||||
Dmitry Galuza <galuza@yandex-team.ru>
|
|
||||||
Eugene Konkov <konkov@yandex-team.ru>
|
|
||||||
Evgeniy Gatov <egatov@yandex-team.ru>
|
|
||||||
Ilya Khomutov <robert@yandex-team.ru>
|
|
||||||
Ilya Korolev <breeze@yandex-team.ru>
|
|
||||||
Ivan Blinkov <blinkov@yandex-team.ru>
|
|
||||||
Maxim Nikulin <mnikulin@yandex-team.ru>
|
|
||||||
Michael Kolupaev <mkolupaev@yandex-team.ru>
|
|
||||||
Michael Razuvaev <razuvaev@yandex-team.ru>
|
|
||||||
Nikolai Kochetov <nik-kochetov@yandex-team.ru>
|
|
||||||
Nikolay Vasiliev <lonlylocly@yandex-team.ru>
|
|
||||||
Nikolay Volosatov <bamx23@yandex-team.ru>
|
|
||||||
Pavel Artemkin <stanly@yandex-team.ru>
|
|
||||||
Pavel Kartaviy <kartavyy@yandex-team.ru>
|
|
||||||
Roman Nozdrin <drrtuy@yandex-team.ru>
|
|
||||||
Roman Peshkurov <peshkurov@yandex-team.ru>
|
|
||||||
Sergey Fedorov <fets@yandex-team.ru>
|
|
||||||
Sergey Lazarev <hamilkar@yandex-team.ru>
|
|
||||||
Sergey Magidovich <mgsergio@yandex-team.ru>
|
|
||||||
Sergey Serebryanik <serebrserg@yandex-team.ru>
|
|
||||||
Sergey Veletskiy <velom@yandex-team.ru>
|
|
||||||
Vasily Okunev <okunev@yandex-team.ru>
|
|
||||||
Vitaliy Lyudvichenko <vludv@yandex-team.ru>
|
|
||||||
Vladimir Chebotarev <chebotarev@yandex-team.ru>
|
|
||||||
Vsevolod Orlov <vorloff@yandex-team.ru>
|
|
||||||
Vyacheslav Alipov <alipov@yandex-team.ru>
|
|
||||||
Yuriy Galitskiy <orantius@yandex-team.ru>
|
|
||||||
|
@ -353,7 +353,6 @@ include (cmake/find/rapidjson.cmake)
|
|||||||
include (cmake/find/fastops.cmake)
|
include (cmake/find/fastops.cmake)
|
||||||
include (cmake/find/orc.cmake)
|
include (cmake/find/orc.cmake)
|
||||||
include (cmake/find/avro.cmake)
|
include (cmake/find/avro.cmake)
|
||||||
include (cmake/find/replxx.cmake)
|
|
||||||
|
|
||||||
find_contrib_lib(cityhash)
|
find_contrib_lib(cityhash)
|
||||||
find_contrib_lib(farmhash)
|
find_contrib_lib(farmhash)
|
||||||
|
4
LICENSE
4
LICENSE
@ -1,4 +1,4 @@
|
|||||||
Copyright 2016-2019 Yandex LLC
|
Copyright 2016-2020 Yandex LLC
|
||||||
|
|
||||||
Apache License
|
Apache License
|
||||||
Version 2.0, January 2004
|
Version 2.0, January 2004
|
||||||
@ -188,7 +188,7 @@ Copyright 2016-2019 Yandex LLC
|
|||||||
same "printed page" as the copyright notice for easier
|
same "printed page" as the copyright notice for easier
|
||||||
identification within third-party archives.
|
identification within third-party archives.
|
||||||
|
|
||||||
Copyright 2016-2019 Yandex LLC
|
Copyright 2016-2020 Yandex LLC
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
|
@ -1,40 +0,0 @@
|
|||||||
option (ENABLE_REPLXX "Enable replxx support" ${NOT_UNBUNDLED})
|
|
||||||
|
|
||||||
if (ENABLE_REPLXX)
|
|
||||||
option (USE_INTERNAL_REPLXX "Use internal replxx library" ${NOT_UNBUNDLED})
|
|
||||||
|
|
||||||
if (USE_INTERNAL_REPLXX AND NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/replxx/README.md")
|
|
||||||
message (WARNING "submodule contrib/replxx is missing. to fix try run: \n git submodule update --init --recursive")
|
|
||||||
set (USE_INTERNAL_REPLXX 0)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (NOT USE_INTERNAL_REPLXX)
|
|
||||||
find_library(LIBRARY_REPLXX NAMES replxx replxx-static)
|
|
||||||
find_path(INCLUDE_REPLXX replxx.hxx)
|
|
||||||
|
|
||||||
add_library(replxx UNKNOWN IMPORTED)
|
|
||||||
set_property(TARGET replxx PROPERTY IMPORTED_LOCATION ${LIBRARY_REPLXX})
|
|
||||||
target_include_directories(replxx PUBLIC ${INCLUDE_REPLXX})
|
|
||||||
|
|
||||||
set(CMAKE_REQUIRED_LIBRARIES replxx)
|
|
||||||
check_cxx_source_compiles(
|
|
||||||
"
|
|
||||||
#include <replxx.hxx>
|
|
||||||
int main() {
|
|
||||||
replxx::Replxx rx;
|
|
||||||
}
|
|
||||||
"
|
|
||||||
EXTERNAL_REPLXX_WORKS
|
|
||||||
)
|
|
||||||
|
|
||||||
if (NOT EXTERNAL_REPLXX_WORKS)
|
|
||||||
message (FATAL_ERROR "replxx is unusable: ${LIBRARY_REPLXX} ${INCLUDE_REPLXX}")
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
set(USE_REPLXX 1)
|
|
||||||
|
|
||||||
message (STATUS "Using replxx")
|
|
||||||
else ()
|
|
||||||
set(USE_REPLXX 0)
|
|
||||||
endif ()
|
|
4
contrib/CMakeLists.txt
vendored
4
contrib/CMakeLists.txt
vendored
@ -336,6 +336,4 @@ if (USE_FASTOPS)
|
|||||||
add_subdirectory (fastops-cmake)
|
add_subdirectory (fastops-cmake)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (USE_INTERNAL_REPLXX)
|
add_subdirectory(replxx-cmake)
|
||||||
add_subdirectory (replxx-cmake)
|
|
||||||
endif()
|
|
||||||
|
@ -23,6 +23,10 @@ typedef unsigned __int64 uint64_t;
|
|||||||
|
|
||||||
#endif // !defined(_MSC_VER)
|
#endif // !defined(_MSC_VER)
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
|
||||||
//-----------------------------------------------------------------------------
|
//-----------------------------------------------------------------------------
|
||||||
|
|
||||||
void MurmurHash3_x86_32 ( const void * key, int len, uint32_t seed, void * out );
|
void MurmurHash3_x86_32 ( const void * key, int len, uint32_t seed, void * out );
|
||||||
@ -32,3 +36,7 @@ void MurmurHash3_x86_128 ( const void * key, int len, uint32_t seed, void * out
|
|||||||
void MurmurHash3_x64_128 ( const void * key, int len, uint32_t seed, void * out );
|
void MurmurHash3_x64_128 ( const void * key, int len, uint32_t seed, void * out );
|
||||||
|
|
||||||
//-----------------------------------------------------------------------------
|
//-----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
@ -1,18 +1,57 @@
|
|||||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/replxx")
|
option (ENABLE_REPLXX "Enable replxx support" ${ENABLE_LIBRARIES})
|
||||||
|
|
||||||
set(SRCS
|
if (ENABLE_REPLXX)
|
||||||
${LIBRARY_DIR}/src/conversion.cxx
|
option (USE_INTERNAL_REPLXX "Use internal replxx library" ${NOT_UNBUNDLED})
|
||||||
${LIBRARY_DIR}/src/escape.cxx
|
|
||||||
${LIBRARY_DIR}/src/history.cxx
|
|
||||||
${LIBRARY_DIR}/src/io.cxx
|
|
||||||
${LIBRARY_DIR}/src/prompt.cxx
|
|
||||||
${LIBRARY_DIR}/src/replxx.cxx
|
|
||||||
${LIBRARY_DIR}/src/replxx_impl.cxx
|
|
||||||
${LIBRARY_DIR}/src/util.cxx
|
|
||||||
${LIBRARY_DIR}/src/wcwidth.cpp
|
|
||||||
${LIBRARY_DIR}/src/ConvertUTF.cpp
|
|
||||||
)
|
|
||||||
|
|
||||||
add_library(replxx ${SRCS})
|
if (USE_INTERNAL_REPLXX)
|
||||||
target_include_directories(replxx PUBLIC ${LIBRARY_DIR}/include)
|
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/replxx")
|
||||||
target_compile_options(replxx PUBLIC -Wno-documentation)
|
|
||||||
|
set(SRCS
|
||||||
|
${LIBRARY_DIR}/src/conversion.cxx
|
||||||
|
${LIBRARY_DIR}/src/ConvertUTF.cpp
|
||||||
|
${LIBRARY_DIR}/src/escape.cxx
|
||||||
|
${LIBRARY_DIR}/src/history.cxx
|
||||||
|
${LIBRARY_DIR}/src/io.cxx
|
||||||
|
${LIBRARY_DIR}/src/prompt.cxx
|
||||||
|
${LIBRARY_DIR}/src/replxx_impl.cxx
|
||||||
|
${LIBRARY_DIR}/src/replxx.cxx
|
||||||
|
${LIBRARY_DIR}/src/util.cxx
|
||||||
|
${LIBRARY_DIR}/src/wcwidth.cpp
|
||||||
|
)
|
||||||
|
|
||||||
|
add_library (replxx ${SRCS})
|
||||||
|
target_include_directories(replxx PUBLIC ${LIBRARY_DIR}/include)
|
||||||
|
else ()
|
||||||
|
find_library(LIBRARY_REPLXX NAMES replxx replxx-static)
|
||||||
|
find_path(INCLUDE_REPLXX replxx.hxx)
|
||||||
|
|
||||||
|
add_library(replxx UNKNOWN IMPORTED)
|
||||||
|
set_property(TARGET replxx PROPERTY IMPORTED_LOCATION ${LIBRARY_REPLXX})
|
||||||
|
target_include_directories(replxx PUBLIC ${INCLUDE_REPLXX})
|
||||||
|
|
||||||
|
set(CMAKE_REQUIRED_LIBRARIES replxx)
|
||||||
|
check_cxx_source_compiles(
|
||||||
|
"
|
||||||
|
#include <replxx.hxx>
|
||||||
|
int main() {
|
||||||
|
replxx::Replxx rx;
|
||||||
|
}
|
||||||
|
"
|
||||||
|
EXTERNAL_REPLXX_WORKS
|
||||||
|
)
|
||||||
|
|
||||||
|
if (NOT EXTERNAL_REPLXX_WORKS)
|
||||||
|
message (FATAL_ERROR "replxx is unusable: ${LIBRARY_REPLXX} ${INCLUDE_REPLXX}")
|
||||||
|
endif ()
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
target_compile_options(replxx PUBLIC -Wno-documentation)
|
||||||
|
target_compile_definitions(replxx PUBLIC USE_REPLXX=1)
|
||||||
|
|
||||||
|
message (STATUS "Using replxx")
|
||||||
|
else ()
|
||||||
|
add_library(replxx INTERFACE)
|
||||||
|
target_compile_definitions(replxx INTERFACE USE_REPLXX=0)
|
||||||
|
|
||||||
|
message (STATUS "Not using replxx (Beware! Runtime fallback to readline is possible!)")
|
||||||
|
endif ()
|
||||||
|
@ -4,7 +4,7 @@ set(CLICKHOUSE_CLIENT_SOURCES
|
|||||||
${CMAKE_CURRENT_SOURCE_DIR}/Suggest.cpp
|
${CMAKE_CURRENT_SOURCE_DIR}/Suggest.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
set(CLICKHOUSE_CLIENT_LINK PRIVATE clickhouse_common_config clickhouse_functions clickhouse_aggregate_functions clickhouse_common_io clickhouse_parsers string_utils ${LINE_EDITING_LIBS} ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
set(CLICKHOUSE_CLIENT_LINK PRIVATE clickhouse_common_config clickhouse_functions clickhouse_aggregate_functions clickhouse_common_io clickhouse_parsers string_utils ${Boost_PROGRAM_OPTIONS_LIBRARY})
|
||||||
|
|
||||||
include(CheckSymbolExists)
|
include(CheckSymbolExists)
|
||||||
check_symbol_exists(readpassphrase readpassphrase.h HAVE_READPASSPHRASE)
|
check_symbol_exists(readpassphrase readpassphrase.h HAVE_READPASSPHRASE)
|
||||||
|
@ -2,6 +2,12 @@
|
|||||||
#include "ConnectionParameters.h"
|
#include "ConnectionParameters.h"
|
||||||
#include "Suggest.h"
|
#include "Suggest.h"
|
||||||
|
|
||||||
|
#if USE_REPLXX
|
||||||
|
# include <common/ReplxxLineReader.h>
|
||||||
|
#else
|
||||||
|
# include <common/LineReader.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <fcntl.h>
|
#include <fcntl.h>
|
||||||
#include <signal.h>
|
#include <signal.h>
|
||||||
@ -19,7 +25,6 @@
|
|||||||
#include <Poco/File.h>
|
#include <Poco/File.h>
|
||||||
#include <Poco/Util/Application.h>
|
#include <Poco/Util/Application.h>
|
||||||
#include <common/find_symbols.h>
|
#include <common/find_symbols.h>
|
||||||
#include <common/config_common.h>
|
|
||||||
#include <common/LineReader.h>
|
#include <common/LineReader.h>
|
||||||
#include <Common/ClickHouseRevision.h>
|
#include <Common/ClickHouseRevision.h>
|
||||||
#include <Common/Stopwatch.h>
|
#include <Common/Stopwatch.h>
|
||||||
@ -496,7 +501,11 @@ private:
|
|||||||
if (!history_file.empty() && !Poco::File(history_file).exists())
|
if (!history_file.empty() && !Poco::File(history_file).exists())
|
||||||
Poco::File(history_file).createFile();
|
Poco::File(history_file).createFile();
|
||||||
|
|
||||||
LineReader lr(&Suggest::instance(), history_file, '\\', config().has("multiline") ? ';' : 0);
|
#if USE_REPLXX
|
||||||
|
ReplxxLineReader lr(Suggest::instance(), history_file, '\\', config().has("multiline") ? ';' : 0);
|
||||||
|
#else
|
||||||
|
LineReader lr(history_file, '\\', config().has("multiline") ? ';' : 0);
|
||||||
|
#endif
|
||||||
|
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
@ -504,6 +513,12 @@ private:
|
|||||||
if (input.empty())
|
if (input.empty())
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
if (input.ends_with("\\G"))
|
||||||
|
{
|
||||||
|
input.resize(input.size() - 2);
|
||||||
|
has_vertical_output_suffix = true;
|
||||||
|
}
|
||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
if (!process(input))
|
if (!process(input))
|
||||||
|
@ -16,11 +16,11 @@ namespace ErrorCodes
|
|||||||
|
|
||||||
ParsedTemplateFormatString::ParsedTemplateFormatString(const FormatSchemaInfo & schema, const ColumnIdxGetter & idx_by_name)
|
ParsedTemplateFormatString::ParsedTemplateFormatString(const FormatSchemaInfo & schema, const ColumnIdxGetter & idx_by_name)
|
||||||
{
|
{
|
||||||
|
ReadBufferFromFile schema_file(schema.absoluteSchemaPath(), 4096);
|
||||||
|
String format_string;
|
||||||
|
readStringUntilEOF(format_string, schema_file);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
ReadBufferFromFile schema_file(schema.absoluteSchemaPath(), 4096);
|
|
||||||
String format_string;
|
|
||||||
readStringUntilEOF(format_string, schema_file);
|
|
||||||
parse(format_string, idx_by_name);
|
parse(format_string, idx_by_name);
|
||||||
}
|
}
|
||||||
catch (DB::Exception & e)
|
catch (DB::Exception & e)
|
||||||
@ -193,7 +193,7 @@ const char * ParsedTemplateFormatString::readMayBeQuotedColumnNameInto(const cha
|
|||||||
String ParsedTemplateFormatString::dump() const
|
String ParsedTemplateFormatString::dump() const
|
||||||
{
|
{
|
||||||
WriteBufferFromOwnString res;
|
WriteBufferFromOwnString res;
|
||||||
res << "Delimiter " << 0 << ": ";
|
res << "\nDelimiter " << 0 << ": ";
|
||||||
verbosePrintString(delimiters.front().c_str(), delimiters.front().c_str() + delimiters.front().size(), res);
|
verbosePrintString(delimiters.front().c_str(), delimiters.front().c_str() + delimiters.front().size(), res);
|
||||||
|
|
||||||
size_t num_columns = std::max(formats.size(), format_idx_to_column_idx.size());
|
size_t num_columns = std::max(formats.size(), format_idx_to_column_idx.size());
|
||||||
|
@ -540,6 +540,7 @@ public:
|
|||||||
|
|
||||||
Strings getAllTriedToLoadNames() const
|
Strings getAllTriedToLoadNames() const
|
||||||
{
|
{
|
||||||
|
std::lock_guard lock{mutex};
|
||||||
Strings names;
|
Strings names;
|
||||||
for (auto & [name, info] : infos)
|
for (auto & [name, info] : infos)
|
||||||
if (info.triedToLoad())
|
if (info.triedToLoad())
|
||||||
|
@ -49,6 +49,7 @@ Block QueryLogElement::createBlock()
|
|||||||
{std::make_shared<DataTypeUInt64>(), "memory_usage"},
|
{std::make_shared<DataTypeUInt64>(), "memory_usage"},
|
||||||
|
|
||||||
{std::make_shared<DataTypeString>(), "query"},
|
{std::make_shared<DataTypeString>(), "query"},
|
||||||
|
{std::make_shared<DataTypeInt32>(), "exception_code"},
|
||||||
{std::make_shared<DataTypeString>(), "exception"},
|
{std::make_shared<DataTypeString>(), "exception"},
|
||||||
{std::make_shared<DataTypeString>(), "stack_trace"},
|
{std::make_shared<DataTypeString>(), "stack_trace"},
|
||||||
|
|
||||||
@ -107,6 +108,7 @@ void QueryLogElement::appendToBlock(Block & block) const
|
|||||||
columns[i++]->insert(memory_usage);
|
columns[i++]->insert(memory_usage);
|
||||||
|
|
||||||
columns[i++]->insertData(query.data(), query.size());
|
columns[i++]->insertData(query.data(), query.size());
|
||||||
|
columns[i++]->insert(exception_code);
|
||||||
columns[i++]->insertData(exception.data(), exception.size());
|
columns[i++]->insertData(exception.data(), exception.size());
|
||||||
columns[i++]->insertData(stack_trace.data(), stack_trace.size());
|
columns[i++]->insertData(stack_trace.data(), stack_trace.size());
|
||||||
|
|
||||||
|
@ -54,6 +54,7 @@ struct QueryLogElement
|
|||||||
|
|
||||||
String query;
|
String query;
|
||||||
|
|
||||||
|
Int32 exception_code{}; // because ErrorCodes are int
|
||||||
String exception;
|
String exception;
|
||||||
String stack_trace;
|
String stack_trace;
|
||||||
|
|
||||||
|
@ -163,6 +163,7 @@ static void onExceptionBeforeStart(const String & query_for_logging, Context & c
|
|||||||
elem.query_start_time = current_time;
|
elem.query_start_time = current_time;
|
||||||
|
|
||||||
elem.query = query_for_logging;
|
elem.query = query_for_logging;
|
||||||
|
elem.exception_code = getCurrentExceptionCode();
|
||||||
elem.exception = getCurrentExceptionMessage(false);
|
elem.exception = getCurrentExceptionMessage(false);
|
||||||
|
|
||||||
elem.client_info = context.getClientInfo();
|
elem.client_info = context.getClientInfo();
|
||||||
@ -496,6 +497,7 @@ static std::tuple<ASTPtr, BlockIO> executeQueryImpl(
|
|||||||
|
|
||||||
elem.event_time = time(nullptr);
|
elem.event_time = time(nullptr);
|
||||||
elem.query_duration_ms = 1000 * (elem.event_time - elem.query_start_time);
|
elem.query_duration_ms = 1000 * (elem.event_time - elem.query_start_time);
|
||||||
|
elem.exception_code = getCurrentExceptionCode();
|
||||||
elem.exception = getCurrentExceptionMessage(false);
|
elem.exception = getCurrentExceptionMessage(false);
|
||||||
|
|
||||||
QueryStatus * process_list_elem = context.getProcessListElement();
|
QueryStatus * process_list_elem = context.getProcessListElement();
|
||||||
|
@ -61,7 +61,6 @@ const char * auto_config_build[]
|
|||||||
"USE_HYPERSCAN", "@USE_HYPERSCAN@",
|
"USE_HYPERSCAN", "@USE_HYPERSCAN@",
|
||||||
"USE_SIMDJSON", "@USE_SIMDJSON@",
|
"USE_SIMDJSON", "@USE_SIMDJSON@",
|
||||||
"USE_POCO_REDIS", "@USE_POCO_REDIS@",
|
"USE_POCO_REDIS", "@USE_POCO_REDIS@",
|
||||||
"USE_REPLXX", "@USE_REPLXX@",
|
|
||||||
|
|
||||||
nullptr, nullptr
|
nullptr, nullptr
|
||||||
};
|
};
|
||||||
|
@ -360,6 +360,7 @@ def test_max_data_part_size(start_cluster, name, engine):
|
|||||||
finally:
|
finally:
|
||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine", [
|
@pytest.mark.parametrize("name,engine", [
|
||||||
("mt_with_overflow","MergeTree()"),
|
("mt_with_overflow","MergeTree()"),
|
||||||
("replicated_mt_with_overflow","ReplicatedMergeTree('/clickhouse/replicated_mt_with_overflow', '1')",),
|
("replicated_mt_with_overflow","ReplicatedMergeTree('/clickhouse/replicated_mt_with_overflow', '1')",),
|
||||||
@ -454,6 +455,7 @@ def test_background_move(start_cluster, name, engine):
|
|||||||
finally:
|
finally:
|
||||||
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine", [
|
@pytest.mark.parametrize("name,engine", [
|
||||||
("stopped_moving_mt","MergeTree()"),
|
("stopped_moving_mt","MergeTree()"),
|
||||||
("stopped_moving_replicated_mt","ReplicatedMergeTree('/clickhouse/stopped_moving_replicated_mt', '1')",),
|
("stopped_moving_replicated_mt","ReplicatedMergeTree('/clickhouse/stopped_moving_replicated_mt', '1')",),
|
||||||
@ -720,6 +722,7 @@ def produce_alter_move(node, name):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine", [
|
@pytest.mark.parametrize("name,engine", [
|
||||||
("concurrently_altering_mt","MergeTree()"),
|
("concurrently_altering_mt","MergeTree()"),
|
||||||
("concurrently_altering_replicated_mt","ReplicatedMergeTree('/clickhouse/concurrently_altering_replicated_mt', '1')",),
|
("concurrently_altering_replicated_mt","ReplicatedMergeTree('/clickhouse/concurrently_altering_replicated_mt', '1')",),
|
||||||
@ -773,6 +776,7 @@ def test_concurrent_alter_move(start_cluster, name, engine):
|
|||||||
finally:
|
finally:
|
||||||
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine", [
|
@pytest.mark.parametrize("name,engine", [
|
||||||
("concurrently_dropping_mt","MergeTree()"),
|
("concurrently_dropping_mt","MergeTree()"),
|
||||||
("concurrently_dropping_replicated_mt","ReplicatedMergeTree('/clickhouse/concurrently_dropping_replicated_mt', '1')",),
|
("concurrently_dropping_replicated_mt","ReplicatedMergeTree('/clickhouse/concurrently_dropping_replicated_mt', '1')",),
|
||||||
@ -901,6 +905,8 @@ def test_mutate_to_another_disk(start_cluster, name, engine):
|
|||||||
finally:
|
finally:
|
||||||
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine", [
|
@pytest.mark.parametrize("name,engine", [
|
||||||
("alter_modifying_mt","MergeTree()"),
|
("alter_modifying_mt","MergeTree()"),
|
||||||
("replicated_alter_modifying_mt","ReplicatedMergeTree('/clickhouse/replicated_alter_modifying_mt', '1')",),
|
("replicated_alter_modifying_mt","ReplicatedMergeTree('/clickhouse/replicated_alter_modifying_mt', '1')",),
|
||||||
|
@ -50,6 +50,7 @@ def get_used_disks_for_table(node, table_name):
|
|||||||
return node.query("select disk_name from system.parts where table == '{}' and active=1 order by modification_time".format(table_name)).strip().split('\n')
|
return node.query("select disk_name from system.parts where table == '{}' and active=1 order by modification_time".format(table_name)).strip().split('\n')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine,alter", [
|
@pytest.mark.parametrize("name,engine,alter", [
|
||||||
("mt_test_rule_with_invalid_destination","MergeTree()",0),
|
("mt_test_rule_with_invalid_destination","MergeTree()",0),
|
||||||
("replicated_mt_test_rule_with_invalid_destination","ReplicatedMergeTree('/clickhouse/replicated_test_rule_with_invalid_destination', '1')",0),
|
("replicated_mt_test_rule_with_invalid_destination","ReplicatedMergeTree('/clickhouse/replicated_test_rule_with_invalid_destination', '1')",0),
|
||||||
@ -109,6 +110,7 @@ def test_rule_with_invalid_destination(started_cluster, name, engine, alter):
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine,positive", [
|
@pytest.mark.parametrize("name,engine,positive", [
|
||||||
("mt_test_inserts_to_disk_do_not_work","MergeTree()",0),
|
("mt_test_inserts_to_disk_do_not_work","MergeTree()",0),
|
||||||
("replicated_mt_test_inserts_to_disk_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_inserts_to_disk_do_not_work', '1')",0),
|
("replicated_mt_test_inserts_to_disk_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_inserts_to_disk_do_not_work', '1')",0),
|
||||||
@ -141,6 +143,7 @@ def test_inserts_to_disk_work(started_cluster, name, engine, positive):
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine,positive", [
|
@pytest.mark.parametrize("name,engine,positive", [
|
||||||
("mt_test_moves_to_disk_do_not_work","MergeTree()",0),
|
("mt_test_moves_to_disk_do_not_work","MergeTree()",0),
|
||||||
("replicated_mt_test_moves_to_disk_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_to_disk_do_not_work', '1')",0),
|
("replicated_mt_test_moves_to_disk_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_to_disk_do_not_work', '1')",0),
|
||||||
@ -187,6 +190,7 @@ def test_moves_to_disk_work(started_cluster, name, engine, positive):
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine", [
|
@pytest.mark.parametrize("name,engine", [
|
||||||
("mt_test_moves_to_volume_work","MergeTree()"),
|
("mt_test_moves_to_volume_work","MergeTree()"),
|
||||||
("replicated_mt_test_moves_to_volume_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_to_volume_work', '1')"),
|
("replicated_mt_test_moves_to_volume_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_to_volume_work', '1')"),
|
||||||
@ -233,6 +237,7 @@ def test_moves_to_volume_work(started_cluster, name, engine):
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine,positive", [
|
@pytest.mark.parametrize("name,engine,positive", [
|
||||||
("mt_test_inserts_to_volume_do_not_work","MergeTree()",0),
|
("mt_test_inserts_to_volume_do_not_work","MergeTree()",0),
|
||||||
("replicated_mt_test_inserts_to_volume_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_inserts_to_volume_do_not_work', '1')",0),
|
("replicated_mt_test_inserts_to_volume_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_inserts_to_volume_do_not_work', '1')",0),
|
||||||
@ -271,6 +276,7 @@ def test_inserts_to_volume_work(started_cluster, name, engine, positive):
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine", [
|
@pytest.mark.parametrize("name,engine", [
|
||||||
("mt_test_moves_to_disk_eventually_work","MergeTree()"),
|
("mt_test_moves_to_disk_eventually_work","MergeTree()"),
|
||||||
("replicated_mt_test_moves_to_disk_eventually_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_to_disk_eventually_work', '1')"),
|
("replicated_mt_test_moves_to_disk_eventually_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_to_disk_eventually_work', '1')"),
|
||||||
@ -326,6 +332,7 @@ def test_moves_to_disk_eventually_work(started_cluster, name, engine):
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine,positive", [
|
@pytest.mark.parametrize("name,engine,positive", [
|
||||||
("mt_test_merges_to_disk_do_not_work","MergeTree()",0),
|
("mt_test_merges_to_disk_do_not_work","MergeTree()",0),
|
||||||
("replicated_mt_test_merges_to_disk_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_merges_to_disk_do_not_work', '1')",0),
|
("replicated_mt_test_merges_to_disk_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_merges_to_disk_do_not_work', '1')",0),
|
||||||
@ -383,6 +390,7 @@ def test_merges_to_disk_work(started_cluster, name, engine, positive):
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine", [
|
@pytest.mark.parametrize("name,engine", [
|
||||||
("mt_test_merges_with_full_disk_work","MergeTree()"),
|
("mt_test_merges_with_full_disk_work","MergeTree()"),
|
||||||
("replicated_mt_test_merges_with_full_disk_work","ReplicatedMergeTree('/clickhouse/replicated_test_merges_with_full_disk_work', '1')"),
|
("replicated_mt_test_merges_with_full_disk_work","ReplicatedMergeTree('/clickhouse/replicated_test_merges_with_full_disk_work', '1')"),
|
||||||
@ -449,6 +457,7 @@ def test_merges_with_full_disk_work(started_cluster, name, engine):
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine,positive", [
|
@pytest.mark.parametrize("name,engine,positive", [
|
||||||
("mt_test_moves_after_merges_do_not_work","MergeTree()",0),
|
("mt_test_moves_after_merges_do_not_work","MergeTree()",0),
|
||||||
("replicated_mt_test_moves_after_merges_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_after_merges_do_not_work', '1')",0),
|
("replicated_mt_test_moves_after_merges_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_after_merges_do_not_work', '1')",0),
|
||||||
@ -501,6 +510,7 @@ def test_moves_after_merges_work(started_cluster, name, engine, positive):
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine,positive,bar", [
|
@pytest.mark.parametrize("name,engine,positive,bar", [
|
||||||
("mt_test_moves_after_alter_do_not_work","MergeTree()",0,"DELETE"),
|
("mt_test_moves_after_alter_do_not_work","MergeTree()",0,"DELETE"),
|
||||||
("replicated_mt_test_moves_after_alter_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_after_alter_do_not_work', '1')",0,"DELETE"),
|
("replicated_mt_test_moves_after_alter_do_not_work","ReplicatedMergeTree('/clickhouse/replicated_test_moves_after_alter_do_not_work', '1')",0,"DELETE"),
|
||||||
@ -544,6 +554,7 @@ def test_ttls_do_not_work_after_alter(started_cluster, name, engine, positive, b
|
|||||||
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
node1.query("DROP TABLE IF EXISTS {}".format(name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine,positive", [
|
@pytest.mark.parametrize("name,engine,positive", [
|
||||||
("mt_test_alter_multiple_ttls_positive", "MergeTree()", True),
|
("mt_test_alter_multiple_ttls_positive", "MergeTree()", True),
|
||||||
("mt_replicated_test_alter_multiple_ttls_positive", "ReplicatedMergeTree('/clickhouse/replicated_test_alter_multiple_ttls_positive', '1')", True),
|
("mt_replicated_test_alter_multiple_ttls_positive", "ReplicatedMergeTree('/clickhouse/replicated_test_alter_multiple_ttls_positive', '1')", True),
|
||||||
@ -626,6 +637,7 @@ limitations under the License."""
|
|||||||
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,engine", [
|
@pytest.mark.parametrize("name,engine", [
|
||||||
("concurrently_altering_ttl_mt","MergeTree()"),
|
("concurrently_altering_ttl_mt","MergeTree()"),
|
||||||
("concurrently_altering_ttl_replicated_mt","ReplicatedMergeTree('/clickhouse/concurrently_altering_ttl_replicated_mt', '1')",),
|
("concurrently_altering_ttl_replicated_mt","ReplicatedMergeTree('/clickhouse/concurrently_altering_ttl_replicated_mt', '1')",),
|
||||||
@ -716,6 +728,7 @@ def test_concurrent_alter_with_ttl_move(started_cluster, name, engine):
|
|||||||
finally:
|
finally:
|
||||||
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
node1.query("DROP TABLE IF EXISTS {name}".format(name=name))
|
||||||
|
|
||||||
|
@pytest.mark.skip(reason="Flappy test")
|
||||||
@pytest.mark.parametrize("name,positive", [
|
@pytest.mark.parametrize("name,positive", [
|
||||||
("test_double_move_while_select_negative", 0),
|
("test_double_move_while_select_negative", 0),
|
||||||
("test_double_move_while_select_positive", 1),
|
("test_double_move_while_select_positive", 1),
|
||||||
|
@ -1,14 +1,18 @@
|
|||||||
<test>
|
<test>
|
||||||
<type>once</type>
|
|
||||||
|
<type>loop</type>
|
||||||
|
|
||||||
<stop_conditions>
|
<stop_conditions>
|
||||||
|
<all_of>
|
||||||
|
<iterations>5</iterations>
|
||||||
|
<min_time_not_changing_for_ms>10000</min_time_not_changing_for_ms>
|
||||||
|
</all_of>
|
||||||
<any_of>
|
<any_of>
|
||||||
<average_speed_not_changing_for_ms>1000</average_speed_not_changing_for_ms>
|
<iterations>50</iterations>
|
||||||
<total_time_ms>10000</total_time_ms>
|
<total_time_ms>60000</total_time_ms>
|
||||||
</any_of>
|
</any_of>
|
||||||
</stop_conditions>
|
</stop_conditions>
|
||||||
|
|
||||||
|
|
||||||
<substitutions>
|
<substitutions>
|
||||||
<substitution>
|
<substitution>
|
||||||
<name>func</name>
|
<name>func</name>
|
||||||
@ -37,7 +41,7 @@
|
|||||||
</substitution>
|
</substitution>
|
||||||
</substitutions>
|
</substitutions>
|
||||||
|
|
||||||
<query>SELECT count() FROM system.numbers WHERE NOT ignore({func}(toFloat64(number)))</query>
|
<query>SELECT count() FROM numbers(100000000) WHERE NOT ignore({func}(toFloat64(number)))</query>
|
||||||
<query>SELECT count() FROM system.numbers WHERE NOT ignore({func}(toFloat32(number)))</query>
|
<query>SELECT count() FROM numbers(100000000) WHERE NOT ignore({func}(toFloat32(number)))</query>
|
||||||
<query>SELECT count() FROM system.numbers WHERE NOT ignore({func}(number))</query>
|
<query>SELECT count() FROM numbers(100000000) WHERE NOT ignore({func}(number))</query>
|
||||||
</test>
|
</test>
|
||||||
|
@ -0,0 +1,3 @@
|
|||||||
|
60
|
||||||
|
0
|
||||||
|
0
|
@ -0,0 +1,7 @@
|
|||||||
|
DROP TABLE IF EXISTS test_table_for_01070_exception_code_in_query_log_table;
|
||||||
|
SELECT * FROM test_table_for_01070_exception_code_in_query_log_table; -- { serverError 60 }
|
||||||
|
CREATE TABLE test_table_for_01070_exception_code_in_query_log_table (value UInt64) ENGINE=Memory();
|
||||||
|
SELECT * FROM test_table_for_01070_exception_code_in_query_log_table;
|
||||||
|
SYSTEM FLUSH LOGS;
|
||||||
|
SELECT exception_code FROM system.query_log WHERE query='SELECT * FROM test_table_for_01070_exception_code_in_query_log_table';
|
||||||
|
DROP TABLE IF EXISTS test_table_for_01070_exception_code_in_query_log_table;
|
@ -0,0 +1,2 @@
|
|||||||
|
select 1 format Template settings format_template_row='01070_nonexistent_file.txt'; -- { clientError 107 }
|
||||||
|
select 1 format Template settings format_template_row='/dev/null'; -- { clientError 474 }
|
@ -177,7 +177,7 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
|
|||||||
|
|
||||||
if unbundled:
|
if unbundled:
|
||||||
# TODO: fix build with ENABLE_RDKAFKA
|
# TODO: fix build with ENABLE_RDKAFKA
|
||||||
cmake_flags.append('-DUNBUNDLED=1 -DENABLE_MYSQL=0 -DENABLE_POCO_ODBC=0 -DENABLE_ODBC=0 -DENABLE_READLINE=0 -DENABLE_RDKAFKA=0')
|
cmake_flags.append('-DUNBUNDLED=1 -DENABLE_MYSQL=0 -DENABLE_POCO_ODBC=0 -DENABLE_ODBC=0 -DENABLE_REPLXX=0 -DENABLE_RDKAFKA=0')
|
||||||
|
|
||||||
if split_binary:
|
if split_binary:
|
||||||
cmake_flags.append('-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1')
|
cmake_flags.append('-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1')
|
||||||
|
45
docker/test/codebrowser/Dockerfile
Normal file
45
docker/test/codebrowser/Dockerfile
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
# docker build --network=host -t yandex/clickhouse-codebrowser .
|
||||||
|
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output yandex/clickhouse-codebrowser
|
||||||
|
FROM ubuntu:18.04
|
||||||
|
|
||||||
|
RUN apt-get --allow-unauthenticated update -y \
|
||||||
|
&& env DEBIAN_FRONTEND=noninteractive \
|
||||||
|
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
||||||
|
bash \
|
||||||
|
sudo \
|
||||||
|
wget \
|
||||||
|
software-properties-common \
|
||||||
|
ca-certificates \
|
||||||
|
apt-transport-https \
|
||||||
|
build-essential \
|
||||||
|
gpg-agent \
|
||||||
|
git
|
||||||
|
|
||||||
|
RUN wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | sudo apt-key add -
|
||||||
|
RUN sudo apt-add-repository 'deb https://apt.kitware.com/ubuntu/ bionic main'
|
||||||
|
RUN sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main" >> /etc/apt/sources.list
|
||||||
|
|
||||||
|
RUN sudo apt-get --yes --allow-unauthenticated update
|
||||||
|
# To build woboq
|
||||||
|
RUN sudo apt-get --yes --allow-unauthenticated install cmake clang-8 libllvm8 libclang-8-dev
|
||||||
|
|
||||||
|
# repo versions doesn't work correctly with C++17
|
||||||
|
RUN git clone https://github.com/woboq/woboq_codebrowser.git
|
||||||
|
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release && make -j
|
||||||
|
|
||||||
|
ENV CODEGEN=/woboq_codebrowser/generator/codebrowser_generator
|
||||||
|
ENV CODEINDEX=/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator
|
||||||
|
ENV STATIC_DATA=/woboq_codebrowser/data
|
||||||
|
|
||||||
|
ENV SOURCE_DIRECTORY=/repo_folder
|
||||||
|
ENV BUILD_DIRECTORY=/build
|
||||||
|
ENV HTML_RESULT_DIRECTORY=$BUILD_DIRECTORY/html_report
|
||||||
|
ENV SHA=nosha
|
||||||
|
|
||||||
|
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
|
||||||
|
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-8 -DCMAKE_C_COMPILER=/usr/bin/clang-8 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON && \
|
||||||
|
mkdir -p $HTML_RESULT_DIRECTORY && \
|
||||||
|
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA && \
|
||||||
|
$CODEINDEX $HTML_RESULT_DIRECTORY && \
|
||||||
|
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\
|
||||||
|
mv $HTML_RESULT_DIRECTORY /test_output
|
@ -146,7 +146,9 @@ run_tests
|
|||||||
|
|
||||||
# Analyze results
|
# Analyze results
|
||||||
result_structure="left float, right float, diff float, rd Array(float), query text"
|
result_structure="left float, right float, diff float, rd Array(float), query text"
|
||||||
right/clickhouse local --file '*-report.tsv' -S "$result_structure" --query "select * from table where diff < 0.05 and rd[3] > 0.05 order by rd[3] desc" > flap-prone.tsv
|
right/clickhouse local --file '*-report.tsv' -S "$result_structure" --query "select * from table where abs(diff) < 0.05 and rd[3] > 0.05 order by rd[3] desc" > unstable.tsv
|
||||||
right/clickhouse local --file '*-report.tsv' -S "$result_structure" --query "select * from table where diff > 0.05 and diff > rd[3] order by diff desc" > bad-perf.tsv
|
right/clickhouse local --file '*-report.tsv' -S "$result_structure" --query "select * from table where abs(diff) > 0.05 and abs(diff) > rd[3] order by diff desc" > changed-perf.tsv
|
||||||
right/clickhouse local --file '*-client-time.tsv' -S "query text, client float, server float" -q "select *, floor(client/server, 3) p from table order by p desc" > client-time.tsv
|
right/clickhouse local --file '*-client-time.tsv' -S "query text, client float, server float" -q "select client, server, floor(client/server, 3) p, query from table where p > 1.01 order by p desc" > slow-on-client.tsv
|
||||||
grep Exception:[^:] *-err.log > run-errors.log
|
grep Exception:[^:] *-err.log > run-errors.log
|
||||||
|
|
||||||
|
./report.py > report.html
|
||||||
|
@ -29,5 +29,5 @@ set -m
|
|||||||
time ../compare.sh 0 $ref_sha $PR_TO_TEST $SHA_TO_TEST 2>&1 | ts | tee compare.log
|
time ../compare.sh 0 $ref_sha $PR_TO_TEST $SHA_TO_TEST 2>&1 | ts | tee compare.log
|
||||||
set +m
|
set +m
|
||||||
|
|
||||||
7z a /output/output.7z *.log *.tsv
|
7z a /output/output.7z *.log *.tsv *.html
|
||||||
cp compare.log /output
|
cp compare.log /output
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
-- input is table(query text, run UInt32, version int, time float)
|
-- input is table(query text, run UInt32, version int, time float)
|
||||||
select
|
select
|
||||||
-- abs(diff_percent) > rd_quantiles_percent[3] fail,
|
-- abs(diff_percent) > rd_quantiles_percent[3] fail,
|
||||||
floor(original_medians_array.time_by_version[1], 4) m1,
|
floor(original_medians_array.time_by_version[1], 4) left,
|
||||||
floor(original_medians_array.time_by_version[2], 4) m2,
|
floor(original_medians_array.time_by_version[2], 4) right,
|
||||||
floor((m1 - m2) / m1, 3) diff_percent,
|
floor((right - left) / left, 3) diff_percent,
|
||||||
arrayMap(x -> floor(x / m1, 3), rd.rd_quantiles) rd_quantiles_percent,
|
arrayMap(x -> floor(x / left, 3), rd.rd_quantiles) rd_quantiles_percent,
|
||||||
query
|
query
|
||||||
from
|
from
|
||||||
(
|
(
|
||||||
|
105
docker/test/performance-comparison/report.py
Executable file
105
docker/test/performance-comparison/report.py
Executable file
@ -0,0 +1,105 @@
|
|||||||
|
#!/usr/bin/python3
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import csv
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
doc_template = """
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<style>
|
||||||
|
@font-face {{
|
||||||
|
font-family:'Yandex Sans Display Web';
|
||||||
|
src:url(https://yastatic.net/adv-www/_/H63jN0veW07XQUIA2317lr9UIm8.eot);
|
||||||
|
src:url(https://yastatic.net/adv-www/_/H63jN0veW07XQUIA2317lr9UIm8.eot?#iefix) format('embedded-opentype'),
|
||||||
|
url(https://yastatic.net/adv-www/_/sUYVCPUAQE7ExrvMS7FoISoO83s.woff2) format('woff2'),
|
||||||
|
url(https://yastatic.net/adv-www/_/v2Sve_obH3rKm6rKrtSQpf-eB7U.woff) format('woff'),
|
||||||
|
url(https://yastatic.net/adv-www/_/PzD8hWLMunow5i3RfJ6WQJAL7aI.ttf) format('truetype'),
|
||||||
|
url(https://yastatic.net/adv-www/_/lF_KG5g4tpQNlYIgA0e77fBSZ5s.svg#YandexSansDisplayWeb-Regular) format('svg');
|
||||||
|
font-weight:400;
|
||||||
|
font-style:normal;
|
||||||
|
font-stretch:normal
|
||||||
|
}}
|
||||||
|
|
||||||
|
body {{ font-family: "Yandex Sans Display Web", Arial, sans-serif; background: #EEE; }}
|
||||||
|
h1 {{ margin-left: 10px; }}
|
||||||
|
th, td {{ border: 0; padding: 5px 10px 5px 10px; text-align: left; vertical-align: top; line-height: 1.5; background-color: #FFF;
|
||||||
|
td {{ white-space: pre; font-family: Monospace, Courier New; }}
|
||||||
|
border: 0; box-shadow: 0 0 0 1px rgba(0, 0, 0, 0.05), 0 8px 25px -5px rgba(0, 0, 0, 0.1); }}
|
||||||
|
a {{ color: #06F; text-decoration: none; }}
|
||||||
|
a:hover, a:active {{ color: #F40; text-decoration: underline; }}
|
||||||
|
table {{ border: 0; }}
|
||||||
|
.main {{ margin-left: 10%; }}
|
||||||
|
p.links a {{ padding: 5px; margin: 3px; background: #FFF; line-height: 2; white-space: nowrap; box-shadow: 0 0 0 1px rgba(0, 0, 0, 0.05), 0 8px 25px -5px rgba(0, 0, 0, 0.1); }}
|
||||||
|
</style>
|
||||||
|
<title>{header}</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<div class="main">
|
||||||
|
|
||||||
|
<h1>{header}</h1>
|
||||||
|
{test_part}
|
||||||
|
<p class="links">
|
||||||
|
<a href="{raw_log_url}">{raw_log_name}</a>
|
||||||
|
<a href="{branch_url}">{branch_name}</a>
|
||||||
|
<a href="{commit_url}">Commit</a>
|
||||||
|
{additional_urls}
|
||||||
|
<a href="output.7z">Test output</a>
|
||||||
|
<a href="{task_url}">Task (private network)</a>
|
||||||
|
</p>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
"""
|
||||||
|
|
||||||
|
table_template = """
|
||||||
|
<h2>{caption}</h2>
|
||||||
|
<table>
|
||||||
|
{header}
|
||||||
|
{rows}
|
||||||
|
</table>
|
||||||
|
"""
|
||||||
|
|
||||||
|
def tr(x):
|
||||||
|
return '<tr>' + str(x) + '</tr>'
|
||||||
|
|
||||||
|
def td(x):
|
||||||
|
return '<td>' + str(x) + '</td>'
|
||||||
|
|
||||||
|
def th(x):
|
||||||
|
return '<th>' + str(x) + '</th>'
|
||||||
|
|
||||||
|
def table_row(r):
|
||||||
|
return tr(''.join([td(f) for f in r]))
|
||||||
|
|
||||||
|
def table_header(r):
|
||||||
|
return tr(''.join([th(f) for f in r]))
|
||||||
|
|
||||||
|
def tsv_rows(n):
|
||||||
|
result = ''
|
||||||
|
with open(n) as fd:
|
||||||
|
for row in csv.reader(fd, delimiter="\t", quotechar='"'):
|
||||||
|
result += table_row(row)
|
||||||
|
return result
|
||||||
|
|
||||||
|
params = collections.defaultdict(str)
|
||||||
|
params['header'] = "ClickHouse Performance Comparison"
|
||||||
|
params['test_part'] = (table_template.format_map(
|
||||||
|
collections.defaultdict(str,
|
||||||
|
caption = 'Changes in performance',
|
||||||
|
header = table_header(['Left', 'Right', 'Diff', 'RD', 'Query']),
|
||||||
|
rows = tsv_rows('changed-perf.tsv'))) +
|
||||||
|
table_template.format(
|
||||||
|
caption = 'Slow on client',
|
||||||
|
header = table_header(['Client', 'Server', 'Ratio', 'Query']),
|
||||||
|
rows = tsv_rows('slow-on-client.tsv')) +
|
||||||
|
table_template.format(
|
||||||
|
caption = 'Unstable',
|
||||||
|
header = table_header(['Left', 'Right', 'Diff', 'RD', 'Query']),
|
||||||
|
rows = tsv_rows('unstable.tsv')) +
|
||||||
|
table_template.format(
|
||||||
|
caption = 'Run errors',
|
||||||
|
header = table_header(['A', 'B']),
|
||||||
|
rows = tsv_rows('run-errors.log'))
|
||||||
|
)
|
||||||
|
print(doc_template.format_map(params))
|
@ -10,7 +10,7 @@ if (DEFINED APPLE_HAVE_CLOCK_GETTIME)
|
|||||||
target_compile_definitions(apple_rt PUBLIC -DAPPLE_HAVE_CLOCK_GETTIME=${APPLE_HAVE_CLOCK_GETTIME})
|
target_compile_definitions(apple_rt PUBLIC -DAPPLE_HAVE_CLOCK_GETTIME=${APPLE_HAVE_CLOCK_GETTIME})
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
add_library (common
|
set (COMMON_SRCS
|
||||||
src/argsToConfig.cpp
|
src/argsToConfig.cpp
|
||||||
src/coverage.cpp
|
src/coverage.cpp
|
||||||
src/DateLUT.cpp
|
src/DateLUT.cpp
|
||||||
@ -65,7 +65,19 @@ add_library (common
|
|||||||
include/ext/scope_guard.h
|
include/ext/scope_guard.h
|
||||||
include/ext/size.h
|
include/ext/size.h
|
||||||
include/ext/unlock_guard.h
|
include/ext/unlock_guard.h
|
||||||
|
)
|
||||||
|
|
||||||
|
if (ENABLE_REPLXX)
|
||||||
|
set (COMMON_SRCS
|
||||||
|
src/ReplxxLineReader.cpp
|
||||||
|
include/common/ReplxxLineReader.h
|
||||||
|
|
||||||
|
${COMMON_SRCS}
|
||||||
|
)
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
add_library (common
|
||||||
|
${COMMON_SRCS}
|
||||||
${CONFIG_COMMON})
|
${CONFIG_COMMON})
|
||||||
|
|
||||||
if (USE_INTERNAL_MEMCPY)
|
if (USE_INTERNAL_MEMCPY)
|
||||||
@ -92,8 +104,8 @@ if(CCTZ_LIBRARY)
|
|||||||
target_link_libraries(common PRIVATE ${CCTZ_LIBRARY})
|
target_link_libraries(common PRIVATE ${CCTZ_LIBRARY})
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (USE_REPLXX)
|
if (ENABLE_REPLXX)
|
||||||
target_link_libraries(common PRIVATE replxx)
|
target_link_libraries(common PUBLIC replxx)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
target_link_libraries (common
|
target_link_libraries (common
|
||||||
|
@ -22,8 +22,8 @@ public:
|
|||||||
WordsRange getCompletions(const String & prefix, size_t prefix_length) const;
|
WordsRange getCompletions(const String & prefix, size_t prefix_length) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
LineReader(const Suggest * suggest, const String & history_file_path, char extender, char delimiter = 0); /// if delimiter != 0, then it's multiline mode
|
LineReader(const String & history_file_path, char extender, char delimiter = 0); /// if delimiter != 0, then it's multiline mode
|
||||||
~LineReader();
|
virtual ~LineReader() {}
|
||||||
|
|
||||||
/// Reads the whole line until delimiter (in multiline mode) or until the last line without extender.
|
/// Reads the whole line until delimiter (in multiline mode) or until the last line without extender.
|
||||||
/// If resulting line is empty, it means the user interrupted the input.
|
/// If resulting line is empty, it means the user interrupted the input.
|
||||||
@ -31,7 +31,7 @@ public:
|
|||||||
/// Typical delimiter is ';' (semicolon) and typical extender is '\' (backslash).
|
/// Typical delimiter is ';' (semicolon) and typical extender is '\' (backslash).
|
||||||
String readLine(const String & first_prompt, const String & second_prompt);
|
String readLine(const String & first_prompt, const String & second_prompt);
|
||||||
|
|
||||||
private:
|
protected:
|
||||||
enum InputStatus
|
enum InputStatus
|
||||||
{
|
{
|
||||||
ABORT = 0,
|
ABORT = 0,
|
||||||
@ -39,19 +39,17 @@ private:
|
|||||||
INPUT_LINE,
|
INPUT_LINE,
|
||||||
};
|
};
|
||||||
|
|
||||||
String input;
|
|
||||||
String prev_line;
|
|
||||||
const String history_file_path;
|
const String history_file_path;
|
||||||
|
static constexpr char word_break_characters[] = " \t\n\r\"\\'`@$><=;|&{(.";
|
||||||
|
|
||||||
|
String input;
|
||||||
|
|
||||||
|
private:
|
||||||
const char extender;
|
const char extender;
|
||||||
const char delimiter;
|
const char delimiter;
|
||||||
|
|
||||||
InputStatus readOneLine(const String & prompt);
|
String prev_line;
|
||||||
void addToHistory(const String & line);
|
|
||||||
|
|
||||||
/// Since CMake doesn't impose restrictions on includes between unrelated targets
|
virtual InputStatus readOneLine(const String & prompt);
|
||||||
/// it's possible that we include this file without USE_REPLXX defined.
|
virtual void addToHistory(const String &) {}
|
||||||
#ifdef __clang__
|
|
||||||
[[maybe_unused]]
|
|
||||||
#endif
|
|
||||||
void * impl;
|
|
||||||
};
|
};
|
||||||
|
18
libs/libcommon/include/common/ReplxxLineReader.h
Normal file
18
libs/libcommon/include/common/ReplxxLineReader.h
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "LineReader.h"
|
||||||
|
|
||||||
|
#include <replxx.hxx>
|
||||||
|
|
||||||
|
class ReplxxLineReader : public LineReader
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
ReplxxLineReader(const Suggest & suggest, const String & history_file_path, char extender, char delimiter = 0);
|
||||||
|
~ReplxxLineReader() override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
InputStatus readOneLine(const String & prompt) override;
|
||||||
|
void addToHistory(const String & line) override;
|
||||||
|
|
||||||
|
replxx::Replxx rx;
|
||||||
|
};
|
@ -3,6 +3,5 @@
|
|||||||
// .h autogenerated by cmake !
|
// .h autogenerated by cmake !
|
||||||
|
|
||||||
#cmakedefine01 USE_JEMALLOC
|
#cmakedefine01 USE_JEMALLOC
|
||||||
#cmakedefine01 USE_REPLXX
|
|
||||||
#cmakedefine01 UNBUNDLED
|
#cmakedefine01 UNBUNDLED
|
||||||
#cmakedefine01 WITH_COVERAGE
|
#cmakedefine01 WITH_COVERAGE
|
||||||
|
@ -1,26 +1,20 @@
|
|||||||
#include <common/config_common.h>
|
|
||||||
#include <common/LineReader.h>
|
#include <common/LineReader.h>
|
||||||
|
|
||||||
#if USE_REPLXX
|
|
||||||
#include <replxx.hxx>
|
|
||||||
#else
|
|
||||||
|
|
||||||
/// We can detect if code is linked with one or another readline variants or open the library dynamically.
|
|
||||||
#include <dlfcn.h>
|
|
||||||
extern "C"
|
|
||||||
{
|
|
||||||
char * readline(const char *) __attribute__((__weak__));
|
|
||||||
char * (*readline_ptr)(const char *) = readline;
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <string_view>
|
#include <string_view>
|
||||||
|
|
||||||
#include <port/unistd.h>
|
#include <port/unistd.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
|
#ifdef OS_LINUX
|
||||||
|
/// We can detect if code is linked with one or another readline variants or open the library dynamically.
|
||||||
|
# include <dlfcn.h>
|
||||||
|
extern "C"
|
||||||
|
{
|
||||||
|
char * readline(const char *) __attribute__((__weak__));
|
||||||
|
char * (*readline_ptr)(const char *) = readline;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
namespace
|
namespace
|
||||||
{
|
{
|
||||||
@ -42,8 +36,6 @@ bool hasInputData()
|
|||||||
return select(1, &fds, nullptr, nullptr, &timeout) == 1;
|
return select(1, &fds, nullptr, nullptr, &timeout) == 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
constexpr char word_break_characters[] = " \t\n\r\"\\'`@$><=;|&{(.";
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LineReader::Suggest::WordsRange LineReader::Suggest::getCompletions(const String & prefix, size_t prefix_length) const
|
LineReader::Suggest::WordsRange LineReader::Suggest::getCompletions(const String & prefix, size_t prefix_length) const
|
||||||
@ -68,39 +60,12 @@ LineReader::Suggest::WordsRange LineReader::Suggest::getCompletions(const String
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
LineReader::LineReader(const Suggest * suggest, const String & history_file_path_, char extender_, char delimiter_)
|
LineReader::LineReader(const String & history_file_path_, char extender_, char delimiter_)
|
||||||
: history_file_path(history_file_path_), extender(extender_), delimiter(delimiter_)
|
: history_file_path(history_file_path_), extender(extender_), delimiter(delimiter_)
|
||||||
{
|
{
|
||||||
#if USE_REPLXX
|
|
||||||
impl = new replxx::Replxx;
|
|
||||||
auto & rx = *(replxx::Replxx*)(impl);
|
|
||||||
|
|
||||||
if (!history_file_path.empty())
|
|
||||||
rx.history_load(history_file_path);
|
|
||||||
|
|
||||||
auto callback = [suggest] (const String & context, size_t context_size)
|
|
||||||
{
|
|
||||||
auto range = suggest->getCompletions(context, context_size);
|
|
||||||
return replxx::Replxx::completions_t(range.first, range.second);
|
|
||||||
};
|
|
||||||
|
|
||||||
rx.set_completion_callback(callback);
|
|
||||||
rx.set_complete_on_empty(false);
|
|
||||||
rx.set_word_break_characters(word_break_characters);
|
|
||||||
#endif
|
|
||||||
/// FIXME: check extender != delimiter
|
/// FIXME: check extender != delimiter
|
||||||
}
|
}
|
||||||
|
|
||||||
LineReader::~LineReader()
|
|
||||||
{
|
|
||||||
#if USE_REPLXX
|
|
||||||
auto & rx = *(replxx::Replxx*)(impl);
|
|
||||||
if (!history_file_path.empty())
|
|
||||||
rx.history_save(history_file_path);
|
|
||||||
delete (replxx::Replxx *)impl;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
String LineReader::readLine(const String & first_prompt, const String & second_prompt)
|
String LineReader::readLine(const String & first_prompt, const String & second_prompt)
|
||||||
{
|
{
|
||||||
String line;
|
String line;
|
||||||
@ -149,14 +114,7 @@ LineReader::InputStatus LineReader::readOneLine(const String & prompt)
|
|||||||
{
|
{
|
||||||
input.clear();
|
input.clear();
|
||||||
|
|
||||||
#if USE_REPLXX
|
#ifdef OS_LINUX
|
||||||
auto & rx = *(replxx::Replxx*)(impl);
|
|
||||||
const char* cinput = rx.input(prompt);
|
|
||||||
if (cinput == nullptr)
|
|
||||||
return (errno != EAGAIN) ? ABORT : RESET_LINE;
|
|
||||||
input = cinput;
|
|
||||||
#else
|
|
||||||
|
|
||||||
if (!readline_ptr)
|
if (!readline_ptr)
|
||||||
{
|
{
|
||||||
for (auto name : {"libreadline.so", "libreadline.so.0", "libeditline.so", "libeditline.so.0"})
|
for (auto name : {"libreadline.so", "libreadline.so.0", "libeditline.so", "libeditline.so.0"})
|
||||||
@ -182,22 +140,14 @@ LineReader::InputStatus LineReader::readOneLine(const String & prompt)
|
|||||||
input = line_read;
|
input = line_read;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
#endif
|
||||||
{
|
{
|
||||||
std::cout << prompt;
|
std::cout << prompt;
|
||||||
std::getline(std::cin, input);
|
std::getline(std::cin, input);
|
||||||
if (!std::cin.good())
|
if (!std::cin.good())
|
||||||
return ABORT;
|
return ABORT;
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
|
||||||
trim(input);
|
trim(input);
|
||||||
return INPUT_LINE;
|
return INPUT_LINE;
|
||||||
}
|
}
|
||||||
|
|
||||||
void LineReader::addToHistory(const String & line)
|
|
||||||
{
|
|
||||||
#if USE_REPLXX
|
|
||||||
auto & rx = *(replxx::Replxx*)(impl);
|
|
||||||
rx.history_add(line);
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
57
libs/libcommon/src/ReplxxLineReader.cpp
Normal file
57
libs/libcommon/src/ReplxxLineReader.cpp
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
#include <common/ReplxxLineReader.h>
|
||||||
|
|
||||||
|
#include <errno.h>
|
||||||
|
#include <port/unistd.h>
|
||||||
|
#include <string.h>
|
||||||
|
|
||||||
|
namespace
|
||||||
|
{
|
||||||
|
|
||||||
|
/// Trim ending whitespace inplace
|
||||||
|
void trim(String & s)
|
||||||
|
{
|
||||||
|
s.erase(std::find_if(s.rbegin(), s.rend(), [](int ch) { return !std::isspace(ch); }).base(), s.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
ReplxxLineReader::ReplxxLineReader(const Suggest & suggest, const String & history_file_path_, char extender_, char delimiter_)
|
||||||
|
: LineReader(history_file_path_, extender_, delimiter_)
|
||||||
|
{
|
||||||
|
if (!history_file_path.empty())
|
||||||
|
rx.history_load(history_file_path);
|
||||||
|
|
||||||
|
auto callback = [&suggest] (const String & context, size_t context_size)
|
||||||
|
{
|
||||||
|
auto range = suggest.getCompletions(context, context_size);
|
||||||
|
return replxx::Replxx::completions_t(range.first, range.second);
|
||||||
|
};
|
||||||
|
|
||||||
|
rx.set_completion_callback(callback);
|
||||||
|
rx.set_complete_on_empty(false);
|
||||||
|
rx.set_word_break_characters(word_break_characters);
|
||||||
|
}
|
||||||
|
|
||||||
|
ReplxxLineReader::~ReplxxLineReader()
|
||||||
|
{
|
||||||
|
if (!history_file_path.empty())
|
||||||
|
rx.history_save(history_file_path);
|
||||||
|
}
|
||||||
|
|
||||||
|
LineReader::InputStatus ReplxxLineReader::readOneLine(const String & prompt)
|
||||||
|
{
|
||||||
|
input.clear();
|
||||||
|
|
||||||
|
const char* cinput = rx.input(prompt);
|
||||||
|
if (cinput == nullptr)
|
||||||
|
return (errno != EAGAIN) ? ABORT : RESET_LINE;
|
||||||
|
input = cinput;
|
||||||
|
|
||||||
|
trim(input);
|
||||||
|
return INPUT_LINE;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ReplxxLineReader::addToHistory(const String & line)
|
||||||
|
{
|
||||||
|
rx.history_add(line);
|
||||||
|
}
|
@ -1,3 +1,3 @@
|
|||||||
add_executable(clickhouse-zookeeper-cli zookeeper-cli.cpp)
|
add_executable(clickhouse-zookeeper-cli zookeeper-cli.cpp)
|
||||||
target_link_libraries(clickhouse-zookeeper-cli PRIVATE clickhouse_common_zookeeper ${Poco_Foundation_LIBRARY} ${LINE_EDITING_LIBS})
|
target_link_libraries(clickhouse-zookeeper-cli PRIVATE clickhouse_common_zookeeper ${Poco_Foundation_LIBRARY})
|
||||||
INSTALL(TARGETS clickhouse-zookeeper-cli RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse-utils)
|
INSTALL(TARGETS clickhouse-zookeeper-cli RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT clickhouse-utils)
|
||||||
|
@ -1,12 +1,13 @@
|
|||||||
#include <Common/ZooKeeper/ZooKeeper.h>
|
#include <IO/ReadBufferFromString.h>
|
||||||
|
#include <IO/ReadHelpers.h>
|
||||||
|
#include <Poco/ConsoleChannel.h>
|
||||||
#include <Common/ZooKeeper/KeeperException.h>
|
#include <Common/ZooKeeper/KeeperException.h>
|
||||||
|
#include <Common/ZooKeeper/ZooKeeper.h>
|
||||||
|
#include <common/LineReader.h>
|
||||||
|
#include <common/logger_useful.h>
|
||||||
|
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <Poco/ConsoleChannel.h>
|
|
||||||
#include <common/logger_useful.h>
|
|
||||||
#include <common/LineReader.h>
|
|
||||||
#include <IO/ReadHelpers.h>
|
|
||||||
#include <IO/ReadBufferFromString.h>
|
|
||||||
|
|
||||||
|
|
||||||
void printStat(const Coordination::Stat & s)
|
void printStat(const Coordination::Stat & s)
|
||||||
@ -69,7 +70,7 @@ int main(int argc, char ** argv)
|
|||||||
Logger::root().setLevel("trace");
|
Logger::root().setLevel("trace");
|
||||||
|
|
||||||
zkutil::ZooKeeper zk(argv[1]);
|
zkutil::ZooKeeper zk(argv[1]);
|
||||||
LineReader lr(nullptr, {}, '\\');
|
LineReader lr({}, '\\');
|
||||||
|
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
|
@ -501,7 +501,7 @@ sudo clickhouse-client-$LATEST_VERSION/install/doinst.sh
|
|||||||
ClickHouse source code is published under Apache 2.0 License.</a> Software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
ClickHouse source code is published under Apache 2.0 License.</a> Software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
KIND, either express or implied.</p>
|
KIND, either express or implied.</p>
|
||||||
|
|
||||||
<p id="footer">© 2016–2019 <a href="https://yandex.com/company/" rel="external nofollow">YANDEX</a> LLC</p>
|
<p id="footer">© 2016–2020 <a href="https://yandex.com/company/" rel="external nofollow">YANDEX</a> LLC</p>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user