mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 08:32:02 +00:00
Merge branch 'master' into stateless_test_flaky_check
This commit is contained in:
commit
a7f989193a
17
.github/codecov.yml
vendored
Normal file
17
.github/codecov.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
codecov:
|
||||||
|
max_report_age: off
|
||||||
|
strict_yaml_branch: "master"
|
||||||
|
|
||||||
|
ignore:
|
||||||
|
- "contrib"
|
||||||
|
- "docs"
|
||||||
|
- "benchmark"
|
||||||
|
- "tests"
|
||||||
|
- "docker"
|
||||||
|
- "debian"
|
||||||
|
- "cmake"
|
||||||
|
|
||||||
|
comment: false
|
||||||
|
|
||||||
|
github_checks:
|
||||||
|
annotations: false
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -118,6 +118,7 @@ website/package-lock.json
|
|||||||
|
|
||||||
# clangd cache
|
# clangd cache
|
||||||
/.clangd
|
/.clangd
|
||||||
|
/.cache
|
||||||
|
|
||||||
/compile_commands.json
|
/compile_commands.json
|
||||||
|
|
||||||
|
4
.gitmodules
vendored
4
.gitmodules
vendored
@ -107,7 +107,6 @@
|
|||||||
[submodule "contrib/grpc"]
|
[submodule "contrib/grpc"]
|
||||||
path = contrib/grpc
|
path = contrib/grpc
|
||||||
url = https://github.com/ClickHouse-Extras/grpc.git
|
url = https://github.com/ClickHouse-Extras/grpc.git
|
||||||
branch = v1.25.0
|
|
||||||
[submodule "contrib/aws"]
|
[submodule "contrib/aws"]
|
||||||
path = contrib/aws
|
path = contrib/aws
|
||||||
url = https://github.com/ClickHouse-Extras/aws-sdk-cpp.git
|
url = https://github.com/ClickHouse-Extras/aws-sdk-cpp.git
|
||||||
@ -159,7 +158,7 @@
|
|||||||
url = https://github.com/openldap/openldap.git
|
url = https://github.com/openldap/openldap.git
|
||||||
[submodule "contrib/AMQP-CPP"]
|
[submodule "contrib/AMQP-CPP"]
|
||||||
path = contrib/AMQP-CPP
|
path = contrib/AMQP-CPP
|
||||||
url = https://github.com/CopernicaMarketingSoftware/AMQP-CPP.git
|
url = https://github.com/ClickHouse-Extras/AMQP-CPP.git
|
||||||
[submodule "contrib/cassandra"]
|
[submodule "contrib/cassandra"]
|
||||||
path = contrib/cassandra
|
path = contrib/cassandra
|
||||||
url = https://github.com/ClickHouse-Extras/cpp-driver.git
|
url = https://github.com/ClickHouse-Extras/cpp-driver.git
|
||||||
@ -186,3 +185,4 @@
|
|||||||
[submodule "contrib/cyrus-sasl"]
|
[submodule "contrib/cyrus-sasl"]
|
||||||
path = contrib/cyrus-sasl
|
path = contrib/cyrus-sasl
|
||||||
url = https://github.com/cyrusimap/cyrus-sasl
|
url = https://github.com/cyrusimap/cyrus-sasl
|
||||||
|
branch = cyrus-sasl-2.1
|
||||||
|
@ -300,6 +300,11 @@ if (COMPILER_CLANG)
|
|||||||
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
# Set new experimental pass manager, it's a performance, build time and binary size win.
|
||||||
|
# Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang.
|
||||||
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager")
|
||||||
|
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fexperimental-new-pass-manager")
|
||||||
|
|
||||||
# We cannot afford to use LTO when compiling unit tests, and it's not enough
|
# We cannot afford to use LTO when compiling unit tests, and it's not enough
|
||||||
# to only supply -fno-lto at the final linking stage. So we disable it
|
# to only supply -fno-lto at the final linking stage. So we disable it
|
||||||
# completely.
|
# completely.
|
||||||
@ -513,7 +518,13 @@ endif ()
|
|||||||
macro (add_executable target)
|
macro (add_executable target)
|
||||||
# invoke built-in add_executable
|
# invoke built-in add_executable
|
||||||
# explicitly acquire and interpose malloc symbols by clickhouse_malloc
|
# explicitly acquire and interpose malloc symbols by clickhouse_malloc
|
||||||
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>)
|
# if GLIBC_COMPATIBILITY is ON and ENABLE_THINLTO is on than provide memcpy symbol explicitly to neutrialize thinlto's libcall generation.
|
||||||
|
if (GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
|
||||||
|
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc> $<TARGET_OBJECTS:clickhouse_memcpy>)
|
||||||
|
else ()
|
||||||
|
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>)
|
||||||
|
endif ()
|
||||||
|
|
||||||
get_target_property (type ${target} TYPE)
|
get_target_property (type ${target} TYPE)
|
||||||
if (${type} STREQUAL EXECUTABLE)
|
if (${type} STREQUAL EXECUTABLE)
|
||||||
# operator::new/delete for executables (MemoryTracker stuff)
|
# operator::new/delete for executables (MemoryTracker stuff)
|
||||||
|
@ -17,5 +17,4 @@ ClickHouse is an open-source column-oriented database management system that all
|
|||||||
|
|
||||||
## Upcoming Events
|
## Upcoming Events
|
||||||
|
|
||||||
* [ClickHouse for Edge Analytics](https://ones2020.sched.com/event/bWPs) on September 29, 2020.
|
* [ClickHouse virtual office hours](https://www.eventbrite.com/e/clickhouse-october-virtual-meetup-office-hours-tickets-123129500651) on October 22, 2020.
|
||||||
* [ClickHouse online meetup (in Russian)](https://clck.ru/R2zB9) on October 1, 2020.
|
|
||||||
|
@ -10,11 +10,14 @@ currently being supported with security updates:
|
|||||||
| 1.x | :x: |
|
| 1.x | :x: |
|
||||||
| 18.x | :x: |
|
| 18.x | :x: |
|
||||||
| 19.x | :x: |
|
| 19.x | :x: |
|
||||||
| 19.14 | :white_check_mark: |
|
|
||||||
| 20.1 | :x: |
|
| 20.1 | :x: |
|
||||||
| 20.3 | :white_check_mark: |
|
| 20.3 | :white_check_mark: |
|
||||||
| 20.4 | :white_check_mark: |
|
| 20.4 | :x: |
|
||||||
| 20.5 | :white_check_mark: |
|
| 20.5 | :x: |
|
||||||
|
| 20.6 | :x: |
|
||||||
|
| 20.7 | :white_check_mark: |
|
||||||
|
| 20.8 | :white_check_mark: |
|
||||||
|
| 20.9 | :white_check_mark: |
|
||||||
|
|
||||||
## Reporting a Vulnerability
|
## Reporting a Vulnerability
|
||||||
|
|
||||||
|
@ -16,19 +16,6 @@ void trim(String & s)
|
|||||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](int ch) { return !std::isspace(ch); }).base(), s.end());
|
s.erase(std::find_if(s.rbegin(), s.rend(), [](int ch) { return !std::isspace(ch); }).base(), s.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Uses separate replxx::Replxx instance to avoid loading them again in the
|
|
||||||
// current context (replxx::Replxx::history_load() will re-load the history
|
|
||||||
// from the file), since then they will overlaps with history from the current
|
|
||||||
// session (this will make behavior compatible with other interpreters, i.e.
|
|
||||||
// bash).
|
|
||||||
void history_save(const String & history_file_path, const String & line)
|
|
||||||
{
|
|
||||||
replxx::Replxx rx_no_overlap;
|
|
||||||
rx_no_overlap.history_load(history_file_path);
|
|
||||||
rx_no_overlap.history_add(line);
|
|
||||||
rx_no_overlap.history_save(history_file_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ReplxxLineReader::ReplxxLineReader(
|
ReplxxLineReader::ReplxxLineReader(
|
||||||
@ -58,7 +45,10 @@ ReplxxLineReader::ReplxxLineReader(
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
rx.history_load(history_file_path);
|
if (!rx.history_load(history_file_path))
|
||||||
|
{
|
||||||
|
rx.print("Loading history failed: %s\n", strerror(errno));
|
||||||
|
}
|
||||||
|
|
||||||
if (flock(history_file_fd, LOCK_UN))
|
if (flock(history_file_fd, LOCK_UN))
|
||||||
{
|
{
|
||||||
@ -128,7 +118,8 @@ void ReplxxLineReader::addToHistory(const String & line)
|
|||||||
rx.history_add(line);
|
rx.history_add(line);
|
||||||
|
|
||||||
// flush changes to the disk
|
// flush changes to the disk
|
||||||
history_save(history_file_path, line);
|
if (!rx.history_save(history_file_path))
|
||||||
|
rx.print("Saving history failed: %s\n", strerror(errno));
|
||||||
|
|
||||||
if (locked && 0 != flock(history_file_fd, LOCK_UN))
|
if (locked && 0 != flock(history_file_fd, LOCK_UN))
|
||||||
rx.print("Unlock of history file failed: %s\n", strerror(errno));
|
rx.print("Unlock of history file failed: %s\n", strerror(errno));
|
||||||
|
@ -313,13 +313,4 @@ namespace ZeroTraits
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
inline bool operator==(StringRef lhs, const char * rhs)
|
|
||||||
{
|
|
||||||
for (size_t pos = 0; pos < lhs.size; ++pos)
|
|
||||||
if (!rhs[pos] || lhs.data[pos] != rhs[pos])
|
|
||||||
return false;
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::ostream & operator<<(std::ostream & os, const StringRef & str);
|
std::ostream & operator<<(std::ostream & os, const StringRef & str);
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <common/types.h>
|
#include <common/types.h>
|
||||||
|
|
||||||
|
@ -1,6 +1,9 @@
|
|||||||
/// Original is here https://github.com/cerevra/int
|
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
|
/// Original is here https://github.com/cerevra/int
|
||||||
|
/// Distributed under the Boost Software License, Version 1.0.
|
||||||
|
/// (See at http://www.boost.org/LICENSE_1_0.txt)
|
||||||
|
|
||||||
#include "throwError.h"
|
#include "throwError.h"
|
||||||
|
|
||||||
namespace wide
|
namespace wide
|
||||||
@ -436,46 +439,94 @@ private:
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
constexpr static auto multiply(const integer<Bits, Signed> & lhs, const T & rhs)
|
constexpr static integer<Bits, Signed>
|
||||||
|
multiply(const integer<Bits, Signed> & lhs, const T & rhs)
|
||||||
{
|
{
|
||||||
integer<Bits, Signed> res{};
|
if constexpr (Bits == 256 && sizeof(base_type) == 8)
|
||||||
#if 1
|
|
||||||
integer<Bits, Signed> lhs2 = plus(lhs, shift_left(lhs, 1));
|
|
||||||
integer<Bits, Signed> lhs3 = plus(lhs2, shift_left(lhs, 2));
|
|
||||||
#endif
|
|
||||||
for (unsigned i = 0; i < item_count; ++i)
|
|
||||||
{
|
{
|
||||||
base_type rhs_item = get_item(rhs, i);
|
/// @sa https://github.com/abseil/abseil-cpp/blob/master/absl/numeric/int128.h
|
||||||
unsigned pos = i * base_bits;
|
using HalfType = unsigned __int128;
|
||||||
|
|
||||||
while (rhs_item)
|
HalfType a01 = (HalfType(lhs.items[little(1)]) << 64) + lhs.items[little(0)];
|
||||||
|
HalfType a23 = (HalfType(lhs.items[little(3)]) << 64) + lhs.items[little(2)];
|
||||||
|
HalfType a0 = lhs.items[little(0)];
|
||||||
|
HalfType a1 = lhs.items[little(1)];
|
||||||
|
|
||||||
|
HalfType b01 = rhs;
|
||||||
|
uint64_t b0 = b01;
|
||||||
|
uint64_t b1 = 0;
|
||||||
|
HalfType b23 = 0;
|
||||||
|
if constexpr (sizeof(T) > 8)
|
||||||
|
b1 = b01 >> 64;
|
||||||
|
if constexpr (sizeof(T) > 16)
|
||||||
|
b23 = (HalfType(rhs.items[little(3)]) << 64) + rhs.items[little(2)];
|
||||||
|
|
||||||
|
HalfType r23 = a23 * b01 + a01 * b23 + a1 * b1;
|
||||||
|
HalfType r01 = a0 * b0;
|
||||||
|
HalfType r12 = (r01 >> 64) + (r23 << 64);
|
||||||
|
HalfType r12_x = a1 * b0;
|
||||||
|
|
||||||
|
integer<Bits, Signed> res;
|
||||||
|
res.items[little(0)] = r01;
|
||||||
|
res.items[little(3)] = r23 >> 64;
|
||||||
|
|
||||||
|
if constexpr (sizeof(T) > 8)
|
||||||
{
|
{
|
||||||
#if 1 /// optimization
|
HalfType r12_y = a0 * b1;
|
||||||
if ((rhs_item & 0x7) == 0x7)
|
r12_x += r12_y;
|
||||||
{
|
if (r12_x < r12_y)
|
||||||
res = plus(res, shift_left(lhs3, pos));
|
++res.items[little(3)];
|
||||||
rhs_item >>= 3;
|
|
||||||
pos += 3;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((rhs_item & 0x3) == 0x3)
|
|
||||||
{
|
|
||||||
res = plus(res, shift_left(lhs2, pos));
|
|
||||||
rhs_item >>= 2;
|
|
||||||
pos += 2;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
if (rhs_item & 1)
|
|
||||||
res = plus(res, shift_left(lhs, pos));
|
|
||||||
|
|
||||||
rhs_item >>= 1;
|
|
||||||
++pos;
|
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
r12 += r12_x;
|
||||||
|
if (r12 < r12_x)
|
||||||
|
++res.items[little(3)];
|
||||||
|
|
||||||
|
res.items[little(1)] = r12;
|
||||||
|
res.items[little(2)] = r12 >> 64;
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
integer<Bits, Signed> res{};
|
||||||
|
#if 1
|
||||||
|
integer<Bits, Signed> lhs2 = plus(lhs, shift_left(lhs, 1));
|
||||||
|
integer<Bits, Signed> lhs3 = plus(lhs2, shift_left(lhs, 2));
|
||||||
|
#endif
|
||||||
|
for (unsigned i = 0; i < item_count; ++i)
|
||||||
|
{
|
||||||
|
base_type rhs_item = get_item(rhs, i);
|
||||||
|
unsigned pos = i * base_bits;
|
||||||
|
|
||||||
|
while (rhs_item)
|
||||||
|
{
|
||||||
|
#if 1 /// optimization
|
||||||
|
if ((rhs_item & 0x7) == 0x7)
|
||||||
|
{
|
||||||
|
res = plus(res, shift_left(lhs3, pos));
|
||||||
|
rhs_item >>= 3;
|
||||||
|
pos += 3;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if ((rhs_item & 0x3) == 0x3)
|
||||||
|
{
|
||||||
|
res = plus(res, shift_left(lhs2, pos));
|
||||||
|
rhs_item >>= 2;
|
||||||
|
pos += 2;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
if (rhs_item & 1)
|
||||||
|
res = plus(res, shift_left(lhs, pos));
|
||||||
|
|
||||||
|
rhs_item >>= 1;
|
||||||
|
++pos;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -27,6 +27,10 @@ if (GLIBC_COMPATIBILITY)
|
|||||||
list(APPEND glibc_compatibility_sources musl/getentropy.c)
|
list(APPEND glibc_compatibility_sources musl/getentropy.c)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
add_library (clickhouse_memcpy OBJECT
|
||||||
|
${ClickHouse_SOURCE_DIR}/contrib/FastMemcpy/memcpy_wrapper.c
|
||||||
|
)
|
||||||
|
|
||||||
# Need to omit frame pointers to match the performance of glibc
|
# Need to omit frame pointers to match the performance of glibc
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fomit-frame-pointer")
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
#include <optional>
|
#include <optional>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <Poco/AutoPtr.h>
|
#include <Poco/AutoPtr.h>
|
||||||
|
@ -21,8 +21,8 @@ void Pool::Entry::incrementRefCount()
|
|||||||
{
|
{
|
||||||
if (!data)
|
if (!data)
|
||||||
return;
|
return;
|
||||||
++data->ref_count;
|
/// First reference, initialize thread
|
||||||
if (data->ref_count == 1)
|
if (data->ref_count.fetch_add(1) == 0)
|
||||||
mysql_thread_init();
|
mysql_thread_init();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -30,12 +30,10 @@ void Pool::Entry::decrementRefCount()
|
|||||||
{
|
{
|
||||||
if (!data)
|
if (!data)
|
||||||
return;
|
return;
|
||||||
if (data->ref_count > 0)
|
|
||||||
{
|
/// We were the last user of this thread, deinitialize it
|
||||||
--data->ref_count;
|
if (data->ref_count.fetch_sub(1) == 1)
|
||||||
if (data->ref_count == 0)
|
mysql_thread_end();
|
||||||
mysql_thread_end();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
#include <list>
|
#include <list>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <mutex>
|
#include <mutex>
|
||||||
|
#include <atomic>
|
||||||
|
|
||||||
#include <Poco/Exception.h>
|
#include <Poco/Exception.h>
|
||||||
#include <mysqlxx/Connection.h>
|
#include <mysqlxx/Connection.h>
|
||||||
@ -35,7 +36,9 @@ protected:
|
|||||||
struct Connection
|
struct Connection
|
||||||
{
|
{
|
||||||
mysqlxx::Connection conn;
|
mysqlxx::Connection conn;
|
||||||
int ref_count = 0;
|
/// Ref count modified in constructor/descructor of Entry
|
||||||
|
/// but also read in pool code.
|
||||||
|
std::atomic<int> ref_count = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@ -99,7 +99,7 @@ def gen_html_json(options, arguments):
|
|||||||
tuples = read_stats_file(options, arguments[1])
|
tuples = read_stats_file(options, arguments[1])
|
||||||
print('{')
|
print('{')
|
||||||
print('"system: GreenPlum(x2),')
|
print('"system: GreenPlum(x2),')
|
||||||
print('"version": "%s",' % '4.3.9.1')
|
print(('"version": "%s",' % '4.3.9.1'))
|
||||||
print('"data_size": 10000000,')
|
print('"data_size": 10000000,')
|
||||||
print('"time": "",')
|
print('"time": "",')
|
||||||
print('"comments": "",')
|
print('"comments": "",')
|
||||||
|
330
cmake/Modules/FindgRPC.cmake
Normal file
330
cmake/Modules/FindgRPC.cmake
Normal file
@ -0,0 +1,330 @@
|
|||||||
|
#[[
|
||||||
|
Defines the following variables:
|
||||||
|
``gRPC_FOUND``
|
||||||
|
Whether the gRPC framework is found
|
||||||
|
``gRPC_INCLUDE_DIRS``
|
||||||
|
The include directories of the gRPC framework, including the include directories of the C++ wrapper.
|
||||||
|
``gRPC_LIBRARIES``
|
||||||
|
The libraries of the gRPC framework.
|
||||||
|
``gRPC_UNSECURE_LIBRARIES``
|
||||||
|
The libraries of the gRPC framework without SSL.
|
||||||
|
``_gRPC_CPP_PLUGIN``
|
||||||
|
The plugin for generating gRPC client and server C++ stubs from `.proto` files
|
||||||
|
``_gRPC_PYTHON_PLUGIN``
|
||||||
|
The plugin for generating gRPC client and server Python stubs from `.proto` files
|
||||||
|
|
||||||
|
The following :prop_tgt:`IMPORTED` targets are also defined:
|
||||||
|
``grpc++``
|
||||||
|
``grpc++_unsecure``
|
||||||
|
``grpc_cpp_plugin``
|
||||||
|
``grpc_python_plugin``
|
||||||
|
|
||||||
|
Add custom commands to process ``.proto`` files to C++::
|
||||||
|
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
|
||||||
|
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
|
||||||
|
|
||||||
|
``SRCS``
|
||||||
|
Variable to define with autogenerated source files
|
||||||
|
``HDRS``
|
||||||
|
Variable to define with autogenerated header files
|
||||||
|
``DESCRIPTORS``
|
||||||
|
Variable to define with autogenerated descriptor files, if requested.
|
||||||
|
``EXPORT_MACRO``
|
||||||
|
is a macro which should expand to ``__declspec(dllexport)`` or
|
||||||
|
``__declspec(dllimport)`` depending on what is being compiled.
|
||||||
|
``ARGN``
|
||||||
|
``.proto`` files
|
||||||
|
#]]
|
||||||
|
|
||||||
|
# Function to generate C++ files from .proto files.
|
||||||
|
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||||
|
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
|
||||||
|
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
|
||||||
|
|
||||||
|
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
|
||||||
|
if(NOT _proto_files)
|
||||||
|
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
|
||||||
|
set(_append_arg APPEND_PATH)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(_descriptors DESCRIPTORS)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_outvar)
|
||||||
|
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
|
||||||
|
|
||||||
|
set(${SRCS})
|
||||||
|
set(${HDRS})
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(_file ${_outvar})
|
||||||
|
if(_file MATCHES "cc$")
|
||||||
|
list(APPEND ${SRCS} ${_file})
|
||||||
|
elseif(_file MATCHES "desc$")
|
||||||
|
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
|
||||||
|
else()
|
||||||
|
list(APPEND ${HDRS} ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
|
||||||
|
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# Helper function.
|
||||||
|
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||||
|
function(protobuf_generate_grpc)
|
||||||
|
set(_options APPEND_PATH DESCRIPTORS)
|
||||||
|
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
|
||||||
|
if(COMMAND target_sources)
|
||||||
|
list(APPEND _singleargs TARGET)
|
||||||
|
endif()
|
||||||
|
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
|
||||||
|
|
||||||
|
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_LANGUAGE)
|
||||||
|
set(protobuf_generate_grpc_LANGUAGE cpp)
|
||||||
|
endif()
|
||||||
|
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
|
||||||
|
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
|
||||||
|
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
|
||||||
|
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||||
|
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
|
||||||
|
else()
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PLUGIN)
|
||||||
|
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
|
||||||
|
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||||
|
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
|
||||||
|
else()
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_TARGET)
|
||||||
|
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
|
||||||
|
foreach(_file ${_source_list})
|
||||||
|
if(_file MATCHES "proto$")
|
||||||
|
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOS)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_APPEND_PATH)
|
||||||
|
# Create an include path for each file specified
|
||||||
|
foreach(_file ${protobuf_generate_grpc_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_file} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_path ${_abs_file} PATH)
|
||||||
|
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${_abs_path})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
else()
|
||||||
|
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
|
||||||
|
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
|
||||||
|
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${ABS_PATH})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set(_generated_srcs_all)
|
||||||
|
foreach(_proto ${protobuf_generate_grpc_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_proto} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||||
|
get_filename_component(_basename ${_proto} NAME_WE)
|
||||||
|
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||||
|
|
||||||
|
set(_possible_rel_dir)
|
||||||
|
if(NOT protobuf_generate_grpc_APPEND_PATH)
|
||||||
|
set(_possible_rel_dir ${_rel_dir}/)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_generated_srcs)
|
||||||
|
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
|
||||||
|
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
|
||||||
|
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
|
||||||
|
list(APPEND _generated_srcs ${_descriptor_file})
|
||||||
|
endif()
|
||||||
|
list(APPEND _generated_srcs_all ${_generated_srcs})
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${_generated_srcs}
|
||||||
|
COMMAND protobuf::protoc
|
||||||
|
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||||
|
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||||
|
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
|
||||||
|
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
|
||||||
|
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
|
||||||
|
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
|
||||||
|
VERBATIM)
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
|
||||||
|
if(protobuf_generate_grpc_OUT_VAR)
|
||||||
|
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
if(protobuf_generate_grpc_TARGET)
|
||||||
|
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
|
||||||
|
endif()
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
|
||||||
|
# Find the libraries.
|
||||||
|
if(gRPC_USE_STATIC_LIBS)
|
||||||
|
# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES
|
||||||
|
set(_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
|
if(WIN32)
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
|
else()
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
find_library(gRPC_LIBRARY NAMES grpc)
|
||||||
|
find_library(gRPC_CPP_LIBRARY NAMES grpc++)
|
||||||
|
find_library(gRPC_UNSECURE_LIBRARY NAMES grpc_unsecure)
|
||||||
|
find_library(gRPC_CPP_UNSECURE_LIBRARY NAMES grpc++_unsecure)
|
||||||
|
|
||||||
|
set(gRPC_LIBRARIES)
|
||||||
|
if(gRPC_USE_UNSECURE_LIBRARIES)
|
||||||
|
if(gRPC_UNSECURE_LIBRARY)
|
||||||
|
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_UNSECURE_LIBRARY})
|
||||||
|
endif()
|
||||||
|
if(gRPC_CPP_UNSECURE_LIBRARY)
|
||||||
|
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_UNSECURE_LIBRARY})
|
||||||
|
endif()
|
||||||
|
else()
|
||||||
|
if(gRPC_LIBRARY)
|
||||||
|
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_LIBRARY})
|
||||||
|
endif()
|
||||||
|
if(gRPC_CPP_UNSECURE_LIBRARY)
|
||||||
|
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_LIBRARY})
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Restore the original find library ordering.
|
||||||
|
if(gRPC_USE_STATIC_LIBS)
|
||||||
|
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Find the include directories.
|
||||||
|
find_path(gRPC_INCLUDE_DIR grpc/grpc.h)
|
||||||
|
find_path(gRPC_CPP_INCLUDE_DIR grpc++/grpc++.h)
|
||||||
|
|
||||||
|
if(gRPC_INCLUDE_DIR AND gRPC_CPP_INCLUDE_DIR AND NOT(gRPC_INCLUDE_DIR STREQUAL gRPC_CPP_INCLUDE_DIR))
|
||||||
|
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR} ${gRPC_CPP_INCLUDE_DIR})
|
||||||
|
elseif(gRPC_INCLUDE_DIR)
|
||||||
|
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR})
|
||||||
|
else()
|
||||||
|
set(gRPC_INCLUDE_DIRS ${gRPC_CPP_INCLUDE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# Get full path to plugin.
|
||||||
|
find_program(_gRPC_CPP_PLUGIN
|
||||||
|
NAMES grpc_cpp_plugin
|
||||||
|
DOC "The plugin for generating gRPC client and server C++ stubs from `.proto` files")
|
||||||
|
|
||||||
|
find_program(_gRPC_PYTHON_PLUGIN
|
||||||
|
NAMES grpc_python_plugin
|
||||||
|
DOC "The plugin for generating gRPC client and server Python stubs from `.proto` files")
|
||||||
|
|
||||||
|
# Add imported targets.
|
||||||
|
if(gRPC_CPP_LIBRARY AND NOT TARGET grpc++)
|
||||||
|
add_library(grpc++ UNKNOWN IMPORTED)
|
||||||
|
set_target_properties(grpc++ PROPERTIES
|
||||||
|
IMPORTED_LOCATION "${gRPC_CPP_LIBRARY}")
|
||||||
|
set_target_properties(grpc++ PROPERTIES
|
||||||
|
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(gRPC_CPP_UNSECURE_LIBRARY AND NOT TARGET grpc++_unsecure)
|
||||||
|
add_library(grpc++_unsecure UNKNOWN IMPORTED)
|
||||||
|
set_target_properties(grpc++_unsecure PROPERTIES
|
||||||
|
IMPORTED_LOCATION "${gRPC_CPP_UNSECURE_LIBRARY}")
|
||||||
|
set_target_properties(grpc++_unsecure PROPERTIES
|
||||||
|
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(gRPC_CPP_PLUGIN AND NOT TARGET grpc_cpp_plugin)
|
||||||
|
add_executable(grpc_cpp_plugin IMPORTED)
|
||||||
|
set_target_properties(grpc_cpp_plugin PROPERTIES
|
||||||
|
IMPORTED_LOCATION "${gRPC_CPP_PLUGIN}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(gRPC_PYTHON_PLUGIN AND NOT TARGET grpc_python_plugin)
|
||||||
|
add_executable(grpc_python_plugin IMPORTED)
|
||||||
|
set_target_properties(grpc_python_plugin PROPERTIES
|
||||||
|
IMPORTED_LOCATION "${gRPC_PYTHON_PLUGIN}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
#include(FindPackageHandleStandardArgs.cmake)
|
||||||
|
FIND_PACKAGE_HANDLE_STANDARD_ARGS(gRPC
|
||||||
|
REQUIRED_VARS gRPC_LIBRARY gRPC_CPP_LIBRARY gRPC_UNSECURE_LIBRARY gRPC_CPP_UNSECURE_LIBRARY
|
||||||
|
gRPC_INCLUDE_DIR gRPC_CPP_INCLUDE_DIR _gRPC_CPP_PLUGIN _gRPC_PYTHON_PLUGIN)
|
||||||
|
|
||||||
|
if(gRPC_FOUND)
|
||||||
|
if(gRPC_DEBUG)
|
||||||
|
message(STATUS "gRPC: INCLUDE_DIRS=${gRPC_INCLUDE_DIRS}")
|
||||||
|
message(STATUS "gRPC: LIBRARIES=${gRPC_LIBRARIES}")
|
||||||
|
message(STATUS "gRPC: CPP_PLUGIN=${_gRPC_CPP_PLUGIN}")
|
||||||
|
message(STATUS "gRPC: PYTHON_PLUGIN=${_gRPC_PYTHON_PLUGIN}")
|
||||||
|
endif()
|
||||||
|
endif()
|
@ -6,7 +6,7 @@ if (ENABLE_CLANG_TIDY)
|
|||||||
message(FATAL_ERROR "clang-tidy requires CMake version at least 3.6.")
|
message(FATAL_ERROR "clang-tidy requires CMake version at least 3.6.")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-10" "clang-tidy-9" "clang-tidy-8")
|
find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-11" "clang-tidy-10" "clang-tidy-9" "clang-tidy-8")
|
||||||
|
|
||||||
if (CLANG_TIDY_PATH)
|
if (CLANG_TIDY_PATH)
|
||||||
message(STATUS
|
message(STATUS
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
# This strings autochanged from release_lib.sh:
|
# This strings autochanged from release_lib.sh:
|
||||||
SET(VERSION_REVISION 54441)
|
SET(VERSION_REVISION 54442)
|
||||||
SET(VERSION_MAJOR 20)
|
SET(VERSION_MAJOR 20)
|
||||||
SET(VERSION_MINOR 10)
|
SET(VERSION_MINOR 11)
|
||||||
SET(VERSION_PATCH 1)
|
SET(VERSION_PATCH 1)
|
||||||
SET(VERSION_GITHASH 11a247d2f42010c1a17bf678c3e00a4bc89b23f8)
|
SET(VERSION_GITHASH 76a04fb4b4f6cd27ad999baf6dc9a25e88851c42)
|
||||||
SET(VERSION_DESCRIBE v20.10.1.1-prestable)
|
SET(VERSION_DESCRIBE v20.11.1.1-prestable)
|
||||||
SET(VERSION_STRING 20.10.1.1)
|
SET(VERSION_STRING 20.11.1.1)
|
||||||
# end of autochange
|
# end of autochange
|
||||||
|
@ -1,45 +1,65 @@
|
|||||||
option (ENABLE_GRPC "Use gRPC" ${ENABLE_LIBRARIES})
|
option(ENABLE_GRPC "Use gRPC" ${ENABLE_LIBRARIES})
|
||||||
|
|
||||||
if (NOT ENABLE_GRPC)
|
if(NOT ENABLE_GRPC)
|
||||||
if (USE_INTERNAL_GRPC_LIBRARY)
|
if(USE_INTERNAL_GRPC_LIBRARY)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal gRPC library with ENABLE_GRPC=OFF")
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal gRPC library with ENABLE_GRPC=OFF")
|
||||||
endif()
|
endif()
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option (USE_INTERNAL_GRPC_LIBRARY
|
if(NOT USE_PROTOBUF)
|
||||||
"Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)"
|
message(WARNING "Cannot use gRPC library without protobuf")
|
||||||
${NOT_UNBUNDLED})
|
|
||||||
|
|
||||||
if (NOT USE_INTERNAL_GRPC_LIBRARY)
|
|
||||||
find_package(grpc)
|
|
||||||
if (NOT GRPC_FOUND)
|
|
||||||
find_path(GRPC_INCLUDE_DIR grpcpp/grpcpp.h)
|
|
||||||
find_library(GRPC_LIBRARY grpc++)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
if (GRPC_INCLUDE_DIR AND GRPC_LIBRARY)
|
|
||||||
set (USE_GRPC ON)
|
|
||||||
else()
|
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC")
|
|
||||||
endif()
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (NOT USE_GRPC)
|
# Normally we use the internal gRPC framework.
|
||||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include/grpc++/grpc++.h")
|
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
|
||||||
message (WARNING "submodule contrib/grpc is missing. To fix try run: \n git submodule update --init --recursive")
|
# The external gRPC framework can be installed in the system by running
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gRPC")
|
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
|
||||||
set (USE_INTERNAL_GRPC_LIBRARY OFF)
|
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
|
||||||
elseif (NOT USE_PROTOBUF)
|
|
||||||
message (WARNING "gRPC requires protobuf which is disabled")
|
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Will not use internal gRPC without protobuf")
|
if(USE_INTERNAL_GRPC_LIBRARY)
|
||||||
set (USE_INTERNAL_GRPC_LIBRARY OFF)
|
message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init --recursive")
|
||||||
else()
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc")
|
||||||
set (GRPC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
|
set(USE_INTERNAL_GRPC_LIBRARY 0)
|
||||||
set (GRPC_LIBRARY "libgrpc++")
|
endif()
|
||||||
set (USE_GRPC ON)
|
set(MISSING_INTERNAL_GRPC_LIBRARY 1)
|
||||||
set (USE_INTERNAL_GRPC_LIBRARY ON)
|
|
||||||
endif()
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
message (STATUS "Using gRPC=${USE_GRPC}: ${GRPC_INCLUDE_DIR} : ${GRPC_LIBRARY}")
|
if(USE_SSL)
|
||||||
|
set(gRPC_USE_UNSECURE_LIBRARIES FALSE)
|
||||||
|
else()
|
||||||
|
set(gRPC_USE_UNSECURE_LIBRARIES TRUE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT USE_INTERNAL_GRPC_LIBRARY)
|
||||||
|
find_package(gRPC)
|
||||||
|
if(NOT gRPC_INCLUDE_DIRS OR NOT gRPC_LIBRARIES)
|
||||||
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC library")
|
||||||
|
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
|
||||||
|
elseif(NOT _gRPC_CPP_PLUGIN)
|
||||||
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system grcp_cpp_plugin")
|
||||||
|
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
|
||||||
|
else()
|
||||||
|
set(EXTERNAL_GRPC_LIBRARY_FOUND 1)
|
||||||
|
set(USE_GRPC 1)
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT EXTERNAL_GRPC_LIBRARY_FOUND AND NOT MISSING_INTERNAL_GRPC_LIBRARY)
|
||||||
|
set(gRPC_INCLUDE_DIRS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
|
||||||
|
if(gRPC_USE_UNSECURE_LIBRARIES)
|
||||||
|
set(gRPC_LIBRARIES grpc_unsecure grpc++_unsecure)
|
||||||
|
else()
|
||||||
|
set(gRPC_LIBRARIES grpc grpc++)
|
||||||
|
endif()
|
||||||
|
set(_gRPC_CPP_PLUGIN $<TARGET_FILE:grpc_cpp_plugin>)
|
||||||
|
set(_gRPC_PROTOC_EXECUTABLE $<TARGET_FILE:protobuf::protoc>)
|
||||||
|
|
||||||
|
include("${ClickHouse_SOURCE_DIR}/contrib/grpc-cmake/protobuf_generate_grpc.cmake")
|
||||||
|
|
||||||
|
set(USE_INTERNAL_GRPC_LIBRARY 1)
|
||||||
|
set(USE_GRPC 1)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
message(STATUS "Using gRPC=${USE_GRPC}: ${gRPC_INCLUDE_DIRS} : ${gRPC_LIBRARIES} : ${_gRPC_CPP_PLUGIN}")
|
||||||
|
@ -26,7 +26,7 @@ endif ()
|
|||||||
if (NOT USE_INTERNAL_LLVM_LIBRARY)
|
if (NOT USE_INTERNAL_LLVM_LIBRARY)
|
||||||
set (LLVM_PATHS "/usr/local/lib/llvm")
|
set (LLVM_PATHS "/usr/local/lib/llvm")
|
||||||
|
|
||||||
foreach(llvm_v 9 8)
|
foreach(llvm_v 10 9 8)
|
||||||
if (NOT LLVM_FOUND)
|
if (NOT LLVM_FOUND)
|
||||||
find_package (LLVM ${llvm_v} CONFIG PATHS ${LLVM_PATHS})
|
find_package (LLVM ${llvm_v} CONFIG PATHS ${LLVM_PATHS})
|
||||||
endif ()
|
endif ()
|
||||||
|
@ -26,8 +26,8 @@ if (NOT USE_INTERNAL_ODBC_LIBRARY)
|
|||||||
find_path (INCLUDE_ODBC sql.h)
|
find_path (INCLUDE_ODBC sql.h)
|
||||||
|
|
||||||
if(LIBRARY_ODBC AND INCLUDE_ODBC)
|
if(LIBRARY_ODBC AND INCLUDE_ODBC)
|
||||||
add_library (unixodbc UNKNOWN IMPORTED)
|
add_library (unixodbc INTERFACE)
|
||||||
set_target_properties (unixodbc PROPERTIES IMPORTED_LOCATION ${LIBRARY_ODBC})
|
set_target_properties (unixodbc PROPERTIES INTERFACE_LINK_LIBRARIES ${LIBRARY_ODBC})
|
||||||
set_target_properties (unixodbc PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_ODBC})
|
set_target_properties (unixodbc PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_ODBC})
|
||||||
set_target_properties (unixodbc PROPERTIES INTERFACE_COMPILE_DEFINITIONS USE_ODBC=1)
|
set_target_properties (unixodbc PROPERTIES INTERFACE_COMPILE_DEFINITIONS USE_ODBC=1)
|
||||||
|
|
||||||
|
@ -1,57 +1,62 @@
|
|||||||
option(ENABLE_PROTOBUF "Enable protobuf" ${ENABLE_LIBRARIES})
|
option(ENABLE_PROTOBUF "Enable protobuf" ${ENABLE_LIBRARIES})
|
||||||
|
|
||||||
if(NOT ENABLE_PROTOBUF)
|
if(NOT ENABLE_PROTOBUF)
|
||||||
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf with ENABLE_PROTOBUF=OFF")
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf with ENABLE_PROTOBUF=OFF")
|
||||||
endif()
|
endif()
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled" ${NOT_UNBUNDLED})
|
# Normally we use the internal protobuf library.
|
||||||
|
# You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case.
|
||||||
|
# The external protobuf library can be installed in the system by running
|
||||||
|
# sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev
|
||||||
|
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
|
||||||
|
|
||||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
|
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
|
||||||
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||||
message(WARNING "submodule contrib/protobuf is missing. to fix try run: \n git submodule update --init --recursive")
|
message(WARNING "submodule contrib/protobuf is missing. to fix try run: \n git submodule update --init --recursive")
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf")
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf")
|
||||||
set(USE_INTERNAL_PROTOBUF_LIBRARY 0)
|
set(USE_INTERNAL_PROTOBUF_LIBRARY 0)
|
||||||
endif()
|
endif()
|
||||||
set(MISSING_INTERNAL_PROTOBUF_LIBRARY 1)
|
set(MISSING_INTERNAL_PROTOBUF_LIBRARY 1)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(NOT USE_INTERNAL_PROTOBUF_LIBRARY)
|
if(NOT USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||||
find_package(Protobuf)
|
find_package(Protobuf)
|
||||||
if (Protobuf_LIBRARY AND Protobuf_INCLUDE_DIR AND Protobuf_PROTOC_EXECUTABLE)
|
if(NOT Protobuf_INCLUDE_DIR OR NOT Protobuf_LIBRARY)
|
||||||
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 1)
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf library")
|
||||||
set(USE_PROTOBUF 1)
|
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
|
||||||
else()
|
elseif(NOT Protobuf_PROTOC_EXECUTABLE)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf")
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf compiler")
|
||||||
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
|
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
|
||||||
endif()
|
else()
|
||||||
|
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 1)
|
||||||
|
set(USE_PROTOBUF 1)
|
||||||
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (NOT EXTERNAL_PROTOBUF_LIBRARY_FOUND AND NOT MISSING_INTERNAL_PROTOBUF_LIBRARY)
|
if(NOT EXTERNAL_PROTOBUF_LIBRARY_FOUND AND NOT MISSING_INTERNAL_PROTOBUF_LIBRARY)
|
||||||
set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src")
|
set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src")
|
||||||
|
set(Protobuf_LIBRARY libprotobuf)
|
||||||
|
set(Protobuf_PROTOC_EXECUTABLE "$<TARGET_FILE:protoc>")
|
||||||
|
set(Protobuf_PROTOC_LIBRARY libprotoc)
|
||||||
|
|
||||||
set(USE_PROTOBUF 1)
|
include("${ClickHouse_SOURCE_DIR}/contrib/protobuf-cmake/protobuf_generate.cmake")
|
||||||
set(USE_INTERNAL_PROTOBUF_LIBRARY 1)
|
|
||||||
set(Protobuf_LIBRARY libprotobuf)
|
|
||||||
set(Protobuf_PROTOC_LIBRARY libprotoc)
|
|
||||||
set(Protobuf_LITE_LIBRARY libprotobuf-lite)
|
|
||||||
|
|
||||||
set(Protobuf_PROTOC_EXECUTABLE "$<TARGET_FILE:protoc>")
|
set(USE_INTERNAL_PROTOBUF_LIBRARY 1)
|
||||||
|
set(USE_PROTOBUF 1)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(OS_FREEBSD AND SANITIZE STREQUAL "address")
|
if(OS_FREEBSD AND SANITIZE STREQUAL "address")
|
||||||
# ../contrib/protobuf/src/google/protobuf/arena_impl.h:45:10: fatal error: 'sanitizer/asan_interface.h' file not found
|
# ../contrib/protobuf/src/google/protobuf/arena_impl.h:45:10: fatal error: 'sanitizer/asan_interface.h' file not found
|
||||||
# #include <sanitizer/asan_interface.h>
|
# #include <sanitizer/asan_interface.h>
|
||||||
if(LLVM_INCLUDE_DIRS)
|
if(LLVM_INCLUDE_DIRS)
|
||||||
set(Protobuf_INCLUDE_DIR "${Protobuf_INCLUDE_DIR}" ${LLVM_INCLUDE_DIRS})
|
set(Protobuf_INCLUDE_DIR "${Protobuf_INCLUDE_DIR}" ${LLVM_INCLUDE_DIRS})
|
||||||
else()
|
else()
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use protobuf on FreeBSD with address sanitizer without LLVM")
|
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use protobuf on FreeBSD with address sanitizer without LLVM")
|
||||||
set(USE_PROTOBUF 0)
|
set(USE_PROTOBUF 0)
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
include ("${ClickHouse_SOURCE_DIR}/cmake/protobuf_generate_cpp.cmake")
|
message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY} : ${Protobuf_PROTOC_EXECUTABLE} : ${Protobuf_PROTOC_LIBRARY}")
|
||||||
|
|
||||||
message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY} : ${Protobuf_PROTOC_EXECUTABLE}")
|
|
||||||
|
@ -14,10 +14,10 @@ if (NOT ENABLE_RDKAFKA)
|
|||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if (NOT ARCH_ARM AND USE_LIBGSASL)
|
if (NOT ARCH_ARM)
|
||||||
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED})
|
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED})
|
||||||
elseif(USE_INTERNAL_RDKAFKA_LIBRARY)
|
elseif(USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ARCH_ARM=${ARCH_ARM} AND USE_LIBGSASL=${USE_LIBGSASL}")
|
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ARCH_ARM=${ARCH_ARM}")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
|
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
|
||||||
|
@ -1,172 +0,0 @@
|
|||||||
# This file declares functions adding custom commands for generating C++ files from *.proto files:
|
|
||||||
# function (protobuf_generate_cpp SRCS HDRS)
|
|
||||||
# function (protobuf_generate_grpc_cpp SRCS HDRS)
|
|
||||||
|
|
||||||
if (NOT USE_PROTOBUF)
|
|
||||||
message (WARNING "Could not use protobuf_generate_cpp() without the protobuf library")
|
|
||||||
return()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if (NOT DEFINED PROTOBUF_PROTOC_EXECUTABLE)
|
|
||||||
set (PROTOBUF_PROTOC_EXECUTABLE "$<TARGET_FILE:protoc>")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if (NOT DEFINED GRPC_CPP_PLUGIN_EXECUTABLE)
|
|
||||||
set (GRPC_CPP_PLUGIN_EXECUTABLE $<TARGET_FILE:grpc_cpp_plugin>)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if (NOT DEFINED PROTOBUF_GENERATE_CPP_APPEND_PATH)
|
|
||||||
set (PROTOBUF_GENERATE_CPP_APPEND_PATH TRUE)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
|
|
||||||
function(protobuf_generate_cpp_impl SRCS HDRS MODES OUTPUT_FILE_EXTS PLUGIN)
|
|
||||||
if(NOT ARGN)
|
|
||||||
message(SEND_ERROR "Error: protobuf_generate_cpp() called without any proto files")
|
|
||||||
return()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(PROTOBUF_GENERATE_CPP_APPEND_PATH)
|
|
||||||
# Create an include path for each file specified
|
|
||||||
foreach(FIL ${ARGN})
|
|
||||||
get_filename_component(ABS_FIL ${FIL} ABSOLUTE)
|
|
||||||
get_filename_component(ABS_PATH ${ABS_FIL} PATH)
|
|
||||||
list(FIND protobuf_include_path ${ABS_PATH} _contains_already)
|
|
||||||
if(${_contains_already} EQUAL -1)
|
|
||||||
list(APPEND protobuf_include_path -I ${ABS_PATH})
|
|
||||||
endif()
|
|
||||||
endforeach()
|
|
||||||
else()
|
|
||||||
set(protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
|
|
||||||
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if(DEFINED Protobuf_IMPORT_DIRS)
|
|
||||||
foreach(DIR ${Protobuf_IMPORT_DIRS})
|
|
||||||
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
|
|
||||||
list(FIND protobuf_include_path ${ABS_PATH} _contains_already)
|
|
||||||
if(${_contains_already} EQUAL -1)
|
|
||||||
list(APPEND protobuf_include_path -I ${ABS_PATH})
|
|
||||||
endif()
|
|
||||||
endforeach()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set (intermediate_dir ${CMAKE_CURRENT_BINARY_DIR}/intermediate)
|
|
||||||
file (MAKE_DIRECTORY ${intermediate_dir})
|
|
||||||
|
|
||||||
set (protoc_args)
|
|
||||||
foreach (mode ${MODES})
|
|
||||||
list (APPEND protoc_args "--${mode}_out" ${intermediate_dir})
|
|
||||||
endforeach()
|
|
||||||
if (PLUGIN)
|
|
||||||
list (APPEND protoc_args "--plugin=${PLUGIN}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set(srcs)
|
|
||||||
set(hdrs)
|
|
||||||
set(all_intermediate_outputs)
|
|
||||||
|
|
||||||
foreach(input_name ${ARGN})
|
|
||||||
get_filename_component(abs_name ${input_name} ABSOLUTE)
|
|
||||||
get_filename_component(name ${input_name} NAME_WE)
|
|
||||||
|
|
||||||
set (intermediate_outputs)
|
|
||||||
foreach (ext ${OUTPUT_FILE_EXTS})
|
|
||||||
set (filename "${name}${ext}")
|
|
||||||
set (output "${CMAKE_CURRENT_BINARY_DIR}/${filename}")
|
|
||||||
set (intermediate_output "${intermediate_dir}/${filename}")
|
|
||||||
list (APPEND intermediate_outputs "${intermediate_output}")
|
|
||||||
list (APPEND all_intermediate_outputs "${intermediate_output}")
|
|
||||||
|
|
||||||
if (${ext} MATCHES ".*\\.h")
|
|
||||||
list(APPEND hdrs "${output}")
|
|
||||||
else()
|
|
||||||
list(APPEND srcs "${output}")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
add_custom_command(
|
|
||||||
OUTPUT ${output}
|
|
||||||
COMMAND ${CMAKE_COMMAND} -DPROTOBUF_GENERATE_CPP_SCRIPT_MODE=1 -DUSE_PROTOBUF=1 -DDIR=${CMAKE_CURRENT_BINARY_DIR} -DFILENAME=${filename} -DCOMPILER_ID=${CMAKE_CXX_COMPILER_ID} -P ${ClickHouse_SOURCE_DIR}/cmake/protobuf_generate_cpp.cmake
|
|
||||||
DEPENDS ${intermediate_output})
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
add_custom_command(
|
|
||||||
OUTPUT ${intermediate_outputs}
|
|
||||||
COMMAND ${Protobuf_PROTOC_EXECUTABLE}
|
|
||||||
ARGS ${protobuf_include_path} ${protoc_args} ${abs_name}
|
|
||||||
DEPENDS ${abs_name} ${Protobuf_PROTOC_EXECUTABLE} ${PLUGIN}
|
|
||||||
COMMENT "Running C++ protocol buffer compiler on ${name}"
|
|
||||||
VERBATIM )
|
|
||||||
endforeach()
|
|
||||||
|
|
||||||
set_source_files_properties(${srcs} ${hdrs} ${all_intermediate_outputs} PROPERTIES GENERATED TRUE)
|
|
||||||
set(${SRCS} ${srcs} PARENT_SCOPE)
|
|
||||||
set(${HDRS} ${hdrs} PARENT_SCOPE)
|
|
||||||
endfunction()
|
|
||||||
|
|
||||||
|
|
||||||
if (PROTOBUF_GENERATE_CPP_SCRIPT_MODE)
|
|
||||||
set (output "${DIR}/${FILENAME}")
|
|
||||||
set (intermediate_dir ${DIR}/intermediate)
|
|
||||||
set (intermediate_output "${intermediate_dir}/${FILENAME}")
|
|
||||||
|
|
||||||
if (COMPILER_ID MATCHES "Clang")
|
|
||||||
set (pragma_push "#pragma clang diagnostic push\n")
|
|
||||||
set (pragma_pop "#pragma clang diagnostic pop\n")
|
|
||||||
set (pragma_disable_warnings "#pragma clang diagnostic ignored \"-Weverything\"\n")
|
|
||||||
elseif (COMPILER_ID MATCHES "GNU")
|
|
||||||
set (pragma_push "#pragma GCC diagnostic push\n")
|
|
||||||
set (pragma_pop "#pragma GCC diagnostic pop\n")
|
|
||||||
set (pragma_disable_warnings "#pragma GCC diagnostic ignored \"-Wall\"\n"
|
|
||||||
"#pragma GCC diagnostic ignored \"-Wextra\"\n"
|
|
||||||
"#pragma GCC diagnostic ignored \"-Warray-bounds\"\n"
|
|
||||||
"#pragma GCC diagnostic ignored \"-Wold-style-cast\"\n"
|
|
||||||
"#pragma GCC diagnostic ignored \"-Wshadow\"\n"
|
|
||||||
"#pragma GCC diagnostic ignored \"-Wsuggest-override\"\n"
|
|
||||||
"#pragma GCC diagnostic ignored \"-Wcast-qual\"\n"
|
|
||||||
"#pragma GCC diagnostic ignored \"-Wunused-parameter\"\n")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
if (${FILENAME} MATCHES ".*\\.h")
|
|
||||||
file(WRITE "${output}"
|
|
||||||
"#pragma once\n"
|
|
||||||
${pragma_push}
|
|
||||||
${pragma_disable_warnings}
|
|
||||||
"#include \"${intermediate_output}\"\n"
|
|
||||||
${pragma_pop}
|
|
||||||
)
|
|
||||||
else()
|
|
||||||
file(WRITE "${output}"
|
|
||||||
${pragma_disable_warnings}
|
|
||||||
"#include \"${intermediate_output}\"\n"
|
|
||||||
)
|
|
||||||
endif()
|
|
||||||
return()
|
|
||||||
endif()
|
|
||||||
|
|
||||||
|
|
||||||
function(protobuf_generate_cpp SRCS HDRS)
|
|
||||||
set (modes cpp)
|
|
||||||
set (output_file_exts ".pb.cc" ".pb.h")
|
|
||||||
set (plugin)
|
|
||||||
|
|
||||||
protobuf_generate_cpp_impl(srcs hdrs "${modes}" "${output_file_exts}" "${plugin}" ${ARGN})
|
|
||||||
|
|
||||||
set(${SRCS} ${srcs} PARENT_SCOPE)
|
|
||||||
set(${HDRS} ${hdrs} PARENT_SCOPE)
|
|
||||||
endfunction()
|
|
||||||
|
|
||||||
|
|
||||||
function(protobuf_generate_grpc_cpp SRCS HDRS)
|
|
||||||
set (modes cpp grpc)
|
|
||||||
set (output_file_exts ".pb.cc" ".pb.h" ".grpc.pb.cc" ".grpc.pb.h")
|
|
||||||
set (plugin "protoc-gen-grpc=${GRPC_CPP_PLUGIN_EXECUTABLE}")
|
|
||||||
|
|
||||||
protobuf_generate_cpp_impl(srcs hdrs "${modes}" "${output_file_exts}" "${plugin}" ${ARGN})
|
|
||||||
|
|
||||||
set(${SRCS} ${srcs} PARENT_SCOPE)
|
|
||||||
set(${HDRS} ${hdrs} PARENT_SCOPE)
|
|
||||||
endfunction()
|
|
@ -57,8 +57,8 @@ if (SANITIZE)
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
elseif (SANITIZE STREQUAL "undefined")
|
elseif (SANITIZE STREQUAL "undefined")
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} -fsanitize=undefined -fno-sanitize-recover=all -fno-sanitize=float-divide-by-zero")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} -fsanitize=undefined -fno-sanitize-recover=all -fno-sanitize=float-divide-by-zero -fsanitize-blacklist=${CMAKE_SOURCE_DIR}/tests/ubsan_suppressions.txt")
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} -fsanitize=undefined -fno-sanitize-recover=all -fno-sanitize=float-divide-by-zero")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} -fsanitize=undefined -fno-sanitize-recover=all -fno-sanitize=float-divide-by-zero -fsanitize-blacklist=${CMAKE_SOURCE_DIR}/tests/ubsan_suppressions.txt")
|
||||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined")
|
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined")
|
||||||
endif()
|
endif()
|
||||||
|
@ -15,6 +15,10 @@ if (COMPILER_GCC)
|
|||||||
elseif (COMPILER_CLANG)
|
elseif (COMPILER_CLANG)
|
||||||
# Require minimum version of clang/apple-clang
|
# Require minimum version of clang/apple-clang
|
||||||
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
||||||
|
# If you are developer you can figure out what exact versions of AppleClang are Ok,
|
||||||
|
# remove the following line and commit changes below.
|
||||||
|
message (FATAL_ERROR "AppleClang is not supported, you should install clang from brew.")
|
||||||
|
|
||||||
# AppleClang 10.0.1 (Xcode 10.2) corresponds to LLVM/Clang upstream version 7.0.0
|
# AppleClang 10.0.1 (Xcode 10.2) corresponds to LLVM/Clang upstream version 7.0.0
|
||||||
# AppleClang 11.0.0 (Xcode 11.0) corresponds to LLVM/Clang upstream version 8.0.0
|
# AppleClang 11.0.0 (Xcode 11.0) corresponds to LLVM/Clang upstream version 8.0.0
|
||||||
set (XCODE_MINIMUM_VERSION 10.2)
|
set (XCODE_MINIMUM_VERSION 10.2)
|
||||||
|
@ -31,6 +31,7 @@ if (COMPILER_CLANG)
|
|||||||
add_warning(pedantic)
|
add_warning(pedantic)
|
||||||
no_warning(vla-extension)
|
no_warning(vla-extension)
|
||||||
no_warning(zero-length-array)
|
no_warning(zero-length-array)
|
||||||
|
no_warning(c11-extensions)
|
||||||
|
|
||||||
add_warning(comma)
|
add_warning(comma)
|
||||||
add_warning(conditional-uninitialized)
|
add_warning(conditional-uninitialized)
|
||||||
@ -57,7 +58,10 @@ if (COMPILER_CLANG)
|
|||||||
add_warning(unused-exception-parameter)
|
add_warning(unused-exception-parameter)
|
||||||
add_warning(unused-macros)
|
add_warning(unused-macros)
|
||||||
add_warning(unused-member-function)
|
add_warning(unused-member-function)
|
||||||
add_warning(zero-as-null-pointer-constant)
|
# XXX: libstdc++ has some of these for 3way compare
|
||||||
|
if (USE_LIBCXX)
|
||||||
|
add_warning(zero-as-null-pointer-constant)
|
||||||
|
endif()
|
||||||
|
|
||||||
if (WEVERYTHING)
|
if (WEVERYTHING)
|
||||||
add_warning(everything)
|
add_warning(everything)
|
||||||
@ -88,6 +92,11 @@ if (COMPILER_CLANG)
|
|||||||
no_warning(weak-template-vtables)
|
no_warning(weak-template-vtables)
|
||||||
no_warning(weak-vtables)
|
no_warning(weak-vtables)
|
||||||
|
|
||||||
|
# XXX: libstdc++ has some of these for 3way compare
|
||||||
|
if (NOT USE_LIBCXX)
|
||||||
|
no_warning(zero-as-null-pointer-constant)
|
||||||
|
endif()
|
||||||
|
|
||||||
# TODO Enable conversion, sign-conversion, double-promotion warnings.
|
# TODO Enable conversion, sign-conversion, double-promotion warnings.
|
||||||
endif ()
|
endif ()
|
||||||
elseif (COMPILER_GCC)
|
elseif (COMPILER_GCC)
|
||||||
@ -169,6 +178,11 @@ elseif (COMPILER_GCC)
|
|||||||
add_cxx_compile_options(-Wunused)
|
add_cxx_compile_options(-Wunused)
|
||||||
# Warn if vector operation is not implemented via SIMD capabilities of the architecture
|
# Warn if vector operation is not implemented via SIMD capabilities of the architecture
|
||||||
add_cxx_compile_options(-Wvector-operation-performance)
|
add_cxx_compile_options(-Wvector-operation-performance)
|
||||||
|
# XXX: libstdc++ has some of these for 3way compare
|
||||||
|
if (USE_LIBCXX)
|
||||||
|
# Warn when a literal 0 is used as null pointer constant.
|
||||||
|
add_cxx_compile_options(-Wzero-as-null-pointer-constant)
|
||||||
|
endif()
|
||||||
|
|
||||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 10)
|
if (CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 10)
|
||||||
# XXX: gcc10 stuck with this option while compiling GatherUtils code
|
# XXX: gcc10 stuck with this option while compiling GatherUtils code
|
||||||
|
2
contrib/AMQP-CPP
vendored
2
contrib/AMQP-CPP
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 1c08399ab0ab9e4042ef8e2bbe9e208e5dcbc13b
|
Subproject commit d63e1f016582e9faaaf279aa24513087a07bc6e7
|
@ -1,4 +1,4 @@
|
|||||||
#include <FastMemcpy.h>
|
#include "FastMemcpy.h"
|
||||||
|
|
||||||
void * memcpy(void * __restrict destination, const void * __restrict source, size_t size)
|
void * memcpy(void * __restrict destination, const void * __restrict source, size_t size)
|
||||||
{
|
{
|
||||||
|
@ -16,6 +16,7 @@ set (SRCS
|
|||||||
${LIBRARY_DIR}/src/flags.cpp
|
${LIBRARY_DIR}/src/flags.cpp
|
||||||
${LIBRARY_DIR}/src/linux_tcp/openssl.cpp
|
${LIBRARY_DIR}/src/linux_tcp/openssl.cpp
|
||||||
${LIBRARY_DIR}/src/linux_tcp/tcpconnection.cpp
|
${LIBRARY_DIR}/src/linux_tcp/tcpconnection.cpp
|
||||||
|
${LIBRARY_DIR}/src/inbuffer.cpp
|
||||||
${LIBRARY_DIR}/src/receivedframe.cpp
|
${LIBRARY_DIR}/src/receivedframe.cpp
|
||||||
${LIBRARY_DIR}/src/table.cpp
|
${LIBRARY_DIR}/src/table.cpp
|
||||||
${LIBRARY_DIR}/src/watchable.cpp
|
${LIBRARY_DIR}/src/watchable.cpp
|
||||||
|
2
contrib/cyrus-sasl
vendored
2
contrib/cyrus-sasl
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 6054630889fd1cd8d0659573d69badcee1e23a00
|
Subproject commit 9995bf9d8e14f58934d9313ac64f13780d6dd3c9
|
2
contrib/grpc
vendored
2
contrib/grpc
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 8aea4e168e78f3eb9828080740fc8cb73d53bf79
|
Subproject commit a6570b863cf76c9699580ba51c7827d5bffaac43
|
File diff suppressed because it is too large
Load Diff
207
contrib/grpc-cmake/protobuf_generate_grpc.cmake
Normal file
207
contrib/grpc-cmake/protobuf_generate_grpc.cmake
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
#[[
|
||||||
|
Add custom commands to process ``.proto`` files to C++::
|
||||||
|
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
|
||||||
|
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
|
||||||
|
|
||||||
|
``SRCS``
|
||||||
|
Variable to define with autogenerated source files
|
||||||
|
``HDRS``
|
||||||
|
Variable to define with autogenerated header files
|
||||||
|
``DESCRIPTORS``
|
||||||
|
Variable to define with autogenerated descriptor files, if requested.
|
||||||
|
``EXPORT_MACRO``
|
||||||
|
is a macro which should expand to ``__declspec(dllexport)`` or
|
||||||
|
``__declspec(dllimport)`` depending on what is being compiled.
|
||||||
|
``ARGN``
|
||||||
|
``.proto`` files
|
||||||
|
#]]
|
||||||
|
|
||||||
|
# Function to generate C++ files from .proto files.
|
||||||
|
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||||
|
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
|
||||||
|
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
|
||||||
|
|
||||||
|
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
|
||||||
|
if(NOT _proto_files)
|
||||||
|
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
|
||||||
|
set(_append_arg APPEND_PATH)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(_descriptors DESCRIPTORS)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_outvar)
|
||||||
|
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
|
||||||
|
|
||||||
|
set(${SRCS})
|
||||||
|
set(${HDRS})
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(_file ${_outvar})
|
||||||
|
if(_file MATCHES "cc$")
|
||||||
|
list(APPEND ${SRCS} ${_file})
|
||||||
|
elseif(_file MATCHES "desc$")
|
||||||
|
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
|
||||||
|
else()
|
||||||
|
list(APPEND ${HDRS} ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
|
||||||
|
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
|
||||||
|
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# Helper function.
|
||||||
|
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||||
|
function(protobuf_generate_grpc)
|
||||||
|
set(_options APPEND_PATH DESCRIPTORS)
|
||||||
|
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
|
||||||
|
if(COMMAND target_sources)
|
||||||
|
list(APPEND _singleargs TARGET)
|
||||||
|
endif()
|
||||||
|
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
|
||||||
|
|
||||||
|
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_LANGUAGE)
|
||||||
|
set(protobuf_generate_grpc_LANGUAGE cpp)
|
||||||
|
endif()
|
||||||
|
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
|
||||||
|
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
|
||||||
|
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
|
||||||
|
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||||
|
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
|
||||||
|
else()
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PLUGIN)
|
||||||
|
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
|
||||||
|
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||||
|
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
|
||||||
|
else()
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_TARGET)
|
||||||
|
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
|
||||||
|
foreach(_file ${_source_list})
|
||||||
|
if(_file MATCHES "proto$")
|
||||||
|
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_grpc_PROTOS)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_APPEND_PATH)
|
||||||
|
# Create an include path for each file specified
|
||||||
|
foreach(_file ${protobuf_generate_grpc_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_file} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_path ${_abs_file} PATH)
|
||||||
|
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${_abs_path})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
else()
|
||||||
|
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
|
||||||
|
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
|
||||||
|
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${ABS_PATH})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set(_generated_srcs_all)
|
||||||
|
foreach(_proto ${protobuf_generate_grpc_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_proto} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||||
|
get_filename_component(_basename ${_proto} NAME_WE)
|
||||||
|
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||||
|
|
||||||
|
set(_possible_rel_dir)
|
||||||
|
if(NOT protobuf_generate_grpc_APPEND_PATH)
|
||||||
|
set(_possible_rel_dir ${_rel_dir}/)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_generated_srcs)
|
||||||
|
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
|
||||||
|
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
|
||||||
|
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
|
||||||
|
list(APPEND _generated_srcs ${_descriptor_file})
|
||||||
|
endif()
|
||||||
|
list(APPEND _generated_srcs_all ${_generated_srcs})
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${_generated_srcs}
|
||||||
|
COMMAND protobuf::protoc
|
||||||
|
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||||
|
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||||
|
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
|
||||||
|
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
|
||||||
|
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
|
||||||
|
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
|
||||||
|
VERBATIM)
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
|
||||||
|
if(protobuf_generate_grpc_OUT_VAR)
|
||||||
|
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
if(protobuf_generate_grpc_TARGET)
|
||||||
|
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
|
||||||
|
endif()
|
||||||
|
endfunction()
|
@ -26,8 +26,8 @@ if (NOT USE_INTERNAL_HYPERSCAN_LIBRARY)
|
|||||||
if (LIBRARY_HYPERSCAN AND INCLUDE_HYPERSCAN)
|
if (LIBRARY_HYPERSCAN AND INCLUDE_HYPERSCAN)
|
||||||
set (EXTERNAL_HYPERSCAN_LIBRARY_FOUND 1)
|
set (EXTERNAL_HYPERSCAN_LIBRARY_FOUND 1)
|
||||||
|
|
||||||
add_library (hyperscan UNKNOWN IMPORTED GLOBAL)
|
add_library (hyperscan INTERFACE)
|
||||||
set_target_properties (hyperscan PROPERTIES IMPORTED_LOCATION ${LIBRARY_HYPERSCAN})
|
set_target_properties (hyperscan PROPERTIES INTERFACE_LINK_LIBRARIES ${LIBRARY_HYPERSCAN})
|
||||||
set_target_properties (hyperscan PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_HYPERSCAN})
|
set_target_properties (hyperscan PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_HYPERSCAN})
|
||||||
set_property(TARGET hyperscan APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_HYPERSCAN=1)
|
set_property(TARGET hyperscan APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_HYPERSCAN=1)
|
||||||
else ()
|
else ()
|
||||||
|
2
contrib/jemalloc
vendored
2
contrib/jemalloc
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 026764f19995c53583ab25a3b9c06a2fd74e4689
|
Subproject commit 93e27e435cac846028da20cd9b0841fbc9110bd2
|
@ -9,10 +9,6 @@ else()
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT ENABLE_JEMALLOC)
|
if (NOT ENABLE_JEMALLOC)
|
||||||
if(USE_INTERNAL_JEMALLOC_LIBRARY)
|
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal jemalloc with ENABLE_JEMALLOC=OFF")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
add_library(jemalloc INTERFACE)
|
add_library(jemalloc INTERFACE)
|
||||||
target_compile_definitions(jemalloc INTERFACE USE_JEMALLOC=0)
|
target_compile_definitions(jemalloc INTERFACE USE_JEMALLOC=0)
|
||||||
|
|
||||||
@ -24,162 +20,116 @@ if (NOT OS_LINUX)
|
|||||||
message (WARNING "jemalloc support on non-linux is EXPERIMENTAL")
|
message (WARNING "jemalloc support on non-linux is EXPERIMENTAL")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
option (USE_INTERNAL_JEMALLOC_LIBRARY "Use internal jemalloc library" ${NOT_UNBUNDLED})
|
if (OS_LINUX)
|
||||||
|
# ThreadPool select job randomly, and there can be some threads that had been
|
||||||
|
# performed some memory heavy task before and will be inactive for some time,
|
||||||
|
# but until it will became active again, the memory will not be freed since by
|
||||||
|
# default each thread has it's own arena, but there should be not more then
|
||||||
|
# 4*CPU arenas (see opt.nareans description).
|
||||||
|
#
|
||||||
|
# By enabling percpu_arena number of arenas limited to number of CPUs and hence
|
||||||
|
# this problem should go away.
|
||||||
|
#
|
||||||
|
# muzzy_decay_ms -- use MADV_FREE when available on newer Linuxes, to
|
||||||
|
# avoid spurious latencies and additional work associated with
|
||||||
|
# MADV_DONTNEED. See
|
||||||
|
# https://github.com/ClickHouse/ClickHouse/issues/11121 for motivation.
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF "percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:10000")
|
||||||
|
else()
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF "oversize_threshold:0,muzzy_decay_ms:10000")
|
||||||
|
endif()
|
||||||
|
# CACHE variable is empty, to allow changing defaults without necessity
|
||||||
|
# to purge cache
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE "" CACHE STRING "Change default configuration string of JEMalloc" )
|
||||||
|
if (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE)
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF "${JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE}")
|
||||||
|
endif()
|
||||||
|
message (STATUS "jemalloc malloc_conf: ${JEMALLOC_CONFIG_MALLOC_CONF}")
|
||||||
|
|
||||||
if (NOT USE_INTERNAL_JEMALLOC_LIBRARY)
|
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/jemalloc")
|
||||||
find_library(LIBRARY_JEMALLOC jemalloc)
|
|
||||||
find_path(INCLUDE_JEMALLOC jemalloc/jemalloc.h)
|
|
||||||
|
|
||||||
if (LIBRARY_JEMALLOC AND INCLUDE_JEMALLOC)
|
set (SRCS
|
||||||
set(EXTERNAL_JEMALLOC_LIBRARY_FOUND 1)
|
${LIBRARY_DIR}/src/arena.c
|
||||||
|
${LIBRARY_DIR}/src/background_thread.c
|
||||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
${LIBRARY_DIR}/src/base.c
|
||||||
find_package(Threads)
|
${LIBRARY_DIR}/src/bin.c
|
||||||
|
${LIBRARY_DIR}/src/bitmap.c
|
||||||
set (CMAKE_REQUIRED_LIBRARIES ${LIBRARY_JEMALLOC} Threads::Threads "dl")
|
${LIBRARY_DIR}/src/ckh.c
|
||||||
set (CMAKE_REQUIRED_INCLUDES ${INCLUDE_JEMALLOC})
|
${LIBRARY_DIR}/src/ctl.c
|
||||||
check_cxx_source_compiles (
|
${LIBRARY_DIR}/src/div.c
|
||||||
"
|
${LIBRARY_DIR}/src/extent.c
|
||||||
#include <jemalloc/jemalloc.h>
|
${LIBRARY_DIR}/src/extent_dss.c
|
||||||
|
${LIBRARY_DIR}/src/extent_mmap.c
|
||||||
int main() {
|
${LIBRARY_DIR}/src/hash.c
|
||||||
free(mallocx(1, 0));
|
${LIBRARY_DIR}/src/hook.c
|
||||||
}
|
${LIBRARY_DIR}/src/jemalloc.c
|
||||||
"
|
${LIBRARY_DIR}/src/large.c
|
||||||
EXTERNAL_JEMALLOC_LIBRARY_WORKS
|
${LIBRARY_DIR}/src/log.c
|
||||||
)
|
${LIBRARY_DIR}/src/malloc_io.c
|
||||||
|
${LIBRARY_DIR}/src/mutex.c
|
||||||
if (EXTERNAL_JEMALLOC_LIBRARY_WORKS)
|
${LIBRARY_DIR}/src/mutex_pool.c
|
||||||
add_library (jemalloc STATIC IMPORTED)
|
${LIBRARY_DIR}/src/nstime.c
|
||||||
set_property (TARGET jemalloc PROPERTY IMPORTED_LOCATION ${LIBRARY_JEMALLOC})
|
${LIBRARY_DIR}/src/pages.c
|
||||||
set_property (TARGET jemalloc PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_JEMALLOC})
|
${LIBRARY_DIR}/src/prng.c
|
||||||
set_property (TARGET jemalloc PROPERTY INTERFACE_LINK_LIBRARIES Threads::Threads dl)
|
${LIBRARY_DIR}/src/prof.c
|
||||||
else()
|
${LIBRARY_DIR}/src/rtree.c
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "External jemalloc is unusable: ${LIBRARY_JEMALLOC} ${INCLUDE_JEMALLOC}")
|
${LIBRARY_DIR}/src/sc.c
|
||||||
endif ()
|
${LIBRARY_DIR}/src/stats.c
|
||||||
|
${LIBRARY_DIR}/src/sz.c
|
||||||
else()
|
${LIBRARY_DIR}/src/tcache.c
|
||||||
set(EXTERNAL_JEMALLOC_LIBRARY_FOUND 0)
|
${LIBRARY_DIR}/src/test_hooks.c
|
||||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system jemalloc")
|
${LIBRARY_DIR}/src/ticker.c
|
||||||
endif()
|
${LIBRARY_DIR}/src/tsd.c
|
||||||
|
${LIBRARY_DIR}/src/witness.c
|
||||||
|
${LIBRARY_DIR}/src/safety_check.c
|
||||||
|
)
|
||||||
|
if (OS_DARWIN)
|
||||||
|
list(APPEND SRCS ${LIBRARY_DIR}/src/zone.c)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT EXTERNAL_JEMALLOC_LIBRARY_FOUND OR NOT EXTERNAL_JEMALLOC_LIBRARY_WORKS)
|
add_library(jemalloc ${SRCS})
|
||||||
set(USE_INTERNAL_JEMALLOC_LIBRARY 1)
|
target_include_directories(jemalloc PRIVATE ${LIBRARY_DIR}/include)
|
||||||
|
target_include_directories(jemalloc SYSTEM PUBLIC include)
|
||||||
|
|
||||||
if (OS_LINUX)
|
set (JEMALLOC_INCLUDE_PREFIX)
|
||||||
# ThreadPool select job randomly, and there can be some threads that had been
|
# OS_
|
||||||
# performed some memory heavy task before and will be inactive for some time,
|
if (OS_LINUX)
|
||||||
# but until it will became active again, the memory will not be freed since by
|
set (JEMALLOC_INCLUDE_PREFIX "include_linux")
|
||||||
# default each thread has it's own arena, but there should be not more then
|
elseif (OS_FREEBSD)
|
||||||
# 4*CPU arenas (see opt.nareans description).
|
set (JEMALLOC_INCLUDE_PREFIX "include_freebsd")
|
||||||
#
|
elseif (OS_DARWIN)
|
||||||
# By enabling percpu_arena number of arenas limited to number of CPUs and hence
|
set (JEMALLOC_INCLUDE_PREFIX "include_darwin")
|
||||||
# this problem should go away.
|
else ()
|
||||||
#
|
message (FATAL_ERROR "internal jemalloc: This OS is not supported")
|
||||||
# muzzy_decay_ms -- use MADV_FREE when available on newer Linuxes, to
|
|
||||||
# avoid spurious latencies and additional work associated with
|
|
||||||
# MADV_DONTNEED. See
|
|
||||||
# https://github.com/ClickHouse/ClickHouse/issues/11121 for motivation.
|
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF "percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:10000")
|
|
||||||
else()
|
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF "oversize_threshold:0,muzzy_decay_ms:10000")
|
|
||||||
endif()
|
|
||||||
# CACHE variable is empty, to allow changing defaults without necessity
|
|
||||||
# to purge cache
|
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE "" CACHE STRING "Change default configuration string of JEMalloc" )
|
|
||||||
if (JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE)
|
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF "${JEMALLOC_CONFIG_MALLOC_CONF_OVERRIDE}")
|
|
||||||
endif()
|
|
||||||
message (STATUS "jemalloc malloc_conf: ${JEMALLOC_CONFIG_MALLOC_CONF}")
|
|
||||||
|
|
||||||
set (LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/jemalloc")
|
|
||||||
|
|
||||||
set (SRCS
|
|
||||||
${LIBRARY_DIR}/src/arena.c
|
|
||||||
${LIBRARY_DIR}/src/background_thread.c
|
|
||||||
${LIBRARY_DIR}/src/base.c
|
|
||||||
${LIBRARY_DIR}/src/bin.c
|
|
||||||
${LIBRARY_DIR}/src/bitmap.c
|
|
||||||
${LIBRARY_DIR}/src/ckh.c
|
|
||||||
${LIBRARY_DIR}/src/ctl.c
|
|
||||||
${LIBRARY_DIR}/src/div.c
|
|
||||||
${LIBRARY_DIR}/src/extent.c
|
|
||||||
${LIBRARY_DIR}/src/extent_dss.c
|
|
||||||
${LIBRARY_DIR}/src/extent_mmap.c
|
|
||||||
${LIBRARY_DIR}/src/hash.c
|
|
||||||
${LIBRARY_DIR}/src/hook.c
|
|
||||||
${LIBRARY_DIR}/src/jemalloc.c
|
|
||||||
${LIBRARY_DIR}/src/large.c
|
|
||||||
${LIBRARY_DIR}/src/log.c
|
|
||||||
${LIBRARY_DIR}/src/malloc_io.c
|
|
||||||
${LIBRARY_DIR}/src/mutex.c
|
|
||||||
${LIBRARY_DIR}/src/mutex_pool.c
|
|
||||||
${LIBRARY_DIR}/src/nstime.c
|
|
||||||
${LIBRARY_DIR}/src/pages.c
|
|
||||||
${LIBRARY_DIR}/src/prng.c
|
|
||||||
${LIBRARY_DIR}/src/prof.c
|
|
||||||
${LIBRARY_DIR}/src/rtree.c
|
|
||||||
${LIBRARY_DIR}/src/sc.c
|
|
||||||
${LIBRARY_DIR}/src/stats.c
|
|
||||||
${LIBRARY_DIR}/src/sz.c
|
|
||||||
${LIBRARY_DIR}/src/tcache.c
|
|
||||||
${LIBRARY_DIR}/src/test_hooks.c
|
|
||||||
${LIBRARY_DIR}/src/ticker.c
|
|
||||||
${LIBRARY_DIR}/src/tsd.c
|
|
||||||
${LIBRARY_DIR}/src/witness.c
|
|
||||||
${LIBRARY_DIR}/src/safety_check.c
|
|
||||||
)
|
|
||||||
if (OS_DARWIN)
|
|
||||||
list(APPEND SRCS ${LIBRARY_DIR}/src/zone.c)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
add_library(jemalloc ${SRCS})
|
|
||||||
target_include_directories(jemalloc PRIVATE ${LIBRARY_DIR}/include)
|
|
||||||
target_include_directories(jemalloc SYSTEM PUBLIC include)
|
|
||||||
|
|
||||||
set (JEMALLOC_INCLUDE_PREFIX)
|
|
||||||
# OS_
|
|
||||||
if (OS_LINUX)
|
|
||||||
set (JEMALLOC_INCLUDE_PREFIX "include_linux")
|
|
||||||
elseif (OS_FREEBSD)
|
|
||||||
set (JEMALLOC_INCLUDE_PREFIX "include_freebsd")
|
|
||||||
elseif (OS_DARWIN)
|
|
||||||
set (JEMALLOC_INCLUDE_PREFIX "include_darwin")
|
|
||||||
else ()
|
|
||||||
message (FATAL_ERROR "internal jemalloc: This OS is not supported")
|
|
||||||
endif ()
|
|
||||||
# ARCH_
|
|
||||||
if (ARCH_AMD64)
|
|
||||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_x86_64")
|
|
||||||
elseif (ARCH_ARM)
|
|
||||||
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_aarch64")
|
|
||||||
else ()
|
|
||||||
message (FATAL_ERROR "internal jemalloc: This arch is not supported")
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
configure_file(${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h.in
|
|
||||||
${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h)
|
|
||||||
target_include_directories(jemalloc SYSTEM PRIVATE
|
|
||||||
${CMAKE_CURRENT_BINARY_DIR}/${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal)
|
|
||||||
|
|
||||||
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_NO_PRIVATE_NAMESPACE)
|
|
||||||
|
|
||||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
|
||||||
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_DEBUG=1 -DJEMALLOC_PROF=1)
|
|
||||||
|
|
||||||
if (USE_UNWIND)
|
|
||||||
target_compile_definitions (jemalloc PRIVATE -DJEMALLOC_PROF_LIBUNWIND=1)
|
|
||||||
target_link_libraries (jemalloc PRIVATE unwind)
|
|
||||||
endif ()
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
target_compile_options(jemalloc PRIVATE -Wno-redundant-decls)
|
|
||||||
# for RTLD_NEXT
|
|
||||||
target_compile_options(jemalloc PRIVATE -D_GNU_SOURCE)
|
|
||||||
|
|
||||||
set (USE_INTERNAL_JEMALLOC_LIBRARY 1)
|
|
||||||
endif ()
|
endif ()
|
||||||
|
# ARCH_
|
||||||
|
if (ARCH_AMD64)
|
||||||
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_x86_64")
|
||||||
|
elseif (ARCH_ARM)
|
||||||
|
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_aarch64")
|
||||||
|
else ()
|
||||||
|
message (FATAL_ERROR "internal jemalloc: This arch is not supported")
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
configure_file(${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h.in
|
||||||
|
${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal/jemalloc_internal_defs.h)
|
||||||
|
target_include_directories(jemalloc SYSTEM PRIVATE
|
||||||
|
${CMAKE_CURRENT_BINARY_DIR}/${JEMALLOC_INCLUDE_PREFIX}/jemalloc/internal)
|
||||||
|
|
||||||
|
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_NO_PRIVATE_NAMESPACE)
|
||||||
|
|
||||||
|
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||||
|
target_compile_definitions(jemalloc PRIVATE -DJEMALLOC_DEBUG=1 -DJEMALLOC_PROF=1)
|
||||||
|
|
||||||
|
if (USE_UNWIND)
|
||||||
|
target_compile_definitions (jemalloc PRIVATE -DJEMALLOC_PROF_LIBUNWIND=1)
|
||||||
|
target_link_libraries (jemalloc PRIVATE unwind)
|
||||||
|
endif ()
|
||||||
|
endif ()
|
||||||
|
|
||||||
|
target_compile_options(jemalloc PRIVATE -Wno-redundant-decls)
|
||||||
|
# for RTLD_NEXT
|
||||||
|
target_compile_options(jemalloc PRIVATE -D_GNU_SOURCE)
|
||||||
|
|
||||||
set_property(TARGET jemalloc APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_JEMALLOC=1)
|
set_property(TARGET jemalloc APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_JEMALLOC=1)
|
||||||
if (MAKE_STATIC_LIBRARIES)
|
if (MAKE_STATIC_LIBRARIES)
|
||||||
|
2
contrib/libhdfs3
vendored
2
contrib/libhdfs3
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 1b666578c85094306b061352078022f6350bfab8
|
Subproject commit 30552ac527f2c14070d834e171493b2e7f662375
|
@ -6,8 +6,8 @@ if (NOT USE_INTERNAL_LZ4_LIBRARY)
|
|||||||
|
|
||||||
if (LIBRARY_LZ4 AND INCLUDE_LZ4)
|
if (LIBRARY_LZ4 AND INCLUDE_LZ4)
|
||||||
set(EXTERNAL_LZ4_LIBRARY_FOUND 1)
|
set(EXTERNAL_LZ4_LIBRARY_FOUND 1)
|
||||||
add_library (lz4 UNKNOWN IMPORTED)
|
add_library (lz4 INTERFACE)
|
||||||
set_property (TARGET lz4 PROPERTY IMPORTED_LOCATION ${LIBRARY_LZ4})
|
set_property (TARGET lz4 PROPERTY INTERFACE_LINK_LIBRARIES ${LIBRARY_LZ4})
|
||||||
set_property (TARGET lz4 PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_LZ4})
|
set_property (TARGET lz4 PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_LZ4})
|
||||||
set_property (TARGET lz4 APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_XXHASH=0)
|
set_property (TARGET lz4 APPEND PROPERTY INTERFACE_COMPILE_DEFINITIONS USE_XXHASH=0)
|
||||||
else()
|
else()
|
||||||
|
2
contrib/mariadb-connector-c
vendored
2
contrib/mariadb-connector-c
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 3f512fedf0ba0f769a1b4852b4bac542d92c5b20
|
Subproject commit f5638e954a79f50bac7c7a5deaa5a241e0ce8b5f
|
2
contrib/openssl
vendored
2
contrib/openssl
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 07e9623064508d15dd61367f960ebe7fc9aecd77
|
Subproject commit 237260dd6a4bca5cb5a321d366a8a9c807957455
|
2
contrib/poco
vendored
2
contrib/poco
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 297fc905e166392156f83b96aaa5f44e8a6a35c4
|
Subproject commit 757d947235b307675cff964f29b19d388140a9eb
|
198
contrib/protobuf-cmake/protobuf_generate.cmake
Normal file
198
contrib/protobuf-cmake/protobuf_generate.cmake
Normal file
@ -0,0 +1,198 @@
|
|||||||
|
# The code in this file was copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake
|
||||||
|
|
||||||
|
#[[
|
||||||
|
Add custom commands to process ``.proto`` files to C++::
|
||||||
|
|
||||||
|
protobuf_generate_cpp (<SRCS> <HDRS>
|
||||||
|
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
|
||||||
|
|
||||||
|
``SRCS``
|
||||||
|
Variable to define with autogenerated source files
|
||||||
|
``HDRS``
|
||||||
|
Variable to define with autogenerated header files
|
||||||
|
``DESCRIPTORS``
|
||||||
|
Variable to define with autogenerated descriptor files, if requested.
|
||||||
|
``EXPORT_MACRO``
|
||||||
|
is a macro which should expand to ``__declspec(dllexport)`` or
|
||||||
|
``__declspec(dllimport)`` depending on what is being compiled.
|
||||||
|
``ARGN``
|
||||||
|
``.proto`` files
|
||||||
|
#]]
|
||||||
|
|
||||||
|
function(PROTOBUF_GENERATE_CPP SRCS HDRS)
|
||||||
|
cmake_parse_arguments(protobuf_generate_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
|
||||||
|
|
||||||
|
set(_proto_files "${protobuf_generate_cpp_UNPARSED_ARGUMENTS}")
|
||||||
|
if(NOT _proto_files)
|
||||||
|
message(SEND_ERROR "Error: PROTOBUF_GENERATE_CPP() called without any proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(PROTOBUF_GENERATE_CPP_APPEND_PATH)
|
||||||
|
set(_append_arg APPEND_PATH)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_cpp_DESCRIPTORS)
|
||||||
|
set(_descriptors DESCRIPTORS)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(DEFINED Protobuf_IMPORT_DIRS)
|
||||||
|
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_outvar)
|
||||||
|
protobuf_generate(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
|
||||||
|
|
||||||
|
set(${SRCS})
|
||||||
|
set(${HDRS})
|
||||||
|
if(protobuf_generate_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_cpp_DESCRIPTORS})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(_file ${_outvar})
|
||||||
|
if(_file MATCHES "cc$")
|
||||||
|
list(APPEND ${SRCS} ${_file})
|
||||||
|
elseif(_file MATCHES "desc$")
|
||||||
|
list(APPEND ${protobuf_generate_cpp_DESCRIPTORS} ${_file})
|
||||||
|
else()
|
||||||
|
list(APPEND ${HDRS} ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
|
||||||
|
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
|
||||||
|
if(protobuf_generate_cpp_DESCRIPTORS)
|
||||||
|
set(${protobuf_generate_cpp_DESCRIPTORS} "${${protobuf_generate_cpp_DESCRIPTORS}}" PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# By default have PROTOBUF_GENERATE_CPP macro pass -I to protoc
|
||||||
|
# for each directory where a proto file is referenced.
|
||||||
|
if(NOT DEFINED PROTOBUF_GENERATE_CPP_APPEND_PATH)
|
||||||
|
set(PROTOBUF_GENERATE_CPP_APPEND_PATH TRUE)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
function(protobuf_generate)
|
||||||
|
set(_options APPEND_PATH DESCRIPTORS)
|
||||||
|
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
|
||||||
|
if(COMMAND target_sources)
|
||||||
|
list(APPEND _singleargs TARGET)
|
||||||
|
endif()
|
||||||
|
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
|
||||||
|
|
||||||
|
cmake_parse_arguments(protobuf_generate "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_PROTOS AND NOT protobuf_generate_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate called without any targets or source files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_OUT_VAR AND NOT protobuf_generate_TARGET)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate called without a target or output variable")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_LANGUAGE)
|
||||||
|
set(protobuf_generate_LANGUAGE cpp)
|
||||||
|
endif()
|
||||||
|
string(TOLOWER ${protobuf_generate_LANGUAGE} protobuf_generate_LANGUAGE)
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_PROTOC_OUT_DIR)
|
||||||
|
set(protobuf_generate_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_EXPORT_MACRO AND protobuf_generate_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_dll_export_decl "dllexport_decl=${protobuf_generate_EXPORT_MACRO}:")
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_GENERATE_EXTENSIONS)
|
||||||
|
if(protobuf_generate_LANGUAGE STREQUAL cpp)
|
||||||
|
set(protobuf_generate_GENERATE_EXTENSIONS .pb.h .pb.cc)
|
||||||
|
elseif(protobuf_generate_LANGUAGE STREQUAL python)
|
||||||
|
set(protobuf_generate_GENERATE_EXTENSIONS _pb2.py)
|
||||||
|
else()
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_TARGET)
|
||||||
|
get_target_property(_source_list ${protobuf_generate_TARGET} SOURCES)
|
||||||
|
foreach(_file ${_source_list})
|
||||||
|
if(_file MATCHES "proto$")
|
||||||
|
list(APPEND protobuf_generate_PROTOS ${_file})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(NOT protobuf_generate_PROTOS)
|
||||||
|
message(SEND_ERROR "Error: protobuf_generate could not find any .proto files")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if(protobuf_generate_APPEND_PATH)
|
||||||
|
# Create an include path for each file specified
|
||||||
|
foreach(_file ${protobuf_generate_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_file} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_path ${_abs_file} PATH)
|
||||||
|
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${_abs_path})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
else()
|
||||||
|
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
|
endif()
|
||||||
|
|
||||||
|
foreach(DIR ${protobuf_generate_IMPORT_DIRS})
|
||||||
|
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
|
||||||
|
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
|
||||||
|
if(${_contains_already} EQUAL -1)
|
||||||
|
list(APPEND _protobuf_include_path -I ${ABS_PATH})
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set(_generated_srcs_all)
|
||||||
|
foreach(_proto ${protobuf_generate_PROTOS})
|
||||||
|
get_filename_component(_abs_file ${_proto} ABSOLUTE)
|
||||||
|
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||||
|
get_filename_component(_basename ${_proto} NAME_WE)
|
||||||
|
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||||
|
|
||||||
|
set(_possible_rel_dir)
|
||||||
|
if (NOT protobuf_generate_APPEND_PATH)
|
||||||
|
set(_possible_rel_dir ${_rel_dir}/)
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(_generated_srcs)
|
||||||
|
foreach(_ext ${protobuf_generate_GENERATE_EXTENSIONS})
|
||||||
|
list(APPEND _generated_srcs "${protobuf_generate_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
if(protobuf_generate_DESCRIPTORS AND protobuf_generate_LANGUAGE STREQUAL cpp)
|
||||||
|
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
|
||||||
|
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
|
||||||
|
list(APPEND _generated_srcs ${_descriptor_file})
|
||||||
|
endif()
|
||||||
|
list(APPEND _generated_srcs_all ${_generated_srcs})
|
||||||
|
|
||||||
|
add_custom_command(
|
||||||
|
OUTPUT ${_generated_srcs}
|
||||||
|
COMMAND protobuf::protoc
|
||||||
|
ARGS --${protobuf_generate_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_PROTOC_OUT_DIR} ${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
|
||||||
|
DEPENDS ${_abs_file} protobuf::protoc
|
||||||
|
COMMENT "Running ${protobuf_generate_LANGUAGE} protocol buffer compiler on ${_proto}"
|
||||||
|
VERBATIM )
|
||||||
|
endforeach()
|
||||||
|
|
||||||
|
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
|
||||||
|
if(protobuf_generate_OUT_VAR)
|
||||||
|
set(${protobuf_generate_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
if(protobuf_generate_TARGET)
|
||||||
|
target_sources(${protobuf_generate_TARGET} PRIVATE ${_generated_srcs_all})
|
||||||
|
endif()
|
||||||
|
endfunction()
|
2
contrib/replxx
vendored
2
contrib/replxx
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 94b1f568d16183214d26c7c0e9ce69a4ce407f65
|
Subproject commit 8cf626c04e9a74313fb0b474cdbe2297c0f3cdc8
|
4
debian/changelog
vendored
4
debian/changelog
vendored
@ -1,5 +1,5 @@
|
|||||||
clickhouse (20.10.1.1) unstable; urgency=low
|
clickhouse (20.11.1.1) unstable; urgency=low
|
||||||
|
|
||||||
* Modified source code
|
* Modified source code
|
||||||
|
|
||||||
-- clickhouse-release <clickhouse-release@yandex-team.ru> Tue, 08 Sep 2020 17:04:39 +0300
|
-- clickhouse-release <clickhouse-release@yandex-team.ru> Sat, 10 Oct 2020 18:39:55 +0300
|
||||||
|
69
debian/clickhouse-server.init
vendored
69
debian/clickhouse-server.init
vendored
@ -153,82 +153,19 @@ initdb()
|
|||||||
|
|
||||||
start()
|
start()
|
||||||
{
|
{
|
||||||
[ -x $CLICKHOUSE_BINDIR/$PROGRAM ] || exit 0
|
${CLICKHOUSE_GENERIC_PROGRAM} start --user "${CLICKHOUSE_USER}" --pid-path "${CLICKHOUSE_PIDDIR}" --config-path "${CLICKHOUSE_CONFDIR}" --binary-path "${CLICKHOUSE_BINDIR}"
|
||||||
local EXIT_STATUS
|
|
||||||
EXIT_STATUS=0
|
|
||||||
|
|
||||||
echo -n "Start $PROGRAM service: "
|
|
||||||
|
|
||||||
if is_running; then
|
|
||||||
echo -n "already running "
|
|
||||||
EXIT_STATUS=1
|
|
||||||
else
|
|
||||||
ulimit -n 262144
|
|
||||||
mkdir -p $CLICKHOUSE_PIDDIR
|
|
||||||
chown -R $CLICKHOUSE_USER:$CLICKHOUSE_GROUP $CLICKHOUSE_PIDDIR
|
|
||||||
initdb
|
|
||||||
if ! is_running; then
|
|
||||||
# Lock should not be held while running child process, so we release the lock. Note: obviously, there is race condition.
|
|
||||||
# But clickhouse-server has protection from simultaneous runs with same data directory.
|
|
||||||
su -s $SHELL ${CLICKHOUSE_USER} -c "$FLOCK -u 9; $CLICKHOUSE_PROGRAM_ENV exec -a \"$PROGRAM\" \"$CLICKHOUSE_BINDIR/$PROGRAM\" --daemon --pid-file=\"$CLICKHOUSE_PIDFILE\" --config-file=\"$CLICKHOUSE_CONFIG\""
|
|
||||||
EXIT_STATUS=$?
|
|
||||||
if [ $EXIT_STATUS -ne 0 ]; then
|
|
||||||
return $EXIT_STATUS
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ $EXIT_STATUS -eq 0 ]; then
|
|
||||||
attempts=0
|
|
||||||
while ! is_running && [ $attempts -le ${CLICKHOUSE_START_TIMEOUT:=10} ]; do
|
|
||||||
attempts=$(($attempts + 1))
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
if is_running; then
|
|
||||||
echo "DONE"
|
|
||||||
else
|
|
||||||
echo "UNKNOWN"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
echo "FAILED"
|
|
||||||
fi
|
|
||||||
|
|
||||||
return $EXIT_STATUS
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
stop()
|
stop()
|
||||||
{
|
{
|
||||||
#local EXIT_STATUS
|
${CLICKHOUSE_GENERIC_PROGRAM} stop --pid-path "${CLICKHOUSE_PIDDIR}"
|
||||||
EXIT_STATUS=0
|
|
||||||
|
|
||||||
if [ -f $CLICKHOUSE_PIDFILE ]; then
|
|
||||||
|
|
||||||
echo -n "Stop $PROGRAM service: "
|
|
||||||
|
|
||||||
kill -TERM $(cat "$CLICKHOUSE_PIDFILE")
|
|
||||||
|
|
||||||
if ! wait_for_done ${CLICKHOUSE_STOP_TIMEOUT}; then
|
|
||||||
EXIT_STATUS=2
|
|
||||||
echo "TIMEOUT"
|
|
||||||
else
|
|
||||||
echo "DONE"
|
|
||||||
fi
|
|
||||||
|
|
||||||
fi
|
|
||||||
return $EXIT_STATUS
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
restart()
|
restart()
|
||||||
{
|
{
|
||||||
check_config
|
${CLICKHOUSE_GENERIC_PROGRAM} restart --user "${CLICKHOUSE_USER}" --pid-path "${CLICKHOUSE_PIDDIR}" --config-path "${CLICKHOUSE_CONFDIR}" --binary-path "${CLICKHOUSE_BINDIR}"
|
||||||
if stop; then
|
|
||||||
if start; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
return 1
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
104
debian/clickhouse-server.postinst
vendored
104
debian/clickhouse-server.postinst
vendored
@ -2,6 +2,7 @@
|
|||||||
set -e
|
set -e
|
||||||
# set -x
|
# set -x
|
||||||
|
|
||||||
|
PROGRAM=clickhouse-server
|
||||||
CLICKHOUSE_USER=${CLICKHOUSE_USER:=clickhouse}
|
CLICKHOUSE_USER=${CLICKHOUSE_USER:=clickhouse}
|
||||||
CLICKHOUSE_GROUP=${CLICKHOUSE_GROUP:=${CLICKHOUSE_USER}}
|
CLICKHOUSE_GROUP=${CLICKHOUSE_GROUP:=${CLICKHOUSE_USER}}
|
||||||
# Please note that we don't support paths with whitespaces. This is rather ignorant.
|
# Please note that we don't support paths with whitespaces. This is rather ignorant.
|
||||||
@ -12,6 +13,7 @@ CLICKHOUSE_BINDIR=${CLICKHOUSE_BINDIR:=/usr/bin}
|
|||||||
CLICKHOUSE_GENERIC_PROGRAM=${CLICKHOUSE_GENERIC_PROGRAM:=clickhouse}
|
CLICKHOUSE_GENERIC_PROGRAM=${CLICKHOUSE_GENERIC_PROGRAM:=clickhouse}
|
||||||
EXTRACT_FROM_CONFIG=${CLICKHOUSE_GENERIC_PROGRAM}-extract-from-config
|
EXTRACT_FROM_CONFIG=${CLICKHOUSE_GENERIC_PROGRAM}-extract-from-config
|
||||||
CLICKHOUSE_CONFIG=$CLICKHOUSE_CONFDIR/config.xml
|
CLICKHOUSE_CONFIG=$CLICKHOUSE_CONFDIR/config.xml
|
||||||
|
CLICKHOUSE_PIDDIR=/var/run/$PROGRAM
|
||||||
|
|
||||||
[ -f /usr/share/debconf/confmodule ] && . /usr/share/debconf/confmodule
|
[ -f /usr/share/debconf/confmodule ] && . /usr/share/debconf/confmodule
|
||||||
[ -f /etc/default/clickhouse ] && . /etc/default/clickhouse
|
[ -f /etc/default/clickhouse ] && . /etc/default/clickhouse
|
||||||
@ -41,105 +43,5 @@ if [ "$1" = configure ] || [ -n "$not_deb_os" ]; then
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Make sure the administrative user exists
|
${CLICKHOUSE_GENERIC_PROGRAM} install --user "${CLICKHOUSE_USER}" --group "${CLICKHOUSE_GROUP}" --pid-path "${CLICKHOUSE_PIDDIR}" --config-path "${CLICKHOUSE_CONFDIR}" --binary-path "${CLICKHOUSE_BINDIR}" --log-path "${CLICKHOUSE_LOGDIR}" --data-path "${CLICKHOUSE_DATADIR}"
|
||||||
if ! getent passwd ${CLICKHOUSE_USER} > /dev/null; then
|
|
||||||
if [ -n "$not_deb_os" ]; then
|
|
||||||
useradd -r -s /bin/false --home-dir /nonexistent ${CLICKHOUSE_USER} > /dev/null
|
|
||||||
else
|
|
||||||
adduser --system --disabled-login --no-create-home --home /nonexistent \
|
|
||||||
--shell /bin/false --group --gecos "ClickHouse server" ${CLICKHOUSE_USER} > /dev/null
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# if the user was created manually, make sure the group is there as well
|
|
||||||
if ! getent group ${CLICKHOUSE_GROUP} > /dev/null; then
|
|
||||||
groupadd -r ${CLICKHOUSE_GROUP} > /dev/null
|
|
||||||
fi
|
|
||||||
|
|
||||||
# make sure user is in the correct group
|
|
||||||
if ! id -Gn ${CLICKHOUSE_USER} | grep -qw ${CLICKHOUSE_USER}; then
|
|
||||||
usermod -a -G ${CLICKHOUSE_GROUP} ${CLICKHOUSE_USER} > /dev/null
|
|
||||||
fi
|
|
||||||
|
|
||||||
# check validity of user and group
|
|
||||||
if [ "$(id -u ${CLICKHOUSE_USER})" -eq 0 ]; then
|
|
||||||
echo "The ${CLICKHOUSE_USER} system user must not have uid 0 (root).
|
|
||||||
Please fix this and reinstall this package." >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$(id -g ${CLICKHOUSE_GROUP})" -eq 0 ]; then
|
|
||||||
echo "The ${CLICKHOUSE_USER} system user must not have root as primary group.
|
|
||||||
Please fix this and reinstall this package." >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -x "$CLICKHOUSE_BINDIR/$EXTRACT_FROM_CONFIG" ] && [ -f "$CLICKHOUSE_CONFIG" ]; then
|
|
||||||
if [ -z "$SHELL" ]; then
|
|
||||||
SHELL="/bin/sh"
|
|
||||||
fi
|
|
||||||
CLICKHOUSE_DATADIR_FROM_CONFIG=$(su -s $SHELL ${CLICKHOUSE_USER} -c "$CLICKHOUSE_BINDIR/$EXTRACT_FROM_CONFIG --config-file=\"$CLICKHOUSE_CONFIG\" --key=path") ||:
|
|
||||||
echo "Path to data directory in ${CLICKHOUSE_CONFIG}: ${CLICKHOUSE_DATADIR_FROM_CONFIG}"
|
|
||||||
fi
|
|
||||||
CLICKHOUSE_DATADIR_FROM_CONFIG=${CLICKHOUSE_DATADIR_FROM_CONFIG:=$CLICKHOUSE_DATADIR}
|
|
||||||
|
|
||||||
if [ ! -d ${CLICKHOUSE_DATADIR_FROM_CONFIG} ]; then
|
|
||||||
mkdir -p ${CLICKHOUSE_DATADIR_FROM_CONFIG}
|
|
||||||
chown ${CLICKHOUSE_USER}:${CLICKHOUSE_GROUP} ${CLICKHOUSE_DATADIR_FROM_CONFIG}
|
|
||||||
chmod 700 ${CLICKHOUSE_DATADIR_FROM_CONFIG}
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -d ${CLICKHOUSE_CONFDIR} ]; then
|
|
||||||
mkdir -p ${CLICKHOUSE_CONFDIR}/users.d
|
|
||||||
mkdir -p ${CLICKHOUSE_CONFDIR}/config.d
|
|
||||||
rm -fv ${CLICKHOUSE_CONFDIR}/*-preprocessed.xml ||:
|
|
||||||
fi
|
|
||||||
|
|
||||||
[ -e ${CLICKHOUSE_CONFDIR}/preprocessed ] || ln -s ${CLICKHOUSE_DATADIR_FROM_CONFIG}/preprocessed_configs ${CLICKHOUSE_CONFDIR}/preprocessed ||:
|
|
||||||
|
|
||||||
if [ ! -d ${CLICKHOUSE_LOGDIR} ]; then
|
|
||||||
mkdir -p ${CLICKHOUSE_LOGDIR}
|
|
||||||
chown root:${CLICKHOUSE_GROUP} ${CLICKHOUSE_LOGDIR}
|
|
||||||
# Allow everyone to read logs, root and clickhouse to read-write
|
|
||||||
chmod 775 ${CLICKHOUSE_LOGDIR}
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Set net_admin capabilities to support introspection of "taskstats" performance metrics from the kernel
|
|
||||||
# and ipc_lock capabilities to allow mlock of clickhouse binary.
|
|
||||||
|
|
||||||
# 1. Check that "setcap" tool exists.
|
|
||||||
# 2. Check that an arbitrary program with installed capabilities can run.
|
|
||||||
# 3. Set the capabilities.
|
|
||||||
|
|
||||||
# The second is important for Docker and systemd-nspawn.
|
|
||||||
# When the container has no capabilities,
|
|
||||||
# but the executable file inside the container has capabilities,
|
|
||||||
# then attempt to run this file will end up with a cryptic "Operation not permitted" message.
|
|
||||||
|
|
||||||
TMPFILE=/tmp/test_setcap.sh
|
|
||||||
|
|
||||||
command -v setcap >/dev/null \
|
|
||||||
&& echo > $TMPFILE && chmod a+x $TMPFILE && $TMPFILE && setcap "cap_net_admin,cap_ipc_lock,cap_sys_nice+ep" $TMPFILE && $TMPFILE && rm $TMPFILE \
|
|
||||||
&& setcap "cap_net_admin,cap_ipc_lock,cap_sys_nice+ep" "${CLICKHOUSE_BINDIR}/${CLICKHOUSE_GENERIC_PROGRAM}" \
|
|
||||||
|| echo "Cannot set 'net_admin' or 'ipc_lock' or 'sys_nice' capability for clickhouse binary. This is optional. Taskstats accounting will be disabled. To enable taskstats accounting you may add the required capability later manually."
|
|
||||||
|
|
||||||
# Clean old dynamic compilation results
|
|
||||||
if [ -d "${CLICKHOUSE_DATADIR_FROM_CONFIG}/build" ]; then
|
|
||||||
rm -f ${CLICKHOUSE_DATADIR_FROM_CONFIG}/build/*.cpp ${CLICKHOUSE_DATADIR_FROM_CONFIG}/build/*.so ||:
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -f /usr/share/debconf/confmodule ]; then
|
|
||||||
db_get clickhouse-server/default-password
|
|
||||||
defaultpassword="$RET"
|
|
||||||
if [ -n "$defaultpassword" ]; then
|
|
||||||
echo "<yandex><users><default><password>$defaultpassword</password></default></users></yandex>" > ${CLICKHOUSE_CONFDIR}/users.d/default-password.xml
|
|
||||||
chown ${CLICKHOUSE_USER}:${CLICKHOUSE_GROUP} ${CLICKHOUSE_CONFDIR}/users.d/default-password.xml
|
|
||||||
chmod 600 ${CLICKHOUSE_CONFDIR}/users.d/default-password.xml
|
|
||||||
fi
|
|
||||||
|
|
||||||
# everything went well, so now let's reset the password
|
|
||||||
db_set clickhouse-server/default-password ""
|
|
||||||
# ... done with debconf here
|
|
||||||
db_stop
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
2
debian/control
vendored
2
debian/control
vendored
@ -62,5 +62,5 @@ Description: debugging symbols for clickhouse-common-static
|
|||||||
Package: clickhouse-test
|
Package: clickhouse-test
|
||||||
Priority: optional
|
Priority: optional
|
||||||
Architecture: all
|
Architecture: all
|
||||||
Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-client, bash, expect, python, python-lxml, python-termcolor, python-requests, curl, perl, sudo, openssl, netcat-openbsd, telnet, brotli, bsdutils
|
Depends: ${shlibs:Depends}, ${misc:Depends}, clickhouse-client, bash, expect, python3, python3-lxml, python3-termcolor, python3-requests, curl, perl, sudo, openssl, netcat-openbsd, telnet, brotli, bsdutils
|
||||||
Description: ClickHouse tests
|
Description: ClickHouse tests
|
||||||
|
@ -25,10 +25,10 @@ RUN apt-get update \
|
|||||||
ninja-build \
|
ninja-build \
|
||||||
perl \
|
perl \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-requests \
|
python3-requests \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
tzdata \
|
tzdata \
|
||||||
llvm-${LLVM_VERSION} \
|
llvm-${LLVM_VERSION} \
|
||||||
clang-${LLVM_VERSION} \
|
clang-${LLVM_VERSION} \
|
||||||
|
@ -1,9 +1,10 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
|
||||||
#ccache -s # uncomment to display CCache statistics
|
#ccache -s # uncomment to display CCache statistics
|
||||||
mkdir -p /server/build_docker
|
mkdir -p /server/build_docker
|
||||||
cd /server/build_docker
|
cd /server/build_docker
|
||||||
cmake -G Ninja /server -DCMAKE_C_COMPILER=`which gcc-9` -DCMAKE_CXX_COMPILER=`which g++-9`
|
cmake -G Ninja /server "-DCMAKE_C_COMPILER=$(command -v gcc-9)" "-DCMAKE_CXX_COMPILER=$(command -v g++-9)"
|
||||||
|
|
||||||
# Set the number of build jobs to the half of number of virtual CPU cores (rounded up).
|
# Set the number of build jobs to the half of number of virtual CPU cores (rounded up).
|
||||||
# By default, ninja use all virtual CPU cores, that leads to very high memory consumption without much improvement in build time.
|
# By default, ninja use all virtual CPU cores, that leads to very high memory consumption without much improvement in build time.
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
FROM ubuntu:18.04
|
FROM ubuntu:18.04
|
||||||
|
|
||||||
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
||||||
ARG version=20.10.1.*
|
ARG version=20.11.1.*
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install --yes --no-install-recommends \
|
&& apt-get install --yes --no-install-recommends \
|
||||||
|
@ -9,7 +9,8 @@
|
|||||||
"name": "yandex/clickhouse-binary-builder",
|
"name": "yandex/clickhouse-binary-builder",
|
||||||
"dependent": [
|
"dependent": [
|
||||||
"docker/test/split_build_smoke_test",
|
"docker/test/split_build_smoke_test",
|
||||||
"docker/test/pvs"
|
"docker/test/pvs",
|
||||||
|
"docker/test/codebrowser"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"docker/packager/unbundled": {
|
"docker/packager/unbundled": {
|
||||||
@ -133,6 +134,10 @@
|
|||||||
"name": "yandex/clickhouse-postgresql-java-client",
|
"name": "yandex/clickhouse-postgresql-java-client",
|
||||||
"dependent": []
|
"dependent": []
|
||||||
},
|
},
|
||||||
|
"docker/test/integration/kerberos_kdc": {
|
||||||
|
"name": "yandex/clickhouse-kerberos-kdc",
|
||||||
|
"dependent": []
|
||||||
|
},
|
||||||
"docker/test/base": {
|
"docker/test/base": {
|
||||||
"name": "yandex/clickhouse-test-base",
|
"name": "yandex/clickhouse-test-base",
|
||||||
"dependent": [
|
"dependent": [
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# docker build -t yandex/clickhouse-binary-builder .
|
# docker build -t yandex/clickhouse-binary-builder .
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
|
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=11
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||||
@ -11,7 +11,7 @@ RUN apt-get update \
|
|||||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-11 main" >> \
|
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||||
/etc/apt/sources.list
|
/etc/apt/sources.list
|
||||||
|
|
||||||
# initial packages
|
# initial packages
|
||||||
@ -32,10 +32,11 @@ RUN apt-get update \
|
|||||||
curl \
|
curl \
|
||||||
gcc-9 \
|
gcc-9 \
|
||||||
g++-9 \
|
g++-9 \
|
||||||
llvm-${LLVM_VERSION} \
|
clang-10 \
|
||||||
clang-${LLVM_VERSION} \
|
clang-tidy-10 \
|
||||||
lld-${LLVM_VERSION} \
|
lld-10 \
|
||||||
clang-tidy-${LLVM_VERSION} \
|
llvm-10 \
|
||||||
|
llvm-10-dev \
|
||||||
clang-11 \
|
clang-11 \
|
||||||
clang-tidy-11 \
|
clang-tidy-11 \
|
||||||
lld-11 \
|
lld-11 \
|
||||||
|
@ -17,7 +17,10 @@ ccache --show-stats ||:
|
|||||||
ccache --zero-stats ||:
|
ccache --zero-stats ||:
|
||||||
ln -s /usr/lib/x86_64-linux-gnu/libOpenCL.so.1.0.0 /usr/lib/libOpenCL.so ||:
|
ln -s /usr/lib/x86_64-linux-gnu/libOpenCL.so.1.0.0 /usr/lib/libOpenCL.so ||:
|
||||||
rm -f CMakeCache.txt
|
rm -f CMakeCache.txt
|
||||||
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DSANITIZE=$SANITIZER -DENABLE_CHECK_HEAVY_BUILDS=1 $CMAKE_FLAGS ..
|
# Read cmake arguments into array (possibly empty)
|
||||||
|
read -ra CMAKE_FLAGS <<< "${CMAKE_FLAGS:-}"
|
||||||
|
cmake --debug-trycompile --verbose=1 -DCMAKE_VERBOSE_MAKEFILE=1 -LA "-DCMAKE_BUILD_TYPE=$BUILD_TYPE" "-DSANITIZE=$SANITIZER" -DENABLE_CHECK_HEAVY_BUILDS=1 "${CMAKE_FLAGS[@]}" ..
|
||||||
|
# shellcheck disable=SC2086 # No quotes because I want it to expand to nothing if empty.
|
||||||
ninja $NINJA_FLAGS clickhouse-bundle
|
ninja $NINJA_FLAGS clickhouse-bundle
|
||||||
mv ./programs/clickhouse* /output
|
mv ./programs/clickhouse* /output
|
||||||
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds
|
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# docker build -t yandex/clickhouse-deb-builder .
|
# docker build -t yandex/clickhouse-deb-builder .
|
||||||
FROM ubuntu:20.04
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=10
|
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=11
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
|
||||||
@ -11,7 +11,7 @@ RUN apt-get update \
|
|||||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-11 main" >> \
|
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||||
/etc/apt/sources.list
|
/etc/apt/sources.list
|
||||||
|
|
||||||
# initial packages
|
# initial packages
|
||||||
@ -49,15 +49,11 @@ RUN apt-get update \
|
|||||||
lld-11 \
|
lld-11 \
|
||||||
llvm-11 \
|
llvm-11 \
|
||||||
llvm-11-dev \
|
llvm-11-dev \
|
||||||
clang-${LLVM_VERSION} \
|
clang-10 \
|
||||||
clang-tidy-${LLVM_VERSION} \
|
clang-tidy-10 \
|
||||||
lld-${LLVM_VERSION} \
|
lld-10 \
|
||||||
llvm-${LLVM_VERSION} \
|
llvm-10 \
|
||||||
llvm-${LLVM_VERSION}-dev \
|
llvm-10-dev \
|
||||||
llvm-9-dev \
|
|
||||||
lld-9 \
|
|
||||||
clang-9 \
|
|
||||||
clang-tidy-9 \
|
|
||||||
ninja-build \
|
ninja-build \
|
||||||
perl \
|
perl \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
|
@ -4,16 +4,17 @@ set -x -e
|
|||||||
|
|
||||||
ccache --show-stats ||:
|
ccache --show-stats ||:
|
||||||
ccache --zero-stats ||:
|
ccache --zero-stats ||:
|
||||||
build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S'
|
read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
|
||||||
|
build/release --no-pbuilder "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
|
||||||
mv /*.deb /output
|
mv /*.deb /output
|
||||||
mv *.changes /output
|
mv -- *.changes /output
|
||||||
mv *.buildinfo /output
|
mv -- *.buildinfo /output
|
||||||
mv /*.rpm /output ||: # if exists
|
mv /*.rpm /output ||: # if exists
|
||||||
mv /*.tgz /output ||: # if exists
|
mv /*.tgz /output ||: # if exists
|
||||||
|
|
||||||
if [ -n "$BINARY_OUTPUT" ] && { [ "$BINARY_OUTPUT" = "programs" ] || [ "$BINARY_OUTPUT" = "tests" ] ;}
|
if [ -n "$BINARY_OUTPUT" ] && { [ "$BINARY_OUTPUT" = "programs" ] || [ "$BINARY_OUTPUT" = "tests" ] ;}
|
||||||
then
|
then
|
||||||
echo Place $BINARY_OUTPUT to output
|
echo "Place $BINARY_OUTPUT to output"
|
||||||
mkdir /output/binary ||: # if exists
|
mkdir /output/binary ||: # if exists
|
||||||
mv /build/obj-*/programs/clickhouse* /output/binary
|
mv /build/obj-*/programs/clickhouse* /output/binary
|
||||||
if [ "$BINARY_OUTPUT" = "tests" ]
|
if [ "$BINARY_OUTPUT" = "tests" ]
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
#-*- coding: utf-8 -*-
|
#-*- coding: utf-8 -*-
|
||||||
import subprocess
|
import subprocess
|
||||||
import os
|
import os
|
||||||
|
@ -51,6 +51,7 @@ RUN apt-get update \
|
|||||||
protobuf-compiler \
|
protobuf-compiler \
|
||||||
libprotoc-dev \
|
libprotoc-dev \
|
||||||
libgrpc++-dev \
|
libgrpc++-dev \
|
||||||
|
protobuf-compiler-grpc \
|
||||||
rapidjson-dev \
|
rapidjson-dev \
|
||||||
libsnappy-dev \
|
libsnappy-dev \
|
||||||
libparquet-dev \
|
libparquet-dev \
|
||||||
|
@ -4,10 +4,11 @@ set -x -e
|
|||||||
|
|
||||||
ccache --show-stats ||:
|
ccache --show-stats ||:
|
||||||
ccache --zero-stats ||:
|
ccache --zero-stats ||:
|
||||||
build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S'
|
read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
|
||||||
|
build/release --no-pbuilder "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
|
||||||
mv /*.deb /output
|
mv /*.deb /output
|
||||||
mv *.changes /output
|
mv -- *.changes /output
|
||||||
mv *.buildinfo /output
|
mv -- *.buildinfo /output
|
||||||
mv /*.rpm /output ||: # if exists
|
mv /*.rpm /output ||: # if exists
|
||||||
mv /*.tgz /output ||: # if exists
|
mv /*.tgz /output ||: # if exists
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
FROM ubuntu:20.04
|
FROM ubuntu:20.04
|
||||||
|
|
||||||
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
||||||
ARG version=20.10.1.*
|
ARG version=20.11.1.*
|
||||||
ARG gosu_ver=1.10
|
ARG gosu_ver=1.10
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
FROM ubuntu:18.04
|
FROM ubuntu:18.04
|
||||||
|
|
||||||
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
ARG repository="deb https://repo.clickhouse.tech/deb/stable/ main/"
|
||||||
ARG version=20.10.1.*
|
ARG version=20.11.1.*
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y apt-transport-https dirmngr && \
|
apt-get install -y apt-transport-https dirmngr && \
|
||||||
|
@ -52,6 +52,7 @@ RUN apt-get update \
|
|||||||
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment; \
|
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment; \
|
||||||
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \
|
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \
|
||||||
echo "MSAN_OPTIONS='abort_on_error=1'" >> /etc/environment; \
|
echo "MSAN_OPTIONS='abort_on_error=1'" >> /etc/environment; \
|
||||||
|
echo "LSAN_OPTIONS='suppressions=/usr/share/clickhouse-test/config/lsan_suppressions.txt'" >> /etc/environment; \
|
||||||
ln -s /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
|
ln -s /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
|
||||||
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
|
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
|
||||||
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
|
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
|
||||||
|
@ -1,33 +1,15 @@
|
|||||||
# docker build --network=host -t yandex/clickhouse-codebrowser .
|
# docker build --network=host -t yandex/clickhouse-codebrowser .
|
||||||
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output yandex/clickhouse-codebrowser
|
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output yandex/clickhouse-codebrowser
|
||||||
FROM ubuntu:18.04
|
FROM yandex/clickhouse-binary-builder
|
||||||
|
|
||||||
RUN apt-get --allow-unauthenticated update -y \
|
RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-9 libllvm9 libclang-9-dev
|
||||||
&& env DEBIAN_FRONTEND=noninteractive \
|
|
||||||
apt-get --allow-unauthenticated install --yes --no-install-recommends \
|
|
||||||
bash \
|
|
||||||
sudo \
|
|
||||||
wget \
|
|
||||||
software-properties-common \
|
|
||||||
ca-certificates \
|
|
||||||
apt-transport-https \
|
|
||||||
build-essential \
|
|
||||||
gpg-agent \
|
|
||||||
git
|
|
||||||
|
|
||||||
RUN wget -nv -O - https://apt.kitware.com/keys/kitware-archive-latest.asc | sudo apt-key add -
|
|
||||||
RUN sudo apt-add-repository 'deb https://apt.kitware.com/ubuntu/ bionic main'
|
|
||||||
RUN sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main" >> /etc/apt/sources.list
|
|
||||||
|
|
||||||
RUN sudo apt-get --yes --allow-unauthenticated update
|
|
||||||
# To build woboq
|
|
||||||
RUN sudo apt-get --yes --allow-unauthenticated install cmake clang-8 libllvm8 libclang-8-dev
|
|
||||||
|
|
||||||
# repo versions doesn't work correctly with C++17
|
# repo versions doesn't work correctly with C++17
|
||||||
# also we push reports to s3, so we add index.html to subfolder urls
|
# also we push reports to s3, so we add index.html to subfolder urls
|
||||||
# https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
|
# https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
|
||||||
RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser
|
RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser
|
||||||
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release && make -j
|
|
||||||
|
RUN cd woboq_codebrowser && cmake . -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang\+\+-9 -DCMAKE_C_COMPILER=clang-9 && make -j
|
||||||
|
|
||||||
ENV CODEGEN=/woboq_codebrowser/generator/codebrowser_generator
|
ENV CODEGEN=/woboq_codebrowser/generator/codebrowser_generator
|
||||||
ENV CODEINDEX=/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator
|
ENV CODEINDEX=/woboq_codebrowser/indexgenerator/codebrowser_indexgenerator
|
||||||
@ -40,7 +22,7 @@ ENV SHA=nosha
|
|||||||
ENV DATA="data"
|
ENV DATA="data"
|
||||||
|
|
||||||
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
|
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
|
||||||
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-8 -DCMAKE_C_COMPILER=/usr/bin/clang-8 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON && \
|
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-11 -DCMAKE_C_COMPILER=/usr/bin/clang-11 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON && \
|
||||||
mkdir -p $HTML_RESULT_DIRECTORY && \
|
mkdir -p $HTML_RESULT_DIRECTORY && \
|
||||||
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA && \
|
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA && \
|
||||||
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\
|
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\
|
||||||
|
@ -11,7 +11,7 @@ RUN apt-get update \
|
|||||||
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
&& echo "${LLVM_PUBKEY_HASH} /tmp/llvm-snapshot.gpg.key" | sha384sum -c \
|
||||||
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
&& apt-key add /tmp/llvm-snapshot.gpg.key \
|
||||||
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
&& export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
||||||
&& echo "deb [trusted=yes] http://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
&& echo "deb [trusted=yes] https://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-${LLVM_VERSION} main" >> \
|
||||||
/etc/apt/sources.list
|
/etc/apt/sources.list
|
||||||
|
|
||||||
# initial packages
|
# initial packages
|
||||||
@ -52,11 +52,10 @@ RUN apt-get update \
|
|||||||
moreutils \
|
moreutils \
|
||||||
ninja-build \
|
ninja-build \
|
||||||
psmisc \
|
psmisc \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-requests \
|
python3-requests \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
qemu-user-static \
|
|
||||||
rename \
|
rename \
|
||||||
software-properties-common \
|
software-properties-common \
|
||||||
tzdata \
|
tzdata \
|
||||||
|
@ -15,29 +15,65 @@ stage=${stage:-}
|
|||||||
# empty parameter.
|
# empty parameter.
|
||||||
read -ra FASTTEST_CMAKE_FLAGS <<< "${FASTTEST_CMAKE_FLAGS:-}"
|
read -ra FASTTEST_CMAKE_FLAGS <<< "${FASTTEST_CMAKE_FLAGS:-}"
|
||||||
|
|
||||||
|
FASTTEST_WORKSPACE=$(readlink -f "${FASTTEST_WORKSPACE:-.}")
|
||||||
|
FASTTEST_SOURCE=$(readlink -f "${FASTTEST_SOURCE:-$FASTTEST_WORKSPACE/ch}")
|
||||||
|
FASTTEST_BUILD=$(readlink -f "${FASTTEST_BUILD:-${BUILD:-$FASTTEST_WORKSPACE/build}}")
|
||||||
|
FASTTEST_DATA=$(readlink -f "${FASTTEST_DATA:-$FASTTEST_WORKSPACE/db-fasttest}")
|
||||||
|
FASTTEST_OUTPUT=$(readlink -f "${FASTTEST_OUTPUT:-$FASTTEST_WORKSPACE}")
|
||||||
|
|
||||||
function kill_clickhouse
|
# Export these variables, so that all subsequent invocations of the script
|
||||||
|
# use them, and not try to guess them anew, which leads to weird effects.
|
||||||
|
export FASTTEST_WORKSPACE
|
||||||
|
export FASTTEST_SOURCE
|
||||||
|
export FASTTEST_BUILD
|
||||||
|
export FASTTEST_DATA
|
||||||
|
export FASTTEST_OUT
|
||||||
|
|
||||||
|
server_pid=none
|
||||||
|
|
||||||
|
function stop_server
|
||||||
{
|
{
|
||||||
|
if ! kill -0 -- "$server_pid"
|
||||||
|
then
|
||||||
|
echo "ClickHouse server pid '$server_pid' is not running"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
for _ in {1..60}
|
for _ in {1..60}
|
||||||
do
|
do
|
||||||
if ! pkill -f clickhouse-server ; then break ; fi
|
if ! pkill -f "clickhouse-server" && ! kill -- "$server_pid" ; then break ; fi
|
||||||
sleep 1
|
sleep 1
|
||||||
done
|
done
|
||||||
|
|
||||||
if pgrep -f clickhouse-server
|
if kill -0 -- "$server_pid"
|
||||||
then
|
then
|
||||||
pstree -apgT
|
pstree -apgT
|
||||||
jobs
|
jobs
|
||||||
echo "Failed to kill the ClickHouse server $(pgrep -f clickhouse-server)"
|
echo "Failed to kill the ClickHouse server pid '$server_pid'"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
server_pid=none
|
||||||
}
|
}
|
||||||
|
|
||||||
function wait_for_server_start
|
function start_server
|
||||||
{
|
{
|
||||||
|
set -m # Spawn server in its own process groups
|
||||||
|
clickhouse-server --config-file="$FASTTEST_DATA/config.xml" -- --path "$FASTTEST_DATA" --user_files_path "$FASTTEST_DATA/user_files" &>> "$FASTTEST_OUTPUT/server.log" &
|
||||||
|
server_pid=$!
|
||||||
|
set +m
|
||||||
|
|
||||||
|
if [ "$server_pid" == "0" ]
|
||||||
|
then
|
||||||
|
echo "Failed to start ClickHouse server"
|
||||||
|
# Avoid zero PID because `kill` treats it as our process group PID.
|
||||||
|
server_pid="none"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
for _ in {1..60}
|
for _ in {1..60}
|
||||||
do
|
do
|
||||||
if clickhouse-client --query "select 1" || ! pgrep -f clickhouse-server
|
if clickhouse-client --query "select 1" || ! kill -0 -- "$server_pid"
|
||||||
then
|
then
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
@ -47,20 +83,26 @@ function wait_for_server_start
|
|||||||
if ! clickhouse-client --query "select 1"
|
if ! clickhouse-client --query "select 1"
|
||||||
then
|
then
|
||||||
echo "Failed to wait until ClickHouse server starts."
|
echo "Failed to wait until ClickHouse server starts."
|
||||||
|
server_pid="none"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "ClickHouse server pid '$(pgrep -f clickhouse-server)' started and responded"
|
if ! kill -0 -- "$server_pid"
|
||||||
|
then
|
||||||
|
echo "Wrong clickhouse server started: PID '$server_pid' we started is not running, but '$(pgrep -f clickhouse-server)' is running"
|
||||||
|
server_pid="none"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "ClickHouse server pid '$server_pid' started and responded"
|
||||||
}
|
}
|
||||||
|
|
||||||
function clone_root
|
function clone_root
|
||||||
{
|
{
|
||||||
git clone https://github.com/ClickHouse/ClickHouse.git | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/clone_log.txt
|
git clone https://github.com/ClickHouse/ClickHouse.git -- "$FASTTEST_SOURCE" | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/clone_log.txt"
|
||||||
cd ClickHouse
|
|
||||||
CLICKHOUSE_DIR=$(pwd)
|
|
||||||
export CLICKHOUSE_DIR
|
|
||||||
|
|
||||||
|
|
||||||
|
(
|
||||||
|
cd "$FASTTEST_SOURCE"
|
||||||
if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
|
if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
|
||||||
if git fetch origin "+refs/pull/$PULL_REQUEST_NUMBER/merge"; then
|
if git fetch origin "+refs/pull/$PULL_REQUEST_NUMBER/merge"; then
|
||||||
git checkout FETCH_HEAD
|
git checkout FETCH_HEAD
|
||||||
@ -71,22 +113,36 @@ if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
|
|||||||
echo 'Checked out to commit'
|
echo 'Checked out to commit'
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
if [ "$COMMIT_SHA" != "" ]; then
|
if [ -v COMMIT_SHA ]; then
|
||||||
git checkout "$COMMIT_SHA"
|
git checkout "$COMMIT_SHA"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
function run
|
function clone_submodules
|
||||||
{
|
{
|
||||||
|
(
|
||||||
|
cd "$FASTTEST_SOURCE"
|
||||||
|
|
||||||
SUBMODULES_TO_UPDATE=(contrib/boost contrib/zlib-ng contrib/libxml2 contrib/poco contrib/libunwind contrib/ryu contrib/fmtlib contrib/base64 contrib/cctz contrib/libcpuid contrib/double-conversion contrib/libcxx contrib/libcxxabi contrib/libc-headers contrib/lz4 contrib/zstd contrib/fastops contrib/rapidjson contrib/re2 contrib/sparsehash-c11)
|
SUBMODULES_TO_UPDATE=(contrib/boost contrib/zlib-ng contrib/libxml2 contrib/poco contrib/libunwind contrib/ryu contrib/fmtlib contrib/base64 contrib/cctz contrib/libcpuid contrib/double-conversion contrib/libcxx contrib/libcxxabi contrib/libc-headers contrib/lz4 contrib/zstd contrib/fastops contrib/rapidjson contrib/re2 contrib/sparsehash-c11)
|
||||||
|
|
||||||
git submodule update --init --recursive "${SUBMODULES_TO_UPDATE[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/submodule_log.txt
|
git submodule sync
|
||||||
|
git submodule update --init --recursive "${SUBMODULES_TO_UPDATE[@]}"
|
||||||
|
git submodule foreach git reset --hard
|
||||||
|
git submodule foreach git checkout @ -f
|
||||||
|
git submodule foreach git clean -xfd
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
CMAKE_LIBS_CONFIG=(-DENABLE_LIBRARIES=0 -DENABLE_TESTS=0 -DENABLE_UTILS=0 -DENABLE_EMBEDDED_COMPILER=0 -DENABLE_THINLTO=0 -DUSE_UNWIND=1)
|
function run_cmake
|
||||||
|
{
|
||||||
|
CMAKE_LIBS_CONFIG=("-DENABLE_LIBRARIES=0" "-DENABLE_TESTS=0" "-DENABLE_UTILS=0" "-DENABLE_EMBEDDED_COMPILER=0" "-DENABLE_THINLTO=0" "-DUSE_UNWIND=1")
|
||||||
|
|
||||||
export CCACHE_DIR=/ccache
|
# TODO remove this? we don't use ccache anyway. An option would be to download it
|
||||||
export CCACHE_BASEDIR=/ClickHouse
|
# from S3 simultaneously with cloning.
|
||||||
|
export CCACHE_DIR="$FASTTEST_WORKSPACE/ccache"
|
||||||
|
export CCACHE_BASEDIR="$FASTTEST_SOURCE"
|
||||||
export CCACHE_NOHASHDIR=true
|
export CCACHE_NOHASHDIR=true
|
||||||
export CCACHE_COMPILERCHECK=content
|
export CCACHE_COMPILERCHECK=content
|
||||||
export CCACHE_MAXSIZE=15G
|
export CCACHE_MAXSIZE=15G
|
||||||
@ -94,93 +150,108 @@ export CCACHE_MAXSIZE=15G
|
|||||||
ccache --show-stats ||:
|
ccache --show-stats ||:
|
||||||
ccache --zero-stats ||:
|
ccache --zero-stats ||:
|
||||||
|
|
||||||
mkdir build
|
mkdir "$FASTTEST_BUILD" ||:
|
||||||
cd build
|
|
||||||
cmake .. -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_CXX_COMPILER=clang++-10 -DCMAKE_C_COMPILER=clang-10 "${CMAKE_LIBS_CONFIG[@]}" "${FASTTEST_CMAKE_FLAGS[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/cmake_log.txt
|
|
||||||
time ninja clickhouse-bundle | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/build_log.txt
|
|
||||||
ninja install | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/install_log.txt
|
|
||||||
|
|
||||||
|
(
|
||||||
|
cd "$FASTTEST_BUILD"
|
||||||
|
cmake "$FASTTEST_SOURCE" -DCMAKE_CXX_COMPILER=clang++-10 -DCMAKE_C_COMPILER=clang-10 "${CMAKE_LIBS_CONFIG[@]}" "${FASTTEST_CMAKE_FLAGS[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/cmake_log.txt"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
function build
|
||||||
|
{
|
||||||
|
(
|
||||||
|
cd "$FASTTEST_BUILD"
|
||||||
|
time ninja clickhouse-bundle | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/build_log.txt"
|
||||||
ccache --show-stats ||:
|
ccache --show-stats ||:
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
mkdir -p /etc/clickhouse-server
|
function configure
|
||||||
mkdir -p /etc/clickhouse-client
|
{
|
||||||
mkdir -p /etc/clickhouse-server/config.d
|
clickhouse-client --version
|
||||||
mkdir -p /etc/clickhouse-server/users.d
|
clickhouse-test --help
|
||||||
ln -s /test_output /var/log/clickhouse-server
|
|
||||||
cp "$CLICKHOUSE_DIR/programs/server/config.xml" /etc/clickhouse-server/
|
|
||||||
cp "$CLICKHOUSE_DIR/programs/server/users.xml" /etc/clickhouse-server/
|
|
||||||
|
|
||||||
# install tests config
|
mkdir -p "$FASTTEST_DATA"{,/client-config}
|
||||||
$CLICKHOUSE_DIR/tests/config/install.sh
|
cp -a "$FASTTEST_SOURCE/programs/server/"{config,users}.xml "$FASTTEST_DATA"
|
||||||
|
"$FASTTEST_SOURCE/tests/config/install.sh" "$FASTTEST_DATA" "$FASTTEST_DATA/client-config"
|
||||||
|
cp -a "$FASTTEST_SOURCE/programs/server/config.d/log_to_console.xml" "$FASTTEST_DATA/config.d"
|
||||||
# doesn't support SSL
|
# doesn't support SSL
|
||||||
rm -f /etc/clickhouse-server/config.d/secure_ports.xml
|
rm -f "$FASTTEST_DATA/config.d/secure_ports.xml"
|
||||||
|
}
|
||||||
|
|
||||||
|
function run_tests
|
||||||
|
{
|
||||||
|
clickhouse-server --version
|
||||||
|
clickhouse-test --help
|
||||||
|
|
||||||
# Kill the server in case we are running locally and not in docker
|
# Kill the server in case we are running locally and not in docker
|
||||||
kill_clickhouse
|
stop_server ||:
|
||||||
|
|
||||||
clickhouse-server --config /etc/clickhouse-server/config.xml --daemon
|
start_server
|
||||||
|
|
||||||
wait_for_server_start
|
|
||||||
|
|
||||||
TESTS_TO_SKIP=(
|
TESTS_TO_SKIP=(
|
||||||
parquet
|
|
||||||
avro
|
|
||||||
h3
|
|
||||||
odbc
|
|
||||||
mysql
|
|
||||||
sha256
|
|
||||||
_orc_
|
|
||||||
arrow
|
|
||||||
01098_temporary_and_external_tables
|
|
||||||
01083_expressions_in_engine_arguments
|
|
||||||
hdfs
|
|
||||||
00911_tautological_compare
|
|
||||||
protobuf
|
|
||||||
capnproto
|
|
||||||
java_hash
|
|
||||||
hashing
|
|
||||||
secure
|
|
||||||
00490_special_line_separators_and_characters_outside_of_bmp
|
|
||||||
00436_convert_charset
|
|
||||||
00105_shard_collations
|
00105_shard_collations
|
||||||
01354_order_by_tuple_collate_const
|
00109_shard_totals_after_having
|
||||||
01292_create_user
|
00110_external_sort
|
||||||
01098_msgpack_format
|
|
||||||
00929_multi_match_edit_distance
|
|
||||||
00926_multimatch
|
|
||||||
00834_cancel_http_readonly_queries_on_client_close
|
|
||||||
brotli
|
|
||||||
parallel_alter
|
|
||||||
00302_http_compression
|
00302_http_compression
|
||||||
00417_kill_query
|
00417_kill_query
|
||||||
01294_lazy_database_concurrent
|
00436_convert_charset
|
||||||
01193_metadata_loading
|
00490_special_line_separators_and_characters_outside_of_bmp
|
||||||
base64
|
00652_replicated_mutations_zookeeper
|
||||||
01031_mutations_interpreter_and_context
|
|
||||||
json
|
|
||||||
client
|
|
||||||
01305_replica_create_drop_zookeeper
|
|
||||||
01092_memory_profiler
|
|
||||||
01355_ilike
|
|
||||||
01281_unsucceeded_insert_select_queries_counter
|
|
||||||
live_view
|
|
||||||
limit_memory
|
|
||||||
memory_limit
|
|
||||||
memory_leak
|
|
||||||
00110_external_sort
|
|
||||||
00682_empty_parts_merge
|
00682_empty_parts_merge
|
||||||
00701_rollup
|
00701_rollup
|
||||||
00109_shard_totals_after_having
|
00834_cancel_http_readonly_queries_on_client_close
|
||||||
ddl_dictionaries
|
00911_tautological_compare
|
||||||
|
00926_multimatch
|
||||||
|
00929_multi_match_edit_distance
|
||||||
|
01031_mutations_interpreter_and_context
|
||||||
|
01053_ssd_dictionary # this test mistakenly requires acces to /var/lib/clickhouse -- can't run this locally, disabled
|
||||||
|
01083_expressions_in_engine_arguments
|
||||||
|
01092_memory_profiler
|
||||||
|
01098_msgpack_format
|
||||||
|
01098_temporary_and_external_tables
|
||||||
|
01103_check_cpu_instructions_at_startup # avoid dependency on qemu -- invonvenient when running locally
|
||||||
|
01193_metadata_loading
|
||||||
|
01238_http_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently
|
||||||
01251_dict_is_in_infinite_loop
|
01251_dict_is_in_infinite_loop
|
||||||
01259_dictionary_custom_settings_ddl
|
01259_dictionary_custom_settings_ddl
|
||||||
01268_dictionary_direct_layout
|
01268_dictionary_direct_layout
|
||||||
01280_ssd_complex_key_dictionary
|
01280_ssd_complex_key_dictionary
|
||||||
00652_replicated_mutations_zookeeper
|
|
||||||
01411_bayesian_ab_testing
|
|
||||||
01238_http_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently
|
|
||||||
01281_group_by_limit_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently
|
01281_group_by_limit_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently
|
||||||
|
01318_encrypt # Depends on OpenSSL
|
||||||
|
01318_decrypt # Depends on OpenSSL
|
||||||
|
01281_unsucceeded_insert_select_queries_counter
|
||||||
|
01292_create_user
|
||||||
|
01294_lazy_database_concurrent
|
||||||
|
01305_replica_create_drop_zookeeper
|
||||||
|
01354_order_by_tuple_collate_const
|
||||||
|
01355_ilike
|
||||||
|
01411_bayesian_ab_testing
|
||||||
|
_orc_
|
||||||
|
arrow
|
||||||
|
avro
|
||||||
|
base64
|
||||||
|
brotli
|
||||||
|
capnproto
|
||||||
|
client
|
||||||
|
ddl_dictionaries
|
||||||
|
h3
|
||||||
|
hashing
|
||||||
|
hdfs
|
||||||
|
java_hash
|
||||||
|
json
|
||||||
|
limit_memory
|
||||||
|
live_view
|
||||||
|
memory_leak
|
||||||
|
memory_limit
|
||||||
|
mysql
|
||||||
|
odbc
|
||||||
|
parallel_alter
|
||||||
|
parquet
|
||||||
|
protobuf
|
||||||
|
secure
|
||||||
|
sha256
|
||||||
|
|
||||||
# Not sure why these two fail even in sequential mode. Disabled for now
|
# Not sure why these two fail even in sequential mode. Disabled for now
|
||||||
# to make some progress.
|
# to make some progress.
|
||||||
@ -191,11 +262,10 @@ TESTS_TO_SKIP=(
|
|||||||
01460_DistributedFilesToInsert
|
01460_DistributedFilesToInsert
|
||||||
)
|
)
|
||||||
|
|
||||||
time clickhouse-test -j 8 --no-long --testname --shard --zookeeper --skip "${TESTS_TO_SKIP[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/test_log.txt
|
time clickhouse-test -j 8 --order=random --no-long --testname --shard --zookeeper --skip "${TESTS_TO_SKIP[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/test_log.txt"
|
||||||
|
|
||||||
|
|
||||||
# substr is to remove semicolon after test name
|
# substr is to remove semicolon after test name
|
||||||
readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, length($3)-1) }' /test_output/test_log.txt | tee /test_output/failed-parallel-tests.txt)
|
readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, length($3)-1) }' "$FASTTEST_OUTPUT/test_log.txt" | tee "$FASTTEST_OUTPUT/failed-parallel-tests.txt")
|
||||||
|
|
||||||
# We will rerun sequentially any tests that have failed during parallel run.
|
# We will rerun sequentially any tests that have failed during parallel run.
|
||||||
# They might have failed because there was some interference from other tests
|
# They might have failed because there was some interference from other tests
|
||||||
@ -206,19 +276,16 @@ readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, le
|
|||||||
# explicit instead of guessing.
|
# explicit instead of guessing.
|
||||||
if [[ -n "${FAILED_TESTS[*]}" ]]
|
if [[ -n "${FAILED_TESTS[*]}" ]]
|
||||||
then
|
then
|
||||||
kill_clickhouse
|
stop_server ||:
|
||||||
|
|
||||||
# Clean the data so that there is no interference from the previous test run.
|
# Clean the data so that there is no interference from the previous test run.
|
||||||
rm -rf /var/lib/clickhouse ||:
|
rm -rf "$FASTTEST_DATA"/{{meta,}data,user_files} ||:
|
||||||
mkdir /var/lib/clickhouse
|
|
||||||
|
|
||||||
clickhouse-server --config /etc/clickhouse-server/config.xml --daemon
|
start_server
|
||||||
|
|
||||||
wait_for_server_start
|
|
||||||
|
|
||||||
echo "Going to run again: ${FAILED_TESTS[*]}"
|
echo "Going to run again: ${FAILED_TESTS[*]}"
|
||||||
|
|
||||||
clickhouse-test --no-long --testname --shard --zookeeper "${FAILED_TESTS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a /test_output/test_log.txt
|
clickhouse-test --order=random --no-long --testname --shard --zookeeper "${FAILED_TESTS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a "$FASTTEST_OUTPUT/test_log.txt"
|
||||||
else
|
else
|
||||||
echo "No failed tests"
|
echo "No failed tests"
|
||||||
fi
|
fi
|
||||||
@ -228,21 +295,54 @@ case "$stage" in
|
|||||||
"")
|
"")
|
||||||
ls -la
|
ls -la
|
||||||
;&
|
;&
|
||||||
|
|
||||||
"clone_root")
|
"clone_root")
|
||||||
clone_root
|
clone_root
|
||||||
|
|
||||||
# Pass control to the script from cloned sources, unless asked otherwise.
|
# Pass control to the script from cloned sources, unless asked otherwise.
|
||||||
if ! [ -v FASTTEST_LOCAL_SCRIPT ]
|
if ! [ -v FASTTEST_LOCAL_SCRIPT ]
|
||||||
then
|
then
|
||||||
stage=run "$CLICKHOUSE_DIR/docker/test/fasttest/run.sh"
|
# 'run' stage is deprecated, used for compatibility with old scripts.
|
||||||
|
# Replace with 'clone_submodules' after Nov 1, 2020.
|
||||||
|
# cd and CLICKHOUSE_DIR are also a setup for old scripts, remove as well.
|
||||||
|
# In modern script we undo it by changing back into workspace dir right
|
||||||
|
# away, see below. Remove that as well.
|
||||||
|
cd "$FASTTEST_SOURCE"
|
||||||
|
CLICKHOUSE_DIR=$(pwd)
|
||||||
|
export CLICKHOUSE_DIR
|
||||||
|
stage=run "$FASTTEST_SOURCE/docker/test/fasttest/run.sh"
|
||||||
exit $?
|
exit $?
|
||||||
fi
|
fi
|
||||||
;&
|
;&
|
||||||
|
|
||||||
"run")
|
"run")
|
||||||
run
|
# A deprecated stage that is called by old script and equivalent to everything
|
||||||
|
# after cloning root, starting with cloning submodules.
|
||||||
;&
|
;&
|
||||||
|
"clone_submodules")
|
||||||
|
# Recover after being called from the old script that changes into source directory.
|
||||||
|
# See the compatibility hacks in `clone_root` stage above. Remove at the same time,
|
||||||
|
# after Nov 1, 2020.
|
||||||
|
cd "$FASTTEST_WORKSPACE"
|
||||||
|
clone_submodules | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/submodule_log.txt"
|
||||||
|
;&
|
||||||
|
"run_cmake")
|
||||||
|
run_cmake
|
||||||
|
;&
|
||||||
|
"build")
|
||||||
|
build
|
||||||
|
PATH="$FASTTEST_BUILD/programs:$FASTTEST_SOURCE/tests:$PATH"
|
||||||
|
export PATH
|
||||||
|
;&
|
||||||
|
"configure")
|
||||||
|
# The `install_log.txt` is also needed for compatibility with old CI task --
|
||||||
|
# if there is no log, it will decide that build failed.
|
||||||
|
configure | ts '%Y-%m-%d %H:%M:%S' | tee "$FASTTEST_OUTPUT/install_log.txt"
|
||||||
|
;&
|
||||||
|
"run_tests")
|
||||||
|
run_tests
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown test stage '$stage'"
|
||||||
|
exit 1
|
||||||
esac
|
esac
|
||||||
|
|
||||||
pstree -apgT
|
pstree -apgT
|
||||||
|
@ -58,7 +58,7 @@ function watchdog
|
|||||||
|
|
||||||
echo "Fuzzing run has timed out"
|
echo "Fuzzing run has timed out"
|
||||||
killall clickhouse-client ||:
|
killall clickhouse-client ||:
|
||||||
for x in {1..10}
|
for _ in {1..10}
|
||||||
do
|
do
|
||||||
if ! pgrep -f clickhouse-client
|
if ! pgrep -f clickhouse-client
|
||||||
then
|
then
|
||||||
@ -81,6 +81,9 @@ function fuzz
|
|||||||
echo Server started
|
echo Server started
|
||||||
|
|
||||||
fuzzer_exit_code=0
|
fuzzer_exit_code=0
|
||||||
|
# SC2012: Use find instead of ls to better handle non-alphanumeric filenames.
|
||||||
|
# They are all alphanumeric.
|
||||||
|
# shellcheck disable=SC2012
|
||||||
./clickhouse-client --query-fuzzer-runs=1000 \
|
./clickhouse-client --query-fuzzer-runs=1000 \
|
||||||
< <(for f in $(ls ch/tests/queries/0_stateless/*.sql | sort -R); do cat "$f"; echo ';'; done) \
|
< <(for f in $(ls ch/tests/queries/0_stateless/*.sql | sort -R); do cat "$f"; echo ';'; done) \
|
||||||
> >(tail -10000 > fuzzer.log) \
|
> >(tail -10000 > fuzzer.log) \
|
||||||
|
@ -4,7 +4,7 @@ FROM yandex/clickhouse-test-base
|
|||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& env DEBIAN_FRONTEND=noninteractive apt-get -y install \
|
&& env DEBIAN_FRONTEND=noninteractive apt-get -y install \
|
||||||
tzdata \
|
tzdata \
|
||||||
python \
|
python3 \
|
||||||
libreadline-dev \
|
libreadline-dev \
|
||||||
libicu-dev \
|
libicu-dev \
|
||||||
bsdutils \
|
bsdutils \
|
||||||
@ -16,7 +16,8 @@ RUN apt-get update \
|
|||||||
odbc-postgresql \
|
odbc-postgresql \
|
||||||
sqlite3 \
|
sqlite3 \
|
||||||
curl \
|
curl \
|
||||||
tar
|
tar \
|
||||||
|
krb5-user
|
||||||
RUN rm -rf \
|
RUN rm -rf \
|
||||||
/var/lib/apt/lists/* \
|
/var/lib/apt/lists/* \
|
||||||
/var/cache/debconf \
|
/var/cache/debconf \
|
||||||
|
15
docker/test/integration/kerberos_kdc/Dockerfile
Normal file
15
docker/test/integration/kerberos_kdc/Dockerfile
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# docker build -t yandex/clickhouse-kerberos-kdc .
|
||||||
|
|
||||||
|
FROM centos:6.6
|
||||||
|
# old OS to make is faster and smaller
|
||||||
|
|
||||||
|
RUN yum install -y krb5-server krb5-libs krb5-auth-dialog krb5-workstation
|
||||||
|
|
||||||
|
EXPOSE 88 749
|
||||||
|
|
||||||
|
RUN touch /config.sh
|
||||||
|
# should be overwritten e.g. via docker_compose volumes
|
||||||
|
# volumes: /some_path/my_kerberos_config.sh:/config.sh:ro
|
||||||
|
|
||||||
|
|
||||||
|
ENTRYPOINT ["/bin/bash", "/config.sh"]
|
@ -16,13 +16,13 @@ RUN apt-get update \
|
|||||||
iproute2 \
|
iproute2 \
|
||||||
module-init-tools \
|
module-init-tools \
|
||||||
cgroupfs-mount \
|
cgroupfs-mount \
|
||||||
python-pip \
|
python3-pip \
|
||||||
tzdata \
|
tzdata \
|
||||||
libreadline-dev \
|
libreadline-dev \
|
||||||
libicu-dev \
|
libicu-dev \
|
||||||
bsdutils \
|
bsdutils \
|
||||||
curl \
|
curl \
|
||||||
python-pika \
|
python3-pika \
|
||||||
liblua5.1-dev \
|
liblua5.1-dev \
|
||||||
luajit \
|
luajit \
|
||||||
libssl-dev \
|
libssl-dev \
|
||||||
@ -37,7 +37,28 @@ RUN apt-get update \
|
|||||||
ENV TZ=Europe/Moscow
|
ENV TZ=Europe/Moscow
|
||||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||||
|
|
||||||
RUN pip install urllib3==1.23 pytest docker-compose==1.22.0 docker dicttoxml kazoo PyMySQL psycopg2==2.7.5 pymongo tzlocal kafka-python protobuf redis aerospike pytest-timeout minio rpm-confluent-schemaregistry grpcio grpcio-tools cassandra-driver
|
RUN python3 -m pip install \
|
||||||
|
PyMySQL \
|
||||||
|
aerospike \
|
||||||
|
avro \
|
||||||
|
cassandra-driver \
|
||||||
|
confluent-kafka \
|
||||||
|
dicttoxml \
|
||||||
|
docker \
|
||||||
|
docker-compose==1.22.0 \
|
||||||
|
grpcio \
|
||||||
|
grpcio-tools \
|
||||||
|
kafka-python \
|
||||||
|
kazoo \
|
||||||
|
minio \
|
||||||
|
protobuf \
|
||||||
|
psycopg2-binary==2.7.5 \
|
||||||
|
pymongo \
|
||||||
|
pytest \
|
||||||
|
pytest-timeout \
|
||||||
|
redis \
|
||||||
|
tzlocal \
|
||||||
|
urllib3
|
||||||
|
|
||||||
ENV DOCKER_CHANNEL stable
|
ENV DOCKER_CHANNEL stable
|
||||||
ENV DOCKER_VERSION 17.09.1-ce
|
ENV DOCKER_VERSION 17.09.1-ce
|
||||||
|
@ -0,0 +1,59 @@
|
|||||||
|
version: '2.3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
kafka_kerberized_zookeeper:
|
||||||
|
image: confluentinc/cp-zookeeper:5.2.0
|
||||||
|
# restart: always
|
||||||
|
hostname: kafka_kerberized_zookeeper
|
||||||
|
environment:
|
||||||
|
ZOOKEEPER_SERVER_ID: 1
|
||||||
|
ZOOKEEPER_CLIENT_PORT: 2181
|
||||||
|
ZOOKEEPER_SERVERS: "kafka_kerberized_zookeeper:2888:3888"
|
||||||
|
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/zookeeper_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider -Dsun.security.krb5.debug=true"
|
||||||
|
volumes:
|
||||||
|
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
|
||||||
|
- /dev/urandom:/dev/random
|
||||||
|
depends_on:
|
||||||
|
- kafka_kerberos
|
||||||
|
security_opt:
|
||||||
|
- label:disable
|
||||||
|
|
||||||
|
kerberized_kafka1:
|
||||||
|
image: confluentinc/cp-kafka:5.2.0
|
||||||
|
# restart: always
|
||||||
|
hostname: kerberized_kafka1
|
||||||
|
ports:
|
||||||
|
- "9092:9092"
|
||||||
|
- "9093:9093"
|
||||||
|
environment:
|
||||||
|
KAFKA_LISTENERS: OUTSIDE://:19092,UNSECURED_OUTSIDE://:19093,UNSECURED_INSIDE://:9093
|
||||||
|
KAFKA_ADVERTISED_LISTENERS: OUTSIDE://kerberized_kafka1:19092,UNSECURED_OUTSIDE://kerberized_kafka1:19093,UNSECURED_INSIDE://localhost:9093
|
||||||
|
# KAFKA_LISTENERS: INSIDE://kerberized_kafka1:9092,OUTSIDE://kerberized_kafka1:19092
|
||||||
|
# KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:9092,OUTSIDE://kerberized_kafka1:19092
|
||||||
|
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: GSSAPI
|
||||||
|
KAFKA_SASL_ENABLED_MECHANISMS: GSSAPI
|
||||||
|
KAFKA_SASL_KERBEROS_SERVICE_NAME: kafka
|
||||||
|
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: OUTSIDE:SASL_PLAINTEXT,UNSECURED_OUTSIDE:PLAINTEXT,UNSECURED_INSIDE:PLAINTEXT,
|
||||||
|
KAFKA_INTER_BROKER_LISTENER_NAME: OUTSIDE
|
||||||
|
KAFKA_BROKER_ID: 1
|
||||||
|
KAFKA_ZOOKEEPER_CONNECT: "kafka_kerberized_zookeeper:2181"
|
||||||
|
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
|
||||||
|
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||||
|
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dsun.security.krb5.debug=true"
|
||||||
|
volumes:
|
||||||
|
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
|
||||||
|
- /dev/urandom:/dev/random
|
||||||
|
depends_on:
|
||||||
|
- kafka_kerberized_zookeeper
|
||||||
|
- kafka_kerberos
|
||||||
|
security_opt:
|
||||||
|
- label:disable
|
||||||
|
|
||||||
|
kafka_kerberos:
|
||||||
|
image: yandex/clickhouse-kerberos-kdc:${DOCKER_KERBEROS_KDC_TAG}
|
||||||
|
hostname: kafka_kerberos
|
||||||
|
volumes:
|
||||||
|
- ${KERBERIZED_KAFKA_DIR}/secrets:/tmp/keytab
|
||||||
|
- ${KERBERIZED_KAFKA_DIR}/../../kerberos_image_config.sh:/config.sh
|
||||||
|
- /dev/urandom:/dev/random
|
||||||
|
ports: [88, 749]
|
@ -7,7 +7,7 @@ set +e
|
|||||||
reties=0
|
reties=0
|
||||||
while true; do
|
while true; do
|
||||||
docker info &>/dev/null && break
|
docker info &>/dev/null && break
|
||||||
reties=$[$reties+1]
|
reties=$((reties+1))
|
||||||
if [[ $reties -ge 100 ]]; then # 10 sec max
|
if [[ $reties -ge 100 ]]; then # 10 sec max
|
||||||
echo "Can't start docker daemon, timeout exceeded." >&2
|
echo "Can't start docker daemon, timeout exceeded." >&2
|
||||||
exit 1;
|
exit 1;
|
||||||
@ -27,6 +27,7 @@ export DOCKER_MYSQL_JAVA_CLIENT_TAG=${DOCKER_MYSQL_JAVA_CLIENT_TAG:=latest}
|
|||||||
export DOCKER_MYSQL_JS_CLIENT_TAG=${DOCKER_MYSQL_JS_CLIENT_TAG:=latest}
|
export DOCKER_MYSQL_JS_CLIENT_TAG=${DOCKER_MYSQL_JS_CLIENT_TAG:=latest}
|
||||||
export DOCKER_MYSQL_PHP_CLIENT_TAG=${DOCKER_MYSQL_PHP_CLIENT_TAG:=latest}
|
export DOCKER_MYSQL_PHP_CLIENT_TAG=${DOCKER_MYSQL_PHP_CLIENT_TAG:=latest}
|
||||||
export DOCKER_POSTGRESQL_JAVA_CLIENT_TAG=${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:=latest}
|
export DOCKER_POSTGRESQL_JAVA_CLIENT_TAG=${DOCKER_POSTGRESQL_JAVA_CLIENT_TAG:=latest}
|
||||||
|
export DOCKER_KERBEROS_KDC_TAG=${DOCKER_KERBEROS_KDC_TAG:=latest}
|
||||||
|
|
||||||
cd /ClickHouse/tests/integration
|
cd /ClickHouse/tests/integration
|
||||||
exec "$@"
|
exec "$@"
|
||||||
|
@ -48,12 +48,13 @@ This table shows queries that take significantly longer to process on the client
|
|||||||
#### Unexpected Query Duration
|
#### Unexpected Query Duration
|
||||||
Action required for every item -- these are errors that must be fixed.
|
Action required for every item -- these are errors that must be fixed.
|
||||||
|
|
||||||
Queries that have "short" duration (on the order of 0.1 s) can't be reliably tested in a normal way, where we perform a small (about ten) measurements for each server, because the signal-to-noise ratio is much smaller. There is a special mode for such queries that instead runs them for a fixed amount of time, normally with much higher number of measurements (up to thousands). This mode must be explicitly enabled by the test author to avoid accidental errors. It must be used only for queries that are meant to complete "immediately", such as `select count(*)`. If your query is not supposed to be "immediate", try to make it run longer, by e.g. processing more data.
|
A query is supposed to run longer than 0.1 second. If your query runs faster, increase the amount of processed data to bring the run time above this threshold. You can use a bigger table (e.g. `hits_100m` instead of `hits_10m`), increase a `LIMIT`, make a query single-threaded, and so on. Queries that are too fast suffer from poor stability and precision.
|
||||||
|
|
||||||
This table shows queries for which the "short" marking is not consistent with the actual query run time -- i.e., a query runs for a long time but is marked as short, or it runs very fast but is not marked as short.
|
Sometimes you want to test a query that is supposed to complete "instantaneously", i.e. in sublinear time. This might be `count(*)`, or parsing a complicated tuple. It might not be practical or even possible to increase the run time of such queries by adding more data. For such queries there is a specal comparison mode which runs them for a fixed amount of time, instead of a fixed number of iterations like we do normally. This mode is inferior to the normal mode, because the influence of noise and overhead is higher, which leads to less precise and stable results.
|
||||||
|
|
||||||
If your query is really supposed to complete "immediately" and can't be made to run longer, you have to mark it as "short". To do so, write `<query short="1">...` in the test file. The value of "short" attribute is evaluated as a python expression, and substitutions are performed, so you can write something like `<query short="{column1} = {column2}">select count(*) from table where {column1} > {column2}</query>`, to mark only a particular combination of variables as short.
|
If it is impossible to increase the run time of a query and it is supposed to complete "immediately", you have to explicitly mark this in the test. To do so, add a `short` attribute to the query tag in the test file: `<query short="1">...`. The value of the `short` attribute is evaluated as a python expression, and substitutions are performed, so you can write something like `<query short="{column1} = {column2}">select count(*) from table where {column1} > {column2}</query>`, to mark only a particular combination of variables as short.
|
||||||
|
|
||||||
|
This table shows queries for which the `short` marking is not consistent with the actual query run time -- i.e., a query runs for a normal time but is marked as `short`, or it runs faster than normal but is not marked as `short`.
|
||||||
|
|
||||||
#### Partial Queries
|
#### Partial Queries
|
||||||
Action required for the cells marked in red.
|
Action required for the cells marked in red.
|
||||||
|
@ -9,7 +9,7 @@ right_version=${2}
|
|||||||
|
|
||||||
if [ "$left_version" == "" ] || [ "$right_version" == "" ]
|
if [ "$left_version" == "" ] || [ "$right_version" == "" ]
|
||||||
then
|
then
|
||||||
>&2 echo Usage: $(basename "$0") left_version right_version
|
>&2 echo "Usage: $(basename "$0") left_version right_version"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
@ -181,6 +181,9 @@ function run_tests
|
|||||||
# Randomize test order.
|
# Randomize test order.
|
||||||
test_files=$(for f in $test_files; do echo "$f"; done | sort -R)
|
test_files=$(for f in $test_files; do echo "$f"; done | sort -R)
|
||||||
|
|
||||||
|
# Limit profiling time to 10 minutes, not to run for too long.
|
||||||
|
profile_seconds_left=600
|
||||||
|
|
||||||
# Run the tests.
|
# Run the tests.
|
||||||
test_name="<none>"
|
test_name="<none>"
|
||||||
for test in $test_files
|
for test in $test_files
|
||||||
@ -194,15 +197,24 @@ function run_tests
|
|||||||
test_name=$(basename "$test" ".xml")
|
test_name=$(basename "$test" ".xml")
|
||||||
echo test "$test_name"
|
echo test "$test_name"
|
||||||
|
|
||||||
|
# Don't profile if we're past the time limit.
|
||||||
|
# Use awk because bash doesn't support floating point arithmetics.
|
||||||
|
profile_seconds=$(awk "BEGIN { print ($profile_seconds_left > 0 ? 10 : 0) }")
|
||||||
|
|
||||||
TIMEFORMAT=$(printf "$test_name\t%%3R\t%%3U\t%%3S\n")
|
TIMEFORMAT=$(printf "$test_name\t%%3R\t%%3U\t%%3S\n")
|
||||||
# The grep is to filter out set -x output and keep only time output.
|
# The grep is to filter out set -x output and keep only time output.
|
||||||
# The '2>&1 >/dev/null' redirects stderr to stdout, and discards stdout.
|
# The '2>&1 >/dev/null' redirects stderr to stdout, and discards stdout.
|
||||||
{ \
|
{ \
|
||||||
time "$script_dir/perf.py" --host localhost localhost --port 9001 9002 \
|
time "$script_dir/perf.py" --host localhost localhost --port 9001 9002 \
|
||||||
--runs "$CHPC_RUNS" --max-queries "$CHPC_MAX_QUERIES" \
|
--runs "$CHPC_RUNS" --max-queries "$CHPC_MAX_QUERIES" \
|
||||||
|
--profile-seconds "$profile_seconds" \
|
||||||
-- "$test" > "$test_name-raw.tsv" 2> "$test_name-err.log" ; \
|
-- "$test" > "$test_name-raw.tsv" 2> "$test_name-err.log" ; \
|
||||||
} 2>&1 >/dev/null | tee >(grep -v ^+ >> "wall-clock-times.tsv") \
|
} 2>&1 >/dev/null | tee >(grep -v ^+ >> "wall-clock-times.tsv") \
|
||||||
|| echo "Test $test_name failed with error code $?" >> "$test_name-err.log"
|
|| echo "Test $test_name failed with error code $?" >> "$test_name-err.log"
|
||||||
|
|
||||||
|
profile_seconds_left=$(awk -F' ' \
|
||||||
|
'BEGIN { s = '$profile_seconds_left'; } /^profile-total/ { s -= $2 } END { print s }' \
|
||||||
|
"$test_name-raw.tsv")
|
||||||
done
|
done
|
||||||
|
|
||||||
unset TIMEFORMAT
|
unset TIMEFORMAT
|
||||||
@ -294,6 +306,7 @@ for test_file in *-raw.tsv
|
|||||||
do
|
do
|
||||||
test_name=$(basename "$test_file" "-raw.tsv")
|
test_name=$(basename "$test_file" "-raw.tsv")
|
||||||
sed -n "s/^query\t/$test_name\t/p" < "$test_file" >> "analyze/query-runs.tsv"
|
sed -n "s/^query\t/$test_name\t/p" < "$test_file" >> "analyze/query-runs.tsv"
|
||||||
|
sed -n "s/^profile\t/$test_name\t/p" < "$test_file" >> "analyze/query-profiles.tsv"
|
||||||
sed -n "s/^client-time\t/$test_name\t/p" < "$test_file" >> "analyze/client-times.tsv"
|
sed -n "s/^client-time\t/$test_name\t/p" < "$test_file" >> "analyze/client-times.tsv"
|
||||||
sed -n "s/^report-threshold\t/$test_name\t/p" < "$test_file" >> "analyze/report-thresholds.tsv"
|
sed -n "s/^report-threshold\t/$test_name\t/p" < "$test_file" >> "analyze/report-thresholds.tsv"
|
||||||
sed -n "s/^skipped\t/$test_name\t/p" < "$test_file" >> "analyze/skipped-tests.tsv"
|
sed -n "s/^skipped\t/$test_name\t/p" < "$test_file" >> "analyze/skipped-tests.tsv"
|
||||||
@ -658,13 +671,15 @@ create view test_runs as
|
|||||||
group by test
|
group by test
|
||||||
;
|
;
|
||||||
|
|
||||||
create table test_times_report engine File(TSV, 'report/test-times.tsv') as
|
create view test_times_view as
|
||||||
select wall_clock_time_per_test.test, real,
|
select
|
||||||
toDecimal64(total_client_time, 3),
|
wall_clock_time_per_test.test test,
|
||||||
|
real,
|
||||||
|
total_client_time,
|
||||||
queries,
|
queries,
|
||||||
toDecimal64(query_max, 3),
|
query_max,
|
||||||
toDecimal64(real / queries, 3) avg_real_per_query,
|
real / queries avg_real_per_query,
|
||||||
toDecimal64(query_min, 3),
|
query_min,
|
||||||
runs
|
runs
|
||||||
from test_time
|
from test_time
|
||||||
-- wall clock times are also measured for skipped tests, so don't
|
-- wall clock times are also measured for skipped tests, so don't
|
||||||
@ -673,7 +688,43 @@ create table test_times_report engine File(TSV, 'report/test-times.tsv') as
|
|||||||
on wall_clock_time_per_test.test = test_time.test
|
on wall_clock_time_per_test.test = test_time.test
|
||||||
full join test_runs
|
full join test_runs
|
||||||
on test_runs.test = test_time.test
|
on test_runs.test = test_time.test
|
||||||
order by avg_real_per_query desc;
|
;
|
||||||
|
|
||||||
|
-- WITH TOTALS doesn't work with INSERT SELECT, so we have to jump through these
|
||||||
|
-- hoops: https://github.com/ClickHouse/ClickHouse/issues/15227
|
||||||
|
create view test_times_view_total as
|
||||||
|
select
|
||||||
|
'Total' test,
|
||||||
|
sum(real),
|
||||||
|
sum(total_client_time),
|
||||||
|
sum(queries),
|
||||||
|
max(query_max),
|
||||||
|
sum(real) / sum(queries) avg_real_per_query,
|
||||||
|
min(query_min),
|
||||||
|
-- Totaling the number of runs doesn't make sense, but use the max so
|
||||||
|
-- that the reporting script doesn't complain about queries being too
|
||||||
|
-- long.
|
||||||
|
max(runs)
|
||||||
|
from test_times_view
|
||||||
|
;
|
||||||
|
|
||||||
|
create table test_times_report engine File(TSV, 'report/test-times.tsv') as
|
||||||
|
select
|
||||||
|
test,
|
||||||
|
toDecimal64(real, 3),
|
||||||
|
toDecimal64(total_client_time, 3),
|
||||||
|
queries,
|
||||||
|
toDecimal64(query_max, 3),
|
||||||
|
toDecimal64(avg_real_per_query, 3),
|
||||||
|
toDecimal64(query_min, 3),
|
||||||
|
runs
|
||||||
|
from (
|
||||||
|
select * from test_times_view
|
||||||
|
union all
|
||||||
|
select * from test_times_view_total
|
||||||
|
)
|
||||||
|
order by test = 'Total' desc, avg_real_per_query desc
|
||||||
|
;
|
||||||
|
|
||||||
-- report for all queries page, only main metric
|
-- report for all queries page, only main metric
|
||||||
create table all_tests_report engine File(TSV, 'report/all-queries.tsv') as
|
create table all_tests_report engine File(TSV, 'report/all-queries.tsv') as
|
||||||
@ -694,13 +745,12 @@ create table all_tests_report engine File(TSV, 'report/all-queries.tsv') as
|
|||||||
test, query_index, query_display_name
|
test, query_index, query_display_name
|
||||||
from queries order by test, query_index;
|
from queries order by test, query_index;
|
||||||
|
|
||||||
-- queries for which we will build flamegraphs (see below)
|
|
||||||
create table queries_for_flamegraph engine File(TSVWithNamesAndTypes,
|
|
||||||
'report/queries-for-flamegraph.tsv') as
|
|
||||||
select test, query_index from queries where unstable_show or changed_show
|
|
||||||
;
|
|
||||||
|
|
||||||
|
|
||||||
|
-- Report of queries that have inconsistent 'short' markings:
|
||||||
|
-- 1) have short duration, but are not marked as 'short'
|
||||||
|
-- 2) the reverse -- marked 'short' but take too long.
|
||||||
|
-- The threshold for 2) is significantly larger than the threshold for 1), to
|
||||||
|
-- avoid jitter.
|
||||||
create view shortness
|
create view shortness
|
||||||
as select
|
as select
|
||||||
(test, query_index) in
|
(test, query_index) in
|
||||||
@ -718,11 +768,6 @@ create view shortness
|
|||||||
and times.query_index = query_display_names.query_index
|
and times.query_index = query_display_names.query_index
|
||||||
;
|
;
|
||||||
|
|
||||||
-- Report of queries that have inconsistent 'short' markings:
|
|
||||||
-- 1) have short duration, but are not marked as 'short'
|
|
||||||
-- 2) the reverse -- marked 'short' but take too long.
|
|
||||||
-- The threshold for 2) is significantly larger than the threshold for 1), to
|
|
||||||
-- avoid jitter.
|
|
||||||
create table inconsistent_short_marking_report
|
create table inconsistent_short_marking_report
|
||||||
engine File(TSV, 'report/unexpected-query-duration.tsv')
|
engine File(TSV, 'report/unexpected-query-duration.tsv')
|
||||||
as select
|
as select
|
||||||
@ -759,18 +804,15 @@ create table all_query_metrics_tsv engine File(TSV, 'report/all-query-metrics.ts
|
|||||||
" 2> >(tee -a report/errors.log 1>&2)
|
" 2> >(tee -a report/errors.log 1>&2)
|
||||||
|
|
||||||
|
|
||||||
# Prepare source data for metrics and flamegraphs for unstable queries.
|
# Prepare source data for metrics and flamegraphs for queries that were profiled
|
||||||
|
# by perf.py.
|
||||||
for version in {right,left}
|
for version in {right,left}
|
||||||
do
|
do
|
||||||
rm -rf data
|
rm -rf data
|
||||||
clickhouse-local --query "
|
clickhouse-local --query "
|
||||||
create view queries_for_flamegraph as
|
create view query_profiles as
|
||||||
select * from file('report/queries-for-flamegraph.tsv', TSVWithNamesAndTypes,
|
|
||||||
'test text, query_index int');
|
|
||||||
|
|
||||||
create view query_runs as
|
|
||||||
with 0 as left, 1 as right
|
with 0 as left, 1 as right
|
||||||
select * from file('analyze/query-runs.tsv', TSV,
|
select * from file('analyze/query-profiles.tsv', TSV,
|
||||||
'test text, query_index int, query_id text, version UInt8, time float')
|
'test text, query_index int, query_id text, version UInt8, time float')
|
||||||
where version = $version
|
where version = $version
|
||||||
;
|
;
|
||||||
@ -782,15 +824,12 @@ create view query_display_names as select * from
|
|||||||
|
|
||||||
create table unstable_query_runs engine File(TSVWithNamesAndTypes,
|
create table unstable_query_runs engine File(TSVWithNamesAndTypes,
|
||||||
'unstable-query-runs.$version.rep') as
|
'unstable-query-runs.$version.rep') as
|
||||||
select query_runs.test test, query_runs.query_index query_index,
|
select query_profiles.test test, query_profiles.query_index query_index,
|
||||||
query_display_name, query_id
|
query_display_name, query_id
|
||||||
from query_runs
|
from query_profiles
|
||||||
join queries_for_flamegraph on
|
|
||||||
query_runs.test = queries_for_flamegraph.test
|
|
||||||
and query_runs.query_index = queries_for_flamegraph.query_index
|
|
||||||
left join query_display_names on
|
left join query_display_names on
|
||||||
query_runs.test = query_display_names.test
|
query_profiles.test = query_display_names.test
|
||||||
and query_runs.query_index = query_display_names.query_index
|
and query_profiles.query_index = query_display_names.query_index
|
||||||
;
|
;
|
||||||
|
|
||||||
create view query_log as select *
|
create view query_log as select *
|
||||||
|
@ -10,7 +10,7 @@ mkdir left ||:
|
|||||||
left_pr=$1
|
left_pr=$1
|
||||||
left_sha=$2
|
left_sha=$2
|
||||||
|
|
||||||
right_pr=$3
|
# right_pr=$3 not used for now
|
||||||
right_sha=$4
|
right_sha=$4
|
||||||
|
|
||||||
datasets=${CHPC_DATASETS:-"hits1 hits10 hits100 values"}
|
datasets=${CHPC_DATASETS:-"hits1 hits10 hits100 values"}
|
||||||
|
@ -18,9 +18,22 @@ import xml.etree.ElementTree as et
|
|||||||
from threading import Thread
|
from threading import Thread
|
||||||
from scipy import stats
|
from scipy import stats
|
||||||
|
|
||||||
|
|
||||||
|
total_start_seconds = time.perf_counter()
|
||||||
|
stage_start_seconds = total_start_seconds
|
||||||
|
|
||||||
|
def reportStageEnd(stage):
|
||||||
|
global stage_start_seconds, total_start_seconds
|
||||||
|
|
||||||
|
current = time.perf_counter()
|
||||||
|
print(f'stage\t{stage}\t{current - stage_start_seconds:.3f}\t{current - total_start_seconds:.3f}')
|
||||||
|
stage_start_seconds = current
|
||||||
|
|
||||||
|
|
||||||
def tsv_escape(s):
|
def tsv_escape(s):
|
||||||
return s.replace('\\', '\\\\').replace('\t', '\\t').replace('\n', '\\n').replace('\r','')
|
return s.replace('\\', '\\\\').replace('\t', '\\t').replace('\n', '\\n').replace('\r','')
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='Run performance test.')
|
parser = argparse.ArgumentParser(description='Run performance test.')
|
||||||
# Explicitly decode files as UTF-8 because sometimes we have Russian characters in queries, and LANG=C is set.
|
# Explicitly decode files as UTF-8 because sometimes we have Russian characters in queries, and LANG=C is set.
|
||||||
parser.add_argument('file', metavar='FILE', type=argparse.FileType('r', encoding='utf-8'), nargs=1, help='test description file')
|
parser.add_argument('file', metavar='FILE', type=argparse.FileType('r', encoding='utf-8'), nargs=1, help='test description file')
|
||||||
@ -29,16 +42,21 @@ parser.add_argument('--port', nargs='*', default=[9000], help="Space-separated l
|
|||||||
parser.add_argument('--runs', type=int, default=1, help='Number of query runs per server.')
|
parser.add_argument('--runs', type=int, default=1, help='Number of query runs per server.')
|
||||||
parser.add_argument('--max-queries', type=int, default=None, help='Test no more than this number of queries, chosen at random.')
|
parser.add_argument('--max-queries', type=int, default=None, help='Test no more than this number of queries, chosen at random.')
|
||||||
parser.add_argument('--queries-to-run', nargs='*', type=int, default=None, help='Space-separated list of indexes of queries to test.')
|
parser.add_argument('--queries-to-run', nargs='*', type=int, default=None, help='Space-separated list of indexes of queries to test.')
|
||||||
|
parser.add_argument('--profile-seconds', type=int, default=0, help='For how many seconds to profile a query for which the performance has changed.')
|
||||||
parser.add_argument('--long', action='store_true', help='Do not skip the tests tagged as long.')
|
parser.add_argument('--long', action='store_true', help='Do not skip the tests tagged as long.')
|
||||||
parser.add_argument('--print-queries', action='store_true', help='Print test queries and exit.')
|
parser.add_argument('--print-queries', action='store_true', help='Print test queries and exit.')
|
||||||
parser.add_argument('--print-settings', action='store_true', help='Print test settings and exit.')
|
parser.add_argument('--print-settings', action='store_true', help='Print test settings and exit.')
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
reportStageEnd('start')
|
||||||
|
|
||||||
test_name = os.path.splitext(os.path.basename(args.file[0].name))[0]
|
test_name = os.path.splitext(os.path.basename(args.file[0].name))[0]
|
||||||
|
|
||||||
tree = et.parse(args.file[0])
|
tree = et.parse(args.file[0])
|
||||||
root = tree.getroot()
|
root = tree.getroot()
|
||||||
|
|
||||||
|
reportStageEnd('parse')
|
||||||
|
|
||||||
# Process query parameters
|
# Process query parameters
|
||||||
subst_elems = root.findall('substitutions/substitution')
|
subst_elems = root.findall('substitutions/substitution')
|
||||||
available_parameters = {} # { 'table': ['hits_10m', 'hits_100m'], ... }
|
available_parameters = {} # { 'table': ['hits_10m', 'hits_100m'], ... }
|
||||||
@ -112,15 +130,21 @@ if not args.long:
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
# Print report threshold for the test if it is set.
|
# Print report threshold for the test if it is set.
|
||||||
|
ignored_relative_change = 0.05
|
||||||
if 'max_ignored_relative_change' in root.attrib:
|
if 'max_ignored_relative_change' in root.attrib:
|
||||||
print(f'report-threshold\t{root.attrib["max_ignored_relative_change"]}')
|
ignored_relative_change = float(root.attrib["max_ignored_relative_change"])
|
||||||
|
print(f'report-threshold\t{ignored_relative_change}')
|
||||||
|
|
||||||
|
reportStageEnd('before-connect')
|
||||||
|
|
||||||
# Open connections
|
# Open connections
|
||||||
servers = [{'host': host, 'port': port} for (host, port) in zip(args.host, args.port)]
|
servers = [{'host': host or args.host[0], 'port': port or args.port[0]} for (host, port) in itertools.zip_longest(args.host, args.port)]
|
||||||
all_connections = [clickhouse_driver.Client(**server) for server in servers]
|
all_connections = [clickhouse_driver.Client(**server) for server in servers]
|
||||||
|
|
||||||
for s in servers:
|
for i, s in enumerate(servers):
|
||||||
print('server\t{}\t{}'.format(s['host'], s['port']))
|
print(f'server\t{i}\t{s["host"]}\t{s["port"]}')
|
||||||
|
|
||||||
|
reportStageEnd('connect')
|
||||||
|
|
||||||
# Run drop queries, ignoring errors. Do this before all other activity, because
|
# Run drop queries, ignoring errors. Do this before all other activity, because
|
||||||
# clickhouse_driver disconnects on error (this is not configurable), and the new
|
# clickhouse_driver disconnects on error (this is not configurable), and the new
|
||||||
@ -135,6 +159,8 @@ for conn_index, c in enumerate(all_connections):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
reportStageEnd('drop-1')
|
||||||
|
|
||||||
# Apply settings.
|
# Apply settings.
|
||||||
# If there are errors, report them and continue -- maybe a new test uses a setting
|
# If there are errors, report them and continue -- maybe a new test uses a setting
|
||||||
# that is not in master, but the queries can still run. If we have multiple
|
# that is not in master, but the queries can still run. If we have multiple
|
||||||
@ -152,6 +178,8 @@ for conn_index, c in enumerate(all_connections):
|
|||||||
except:
|
except:
|
||||||
print(traceback.format_exc(), file=sys.stderr)
|
print(traceback.format_exc(), file=sys.stderr)
|
||||||
|
|
||||||
|
reportStageEnd('settings')
|
||||||
|
|
||||||
# Check tables that should exist. If they don't exist, just skip this test.
|
# Check tables that should exist. If they don't exist, just skip this test.
|
||||||
tables = [e.text for e in root.findall('preconditions/table_exists')]
|
tables = [e.text for e in root.findall('preconditions/table_exists')]
|
||||||
for t in tables:
|
for t in tables:
|
||||||
@ -164,6 +192,8 @@ for t in tables:
|
|||||||
print(f'skipped\t{tsv_escape(skipped_message)}')
|
print(f'skipped\t{tsv_escape(skipped_message)}')
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
|
reportStageEnd('preconditions')
|
||||||
|
|
||||||
# Run create and fill queries. We will run them simultaneously for both servers,
|
# Run create and fill queries. We will run them simultaneously for both servers,
|
||||||
# to save time.
|
# to save time.
|
||||||
# The weird search is to keep the relative order of elements, which matters, and
|
# The weird search is to keep the relative order of elements, which matters, and
|
||||||
@ -194,6 +224,9 @@ for t in threads:
|
|||||||
for t in threads:
|
for t in threads:
|
||||||
t.join()
|
t.join()
|
||||||
|
|
||||||
|
reportStageEnd('create')
|
||||||
|
|
||||||
|
# By default, test all queries.
|
||||||
queries_to_run = range(0, len(test_queries))
|
queries_to_run = range(0, len(test_queries))
|
||||||
|
|
||||||
if args.max_queries:
|
if args.max_queries:
|
||||||
@ -205,6 +238,7 @@ if args.queries_to_run:
|
|||||||
queries_to_run = args.queries_to_run
|
queries_to_run = args.queries_to_run
|
||||||
|
|
||||||
# Run test queries.
|
# Run test queries.
|
||||||
|
profile_total_seconds = 0
|
||||||
for query_index in queries_to_run:
|
for query_index in queries_to_run:
|
||||||
q = test_queries[query_index]
|
q = test_queries[query_index]
|
||||||
query_prefix = f'{test_name}.query{query_index}'
|
query_prefix = f'{test_name}.query{query_index}'
|
||||||
@ -324,34 +358,49 @@ for query_index in queries_to_run:
|
|||||||
client_seconds = time.perf_counter() - start_seconds
|
client_seconds = time.perf_counter() - start_seconds
|
||||||
print(f'client-time\t{query_index}\t{client_seconds}\t{server_seconds}')
|
print(f'client-time\t{query_index}\t{client_seconds}\t{server_seconds}')
|
||||||
|
|
||||||
#print(all_server_times)
|
|
||||||
#print(stats.ttest_ind(all_server_times[0], all_server_times[1], equal_var = False).pvalue)
|
|
||||||
|
|
||||||
# Run additional profiling queries to collect profile data, but only if test times appeared to be different.
|
# Run additional profiling queries to collect profile data, but only if test times appeared to be different.
|
||||||
# We have to do it after normal runs because otherwise it will affect test statistics too much
|
# We have to do it after normal runs because otherwise it will affect test statistics too much
|
||||||
if len(all_server_times) == 2 and stats.ttest_ind(all_server_times[0], all_server_times[1], equal_var = False).pvalue < 0.1:
|
if len(all_server_times) != 2:
|
||||||
run = 0
|
continue
|
||||||
while True:
|
|
||||||
run_id = f'{query_prefix}.profile{run}'
|
|
||||||
|
|
||||||
for conn_index, c in enumerate(this_query_connections):
|
if len(all_server_times[0]) < 3:
|
||||||
try:
|
# Don't fail if for some reason there are not enough measurements.
|
||||||
res = c.execute(q, query_id = run_id, settings = {'query_profiler_real_time_period_ns': 10000000})
|
continue
|
||||||
print(f'profile\t{query_index}\t{run_id}\t{conn_index}\t{c.last_query.elapsed}')
|
|
||||||
except Exception as e:
|
|
||||||
# Add query id to the exception to make debugging easier.
|
|
||||||
e.args = (run_id, *e.args)
|
|
||||||
e.message = run_id + ': ' + e.message
|
|
||||||
raise
|
|
||||||
|
|
||||||
elapsed = c.last_query.elapsed
|
pvalue = stats.ttest_ind(all_server_times[0], all_server_times[1], equal_var = False).pvalue
|
||||||
profile_seconds += elapsed
|
median = [statistics.median(t) for t in all_server_times]
|
||||||
|
# Keep this consistent with the value used in report. Should eventually move
|
||||||
|
# to (median[1] - median[0]) / min(median), which is compatible with "times"
|
||||||
|
# difference we use in report (max(median) / min(median)).
|
||||||
|
relative_diff = (median[1] - median[0]) / median[0]
|
||||||
|
print(f'diff\t{query_index}\t{median[0]}\t{median[1]}\t{relative_diff}\t{pvalue}')
|
||||||
|
if abs(relative_diff) < ignored_relative_change or pvalue > 0.05:
|
||||||
|
continue
|
||||||
|
|
||||||
run += 1
|
# Perform profile runs for fixed amount of time. Don't limit the number
|
||||||
# Don't spend too much time for profile runs
|
# of runs, because we also have short queries.
|
||||||
if run > args.runs or profile_seconds > 10:
|
profile_start_seconds = time.perf_counter()
|
||||||
break
|
run = 0
|
||||||
# And don't bother with short queries
|
while time.perf_counter() - profile_start_seconds < args.profile_seconds:
|
||||||
|
run_id = f'{query_prefix}.profile{run}'
|
||||||
|
|
||||||
|
for conn_index, c in enumerate(this_query_connections):
|
||||||
|
try:
|
||||||
|
res = c.execute(q, query_id = run_id, settings = {'query_profiler_real_time_period_ns': 10000000})
|
||||||
|
print(f'profile\t{query_index}\t{run_id}\t{conn_index}\t{c.last_query.elapsed}')
|
||||||
|
except Exception as e:
|
||||||
|
# Add query id to the exception to make debugging easier.
|
||||||
|
e.args = (run_id, *e.args)
|
||||||
|
e.message = run_id + ': ' + e.message
|
||||||
|
raise
|
||||||
|
|
||||||
|
run += 1
|
||||||
|
|
||||||
|
profile_total_seconds += time.perf_counter() - profile_start_seconds
|
||||||
|
|
||||||
|
print(f'profile-total\t{profile_total_seconds}')
|
||||||
|
|
||||||
|
reportStageEnd('run')
|
||||||
|
|
||||||
# Run drop queries
|
# Run drop queries
|
||||||
drop_queries = substitute_parameters(drop_query_templates)
|
drop_queries = substitute_parameters(drop_query_templates)
|
||||||
@ -359,3 +408,5 @@ for conn_index, c in enumerate(all_connections):
|
|||||||
for q in drop_queries:
|
for q in drop_queries:
|
||||||
c.execute(q)
|
c.execute(q)
|
||||||
print(f'drop\t{conn_index}\t{c.last_query.elapsed}\t{tsv_escape(q)}')
|
print(f'drop\t{conn_index}\t{c.last_query.elapsed}\t{tsv_escape(q)}')
|
||||||
|
|
||||||
|
reportStageEnd('drop-2')
|
||||||
|
@ -312,7 +312,7 @@ def add_errors_explained():
|
|||||||
|
|
||||||
|
|
||||||
if args.report == 'main':
|
if args.report == 'main':
|
||||||
print(header_template.format())
|
print((header_template.format()))
|
||||||
|
|
||||||
add_tested_commits()
|
add_tested_commits()
|
||||||
|
|
||||||
@ -468,14 +468,14 @@ if args.report == 'main':
|
|||||||
return
|
return
|
||||||
|
|
||||||
columns = [
|
columns = [
|
||||||
'Test', #0
|
'Test', #0
|
||||||
'Wall clock time, s', #1
|
'Wall clock time, entire test, s', #1
|
||||||
'Total client time, s', #2
|
'Total client time for measured query runs, s', #2
|
||||||
'Total queries', #3
|
'Queries', #3
|
||||||
'Longest query<br>(sum for all runs), s', #4
|
'Longest query, total for measured runs, s', #4
|
||||||
'Avg wall clock time<br>(sum for all runs), s', #5
|
'Wall clock time per query, s', #5
|
||||||
'Shortest query<br>(sum for all runs), s', #6
|
'Shortest query, total for measured runs, s', #6
|
||||||
'', # Runs #7
|
'', # Runs #7
|
||||||
]
|
]
|
||||||
attrs = ['' for c in columns]
|
attrs = ['' for c in columns]
|
||||||
attrs[7] = None
|
attrs[7] = None
|
||||||
@ -487,7 +487,7 @@ if args.report == 'main':
|
|||||||
for r in rows:
|
for r in rows:
|
||||||
anchor = f'{currentTableAnchor()}.{r[0]}'
|
anchor = f'{currentTableAnchor()}.{r[0]}'
|
||||||
total_runs = (int(r[7]) + 1) * 2 # one prewarm run, two servers
|
total_runs = (int(r[7]) + 1) * 2 # one prewarm run, two servers
|
||||||
if float(r[5]) > allowed_average_run_time * total_runs:
|
if r[0] != 'Total' and float(r[5]) > allowed_average_run_time * total_runs:
|
||||||
# FIXME should be 15s max -- investigate parallel_insert
|
# FIXME should be 15s max -- investigate parallel_insert
|
||||||
slow_average_tests += 1
|
slow_average_tests += 1
|
||||||
attrs[5] = f'style="background: {color_bad}"'
|
attrs[5] = f'style="background: {color_bad}"'
|
||||||
@ -495,7 +495,7 @@ if args.report == 'main':
|
|||||||
else:
|
else:
|
||||||
attrs[5] = ''
|
attrs[5] = ''
|
||||||
|
|
||||||
if float(r[4]) > allowed_single_run_time * total_runs:
|
if r[0] != 'Total' and float(r[4]) > allowed_single_run_time * total_runs:
|
||||||
slow_average_tests += 1
|
slow_average_tests += 1
|
||||||
attrs[4] = f'style="background: {color_bad}"'
|
attrs[4] = f'style="background: {color_bad}"'
|
||||||
errors_explained.append([f'<a href="./all-queries.html#all-query-times.{r[0]}.0">Some query of the test \'{r[0]}\' is too slow to run. See the all queries report'])
|
errors_explained.append([f'<a href="./all-queries.html#all-query-times.{r[0]}.0">Some query of the test \'{r[0]}\' is too slow to run. See the all queries report'])
|
||||||
@ -571,14 +571,14 @@ if args.report == 'main':
|
|||||||
status = 'failure'
|
status = 'failure'
|
||||||
message = 'Errors while building the report.'
|
message = 'Errors while building the report.'
|
||||||
|
|
||||||
print("""
|
print(("""
|
||||||
<!--status: {status}-->
|
<!--status: {status}-->
|
||||||
<!--message: {message}-->
|
<!--message: {message}-->
|
||||||
""".format(status=status, message=message))
|
""".format(status=status, message=message)))
|
||||||
|
|
||||||
elif args.report == 'all-queries':
|
elif args.report == 'all-queries':
|
||||||
|
|
||||||
print(header_template.format())
|
print((header_template.format()))
|
||||||
|
|
||||||
add_tested_commits()
|
add_tested_commits()
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ FROM yandex/clickhouse-stateless-test
|
|||||||
RUN apt-get update -y \
|
RUN apt-get update -y \
|
||||||
&& env DEBIAN_FRONTEND=noninteractive \
|
&& env DEBIAN_FRONTEND=noninteractive \
|
||||||
apt-get install --yes --no-install-recommends \
|
apt-get install --yes --no-install-recommends \
|
||||||
python-requests \
|
python3-requests \
|
||||||
llvm-9
|
llvm-9
|
||||||
|
|
||||||
COPY s3downloader /s3downloader
|
COPY s3downloader /s3downloader
|
||||||
|
@ -26,11 +26,12 @@ function start()
|
|||||||
fi
|
fi
|
||||||
timeout 120 service clickhouse-server start
|
timeout 120 service clickhouse-server start
|
||||||
sleep 0.5
|
sleep 0.5
|
||||||
counter=$(($counter + 1))
|
counter=$((counter + 1))
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
start
|
start
|
||||||
|
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
|
||||||
/s3downloader --dataset-names $DATASETS
|
/s3downloader --dataset-names $DATASETS
|
||||||
chmod 777 -R /var/lib/clickhouse
|
chmod 777 -R /var/lib/clickhouse
|
||||||
clickhouse-client --query "SHOW DATABASES"
|
clickhouse-client --query "SHOW DATABASES"
|
||||||
@ -43,8 +44,12 @@ clickhouse-client --query "RENAME TABLE datasets.hits_v1 TO test.hits"
|
|||||||
clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits"
|
clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits"
|
||||||
clickhouse-client --query "SHOW TABLES FROM test"
|
clickhouse-client --query "SHOW TABLES FROM test"
|
||||||
|
|
||||||
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
|
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test ; then
|
||||||
SKIP_LIST_OPT="--use-skip-list"
|
SKIP_LIST_OPT="--use-skip-list"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
# We can have several additional options so we path them as array because it's
|
||||||
|
# more idiologically correct.
|
||||||
|
read -ra ADDITIONAL_OPTIONS <<< "${ADDITIONAL_OPTIONS:-}"
|
||||||
|
|
||||||
|
clickhouse-test --testname --shard --zookeeper --no-stateless --hung-check --print-time "$SKIP_LIST_OPT" "${ADDITIONAL_OPTIONS[@]}" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@ -29,7 +29,7 @@ def dowload_with_progress(url, path):
|
|||||||
logging.info("Downloading from %s to temp path %s", url, path)
|
logging.info("Downloading from %s to temp path %s", url, path)
|
||||||
for i in range(RETRIES_COUNT):
|
for i in range(RETRIES_COUNT):
|
||||||
try:
|
try:
|
||||||
with open(path, 'w') as f:
|
with open(path, 'wb') as f:
|
||||||
response = requests.get(url, stream=True)
|
response = requests.get(url, stream=True)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
total_length = response.headers.get('content-length')
|
total_length = response.headers.get('content-length')
|
||||||
@ -74,7 +74,7 @@ if __name__ == "__main__":
|
|||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Simple tool for dowloading datasets for clickhouse from S3")
|
description="Simple tool for dowloading datasets for clickhouse from S3")
|
||||||
|
|
||||||
parser.add_argument('--dataset-names', required=True, nargs='+', choices=AVAILABLE_DATASETS.keys())
|
parser.add_argument('--dataset-names', required=True, nargs='+', choices=list(AVAILABLE_DATASETS.keys()))
|
||||||
parser.add_argument('--url-prefix', default=DEFAULT_URL)
|
parser.add_argument('--url-prefix', default=DEFAULT_URL)
|
||||||
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')
|
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ RUN echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-9
|
|||||||
RUN apt-get update -y \
|
RUN apt-get update -y \
|
||||||
&& env DEBIAN_FRONTEND=noninteractive \
|
&& env DEBIAN_FRONTEND=noninteractive \
|
||||||
apt-get install --yes --no-install-recommends \
|
apt-get install --yes --no-install-recommends \
|
||||||
python-requests
|
python3-requests
|
||||||
|
|
||||||
COPY s3downloader /s3downloader
|
COPY s3downloader /s3downloader
|
||||||
COPY run.sh /run.sh
|
COPY run.sh /run.sh
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
kill_clickhouse () {
|
kill_clickhouse () {
|
||||||
kill `pgrep -u clickhouse` 2>/dev/null
|
kill "$(pgrep -u clickhouse)" 2>/dev/null
|
||||||
|
|
||||||
for i in {1..10}
|
for _ in {1..10}
|
||||||
do
|
do
|
||||||
if ! kill -0 `pgrep -u clickhouse`; then
|
if ! kill -0 "$(pgrep -u clickhouse)"; then
|
||||||
echo "No clickhouse process"
|
echo "No clickhouse process"
|
||||||
break
|
break
|
||||||
else
|
else
|
||||||
echo "Process" `pgrep -u clickhouse` "still alive"
|
echo "Process $(pgrep -u clickhouse) still alive"
|
||||||
sleep 10
|
sleep 10
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
@ -20,19 +20,19 @@ start_clickhouse () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
wait_llvm_profdata () {
|
wait_llvm_profdata () {
|
||||||
while kill -0 `pgrep llvm-profdata-10`;
|
while kill -0 "$(pgrep llvm-profdata-10)"
|
||||||
do
|
do
|
||||||
echo "Waiting for profdata" `pgrep llvm-profdata-10` "still alive"
|
echo "Waiting for profdata $(pgrep llvm-profdata-10) still alive"
|
||||||
sleep 3
|
sleep 3
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
merge_client_files_in_background () {
|
merge_client_files_in_background () {
|
||||||
client_files=`ls /client_*profraw 2>/dev/null`
|
client_files=$(ls /client_*profraw 2>/dev/null)
|
||||||
if [ ! -z "$client_files" ]
|
if [ -n "$client_files" ]
|
||||||
then
|
then
|
||||||
llvm-profdata-10 merge -sparse $client_files -o merged_client_`date +%s`.profraw
|
llvm-profdata-10 merge -sparse "$client_files" -o "merged_client_$(date +%s).profraw"
|
||||||
rm $client_files
|
rm "$client_files"
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -66,12 +66,13 @@ function start()
|
|||||||
fi
|
fi
|
||||||
timeout 120 service clickhouse-server start
|
timeout 120 service clickhouse-server start
|
||||||
sleep 0.5
|
sleep 0.5
|
||||||
counter=$(($counter + 1))
|
counter=$((counter + 1))
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
start
|
start
|
||||||
|
|
||||||
|
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
|
||||||
if ! /s3downloader --dataset-names $DATASETS; then
|
if ! /s3downloader --dataset-names $DATASETS; then
|
||||||
echo "Cannot download datatsets"
|
echo "Cannot download datatsets"
|
||||||
exit 1
|
exit 1
|
||||||
@ -100,11 +101,15 @@ LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "RENAME TA
|
|||||||
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits"
|
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "RENAME TABLE datasets.visits_v1 TO test.visits"
|
||||||
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "SHOW TABLES FROM test"
|
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-client --query "SHOW TABLES FROM test"
|
||||||
|
|
||||||
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
|
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
|
||||||
SKIP_LIST_OPT="--use-skip-list"
|
SKIP_LIST_OPT="--use-skip-list"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-test --testname --shard --zookeeper --no-stateless "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
# We can have several additional options so we path them as array because it's
|
||||||
|
# more idiologically correct.
|
||||||
|
read -ra ADDITIONAL_OPTIONS <<< "${ADDITIONAL_OPTIONS:-}"
|
||||||
|
|
||||||
|
LLVM_PROFILE_FILE='client_%h_%p_%m.profraw' clickhouse-test --testname --shard --zookeeper --no-stateless --hung-check --print-time "$SKIP_LIST_OPT" "${ADDITIONAL_OPTIONS[@]}" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
||||||
|
|
||||||
kill_clickhouse
|
kill_clickhouse
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@ -74,7 +74,7 @@ if __name__ == "__main__":
|
|||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description="Simple tool for dowloading datasets for clickhouse from S3")
|
description="Simple tool for dowloading datasets for clickhouse from S3")
|
||||||
|
|
||||||
parser.add_argument('--dataset-names', required=True, nargs='+', choices=AVAILABLE_DATASETS.keys())
|
parser.add_argument('--dataset-names', required=True, nargs='+', choices=list(AVAILABLE_DATASETS.keys()))
|
||||||
parser.add_argument('--url-prefix', default=DEFAULT_URL)
|
parser.add_argument('--url-prefix', default=DEFAULT_URL)
|
||||||
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')
|
parser.add_argument('--clickhouse-data-path', default='/var/lib/clickhouse/')
|
||||||
|
|
||||||
|
@ -12,10 +12,10 @@ RUN apt-get update -y \
|
|||||||
ncdu \
|
ncdu \
|
||||||
netcat-openbsd \
|
netcat-openbsd \
|
||||||
openssl \
|
openssl \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-requests \
|
python3-requests \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
qemu-user-static \
|
qemu-user-static \
|
||||||
sudo \
|
sudo \
|
||||||
telnet \
|
telnet \
|
||||||
|
@ -13,14 +13,17 @@ dpkg -i package_folder/clickhouse-test_*.deb
|
|||||||
|
|
||||||
service clickhouse-server start && sleep 5
|
service clickhouse-server start && sleep 5
|
||||||
|
|
||||||
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
|
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
|
||||||
SKIP_LIST_OPT="--use-skip-list"
|
SKIP_LIST_OPT="--use-skip-list"
|
||||||
fi
|
fi
|
||||||
|
# We can have several additional options so we path them as array because it's
|
||||||
|
# more idiologically correct.
|
||||||
|
read -ra ADDITIONAL_OPTIONS <<< "${ADDITIONAL_OPTIONS:-}"
|
||||||
|
|
||||||
function run_tests()
|
function run_tests()
|
||||||
{
|
{
|
||||||
for i in $(seq 1 $NUM_TRIES); do
|
for i in $(seq 1 $NUM_TRIES); do
|
||||||
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a test_output/test_result.txt
|
clickhouse-test --testname --shard --zookeeper --hung-check --print-time "$SKIP_LIST_OPT" "${ADDITIONAL_OPTIONS[@]}" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,10 +3,10 @@ FROM yandex/clickhouse-test-base
|
|||||||
|
|
||||||
RUN apt-get update -y && \
|
RUN apt-get update -y && \
|
||||||
apt-get install -y --no-install-recommends \
|
apt-get install -y --no-install-recommends \
|
||||||
python-pip \
|
python3-pip \
|
||||||
python-setuptools
|
python3-setuptools
|
||||||
|
|
||||||
RUN pip install \
|
RUN python3 -m pip install \
|
||||||
pytest \
|
pytest \
|
||||||
pytest-html \
|
pytest-html \
|
||||||
pytest-timeout \
|
pytest-timeout \
|
||||||
@ -17,4 +17,4 @@ CMD dpkg -i package_folder/clickhouse-common-static_*.deb; \
|
|||||||
dpkg -i package_folder/clickhouse-server_*.deb; \
|
dpkg -i package_folder/clickhouse-server_*.deb; \
|
||||||
dpkg -i package_folder/clickhouse-client_*.deb; \
|
dpkg -i package_folder/clickhouse-client_*.deb; \
|
||||||
dpkg -i package_folder/clickhouse-test_*.deb; \
|
dpkg -i package_folder/clickhouse-test_*.deb; \
|
||||||
python -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --html=test_output/report.html --self-contained-html
|
python3 -m pytest /usr/share/clickhouse-test/queries -n $(nproc) --html=test_output/report.html --self-contained-html
|
||||||
|
@ -54,10 +54,10 @@ RUN apt-get --allow-unauthenticated update -y \
|
|||||||
perl \
|
perl \
|
||||||
pigz \
|
pigz \
|
||||||
pkg-config \
|
pkg-config \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-requests \
|
python3-requests \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
qemu-user-static \
|
qemu-user-static \
|
||||||
sudo \
|
sudo \
|
||||||
telnet \
|
telnet \
|
||||||
|
@ -13,8 +13,8 @@ dpkg -i package_folder/clickhouse-test_*.deb
|
|||||||
|
|
||||||
service clickhouse-server start && sleep 5
|
service clickhouse-server start && sleep 5
|
||||||
|
|
||||||
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
|
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
|
||||||
SKIP_LIST_OPT="--use-skip-list"
|
SKIP_LIST_OPT="--use-skip-list"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
||||||
|
@ -12,10 +12,10 @@ RUN apt-get update -y \
|
|||||||
fakeroot \
|
fakeroot \
|
||||||
debhelper \
|
debhelper \
|
||||||
expect \
|
expect \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
python-requests \
|
python3-requests \
|
||||||
sudo \
|
sudo \
|
||||||
openssl \
|
openssl \
|
||||||
ncdu \
|
ncdu \
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
kill_clickhouse () {
|
kill_clickhouse () {
|
||||||
echo "clickhouse pids" `ps aux | grep clickhouse` | ts '%Y-%m-%d %H:%M:%S'
|
echo "clickhouse pids $(pgrep -u clickhouse)" | ts '%Y-%m-%d %H:%M:%S'
|
||||||
kill `pgrep -u clickhouse` 2>/dev/null
|
kill "$(pgrep -u clickhouse)" 2>/dev/null
|
||||||
|
|
||||||
for i in {1..10}
|
for _ in {1..10}
|
||||||
do
|
do
|
||||||
if ! kill -0 `pgrep -u clickhouse`; then
|
if ! kill -0 "$(pgrep -u clickhouse)"; then
|
||||||
echo "No clickhouse process" | ts '%Y-%m-%d %H:%M:%S'
|
echo "No clickhouse process" | ts '%Y-%m-%d %H:%M:%S'
|
||||||
break
|
break
|
||||||
else
|
else
|
||||||
echo "Process" `pgrep -u clickhouse` "still alive" | ts '%Y-%m-%d %H:%M:%S'
|
echo "Process $(pgrep -u clickhouse) still alive" | ts '%Y-%m-%d %H:%M:%S'
|
||||||
sleep 10
|
sleep 10
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "Will try to send second kill signal for sure"
|
echo "Will try to send second kill signal for sure"
|
||||||
kill `pgrep -u clickhouse` 2>/dev/null
|
kill "$(pgrep -u clickhouse)" 2>/dev/null
|
||||||
sleep 5
|
sleep 5
|
||||||
echo "clickhouse pids" `ps aux | grep clickhouse` | ts '%Y-%m-%d %H:%M:%S'
|
echo "clickhouse pids $(pgrep -u clickhouse)" | ts '%Y-%m-%d %H:%M:%S'
|
||||||
}
|
}
|
||||||
|
|
||||||
start_clickhouse () {
|
start_clickhouse () {
|
||||||
@ -47,11 +47,15 @@ start_clickhouse
|
|||||||
sleep 10
|
sleep 10
|
||||||
|
|
||||||
|
|
||||||
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
|
if grep -q -- "--use-skip-list" /usr/bin/clickhouse-test; then
|
||||||
SKIP_LIST_OPT="--use-skip-list"
|
SKIP_LIST_OPT="--use-skip-list"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
LLVM_PROFILE_FILE='client_coverage.profraw' clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
# We can have several additional options so we path them as array because it's
|
||||||
|
# more idiologically correct.
|
||||||
|
read -ra ADDITIONAL_OPTIONS <<< "${ADDITIONAL_OPTIONS:-}"
|
||||||
|
|
||||||
|
LLVM_PROFILE_FILE='client_coverage.profraw' clickhouse-test --testname --shard --zookeeper --hung-check --print-time "$SKIP_LIST_OPT" "${ADDITIONAL_OPTIONS[@]}" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
|
||||||
|
|
||||||
kill_clickhouse
|
kill_clickhouse
|
||||||
|
|
||||||
|
@ -10,10 +10,10 @@ RUN apt-get update -y \
|
|||||||
debhelper \
|
debhelper \
|
||||||
parallel \
|
parallel \
|
||||||
expect \
|
expect \
|
||||||
python \
|
python3 \
|
||||||
python-lxml \
|
python3-lxml \
|
||||||
python-termcolor \
|
python3-termcolor \
|
||||||
python-requests \
|
python3-requests \
|
||||||
curl \
|
curl \
|
||||||
sudo \
|
sudo \
|
||||||
openssl \
|
openssl \
|
||||||
|
@ -13,7 +13,7 @@ function stop()
|
|||||||
timeout 120 service clickhouse-server stop
|
timeout 120 service clickhouse-server stop
|
||||||
|
|
||||||
# Wait for process to disappear from processlist and also try to kill zombies.
|
# Wait for process to disappear from processlist and also try to kill zombies.
|
||||||
while kill -9 $(pidof clickhouse-server)
|
while kill -9 "$(pidof clickhouse-server)"
|
||||||
do
|
do
|
||||||
echo "Killed clickhouse-server"
|
echo "Killed clickhouse-server"
|
||||||
sleep 0.5
|
sleep 0.5
|
||||||
@ -35,7 +35,7 @@ function start()
|
|||||||
fi
|
fi
|
||||||
timeout 120 service clickhouse-server start
|
timeout 120 service clickhouse-server start
|
||||||
sleep 0.5
|
sleep 0.5
|
||||||
counter=$(($counter + 1))
|
counter=$((counter + 1))
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -45,10 +45,11 @@ function start()
|
|||||||
# for clickhouse-server (via service)
|
# for clickhouse-server (via service)
|
||||||
echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment
|
echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment
|
||||||
# for clickhouse-client
|
# for clickhouse-client
|
||||||
export ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'
|
export ASAN_OPTIONS='malloc_context_size=10 allocator_release_to_os_interval_ms=10000'
|
||||||
|
|
||||||
start
|
start
|
||||||
|
|
||||||
|
# shellcheck disable=SC2086 # No quotes because I want to split it into words.
|
||||||
/s3downloader --dataset-names $DATASETS
|
/s3downloader --dataset-names $DATASETS
|
||||||
chmod 777 -R /var/lib/clickhouse
|
chmod 777 -R /var/lib/clickhouse
|
||||||
clickhouse-client --query "ATTACH DATABASE IF NOT EXISTS datasets ENGINE = Ordinary"
|
clickhouse-client --query "ATTACH DATABASE IF NOT EXISTS datasets ENGINE = Ordinary"
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
from multiprocessing import cpu_count
|
from multiprocessing import cpu_count
|
||||||
from subprocess import Popen, check_call
|
from subprocess import Popen, check_call
|
||||||
@ -28,8 +28,18 @@ def get_options(i):
|
|||||||
options = ""
|
options = ""
|
||||||
if 0 < i:
|
if 0 < i:
|
||||||
options += " --order=random"
|
options += " --order=random"
|
||||||
|
|
||||||
if i % 2 == 1:
|
if i % 2 == 1:
|
||||||
options += " --db-engine=Ordinary"
|
options += " --db-engine=Ordinary"
|
||||||
|
|
||||||
|
# If database name is not specified, new database is created for each functional test.
|
||||||
|
# Run some threads with one database for all tests.
|
||||||
|
if i % 3 == 1:
|
||||||
|
options += " --database=test_{}".format(i)
|
||||||
|
|
||||||
|
if i == 13:
|
||||||
|
options += " --client-option='memory_tracker_fault_probability=0.00001'"
|
||||||
|
|
||||||
return options
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,17 +2,19 @@
|
|||||||
|
|
||||||
set -e -x
|
set -e -x
|
||||||
|
|
||||||
|
# Not sure why shellcheck complains that rc is not assigned before it is referenced.
|
||||||
|
# shellcheck disable=SC2154
|
||||||
trap 'rc=$?; echo EXITED WITH: $rc; exit $rc' EXIT
|
trap 'rc=$?; echo EXITED WITH: $rc; exit $rc' EXIT
|
||||||
|
|
||||||
# CLI option to prevent rebuilding images, just re-run tests with images leftover from previuos time
|
# CLI option to prevent rebuilding images, just re-run tests with images leftover from previuos time
|
||||||
readonly NO_REBUILD_FLAG="--no-rebuild"
|
readonly NO_REBUILD_FLAG="--no-rebuild"
|
||||||
|
|
||||||
readonly CLICKHOUSE_DOCKER_DIR="$(realpath ${1})"
|
readonly CLICKHOUSE_DOCKER_DIR="$(realpath "${1}")"
|
||||||
readonly CLICKHOUSE_PACKAGES_ARG="${2}"
|
readonly CLICKHOUSE_PACKAGES_ARG="${2}"
|
||||||
CLICKHOUSE_SERVER_IMAGE="${3}"
|
CLICKHOUSE_SERVER_IMAGE="${3}"
|
||||||
|
|
||||||
if [ ${CLICKHOUSE_PACKAGES_ARG} != ${NO_REBUILD_FLAG} ]; then
|
if [ "${CLICKHOUSE_PACKAGES_ARG}" != "${NO_REBUILD_FLAG}" ]; then
|
||||||
readonly CLICKHOUSE_PACKAGES_DIR="$(realpath ${2})" # or --no-rebuild
|
readonly CLICKHOUSE_PACKAGES_DIR="$(realpath "${2}")" # or --no-rebuild
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
@ -25,7 +27,7 @@ fi
|
|||||||
|
|
||||||
# TODO: optionally mount most recent clickhouse-test and queries directory from local machine
|
# TODO: optionally mount most recent clickhouse-test and queries directory from local machine
|
||||||
|
|
||||||
if [ ${CLICKHOUSE_PACKAGES_ARG} != ${NO_REBUILD_FLAG} ]; then
|
if [ "${CLICKHOUSE_PACKAGES_ARG}" != "${NO_REBUILD_FLAG}" ]; then
|
||||||
docker build --network=host \
|
docker build --network=host \
|
||||||
-f "${CLICKHOUSE_DOCKER_DIR}/test/stateless/clickhouse-statelest-test-runner.Dockerfile" \
|
-f "${CLICKHOUSE_DOCKER_DIR}/test/stateless/clickhouse-statelest-test-runner.Dockerfile" \
|
||||||
--target clickhouse-test-runner-base \
|
--target clickhouse-test-runner-base \
|
||||||
@ -49,7 +51,7 @@ fi
|
|||||||
if [ -z "${CLICKHOUSE_SERVER_IMAGE}" ]; then
|
if [ -z "${CLICKHOUSE_SERVER_IMAGE}" ]; then
|
||||||
CLICKHOUSE_SERVER_IMAGE="yandex/clickhouse-server:local"
|
CLICKHOUSE_SERVER_IMAGE="yandex/clickhouse-server:local"
|
||||||
|
|
||||||
if [ ${CLICKHOUSE_PACKAGES_ARG} != ${NO_REBUILD_FLAG} ]; then
|
if [ "${CLICKHOUSE_PACKAGES_ARG}" != "${NO_REBUILD_FLAG}" ]; then
|
||||||
docker build --network=host \
|
docker build --network=host \
|
||||||
-f "${CLICKHOUSE_DOCKER_DIR}/server/local.Dockerfile" \
|
-f "${CLICKHOUSE_DOCKER_DIR}/server/local.Dockerfile" \
|
||||||
--target clickhouse-server-base \
|
--target clickhouse-server-base \
|
||||||
|
@ -7,7 +7,7 @@ set +e
|
|||||||
reties=0
|
reties=0
|
||||||
while true; do
|
while true; do
|
||||||
docker info &>/dev/null && break
|
docker info &>/dev/null && break
|
||||||
reties=$[$reties+1]
|
reties=$((reties+1))
|
||||||
if [[ $reties -ge 100 ]]; then # 10 sec max
|
if [[ $reties -ge 100 ]]; then # 10 sec max
|
||||||
echo "Can't start docker daemon, timeout exceeded." >&2
|
echo "Can't start docker daemon, timeout exceeded." >&2
|
||||||
exit 1;
|
exit 1;
|
||||||
|
@ -45,7 +45,7 @@ A `Block` is a container that represents a subset (chunk) of a table in memory.
|
|||||||
|
|
||||||
When we calculate some function over columns in a block, we add another column with its result to the block, and we don’t touch columns for arguments of the function because operations are immutable. Later, unneeded columns can be removed from the block, but not modified. It is convenient for the elimination of common subexpressions.
|
When we calculate some function over columns in a block, we add another column with its result to the block, and we don’t touch columns for arguments of the function because operations are immutable. Later, unneeded columns can be removed from the block, but not modified. It is convenient for the elimination of common subexpressions.
|
||||||
|
|
||||||
Blocks are created for every processed chunk of data. Note that for the same type of calculation, the column names and types remain the same for different blocks, and only column data changes. It is better to split block data from the block header because small block sizes have a high overhead of temporary strings for copying shared\_ptrs and column names.
|
Blocks are created for every processed chunk of data. Note that for the same type of calculation, the column names and types remain the same for different blocks, and only column data changes. It is better to split block data from the block header because small block sizes have a high overhead of temporary strings for copying shared_ptrs and column names.
|
||||||
|
|
||||||
## Block Streams {#block-streams}
|
## Block Streams {#block-streams}
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ toc_title: Build on Linux
|
|||||||
|
|
||||||
Supported platforms:
|
Supported platforms:
|
||||||
|
|
||||||
- x86\_64
|
- x86_64
|
||||||
- AArch64
|
- AArch64
|
||||||
- Power9 (experimental)
|
- Power9 (experimental)
|
||||||
|
|
||||||
@ -116,7 +116,7 @@ ninja
|
|||||||
Example for Fedora Rawhide:
|
Example for Fedora Rawhide:
|
||||||
``` bash
|
``` bash
|
||||||
sudo yum update
|
sudo yum update
|
||||||
yum --nogpg install git cmake make gcc-c++ python2
|
yum --nogpg install git cmake make gcc-c++ python3
|
||||||
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
git clone --recursive https://github.com/ClickHouse/ClickHouse.git
|
||||||
mkdir build && cd build
|
mkdir build && cd build
|
||||||
cmake ../ClickHouse
|
cmake ../ClickHouse
|
||||||
|
@ -26,7 +26,7 @@ toc_title: Third-Party Libraries Used
|
|||||||
| libpcg-random | [Apache License 2.0](https://github.com/ClickHouse/ClickHouse/blob/master/contrib/libpcg-random/LICENSE-APACHE.txt) |
|
| libpcg-random | [Apache License 2.0](https://github.com/ClickHouse/ClickHouse/blob/master/contrib/libpcg-random/LICENSE-APACHE.txt) |
|
||||||
| libressl | [OpenSSL License](https://github.com/ClickHouse-Extras/ssl/blob/master/COPYING) |
|
| libressl | [OpenSSL License](https://github.com/ClickHouse-Extras/ssl/blob/master/COPYING) |
|
||||||
| librdkafka | [BSD 2-Clause License](https://github.com/edenhill/librdkafka/blob/363dcad5a23dc29381cc626620e68ae418b3af19/LICENSE) |
|
| librdkafka | [BSD 2-Clause License](https://github.com/edenhill/librdkafka/blob/363dcad5a23dc29381cc626620e68ae418b3af19/LICENSE) |
|
||||||
| libwidechar\_width | [CC0 1.0 Universal](https://github.com/ClickHouse/ClickHouse/blob/master/libs/libwidechar_width/LICENSE) |
|
| libwidechar_width | [CC0 1.0 Universal](https://github.com/ClickHouse/ClickHouse/blob/master/libs/libwidechar_width/LICENSE) |
|
||||||
| llvm | [BSD 3-Clause License](https://github.com/ClickHouse-Extras/llvm/blob/163def217817c90fb982a6daf384744d8472b92b/llvm/LICENSE.TXT) |
|
| llvm | [BSD 3-Clause License](https://github.com/ClickHouse-Extras/llvm/blob/163def217817c90fb982a6daf384744d8472b92b/llvm/LICENSE.TXT) |
|
||||||
| lz4 | [BSD 2-Clause License](https://github.com/lz4/lz4/blob/c10863b98e1503af90616ae99725ecd120265dfb/LICENSE) |
|
| lz4 | [BSD 2-Clause License](https://github.com/lz4/lz4/blob/c10863b98e1503af90616ae99725ecd120265dfb/LICENSE) |
|
||||||
| mariadb-connector-c | [LGPL v2.1](https://github.com/ClickHouse-Extras/mariadb-connector-c/blob/3.1/COPYING.LIB) |
|
| mariadb-connector-c | [LGPL v2.1](https://github.com/ClickHouse-Extras/mariadb-connector-c/blob/3.1/COPYING.LIB) |
|
||||||
|
@ -40,7 +40,7 @@ In the command line terminal run:
|
|||||||
git clone --recursive git@github.com:your_github_username/ClickHouse.git
|
git clone --recursive git@github.com:your_github_username/ClickHouse.git
|
||||||
cd ClickHouse
|
cd ClickHouse
|
||||||
|
|
||||||
Note: please, substitute *your\_github\_username* with what is appropriate!
|
Note: please, substitute *your_github_username* with what is appropriate!
|
||||||
|
|
||||||
This command will create a directory `ClickHouse` containing the working copy of the project.
|
This command will create a directory `ClickHouse` containing the working copy of the project.
|
||||||
|
|
||||||
@ -150,7 +150,7 @@ Now that you are ready to build ClickHouse we recommend you to create a separate
|
|||||||
mkdir build
|
mkdir build
|
||||||
cd build
|
cd build
|
||||||
|
|
||||||
You can have several different directories (build\_release, build\_debug, etc.) for different types of build.
|
You can have several different directories (build_release, build_debug, etc.) for different types of build.
|
||||||
|
|
||||||
While inside the `build` directory, configure your build by running CMake. Before the first run, you need to define environment variables that specify compiler (version 9 gcc compiler in this example).
|
While inside the `build` directory, configure your build by running CMake. Before the first run, you need to define environment variables that specify compiler (version 9 gcc compiler in this example).
|
||||||
|
|
||||||
|
@ -354,7 +354,7 @@ In all other cases, use a name that describes the meaning.
|
|||||||
bool info_successfully_loaded = false;
|
bool info_successfully_loaded = false;
|
||||||
```
|
```
|
||||||
|
|
||||||
**9.** Names of `define`s and global constants use ALL\_CAPS with underscores.
|
**9.** Names of `define`s and global constants use ALL_CAPS with underscores.
|
||||||
|
|
||||||
``` cpp
|
``` cpp
|
||||||
#define MAX_SRC_TABLE_NAMES_TO_STORE 1000
|
#define MAX_SRC_TABLE_NAMES_TO_STORE 1000
|
||||||
@ -394,7 +394,7 @@ The underscore suffix can be omitted if the argument is not used in the construc
|
|||||||
timer (not m_timer)
|
timer (not m_timer)
|
||||||
```
|
```
|
||||||
|
|
||||||
**14.** For the constants in an `enum`, use CamelCase with a capital letter. ALL\_CAPS is also acceptable. If the `enum` is non-local, use an `enum class`.
|
**14.** For the constants in an `enum`, use CamelCase with a capital letter. ALL_CAPS is also acceptable. If the `enum` is non-local, use an `enum class`.
|
||||||
|
|
||||||
``` cpp
|
``` cpp
|
||||||
enum class CompressionMethod
|
enum class CompressionMethod
|
||||||
@ -707,7 +707,7 @@ The standard library is used (`libc++`).
|
|||||||
|
|
||||||
**4.**OS: Linux Ubuntu, not older than Precise.
|
**4.**OS: Linux Ubuntu, not older than Precise.
|
||||||
|
|
||||||
**5.**Code is written for x86\_64 CPU architecture.
|
**5.**Code is written for x86_64 CPU architecture.
|
||||||
|
|
||||||
The CPU instruction set is the minimum supported set among our servers. Currently, it is SSE 4.2.
|
The CPU instruction set is the minimum supported set among our servers. Currently, it is SSE 4.2.
|
||||||
|
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user