Merge branch 'master' into manual-write-duplicate-parts-to-replicas

This commit is contained in:
Alexey Milovidov 2020-08-26 03:24:35 +03:00
commit 0e021c6151
500 changed files with 13982 additions and 4888 deletions

View File

@ -24,3 +24,6 @@ Detailed description / Documentation draft:
By adding documentation, you'll allow users to try your new feature immediately, not when someone else will have time to document it later. Documentation is necessary for all features that affect user experience in any way. You can add brief documentation draft above, or add documentation right into your patch as Markdown files in [docs](https://github.com/ClickHouse/ClickHouse/tree/master/docs) folder. By adding documentation, you'll allow users to try your new feature immediately, not when someone else will have time to document it later. Documentation is necessary for all features that affect user experience in any way. You can add brief documentation draft above, or add documentation right into your patch as Markdown files in [docs](https://github.com/ClickHouse/ClickHouse/tree/master/docs) folder.
If you are doing this for the first time, it's recommended to read the lightweight [Contributing to ClickHouse Documentation](https://github.com/ClickHouse/ClickHouse/tree/master/docs/README.md) guide first. If you are doing this for the first time, it's recommended to read the lightweight [Contributing to ClickHouse Documentation](https://github.com/ClickHouse/ClickHouse/tree/master/docs/README.md) guide first.
Information about CI checks: https://clickhouse.tech/docs/en/development/continuous-integration/

6
.gitmodules vendored
View File

@ -180,3 +180,9 @@
[submodule "contrib/stats"] [submodule "contrib/stats"]
path = contrib/stats path = contrib/stats
url = https://github.com/kthohr/stats.git url = https://github.com/kthohr/stats.git
[submodule "contrib/krb5"]
path = contrib/krb5
url = https://github.com/krb5/krb5
[submodule "contrib/cyrus-sasl"]
path = contrib/cyrus-sasl
url = https://github.com/cyrusimap/cyrus-sasl

View File

@ -202,9 +202,16 @@ if (ARCH_NATIVE)
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=native") set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=native")
endif () endif ()
# cmake < 3.12 doesn't supoprt 20. We'll set CMAKE_CXX_FLAGS for now if (UNBUNDLED AND (COMPILER_GCC OR COMPILER_CLANG))
# to make numeric_limits<__int128> works for unbundled build
set (_CXX_STANDARD "-std=gnu++2a")
else()
set (_CXX_STANDARD "-std=c++2a")
endif()
# cmake < 3.12 doesn't support 20. We'll set CMAKE_CXX_FLAGS for now
# set (CMAKE_CXX_STANDARD 20) # set (CMAKE_CXX_STANDARD 20)
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++2a") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${_CXX_STANDARD}")
set (CMAKE_CXX_EXTENSIONS 0) # https://cmake.org/cmake/help/latest/prop_tgt/CXX_EXTENSIONS.html#prop_tgt:CXX_EXTENSIONS set (CMAKE_CXX_EXTENSIONS 0) # https://cmake.org/cmake/help/latest/prop_tgt/CXX_EXTENSIONS.html#prop_tgt:CXX_EXTENSIONS
set (CMAKE_CXX_STANDARD_REQUIRED ON) set (CMAKE_CXX_STANDARD_REQUIRED ON)
@ -371,7 +378,9 @@ include (cmake/find/ltdl.cmake) # for odbc
# openssl, zlib before poco # openssl, zlib before poco
include (cmake/find/sparsehash.cmake) include (cmake/find/sparsehash.cmake)
include (cmake/find/re2.cmake) include (cmake/find/re2.cmake)
include (cmake/find/krb5.cmake)
include (cmake/find/libgsasl.cmake) include (cmake/find/libgsasl.cmake)
include (cmake/find/cyrus-sasl.cmake)
include (cmake/find/rdkafka.cmake) include (cmake/find/rdkafka.cmake)
include (cmake/find/amqpcpp.cmake) include (cmake/find/amqpcpp.cmake)
include (cmake/find/capnp.cmake) include (cmake/find/capnp.cmake)

View File

@ -17,4 +17,4 @@ ClickHouse is an open-source column-oriented database management system that all
## Upcoming Events ## Upcoming Events
* [ClickHouse at ByteDance (in Chinese)](https://mp.weixin.qq.com/s/Em-HjPylO8D7WPui4RREAQ) on August 14, 2020. * [ClickHouse at ByteDance (in Chinese)](https://mp.weixin.qq.com/s/Em-HjPylO8D7WPui4RREAQ) on August 28, 2020.

View File

@ -6,6 +6,8 @@
#include <string.h> #include <string.h>
#include <unistd.h> #include <unistd.h>
#include <iostream>
namespace namespace
{ {
@ -107,6 +109,8 @@ ReadlineLineReader::ReadlineLineReader(
throw std::runtime_error(std::string("Cannot set signal handler for readline: ") + strerror(errno)); throw std::runtime_error(std::string("Cannot set signal handler for readline: ") + strerror(errno));
rl_variable_bind("completion-ignore-case", "on"); rl_variable_bind("completion-ignore-case", "on");
// TODO: it doesn't work
// history_write_timestamps = 1;
} }
ReadlineLineReader::~ReadlineLineReader() ReadlineLineReader::~ReadlineLineReader()
@ -129,6 +133,11 @@ LineReader::InputStatus ReadlineLineReader::readOneLine(const String & prompt)
void ReadlineLineReader::addToHistory(const String & line) void ReadlineLineReader::addToHistory(const String & line)
{ {
add_history(line.c_str()); add_history(line.c_str());
// Flush changes to the disk
// NOTE readline builds a buffer of all the lines to write, and write them in one syscall.
// Thus there is no need to lock the history file here.
write_history(history_file_path.c_str());
} }
#if RL_VERSION_MAJOR >= 7 #if RL_VERSION_MAJOR >= 7

View File

@ -21,21 +21,26 @@
#endif #endif
/// The thing to avoid creating strings to find substrings in the hash table. /**
* The std::string_view-like container to avoid creating strings to find substrings in the hash table.
*/
struct StringRef struct StringRef
{ {
const char * data = nullptr; const char * data = nullptr;
size_t size = 0; size_t size = 0;
/// Non-constexpr due to reinterpret_cast.
template <typename CharT, typename = std::enable_if_t<sizeof(CharT) == 1>> template <typename CharT, typename = std::enable_if_t<sizeof(CharT) == 1>>
constexpr StringRef(const CharT * data_, size_t size_) : data(reinterpret_cast<const char *>(data_)), size(size_) StringRef(const CharT * data_, size_t size_) : data(reinterpret_cast<const char *>(data_)), size(size_)
{ {
/// Sanity check for overflowed values. /// Sanity check for overflowed values.
assert(size < 0x8000000000000000ULL); assert(size < 0x8000000000000000ULL);
} }
constexpr StringRef(const char * data_, size_t size_) : data(data_), size(size_) {}
StringRef(const std::string & s) : data(s.data()), size(s.size()) {} StringRef(const std::string & s) : data(s.data()), size(s.size()) {}
constexpr explicit StringRef(const std::string_view & s) : data(s.data()), size(s.size()) {} constexpr explicit StringRef(std::string_view s) : data(s.data()), size(s.size()) {}
constexpr StringRef(const char * data_) : StringRef(std::string_view{data_}) {} constexpr StringRef(const char * data_) : StringRef(std::string_view{data_}) {}
constexpr StringRef() = default; constexpr StringRef() = default;
@ -45,6 +50,12 @@ struct StringRef
constexpr explicit operator std::string_view() const { return {data, size}; } constexpr explicit operator std::string_view() const { return {data, size}; }
}; };
/// Here constexpr doesn't implicate inline, see https://www.viva64.com/en/w/v1043/
/// nullptr can't be used because the StringRef values are used in SipHash's pointer arithmetics
/// and the UBSan thinks that something like nullptr + 8 is UB.
constexpr const inline char empty_string_ref_addr{};
constexpr const inline StringRef EMPTY_STRING_REF{&empty_string_ref_addr, 0};
using StringRefs = std::vector<StringRef>; using StringRefs = std::vector<StringRef>;

View File

@ -1,5 +1,7 @@
#pragma once #pragma once
#include <common/types.h>
namespace common namespace common
{ {
template <typename T> template <typename T>
@ -35,6 +37,21 @@ namespace common
return (y > 0 && x > max_int128 - y) || (y < 0 && x < min_int128 - y); return (y > 0 && x > max_int128 - y) || (y < 0 && x < min_int128 - y);
} }
template <>
inline bool addOverflow(bInt256 x, bInt256 y, bInt256 & res)
{
res = x + y;
return (y > 0 && x > std::numeric_limits<bInt256>::max() - y) ||
(y < 0 && x < std::numeric_limits<bInt256>::min() - y);
}
template <>
inline bool addOverflow(bUInt256 x, bUInt256 y, bUInt256 & res)
{
res = x + y;
return x > std::numeric_limits<bUInt256>::max() - y;
}
template <typename T> template <typename T>
inline bool subOverflow(T x, T y, T & res) inline bool subOverflow(T x, T y, T & res)
{ {
@ -68,6 +85,21 @@ namespace common
return (y < 0 && x > max_int128 + y) || (y > 0 && x < min_int128 + y); return (y < 0 && x > max_int128 + y) || (y > 0 && x < min_int128 + y);
} }
template <>
inline bool subOverflow(bInt256 x, bInt256 y, bInt256 & res)
{
res = x - y;
return (y < 0 && x > std::numeric_limits<bInt256>::max() + y) ||
(y > 0 && x < std::numeric_limits<bInt256>::min() + y);
}
template <>
inline bool subOverflow(bUInt256 x, bUInt256 y, bUInt256 & res)
{
res = x - y;
return x < y;
}
template <typename T> template <typename T>
inline bool mulOverflow(T x, T y, T & res) inline bool mulOverflow(T x, T y, T & res)
{ {
@ -103,4 +135,25 @@ namespace common
unsigned __int128 b = (y > 0) ? y : -y; unsigned __int128 b = (y > 0) ? y : -y;
return (a * b) / b != a; return (a * b) / b != a;
} }
template <>
inline bool mulOverflow(bInt256 x, bInt256 y, bInt256 & res)
{
res = x * y;
if (!x || !y)
return false;
bInt256 a = (x > 0) ? x : -x;
bInt256 b = (y > 0) ? y : -y;
return (a * b) / b != a;
}
template <>
inline bool mulOverflow(bUInt256 x, bUInt256 y, bUInt256 & res)
{
res = x * y;
if (!x || !y)
return false;
return (x * y) / y != x;
}
} }

View File

@ -2,6 +2,7 @@
#include "unaligned.h" #include "unaligned.h"
#include <dlfcn.h> #include <dlfcn.h>
#include <string> #include <string>
#include <boost/algorithm/string/replace.hpp>
std::string_view getResource(std::string_view name) std::string_view getResource(std::string_view name)
@ -10,6 +11,7 @@ std::string_view getResource(std::string_view name)
std::replace(name_replaced.begin(), name_replaced.end(), '/', '_'); std::replace(name_replaced.begin(), name_replaced.end(), '/', '_');
std::replace(name_replaced.begin(), name_replaced.end(), '-', '_'); std::replace(name_replaced.begin(), name_replaced.end(), '-', '_');
std::replace(name_replaced.begin(), name_replaced.end(), '.', '_'); std::replace(name_replaced.begin(), name_replaced.end(), '.', '_');
boost::replace_all(name_replaced, "+", "_PLUS_");
/// These are the names that are generated by "ld -r -b binary" /// These are the names that are generated by "ld -r -b binary"
std::string symbol_name_data = "_binary_" + name_replaced + "_start"; std::string symbol_name_data = "_binary_" + name_replaced + "_start";

View File

@ -1,19 +1,15 @@
include (${ClickHouse_SOURCE_DIR}/cmake/add_check.cmake) include (${ClickHouse_SOURCE_DIR}/cmake/add_check.cmake)
add_executable (date_lut_init date_lut_init.cpp)
add_executable (date_lut2 date_lut2.cpp) add_executable (date_lut2 date_lut2.cpp)
add_executable (date_lut3 date_lut3.cpp) add_executable (date_lut3 date_lut3.cpp)
add_executable (date_lut4 date_lut4.cpp)
add_executable (date_lut_default_timezone date_lut_default_timezone.cpp) add_executable (date_lut_default_timezone date_lut_default_timezone.cpp)
add_executable (local_date_time_comparison local_date_time_comparison.cpp) add_executable (local_date_time_comparison local_date_time_comparison.cpp)
add_executable (realloc-perf allocator.cpp) add_executable (realloc-perf allocator.cpp)
set(PLATFORM_LIBS ${CMAKE_DL_LIBS}) set(PLATFORM_LIBS ${CMAKE_DL_LIBS})
target_link_libraries (date_lut_init PRIVATE common ${PLATFORM_LIBS})
target_link_libraries (date_lut2 PRIVATE common ${PLATFORM_LIBS}) target_link_libraries (date_lut2 PRIVATE common ${PLATFORM_LIBS})
target_link_libraries (date_lut3 PRIVATE common ${PLATFORM_LIBS}) target_link_libraries (date_lut3 PRIVATE common ${PLATFORM_LIBS})
target_link_libraries (date_lut4 PRIVATE common ${PLATFORM_LIBS})
target_link_libraries (date_lut_default_timezone PRIVATE common ${PLATFORM_LIBS}) target_link_libraries (date_lut_default_timezone PRIVATE common ${PLATFORM_LIBS})
target_link_libraries (local_date_time_comparison PRIVATE common) target_link_libraries (local_date_time_comparison PRIVATE common)
target_link_libraries (realloc-perf PRIVATE common) target_link_libraries (realloc-perf PRIVATE common)

View File

@ -1,20 +0,0 @@
#include <iostream>
#include <common/DateLUT.h>
int main(int, char **)
{
/** В DateLUT был глюк - для времён из дня 1970-01-01, возвращался номер часа больше 23. */
static const time_t time = 66130;
const auto & date_lut = DateLUT::instance();
std::cerr << date_lut.toHour(time) << std::endl;
std::cerr << date_lut.toDayNum(time) << std::endl;
const auto * values = reinterpret_cast<const DateLUTImpl::Values *>(&date_lut);
std::cerr << values[0].date << ", " << time_t(values[1].date - values[0].date) << std::endl;
return 0;
}

View File

@ -1,8 +0,0 @@
#include <common/DateLUT.h>
/// Позволяет проверить время инициализации DateLUT.
int main(int, char **)
{
DateLUT::instance();
return 0;
}

View File

@ -6,6 +6,8 @@
#include <string> #include <string>
#include <type_traits> #include <type_traits>
#include <boost/multiprecision/cpp_int.hpp>
using Int8 = int8_t; using Int8 = int8_t;
using Int16 = int16_t; using Int16 = int16_t;
using Int32 = int32_t; using Int32 = int32_t;
@ -15,11 +17,21 @@ using Int64 = int64_t;
using char8_t = unsigned char; using char8_t = unsigned char;
#endif #endif
/// This is needed for more strict aliasing. https://godbolt.org/z/xpJBSb https://stackoverflow.com/a/57453713
using UInt8 = char8_t; using UInt8 = char8_t;
using UInt16 = uint16_t; using UInt16 = uint16_t;
using UInt32 = uint32_t; using UInt32 = uint32_t;
using UInt64 = uint64_t; using UInt64 = uint64_t;
using Int128 = __int128;
/// We have to use 127 and 255 bit integers to safe a bit for a sign serialization
//using bInt256 = boost::multiprecision::int256_t;
using bInt256 = boost::multiprecision::number<boost::multiprecision::cpp_int_backend<
255, 255, boost::multiprecision::signed_magnitude, boost::multiprecision::unchecked, void> >;
using bUInt256 = boost::multiprecision::uint256_t;
using String = std::string; using String = std::string;
/// The standard library type traits, such as std::is_arithmetic, with one exception /// The standard library type traits, such as std::is_arithmetic, with one exception
@ -31,6 +43,9 @@ struct is_signed
static constexpr bool value = std::is_signed_v<T>; static constexpr bool value = std::is_signed_v<T>;
}; };
template <> struct is_signed<Int128> { static constexpr bool value = true; };
template <> struct is_signed<bInt256> { static constexpr bool value = true; };
template <typename T> template <typename T>
inline constexpr bool is_signed_v = is_signed<T>::value; inline constexpr bool is_signed_v = is_signed<T>::value;
@ -40,17 +55,26 @@ struct is_unsigned
static constexpr bool value = std::is_unsigned_v<T>; static constexpr bool value = std::is_unsigned_v<T>;
}; };
template <> struct is_unsigned<bUInt256> { static constexpr bool value = true; };
template <typename T> template <typename T>
inline constexpr bool is_unsigned_v = is_unsigned<T>::value; inline constexpr bool is_unsigned_v = is_unsigned<T>::value;
/// TODO: is_integral includes char, char8_t and wchar_t.
template <typename T> template <typename T>
struct is_integral struct is_integer
{ {
static constexpr bool value = std::is_integral_v<T>; static constexpr bool value = std::is_integral_v<T>;
}; };
template <> struct is_integer<Int128> { static constexpr bool value = true; };
template <> struct is_integer<bInt256> { static constexpr bool value = true; };
template <> struct is_integer<bUInt256> { static constexpr bool value = true; };
template <typename T> template <typename T>
inline constexpr bool is_integral_v = is_integral<T>::value; inline constexpr bool is_integer_v = is_integer<T>::value;
template <typename T> template <typename T>
struct is_arithmetic struct is_arithmetic
@ -58,5 +82,42 @@ struct is_arithmetic
static constexpr bool value = std::is_arithmetic_v<T>; static constexpr bool value = std::is_arithmetic_v<T>;
}; };
template <> struct is_arithmetic<__int128> { static constexpr bool value = true; };
template <typename T> template <typename T>
inline constexpr bool is_arithmetic_v = is_arithmetic<T>::value; inline constexpr bool is_arithmetic_v = is_arithmetic<T>::value;
template <typename T>
struct make_unsigned
{
typedef std::make_unsigned_t<T> type;
};
template <> struct make_unsigned<__int128> { using type = unsigned __int128; };
template <> struct make_unsigned<bInt256> { using type = bUInt256; };
template <> struct make_unsigned<bUInt256> { using type = bUInt256; };
template <typename T> using make_unsigned_t = typename make_unsigned<T>::type;
template <typename T>
struct make_signed
{
typedef std::make_signed_t<T> type;
};
template <> struct make_signed<bInt256> { typedef bInt256 type; };
template <> struct make_signed<bUInt256> { typedef bInt256 type; };
template <typename T> using make_signed_t = typename make_signed<T>::type;
template <typename T>
struct is_big_int
{
static constexpr bool value = false;
};
template <> struct is_big_int<bUInt256> { static constexpr bool value = true; };
template <> struct is_big_int<bInt256> { static constexpr bool value = true; };
template <typename T>
inline constexpr bool is_big_int_v = is_big_int<T>::value;

View File

@ -25,8 +25,8 @@ PEERDIR(
contrib/libs/cctz/src contrib/libs/cctz/src
contrib/libs/cxxsupp/libcxx-filesystem contrib/libs/cxxsupp/libcxx-filesystem
contrib/libs/poco/Net contrib/libs/poco/Net
contrib/libs/poco/NetSSL_OpenSSL
contrib/libs/poco/Util contrib/libs/poco/Util
contrib/libs/poco/NetSSL_OpenSSL
contrib/libs/fmt contrib/libs/fmt
contrib/restricted/boost contrib/restricted/boost
contrib/restricted/cityhash-1.0.2 contrib/restricted/cityhash-1.0.2
@ -52,6 +52,7 @@ SRCS(
shift10.cpp shift10.cpp
sleep.cpp sleep.cpp
terminalColors.cpp terminalColors.cpp
) )
END() END()

View File

@ -10,6 +10,7 @@ CFLAGS (GLOBAL -DARCADIA_BUILD)
CFLAGS (GLOBAL -DUSE_CPUID=1) CFLAGS (GLOBAL -DUSE_CPUID=1)
CFLAGS (GLOBAL -DUSE_JEMALLOC=0) CFLAGS (GLOBAL -DUSE_JEMALLOC=0)
CFLAGS (GLOBAL -DUSE_RAPIDJSON=1) CFLAGS (GLOBAL -DUSE_RAPIDJSON=1)
CFLAGS (GLOBAL -DUSE_SSL=1)
IF (OS_DARWIN) IF (OS_DARWIN)
CFLAGS (GLOBAL -DOS_DARWIN) CFLAGS (GLOBAL -DOS_DARWIN)
@ -24,6 +25,7 @@ PEERDIR(
contrib/libs/cxxsupp/libcxx-filesystem contrib/libs/cxxsupp/libcxx-filesystem
contrib/libs/poco/Net contrib/libs/poco/Net
contrib/libs/poco/Util contrib/libs/poco/Util
contrib/libs/poco/NetSSL_OpenSSL
contrib/libs/fmt contrib/libs/fmt
contrib/restricted/boost contrib/restricted/boost
contrib/restricted/cityhash-1.0.2 contrib/restricted/cityhash-1.0.2

View File

@ -2,7 +2,7 @@ Go to https://www.monetdb.org/
The graphical design of the website is a bit old-fashioned but I do not afraid. The graphical design of the website is a bit old-fashioned but I do not afraid.
Dowload now. Download now.
Latest binary releases. Latest binary releases.
Ubuntu & Debian. Ubuntu & Debian.
@ -1103,7 +1103,7 @@ Ok, it's doing something at least for twenty minues...
clk: 28:02 min clk: 28:02 min
``` ```
Finally it has loaded data successfuly in 28 minutes. It's not fast - just below 60 000 rows per second. Finally it has loaded data successfully in 28 minutes. It's not fast - just below 60 000 rows per second.
But the second query from the test does not work: But the second query from the test does not work:

View File

@ -0,0 +1,23 @@
if (${ENABLE_LIBRARIES} AND ${ENABLE_KRB5})
set (DEFAULT_ENABLE_CYRUS_SASL 1)
else()
set (DEFAULT_ENABLE_CYRUS_SASL 0)
endif()
OPTION(ENABLE_CYRUS_SASL "Enable cyrus-sasl" ${DEFAULT_ENABLE_CYRUS_SASL})
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/README")
message (WARNING "submodule contrib/cyrus-sasl is missing. to fix try run: \n git submodule update --init --recursive")
set (ENABLE_CYRUS_SASL 0)
endif ()
if (ENABLE_CYRUS_SASL)
set (USE_CYRUS_SASL 1)
set (CYRUS_SASL_LIBRARY sasl2)
set (CYRUS_SASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/include")
endif ()
message (STATUS "Using cyrus-sasl: krb5=${USE_KRB5}: ${CYRUS_SASL_INCLUDE_DIR} : ${CYRUS_SASL_LIBRARY}")

25
cmake/find/krb5.cmake Normal file
View File

@ -0,0 +1,25 @@
OPTION(ENABLE_KRB5 "Enable krb5" ${ENABLE_LIBRARIES})
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/krb5/README")
message (WARNING "submodule contrib/krb5 is missing. to fix try run: \n git submodule update --init --recursive")
set (ENABLE_KRB5 0)
endif ()
if (NOT CMAKE_SYSTEM_NAME MATCHES "Linux")
message (WARNING "krb5 disabled in non-Linux environments")
set (ENABLE_KRB5 0)
endif ()
if (ENABLE_KRB5)
set (USE_KRB5 1)
set (KRB5_LIBRARY krb5)
set (KRB5_INCLUDE_DIR
"${ClickHouse_SOURCE_DIR}/contrib/krb5/src/include"
"${ClickHouse_BINARY_DIR}/contrib/krb5-cmake/include"
)
endif ()
message (STATUS "Using krb5=${USE_KRB5}: ${KRB5_INCLUDE_DIR} : ${KRB5_LIBRARY}")

View File

@ -311,3 +311,10 @@ if (USE_STATS)
add_subdirectory (stats-cmake) add_subdirectory (stats-cmake)
add_subdirectory (gcem) add_subdirectory (gcem)
endif() endif()
if (USE_KRB5)
add_subdirectory (krb5-cmake)
if (USE_CYRUS_SASL)
add_subdirectory (cyrus-sasl-cmake)
endif()
endif()

View File

@ -27,584 +27,29 @@ if (USE_INTERNAL_CCTZ)
# Build a libray with embedded tzdata # Build a libray with embedded tzdata
if (OS_LINUX) if (OS_LINUX)
# get the list of timezones from tzdata shipped with cctz
set (TIMEZONES
Africa/Abidjan
Africa/Accra
Africa/Addis_Ababa
Africa/Algiers
Africa/Asmara
Africa/Asmera
Africa/Bamako
Africa/Bangui
Africa/Banjul
Africa/Bissau
Africa/Blantyre
Africa/Brazzaville
Africa/Bujumbura
Africa/Cairo
Africa/Casablanca
Africa/Ceuta
Africa/Conakry
Africa/Dakar
Africa/Dar_es_Salaam
Africa/Djibouti
Africa/Douala
Africa/El_Aaiun
Africa/Freetown
Africa/Gaborone
Africa/Harare
Africa/Johannesburg
Africa/Juba
Africa/Kampala
Africa/Khartoum
Africa/Kigali
Africa/Kinshasa
Africa/Lagos
Africa/Libreville
Africa/Lome
Africa/Luanda
Africa/Lubumbashi
Africa/Lusaka
Africa/Malabo
Africa/Maputo
Africa/Maseru
Africa/Mbabane
Africa/Mogadishu
Africa/Monrovia
Africa/Nairobi
Africa/Ndjamena
Africa/Niamey
Africa/Nouakchott
Africa/Ouagadougou
Africa/Porto-Novo
Africa/Sao_Tome
Africa/Timbuktu
Africa/Tripoli
Africa/Tunis
Africa/Windhoek
America/Adak
America/Anchorage
America/Anguilla
America/Antigua
America/Araguaina
America/Argentina/Buenos_Aires
America/Argentina/Catamarca
America/Argentina/ComodRivadavia
America/Argentina/Cordoba
America/Argentina/Jujuy
America/Argentina/La_Rioja
America/Argentina/Mendoza
America/Argentina/Rio_Gallegos
America/Argentina/Salta
America/Argentina/San_Juan
America/Argentina/San_Luis
America/Argentina/Tucuman
America/Argentina/Ushuaia
America/Aruba
America/Asuncion
America/Atikokan
America/Atka
America/Bahia
America/Bahia_Banderas
America/Barbados
America/Belem
America/Belize
America/Blanc-Sablon
America/Boa_Vista
America/Bogota
America/Boise
America/Buenos_Aires
America/Cambridge_Bay
America/Campo_Grande
America/Cancun
America/Caracas
America/Catamarca
America/Cayenne
America/Cayman
America/Chicago
America/Chihuahua
America/Coral_Harbour
America/Cordoba
America/Costa_Rica
America/Creston
America/Cuiaba
America/Curacao
America/Danmarkshavn
America/Dawson
America/Dawson_Creek
America/Denver
America/Detroit
America/Dominica
America/Edmonton
America/Eirunepe
America/El_Salvador
America/Ensenada
America/Fortaleza
America/Fort_Nelson
America/Fort_Wayne
America/Glace_Bay
America/Godthab
America/Goose_Bay
America/Grand_Turk
America/Grenada
America/Guadeloupe
America/Guatemala
America/Guayaquil
America/Guyana
America/Halifax
America/Havana
America/Hermosillo
America/Indiana/Indianapolis
America/Indiana/Knox
America/Indiana/Marengo
America/Indiana/Petersburg
America/Indianapolis
America/Indiana/Tell_City
America/Indiana/Vevay
America/Indiana/Vincennes
America/Indiana/Winamac
America/Inuvik
America/Iqaluit
America/Jamaica
America/Jujuy
America/Juneau
America/Kentucky/Louisville
America/Kentucky/Monticello
America/Knox_IN
America/Kralendijk
America/La_Paz
America/Lima
America/Los_Angeles
America/Louisville
America/Lower_Princes
America/Maceio
America/Managua
America/Manaus
America/Marigot
America/Martinique
America/Matamoros
America/Mazatlan
America/Mendoza
America/Menominee
America/Merida
America/Metlakatla
America/Mexico_City
America/Miquelon
America/Moncton
America/Monterrey
America/Montevideo
America/Montreal
America/Montserrat
America/Nassau
America/New_York
America/Nipigon
America/Nome
America/Noronha
America/North_Dakota/Beulah
America/North_Dakota/Center
America/North_Dakota/New_Salem
America/Ojinaga
America/Panama
America/Pangnirtung
America/Paramaribo
America/Phoenix
America/Port-au-Prince
America/Porto_Acre
America/Port_of_Spain
America/Porto_Velho
America/Puerto_Rico
America/Punta_Arenas
America/Rainy_River
America/Rankin_Inlet
America/Recife
America/Regina
America/Resolute
America/Rio_Branco
America/Rosario
America/Santa_Isabel
America/Santarem
America/Santiago
America/Santo_Domingo
America/Sao_Paulo
America/Scoresbysund
America/Shiprock
America/Sitka
America/St_Barthelemy
America/St_Johns
America/St_Kitts
America/St_Lucia
America/St_Thomas
America/St_Vincent
America/Swift_Current
America/Tegucigalpa
America/Thule
America/Thunder_Bay
America/Tijuana
America/Toronto
America/Tortola
America/Vancouver
America/Virgin
America/Whitehorse
America/Winnipeg
America/Yakutat
America/Yellowknife
Antarctica/Casey
Antarctica/Davis
Antarctica/DumontDUrville
Antarctica/Macquarie
Antarctica/Mawson
Antarctica/McMurdo
Antarctica/Palmer
Antarctica/Rothera
Antarctica/South_Pole
Antarctica/Syowa
Antarctica/Troll
Antarctica/Vostok
Arctic/Longyearbyen
Asia/Aden
Asia/Almaty
Asia/Amman
Asia/Anadyr
Asia/Aqtau
Asia/Aqtobe
Asia/Ashgabat
Asia/Ashkhabad
Asia/Atyrau
Asia/Baghdad
Asia/Bahrain
Asia/Baku
Asia/Bangkok
Asia/Barnaul
Asia/Beirut
Asia/Bishkek
Asia/Brunei
Asia/Calcutta
Asia/Chita
Asia/Choibalsan
Asia/Chongqing
Asia/Chungking
Asia/Colombo
Asia/Dacca
Asia/Damascus
Asia/Dhaka
Asia/Dili
Asia/Dubai
Asia/Dushanbe
Asia/Famagusta
Asia/Gaza
Asia/Harbin
Asia/Hebron
Asia/Ho_Chi_Minh
Asia/Hong_Kong
Asia/Hovd
Asia/Irkutsk
Asia/Istanbul
Asia/Jakarta
Asia/Jayapura
Asia/Jerusalem
Asia/Kabul
Asia/Kamchatka
Asia/Karachi
Asia/Kashgar
Asia/Kathmandu
Asia/Katmandu
Asia/Khandyga
Asia/Kolkata
Asia/Krasnoyarsk
Asia/Kuala_Lumpur
Asia/Kuching
Asia/Kuwait
Asia/Macao
Asia/Macau
Asia/Magadan
Asia/Makassar
Asia/Manila
Asia/Muscat
Asia/Nicosia
Asia/Novokuznetsk
Asia/Novosibirsk
Asia/Omsk
Asia/Oral
Asia/Phnom_Penh
Asia/Pontianak
Asia/Pyongyang
Asia/Qatar
Asia/Qostanay
Asia/Qyzylorda
Asia/Rangoon
Asia/Riyadh
Asia/Saigon
Asia/Sakhalin
Asia/Samarkand
Asia/Seoul
Asia/Shanghai
Asia/Singapore
Asia/Srednekolymsk
Asia/Taipei
Asia/Tashkent
Asia/Tbilisi
Asia/Tehran
Asia/Tel_Aviv
Asia/Thimbu
Asia/Thimphu
Asia/Tokyo
Asia/Tomsk
Asia/Ujung_Pandang
Asia/Ulaanbaatar
Asia/Ulan_Bator
Asia/Urumqi
Asia/Ust-Nera
Asia/Vientiane
Asia/Vladivostok
Asia/Yakutsk
Asia/Yangon
Asia/Yekaterinburg
Asia/Yerevan
Atlantic/Azores
Atlantic/Bermuda
Atlantic/Canary
Atlantic/Cape_Verde
Atlantic/Faeroe
Atlantic/Faroe
Atlantic/Jan_Mayen
Atlantic/Madeira
Atlantic/Reykjavik
Atlantic/South_Georgia
Atlantic/Stanley
Atlantic/St_Helena
Australia/ACT
Australia/Adelaide
Australia/Brisbane
Australia/Broken_Hill
Australia/Canberra
Australia/Currie
Australia/Darwin
Australia/Eucla
Australia/Hobart
Australia/LHI
Australia/Lindeman
Australia/Lord_Howe
Australia/Melbourne
Australia/North
Australia/NSW
Australia/Perth
Australia/Queensland
Australia/South
Australia/Sydney
Australia/Tasmania
Australia/Victoria
Australia/West
Australia/Yancowinna
Brazil/Acre
Brazil/DeNoronha
Brazil/East
Brazil/West
Canada/Atlantic
Canada/Central
Canada/Eastern
Canada/Mountain
Canada/Newfoundland
Canada/Pacific
Canada/Saskatchewan
Canada/Yukon
CET
Chile/Continental
Chile/EasterIsland
CST6CDT
Cuba
EET
Egypt
Eire
EST
EST5EDT
Etc/GMT
Etc/Greenwich
Etc/UCT
Etc/Universal
Etc/UTC
Etc/Zulu
Europe/Amsterdam
Europe/Andorra
Europe/Astrakhan
Europe/Athens
Europe/Belfast
Europe/Belgrade
Europe/Berlin
Europe/Bratislava
Europe/Brussels
Europe/Bucharest
Europe/Budapest
Europe/Busingen
Europe/Chisinau
Europe/Copenhagen
Europe/Dublin
Europe/Gibraltar
Europe/Guernsey
Europe/Helsinki
Europe/Isle_of_Man
Europe/Istanbul
Europe/Jersey
Europe/Kaliningrad
Europe/Kiev
Europe/Kirov
Europe/Lisbon
Europe/Ljubljana
Europe/London
Europe/Luxembourg
Europe/Madrid
Europe/Malta
Europe/Mariehamn
Europe/Minsk
Europe/Monaco
Europe/Moscow
Europe/Nicosia
Europe/Oslo
Europe/Paris
Europe/Podgorica
Europe/Prague
Europe/Riga
Europe/Rome
Europe/Samara
Europe/San_Marino
Europe/Sarajevo
Europe/Saratov
Europe/Simferopol
Europe/Skopje
Europe/Sofia
Europe/Stockholm
Europe/Tallinn
Europe/Tirane
Europe/Tiraspol
Europe/Ulyanovsk
Europe/Uzhgorod
Europe/Vaduz
Europe/Vatican
Europe/Vienna
Europe/Vilnius
Europe/Volgograd
Europe/Warsaw
Europe/Zagreb
Europe/Zaporozhye
Europe/Zurich
Factory
GB
GB-Eire
GMT
GMT0
Greenwich
Hongkong
HST
Iceland
Indian/Antananarivo
Indian/Chagos
Indian/Christmas
Indian/Cocos
Indian/Comoro
Indian/Kerguelen
Indian/Mahe
Indian/Maldives
Indian/Mauritius
Indian/Mayotte
Indian/Reunion
Iran
Israel
Jamaica
Japan
Kwajalein
Libya
MET
Mexico/BajaNorte
Mexico/BajaSur
Mexico/General
MST
MST7MDT
Navajo
NZ
NZ-CHAT
Pacific/Apia
Pacific/Auckland
Pacific/Bougainville
Pacific/Chatham
Pacific/Chuuk
Pacific/Easter
Pacific/Efate
Pacific/Enderbury
Pacific/Fakaofo
Pacific/Fiji
Pacific/Funafuti
Pacific/Galapagos
Pacific/Gambier
Pacific/Guadalcanal
Pacific/Guam
Pacific/Honolulu
Pacific/Johnston
Pacific/Kiritimati
Pacific/Kosrae
Pacific/Kwajalein
Pacific/Majuro
Pacific/Marquesas
Pacific/Midway
Pacific/Nauru
Pacific/Niue
Pacific/Norfolk
Pacific/Noumea
Pacific/Pago_Pago
Pacific/Palau
Pacific/Pitcairn
Pacific/Pohnpei
Pacific/Ponape
Pacific/Port_Moresby
Pacific/Rarotonga
Pacific/Saipan
Pacific/Samoa
Pacific/Tahiti
Pacific/Tarawa
Pacific/Tongatapu
Pacific/Truk
Pacific/Wake
Pacific/Wallis
Pacific/Yap
Poland
Portugal
PRC
PST8PDT
ROC
ROK
Singapore
Turkey
UCT
Universal
US/Alaska
US/Aleutian
US/Arizona
US/Central
US/Eastern
US/East-Indiana
US/Hawaii
US/Indiana-Starke
US/Michigan
US/Mountain
US/Pacific
US/Samoa
UTC
WET
W-SU
Zulu)
set(TZDIR ${LIBRARY_DIR}/testdata/zoneinfo) set(TZDIR ${LIBRARY_DIR}/testdata/zoneinfo)
file(STRINGS ${LIBRARY_DIR}/testdata/version TZDATA_VERSION)
set_property(GLOBAL PROPERTY TZDATA_VERSION_PROP "${TZDATA_VERSION}")
message(STATUS "Packaging with tzdata version: ${TZDATA_VERSION}")
set(TZ_OBJS) set(TZ_OBJS)
# each file/symlink in that dir (except of tab and localtime) store the info about timezone
execute_process(COMMAND bash -c "cd ${TZDIR} && find * -type f,l -and ! -name '*.tab' -and ! -name 'localtime' | sort | paste -sd ';'" OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE TIMEZONES )
foreach(TIMEZONE ${TIMEZONES}) foreach(TIMEZONE ${TIMEZONES})
string(REPLACE "/" "_" TIMEZONE_ID ${TIMEZONE}) string(REPLACE "/" "_" TIMEZONE_ID ${TIMEZONE})
string(REPLACE "+" "_PLUS_" TIMEZONE_ID ${TIMEZONE_ID})
set(TZ_OBJ ${TIMEZONE_ID}.o) set(TZ_OBJ ${TIMEZONE_ID}.o)
set(TZ_OBJS ${TZ_OBJS} ${TZ_OBJ}) set(TZ_OBJS ${TZ_OBJS} ${TZ_OBJ})
# https://stackoverflow.com/questions/14776463/compile-and-add-an-object-file-from-a-binary-with-cmake # https://stackoverflow.com/questions/14776463/compile-and-add-an-object-file-from-a-binary-with-cmake
add_custom_command(OUTPUT ${TZ_OBJ} add_custom_command(OUTPUT ${TZ_OBJ}
COMMAND cd ${TZDIR} && ${OBJCOPY_PATH} -I binary ${OBJCOPY_ARCH_OPTIONS} ${TIMEZONE} ${CMAKE_CURRENT_BINARY_DIR}/${TZ_OBJ} COMMAND cp ${TZDIR}/${TIMEZONE} ${CMAKE_CURRENT_BINARY_DIR}/${TIMEZONE_ID}
COMMAND ${OBJCOPY_PATH} --rename-section .data=.rodata,alloc,load,readonly,data,contents COMMAND cd ${CMAKE_CURRENT_BINARY_DIR} && ${OBJCOPY_PATH} -I binary ${OBJCOPY_ARCH_OPTIONS}
${CMAKE_CURRENT_BINARY_DIR}/${TZ_OBJ} ${CMAKE_CURRENT_BINARY_DIR}/${TZ_OBJ}) --rename-section .data=.rodata,alloc,load,readonly,data,contents ${TIMEZONE_ID} ${TZ_OBJ}
COMMAND rm ${CMAKE_CURRENT_BINARY_DIR}/${TIMEZONE_ID})
set_source_files_properties(${TZ_OBJ} PROPERTIES EXTERNAL_OBJECT true GENERATED true) set_source_files_properties(${TZ_OBJ} PROPERTIES EXTERNAL_OBJECT true GENERATED true)
endforeach(TIMEZONE) endforeach(TIMEZONE)

1
contrib/cyrus-sasl vendored Submodule

@ -0,0 +1 @@
Subproject commit 6054630889fd1cd8d0659573d69badcee1e23a00

View File

@ -0,0 +1,69 @@
set(CYRUS_SASL_SOURCE_DIR ${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl)
add_library(${CYRUS_SASL_LIBRARY})
target_sources(${CYRUS_SASL_LIBRARY} PRIVATE
${CYRUS_SASL_SOURCE_DIR}/plugins/gssapi.c
# ${CYRUS_SASL_SOURCE_DIR}/plugins/gssapiv2_init.c
${CYRUS_SASL_SOURCE_DIR}/common/plugin_common.c
${CYRUS_SASL_SOURCE_DIR}/lib/common.c
${CYRUS_SASL_SOURCE_DIR}/lib/canonusr.c
${CYRUS_SASL_SOURCE_DIR}/lib/server.c
${CYRUS_SASL_SOURCE_DIR}/lib/config.c
${CYRUS_SASL_SOURCE_DIR}/lib/auxprop.c
${CYRUS_SASL_SOURCE_DIR}/lib/saslutil.c
${CYRUS_SASL_SOURCE_DIR}/lib/external.c
${CYRUS_SASL_SOURCE_DIR}/lib/seterror.c
${CYRUS_SASL_SOURCE_DIR}/lib/md5.c
${CYRUS_SASL_SOURCE_DIR}/lib/dlopen.c
${CYRUS_SASL_SOURCE_DIR}/lib/client.c
${CYRUS_SASL_SOURCE_DIR}/lib/checkpw.c
)
target_include_directories(${CYRUS_SASL_LIBRARY} PUBLIC
${CMAKE_CURRENT_BINARY_DIR}
)
target_include_directories(${CYRUS_SASL_LIBRARY} PRIVATE
${CMAKE_CURRENT_SOURCE_DIR} # for config.h
${CYRUS_SASL_SOURCE_DIR}/plugins
${CYRUS_SASL_SOURCE_DIR}
${CYRUS_SASL_SOURCE_DIR}/include
${CYRUS_SASL_SOURCE_DIR}/lib
${CYRUS_SASL_SOURCE_DIR}/sasldb
${CYRUS_SASL_SOURCE_DIR}/common
${CYRUS_SASL_SOURCE_DIR}/saslauthd
${CYRUS_SASL_SOURCE_DIR}/sample
${CYRUS_SASL_SOURCE_DIR}/utils
${CYRUS_SASL_SOURCE_DIR}/tests
)
target_compile_definitions(${CYRUS_SASL_LIBRARY} PUBLIC
HAVE_CONFIG_H
# PLUGINDIR="/usr/local/lib/sasl2"
PLUGINDIR=""
# PIC
OBSOLETE_CRAM_ATTR=1
# SASLAUTHD_CONF_FILE_DEFAULT="/usr/local/etc/saslauthd.conf"
SASLAUTHD_CONF_FILE_DEFAULT=""
# CONFIGDIR="/usr/local/lib/sasl2:/usr/local/etc/sasl2"
CONFIGDIR=""
OBSOLETE_DIGEST_ATTR=1
LIBSASL_EXPORTS=1
)
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/sasl)
file(COPY
${CYRUS_SASL_SOURCE_DIR}/include/sasl.h
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/sasl
)
file(COPY
${CYRUS_SASL_SOURCE_DIR}/include/prop.h
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}
)
target_link_libraries(${CYRUS_SASL_LIBRARY}
PUBLIC ${KRB5_LIBRARY}
)

View File

@ -0,0 +1,722 @@
/* config.h. Generated from config.h.in by configure. */
/* config.h.in. Generated from configure.ac by autoheader. */
/* acconfig.h - autoheader configuration input */
/*
* Copyright (c) 1998-2003 Carnegie Mellon University. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. The name "Carnegie Mellon University" must not be used to
* endorse or promote products derived from this software without
* prior written permission. For permission or any other legal
* details, please contact
* Office of Technology Transfer
* Carnegie Mellon University
* 5000 Forbes Avenue
* Pittsburgh, PA 15213-3890
* (412) 268-4387, fax: (412) 268-7395
* tech-transfer@andrew.cmu.edu
*
* 4. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by Computing Services
* at Carnegie Mellon University (http://www.cmu.edu/computing/)."
*
* CARNEGIE MELLON UNIVERSITY DISCLAIMS ALL WARRANTIES WITH REGARD TO
* THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS, IN NO EVENT SHALL CARNEGIE MELLON UNIVERSITY BE LIABLE
* FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
* AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING
* OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef CONFIG_H
#define CONFIG_H
/* Include SASLdb Support */
/* #undef AUTH_SASLDB */
/* Do we need a leading _ for dlsym? */
/* #undef DLSYM_NEEDS_UNDERSCORE */
/* Should we build a shared plugin (via dlopen) library? */
#define DO_DLOPEN /**/
/* should we support sasl_checkapop? */
#define DO_SASL_CHECKAPOP /**/
/* should we support setpass() for SRP? */
/* #undef DO_SRP_SETPASS */
/* Define if your getpwnam_r()/getspnam_r() functions take 5 arguments */
#define GETXXNAM_R_5ARG 1
/* should we mutex-wrap calls into the GSS library? */
#define GSS_USE_MUTEXES /**/
/* Enable 'alwaystrue' password verifier? */
/* #undef HAVE_ALWAYSTRUE */
/* Define to 1 if you have the `asprintf' function. */
#define HAVE_ASPRINTF 1
/* Include support for Courier's authdaemond? */
#define HAVE_AUTHDAEMON /**/
/* Define to 1 if you have the <crypt.h> header file. */
#define HAVE_CRYPT_H 1
/* Define to 1 if you have the <des.h> header file. */
/* #undef HAVE_DES_H */
/* Define to 1 if you have the <dirent.h> header file, and it defines `DIR'.
*/
#define HAVE_DIRENT_H 1
/* Define to 1 if you have the <dlfcn.h> header file. */
#define HAVE_DLFCN_H 1
/* Define to 1 if you have the `dns_lookup' function. */
/* #undef HAVE_DNS_LOOKUP */
/* Define to 1 if you have the `dn_expand' function. */
/* #undef HAVE_DN_EXPAND */
/* Define to 1 if you have the <fcntl.h> header file. */
#define HAVE_FCNTL_H 1
/* Do we have a getaddrinfo? */
#define HAVE_GETADDRINFO /**/
/* Define to 1 if you have the `getdomainname' function. */
#define HAVE_GETDOMAINNAME 1
/* Define to 1 if you have the `gethostname' function. */
#define HAVE_GETHOSTNAME 1
/* Do we have a getnameinfo() function? */
#define HAVE_GETNAMEINFO /**/
/* Define to 1 if you have the `getpassphrase' function. */
/* #undef HAVE_GETPASSPHRASE */
/* Define to 1 if you have the `getpwnam' function. */
#define HAVE_GETPWNAM 1
/* Define to 1 if you have the `getspnam' function. */
#define HAVE_GETSPNAM 1
/* do we have getsubopt()? */
#define HAVE_GETSUBOPT /**/
/* Define to 1 if you have the `gettimeofday' function. */
#define HAVE_GETTIMEOFDAY 1
/* Include GSSAPI/Kerberos 5 Support */
#define HAVE_GSSAPI /**/
/* Define to 1 if you have the <gssapi/gssapi_ext.h> header file. */
#define HAVE_GSSAPI_GSSAPI_EXT_H 1
/* Define if you have the gssapi/gssapi.h header file */
/* #undef HAVE_GSSAPI_GSSAPI_H */
/* Define to 1 if you have the <gssapi/gssapi_krb5.h> header file. */
#define HAVE_GSSAPI_GSSAPI_KRB5_H 1
/* Define if you have the gssapi.h header file */
#define HAVE_GSSAPI_H /**/
/* Define if your GSSAPI implementation defines
gsskrb5_register_acceptor_identity */
#define HAVE_GSSKRB5_REGISTER_ACCEPTOR_IDENTITY 1
/* Define if your GSSAPI implementation defines GSS_C_NT_HOSTBASED_SERVICE */
#define HAVE_GSS_C_NT_HOSTBASED_SERVICE /**/
/* Define if your GSSAPI implementation defines GSS_C_NT_USER_NAME */
#define HAVE_GSS_C_NT_USER_NAME /**/
/* Define if your GSSAPI implementation defines GSS_C_SEC_CONTEXT_SASL_SSF */
#define HAVE_GSS_C_SEC_CONTEXT_SASL_SSF /**/
/* Define to 1 if you have the `gss_decapsulate_token' function. */
#define HAVE_GSS_DECAPSULATE_TOKEN 1
/* Define to 1 if you have the `gss_encapsulate_token' function. */
#define HAVE_GSS_ENCAPSULATE_TOKEN 1
/* Define to 1 if you have the `gss_get_name_attribute' function. */
#define HAVE_GSS_GET_NAME_ATTRIBUTE 1
/* Define if your GSSAPI implementation defines gss_inquire_sec_context_by_oid
*/
#define HAVE_GSS_INQUIRE_SEC_CONTEXT_BY_OID 1
/* Define to 1 if you have the `gss_oid_equal' function. */
#define HAVE_GSS_OID_EQUAL 1
/* Define if your GSSAPI implementation supports SPNEGO */
#define HAVE_GSS_SPNEGO /**/
/* Include HTTP form Support */
/* #undef HAVE_HTTPFORM */
/* Define to 1 if you have the `inet_aton' function. */
#define HAVE_INET_ATON 1
/* Define to 1 if you have the <inttypes.h> header file. */
#define HAVE_INTTYPES_H 1
/* Define to 1 if you have the `jrand48' function. */
#define HAVE_JRAND48 1
/* Do we have Kerberos 4 Support? */
/* #undef HAVE_KRB */
/* Define to 1 if you have the <krb5.h> header file. */
#define HAVE_KRB5_H 1
/* Define to 1 if you have the `krb_get_err_text' function. */
/* #undef HAVE_KRB_GET_ERR_TEXT */
/* Define to 1 if you have the <lber.h> header file. */
/* #undef HAVE_LBER_H */
/* Support for LDAP? */
/* #undef HAVE_LDAP */
/* Define to 1 if you have the <ldap.h> header file. */
/* #undef HAVE_LDAP_H */
/* Define to 1 if you have the `resolv' library (-lresolv). */
#define HAVE_LIBRESOLV 1
/* Define to 1 if you have the <limits.h> header file. */
#define HAVE_LIMITS_H 1
/* Define to 1 if you have the <malloc.h> header file. */
#define HAVE_MALLOC_H 1
/* Define to 1 if you have the `memcpy' function. */
#define HAVE_MEMCPY 1
/* Define to 1 if you have the `memmem' function. */
#define HAVE_MEMMEM 1
/* Define to 1 if you have the <memory.h> header file. */
#define HAVE_MEMORY_H 1
/* Define to 1 if you have the `mkdir' function. */
#define HAVE_MKDIR 1
/* Do we have mysql support? */
/* #undef HAVE_MYSQL */
/* Define to 1 if you have the <ndir.h> header file, and it defines `DIR'. */
/* #undef HAVE_NDIR_H */
/* Do we have OpenSSL? */
#define HAVE_OPENSSL /**/
/* Use OPIE for server-side OTP? */
/* #undef HAVE_OPIE */
/* Support for PAM? */
/* #undef HAVE_PAM */
/* Define to 1 if you have the <paths.h> header file. */
#define HAVE_PATHS_H 1
/* Do we have Postgres support? */
/* #undef HAVE_PGSQL */
/* Include Support for pwcheck daemon? */
/* #undef HAVE_PWCHECK */
/* Include support for saslauthd? */
#define HAVE_SASLAUTHD /**/
/* Define to 1 if you have the `select' function. */
#define HAVE_SELECT 1
/* Do we have SHA512? */
#define HAVE_SHA512 /**/
/* Include SIA Support */
/* #undef HAVE_SIA */
/* Does the system have snprintf()? */
#define HAVE_SNPRINTF /**/
/* Does sockaddr have an sa_len? */
/* #undef HAVE_SOCKADDR_SA_LEN */
/* Define to 1 if you have the `socket' function. */
#define HAVE_SOCKET 1
/* Do we have a socklen_t? */
#define HAVE_SOCKLEN_T /**/
/* Do we have SQLite support? */
/* #undef HAVE_SQLITE */
/* Do we have SQLite3 support? */
/* #undef HAVE_SQLITE3 */
/* Is there an ss_family in sockaddr_storage? */
#define HAVE_SS_FAMILY /**/
/* Define to 1 if you have the <stdarg.h> header file. */
#define HAVE_STDARG_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <stdlib.h> header file. */
#define HAVE_STDLIB_H 1
/* Define to 1 if you have the `strchr' function. */
#define HAVE_STRCHR 1
/* Define to 1 if you have the `strdup' function. */
#define HAVE_STRDUP 1
/* Define to 1 if you have the `strerror' function. */
#define HAVE_STRERROR 1
/* Define to 1 if you have the <strings.h> header file. */
#define HAVE_STRINGS_H 1
/* Define to 1 if you have the <string.h> header file. */
#define HAVE_STRING_H 1
/* Define to 1 if you have the `strlcat' function. */
/* #undef HAVE_STRLCAT */
/* Define to 1 if you have the `strlcpy' function. */
/* #undef HAVE_STRLCPY */
/* Define to 1 if you have the `strspn' function. */
#define HAVE_STRSPN 1
/* Define to 1 if you have the `strstr' function. */
#define HAVE_STRSTR 1
/* Define to 1 if you have the `strtol' function. */
#define HAVE_STRTOL 1
/* Do we have struct sockaddr_stroage? */
#define HAVE_STRUCT_SOCKADDR_STORAGE /**/
/* Define to 1 if you have the <sysexits.h> header file. */
#define HAVE_SYSEXITS_H 1
/* Define to 1 if you have the `syslog' function. */
#define HAVE_SYSLOG 1
/* Define to 1 if you have the <syslog.h> header file. */
#define HAVE_SYSLOG_H 1
/* Define to 1 if you have the <sys/dir.h> header file, and it defines `DIR'.
*/
/* #undef HAVE_SYS_DIR_H */
/* Define to 1 if you have the <sys/file.h> header file. */
#define HAVE_SYS_FILE_H 1
/* Define to 1 if you have the <sys/ndir.h> header file, and it defines `DIR'.
*/
/* #undef HAVE_SYS_NDIR_H */
/* Define to 1 if you have the <sys/param.h> header file. */
#define HAVE_SYS_PARAM_H 1
/* Define to 1 if you have the <sys/socket.h> header file. */
#define HAVE_SYS_SOCKET_H 1
/* Define to 1 if you have the <sys/stat.h> header file. */
#define HAVE_SYS_STAT_H 1
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/types.h> header file. */
#define HAVE_SYS_TYPES_H 1
/* Define to 1 if you have the <sys/uio.h> header file. */
#define HAVE_SYS_UIO_H 1
/* Define to 1 if you have <sys/wait.h> that is POSIX.1 compatible. */
#define HAVE_SYS_WAIT_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the <varargs.h> header file. */
/* #undef HAVE_VARARGS_H */
/* Does the system have vsnprintf()? */
#define HAVE_VSNPRINTF /**/
/* Define to 1 if you have the <ws2tcpip.h> header file. */
/* #undef HAVE_WS2TCPIP_H */
/* Should we keep handle to DB open in SASLDB plugin? */
/* #undef KEEP_DB_OPEN */
/* Ignore IP Address in Kerberos 4 tickets? */
/* #undef KRB4_IGNORE_IP_ADDRESS */
/* Using Heimdal */
/* #undef KRB5_HEIMDAL */
/* Define to the sub-directory where libtool stores uninstalled libraries. */
#define LT_OBJDIR ".libs/"
/* Name of package */
#define PACKAGE "cyrus-sasl"
/* Define to the address where bug reports for this package should be sent. */
#define PACKAGE_BUGREPORT "https://github.com/cyrusimap/cyrus-sasl/issues"
/* Define to the full name of this package. */
#define PACKAGE_NAME "cyrus-sasl"
/* Define to the full name and version of this package. */
#define PACKAGE_STRING "cyrus-sasl 2.1.27"
/* Define to the one symbol short name of this package. */
#define PACKAGE_TARNAME "cyrus-sasl"
/* Define to the home page for this package. */
#define PACKAGE_URL "http://www.cyrusimap.org"
/* Define to the version of this package. */
#define PACKAGE_VERSION "2.1.27"
/* Where do we look for Courier authdaemond's socket? */
#define PATH_AUTHDAEMON_SOCKET "/dev/null"
/* Where do we look for saslauthd's socket? */
#define PATH_SASLAUTHD_RUNDIR "/var/state/saslauthd"
/* Force a preferred mechanism */
/* #undef PREFER_MECH */
/* Location of pwcheck socket */
/* #undef PWCHECKDIR */
/* Define as the return type of signal handlers (`int' or `void'). */
#define RETSIGTYPE void
/* Use BerkeleyDB for SASLdb */
/* #undef SASL_BERKELEYDB */
/* Path to default SASLdb database */
#define SASL_DB_PATH "/etc/sasldb2"
/* File to use for source of randomness */
#define SASL_DEV_RANDOM "/dev/urandom"
/* Use GDBM for SASLdb */
/* #undef SASL_GDBM */
/* Use LMDB for SASLdb */
/* #undef SASL_LMDB */
/* Use NDBM for SASLdb */
/* #undef SASL_NDBM */
/* The size of `long', as computed by sizeof. */
#define SIZEOF_LONG 8
/* Link ANONYMOUS Statically */
// #define STATIC_ANONYMOUS /**/
/* Link CRAM-MD5 Statically */
// #define STATIC_CRAMMD5 /**/
/* Link DIGEST-MD5 Statically */
// #define STATIC_DIGESTMD5 /**/
/* Link GSSAPI Statically */
#define STATIC_GSSAPIV2 /**/
/* User KERBEROS_V4 Staticly */
/* #undef STATIC_KERBEROS4 */
/* Link ldapdb plugin Statically */
/* #undef STATIC_LDAPDB */
/* Link LOGIN Statically */
/* #undef STATIC_LOGIN */
/* Link NTLM Statically */
/* #undef STATIC_NTLM */
/* Link OTP Statically */
// #define STATIC_OTP /**/
/* Link PASSDSS Statically */
/* #undef STATIC_PASSDSS */
/* Link PLAIN Staticly */
// #define STATIC_PLAIN /**/
/* Link SASLdb Staticly */
// #define STATIC_SASLDB /**/
/* Link SCRAM Statically */
// #define STATIC_SCRAM /**/
/* Link SQL plugin statically */
/* #undef STATIC_SQL */
/* Link SRP Statically */
/* #undef STATIC_SRP */
/* Define to 1 if you have the ANSI C header files. */
#define STDC_HEADERS 1
/* Define to 1 if you can safely include both <sys/time.h> and <time.h>. */
#define TIME_WITH_SYS_TIME 1
/* Should we try to dlopen() plugins while statically compiled? */
/* #undef TRY_DLOPEN_WHEN_STATIC */
/* use the doors IPC API for saslauthd? */
/* #undef USE_DOORS */
/* Enable extensions on AIX 3, Interix. */
#ifndef _ALL_SOURCE
# define _ALL_SOURCE 1
#endif
/* Enable GNU extensions on systems that have them. */
#ifndef _GNU_SOURCE
# define _GNU_SOURCE 1
#endif
/* Enable threading extensions on Solaris. */
#ifndef _POSIX_PTHREAD_SEMANTICS
# define _POSIX_PTHREAD_SEMANTICS 1
#endif
/* Enable extensions on HP NonStop. */
#ifndef _TANDEM_SOURCE
# define _TANDEM_SOURCE 1
#endif
/* Enable general extensions on Solaris. */
#ifndef __EXTENSIONS__
# define __EXTENSIONS__ 1
#endif
/* Version number of package */
#define VERSION "2.1.27"
/* Use DES */
#define WITH_DES /**/
/* Linking against dmalloc? */
/* #undef WITH_DMALLOC */
/* Use RC4 */
#define WITH_RC4 /**/
/* Use OpenSSL DES Implementation */
#define WITH_SSL_DES /**/
/* Define to 1 if on MINIX. */
/* #undef _MINIX */
/* Define to 2 if the system does not provide POSIX.1 features except with
this defined. */
/* #undef _POSIX_1_SOURCE */
/* Define to 1 if you need to in order for `stat' and other things to work. */
/* #undef _POSIX_SOURCE */
/* Define to empty if `const' does not conform to ANSI C. */
/* #undef const */
/* Define to `__inline__' or `__inline' if that's what the C compiler
calls it, or to nothing if 'inline' is not supported under any name. */
#ifndef __cplusplus
/* #undef inline */
#endif
/* Define to `int' if <sys/types.h> does not define. */
/* #undef mode_t */
/* Define to `int' if <sys/types.h> does not define. */
/* #undef pid_t */
/* Create a struct iovec if we need one */
#if !defined(_WIN32)
#if !defined(HAVE_SYS_UIO_H)
/* (win32 is handled in sasl.h) */
struct iovec {
char *iov_base;
long iov_len;
};
#else
#include <sys/types.h>
#include <sys/uio.h>
#endif
#endif
/* location of the random number generator */
#ifdef DEV_RANDOM
/* #undef DEV_RANDOM */
#endif
#define DEV_RANDOM SASL_DEV_RANDOM
/* if we've got krb_get_err_txt, we might as well use it;
especially since krb_err_txt isn't in some newer distributions
(MIT Kerb for Mac 4 being a notable example). If we don't have
it, we fall back to the krb_err_txt array */
#ifdef HAVE_KRB_GET_ERR_TEXT
#define get_krb_err_txt krb_get_err_text
#else
#define get_krb_err_txt(X) (krb_err_txt[(X)])
#endif
/* Make Solaris happy... */
#ifndef __EXTENSIONS__
#define __EXTENSIONS__ 1
#endif
/* Make Linux happy... */
#ifndef _GNU_SOURCE
#define _GNU_SOURCE 1
#endif
#define SASL_PATH_ENV_VAR "SASL_PATH"
#define SASL_CONF_PATH_ENV_VAR "SASL_CONF_PATH"
#include <stdlib.h>
#include <sys/types.h>
#ifndef WIN32
# include <sys/socket.h>
# include <netdb.h>
# include <netinet/in.h>
# ifdef HAVE_SYS_PARAM_H
# include <sys/param.h>
# endif
#else /* WIN32 */
# include <winsock2.h>
#endif /* WIN32 */
#include <string.h>
#ifndef HAVE_SOCKLEN_T
typedef unsigned int socklen_t;
#endif /* HAVE_SOCKLEN_T */
#if !defined(HAVE_STRUCT_SOCKADDR_STORAGE) && !defined(WIN32)
#define _SS_MAXSIZE 128 /* Implementation specific max size */
#define _SS_PADSIZE (_SS_MAXSIZE - sizeof (struct sockaddr))
struct sockaddr_storage {
struct sockaddr ss_sa;
char __ss_pad2[_SS_PADSIZE];
};
# define ss_family ss_sa.sa_family
#endif /* !HAVE_STRUCT_SOCKADDR_STORAGE */
#ifndef AF_INET6
/* Define it to something that should never appear */
#define AF_INET6 AF_MAX
#endif
#ifndef HAVE_GETADDRINFO
#define getaddrinfo sasl_getaddrinfo
#define freeaddrinfo sasl_freeaddrinfo
#define gai_strerror sasl_gai_strerror
#endif
#ifndef HAVE_GETNAMEINFO
#define getnameinfo sasl_getnameinfo
#endif
#if !defined(HAVE_GETNAMEINFO) || !defined(HAVE_GETADDRINFO)
#include "gai.h"
#endif
#ifndef AI_NUMERICHOST /* support glibc 2.0.x */
#define AI_NUMERICHOST 4
#define NI_NUMERICHOST 2
#define NI_NAMEREQD 4
#define NI_NUMERICSERV 8
#endif
#ifndef HAVE_SYSEXITS_H
#include "exits.h"
#else
#include "sysexits.h"
#endif
/* Get the correct time.h */
#if TIME_WITH_SYS_TIME
# include <sys/time.h>
# include <time.h>
#else
# if HAVE_SYS_TIME_H
# include <sys/time.h>
# else
# include <time.h>
# endif
#endif
#ifndef HIER_DELIMITER
#define HIER_DELIMITER '/'
#endif
#ifdef WIN32
#define SASL_ROOT_KEY "SOFTWARE\\Carnegie Mellon\\Project Cyrus\\SASL Library"
#define SASL_PLUGIN_PATH_ATTR "SearchPath"
#define SASL_CONF_PATH_ATTR "ConfFile"
#include <windows.h>
inline static unsigned int sleep(unsigned int seconds) {
Sleep(seconds * 1000);
return 0;
}
#endif
/* handy string manipulation functions */
#ifndef HAVE_STRLCPY
extern size_t saslauthd_strlcpy(char *dst, const char *src, size_t len);
#define strlcpy(x,y,z) saslauthd_strlcpy((x),(y),(z))
#endif
#ifndef HAVE_STRLCAT
extern size_t saslauthd_strlcat(char *dst, const char *src, size_t len);
#define strlcat(x,y,z) saslauthd_strlcat((x),(y),(z))
#endif
#ifndef HAVE_ASPRINTF
extern int asprintf(char **str, const char *fmt, ...);
#endif
#endif /* CONFIG_H */
#if defined __GNUC__ && __GNUC__ > 6
#define GCC_FALLTHROUGH __attribute__((fallthrough));
#else
#define GCC_FALLTHROUGH /* fall through */
#endif

1
contrib/krb5 vendored Submodule

@ -0,0 +1 @@
Subproject commit 99f7ad2831a01f264c07eed42a0a3a9336b86184

View File

@ -0,0 +1,670 @@
find_program(AWK_PROGRAM awk)
if(NOT AWK_PROGRAM)
message(FATAL_ERROR "You need the awk program to build ClickHouse with krb5 enabled.")
endif()
set(KRB5_SOURCE_DIR ${ClickHouse_SOURCE_DIR}/contrib/krb5/src)
set(ALL_SRCS
${KRB5_SOURCE_DIR}/util/et/et_name.c
${KRB5_SOURCE_DIR}/util/et/com_err.c
${KRB5_SOURCE_DIR}/util/et/error_message.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_inq_names.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_rel_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_unwrap_aead.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_set_name_attr.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_glue.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_imp_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/gssd_pname_to_uid.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_authorize_localname.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_prf.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_acquire_cred_with_pw.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_set_cred_option.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_map_name_to_any.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_inq_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_rel_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_seal.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_delete_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_context_time.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_get_name_attr.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_mech_invoke.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_unwrap_iov.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_exp_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_init_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_accept_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_verify.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_sign.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_mechname.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_mechattr.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_complete_auth_token.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_wrap_aead.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_inq_cred_oid.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_rel_buffer.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_initialize.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_export_name_comp.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_set_context_option.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_acquire_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_acquire_cred_imp_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_imp_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_inq_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_set_neg_mechs.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_inq_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_export_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_oid_ops.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_inq_context_oid.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_del_name_attr.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_decapsulate_token.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_compare_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_rel_name_mapping.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_imp_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_dup_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_export_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_wrap_iov.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_rel_oid_set.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_unseal.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_store_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_buffer_set.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_canon_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_dsp_status.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_dsp_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_dsp_name_ext.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_saslname.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_process_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_encapsulate_token.c
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue/g_negoex.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/delete_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/lucid_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/duplicate_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/get_tkt_flags.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/set_allowable_enctypes.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/k5sealiov.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/gssapi_err_krb5.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/canon_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/inq_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/export_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/inq_names.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/prf.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/k5sealv3iov.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/store_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/import_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/export_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/naming_exts.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/s4u_gss_glue.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/rel_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/k5unsealiov.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/gssapi_krb5.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/disp_status.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/import_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/k5seal.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/accept_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/import_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/process_context_token.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/disp_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/wrap_size_limit.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/krb5_gss_glue.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/util_crypt.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/set_ccache.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/export_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/rel_oid.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/val_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/context_time.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/cred_store.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/iakerb.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/copy_ccache.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/init_sec_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/indicate_mechs.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/inq_context.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/util_seed.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/util_seqnum.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/compare_name.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/ser_sctx.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/k5sealv3.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/acquire_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/k5unseal.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/rel_cred.c
${KRB5_SOURCE_DIR}/lib/gssapi/krb5/util_cksum.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/disp_com_err_status.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/gssapi_generic.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/rel_oid_set.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/oid_ops.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/util_buffer.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/util_buffer_set.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/util_set.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/util_token.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/gssapi_err_generic.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/disp_major_status.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/util_seqstate.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/util_errmap.c
${KRB5_SOURCE_DIR}/lib/gssapi/generic/rel_buffer.c
${KRB5_SOURCE_DIR}/lib/gssapi/spnego/spnego_mech.c
${KRB5_SOURCE_DIR}/lib/gssapi/spnego/negoex_util.c
${KRB5_SOURCE_DIR}/lib/gssapi/spnego/negoex_ctx.c
# ${KRB5_SOURCE_DIR}/lib/gssapi/spnego/negoex_trace.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/prng.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/enc_dk_cmac.c
# ${KRB5_SOURCE_DIR}/lib/crypto/krb/crc32.c
# ${KRB5_SOURCE_DIR}/lib/crypto/krb/checksum_cbc.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/enctype_util.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/enc_etm.c
# ${KRB5_SOURCE_DIR}/lib/crypto/krb/combine_keys.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/default_state.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/decrypt_iov.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/checksum_dk_cmac.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/etypes.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/old_api_glue.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/cksumtypes.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/prf_cmac.c
# ${KRB5_SOURCE_DIR}/lib/crypto/krb/enc_old.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/decrypt.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/prf_dk.c
# ${KRB5_SOURCE_DIR}/lib/crypto/krb/s2k_des.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/checksum_unkeyed.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/crypto_length.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/block_size.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/string_to_key.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/verify_checksum.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/crypto_libinit.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/derive.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/random_to_key.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/verify_checksum_iov.c
# ${KRB5_SOURCE_DIR}/lib/crypto/krb/checksum_confounder.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/checksum_length.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/enc_dk_hmac.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/make_checksum.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/prf_des.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/prf.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/coll_proof_cksum.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/enc_rc4.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/cf2.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/aead.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/encrypt_iov.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/cksumtype_to_string.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/key.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/enc_raw.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/keylengths.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/checksum_hmac_md5.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/keyed_cksum.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/keyed_checksum_types.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/prf_aes2.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/state.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/checksum_dk_hmac.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/encrypt.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/checksum_etm.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/make_random_key.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/string_to_cksumtype.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/mandatory_sumtype.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/make_checksum_iov.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/s2k_rc4.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/valid_cksumtype.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/nfold.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/prng_fortuna.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/encrypt_length.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/cmac.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/keyblocks.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/prf_rc4.c
${KRB5_SOURCE_DIR}/lib/crypto/krb/s2k_pbkdf2.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/enc_provider/aes.c
# ${KRB5_SOURCE_DIR}/lib/crypto/openssl/enc_provider/des.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/enc_provider/rc4.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/enc_provider/des3.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/enc_provider/camellia.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/sha256.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/hmac.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/pbkdf2.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/init.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/stubs.c
# ${KRB5_SOURCE_DIR}/lib/crypto/openssl/hash_provider/hash_crc32.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/hash_provider/hash_evp.c
${KRB5_SOURCE_DIR}/lib/crypto/openssl/des/des_keys.c
${KRB5_SOURCE_DIR}/util/support/fake-addrinfo.c
${KRB5_SOURCE_DIR}/util/support/k5buf.c
${KRB5_SOURCE_DIR}/util/support/hex.c
${KRB5_SOURCE_DIR}/util/support/threads.c
${KRB5_SOURCE_DIR}/util/support/utf8.c
${KRB5_SOURCE_DIR}/util/support/hashtab.c
${KRB5_SOURCE_DIR}/util/support/dir_filenames.c
${KRB5_SOURCE_DIR}/util/support/base64.c
${KRB5_SOURCE_DIR}/util/support/strerror_r.c
${KRB5_SOURCE_DIR}/util/support/plugins.c
${KRB5_SOURCE_DIR}/util/support/path.c
${KRB5_SOURCE_DIR}/util/support/init-addrinfo.c
${KRB5_SOURCE_DIR}/util/support/json.c
${KRB5_SOURCE_DIR}/util/support/errors.c
${KRB5_SOURCE_DIR}/util/support/utf8_conv.c
${KRB5_SOURCE_DIR}/util/support/strlcpy.c
${KRB5_SOURCE_DIR}/util/support/gmt_mktime.c
${KRB5_SOURCE_DIR}/util/support/zap.c
${KRB5_SOURCE_DIR}/util/support/bcmp.c
${KRB5_SOURCE_DIR}/util/support/secure_getenv.c
${KRB5_SOURCE_DIR}/util/profile/prof_tree.c
${KRB5_SOURCE_DIR}/util/profile/prof_file.c
${KRB5_SOURCE_DIR}/util/profile/prof_parse.c
${KRB5_SOURCE_DIR}/util/profile/prof_get.c
${KRB5_SOURCE_DIR}/util/profile/prof_set.c
${KRB5_SOURCE_DIR}/util/profile/prof_err.c
${KRB5_SOURCE_DIR}/util/profile/prof_init.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/fwd_tgt.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/conv_creds.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/fast.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/ser_adata.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_tick.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/enc_keyhelper.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/ser_actx.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/init_ctx.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/preauth2.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_princ.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/parse_host_string.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/pr_to_salt.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/rd_req.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/pac_sign.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_addrs.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/conv_princ.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/rd_rep.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/str_conv.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/gic_opt.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/recvauth.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_cksum.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/ai_authdata.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/ser_ctx.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/appdefault.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/bld_princ.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/in_tkt_sky.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_creds.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/auth_con.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_key.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/kdc_rep_dc.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/mk_cred.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/gic_keytab.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/rd_req_dec.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/set_realm.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/preauth_sam2.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/libdef_parse.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/privsafe.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/ser_auth.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/val_renew.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/addr_order.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/authdata_dec.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/walk_rtree.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/gen_subkey.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_auth.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/chpw.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/mk_req.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/allow_weak.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/mk_rep.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/mk_priv.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/s4u_authdata.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/preauth_otp.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/init_keyblock.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/ser_addr.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/encrypt_tk.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/s4u_creds.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/srv_dec_tkt.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/rd_priv.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/authdata_enc.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/authdata_exp.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/decode_kdc.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/decrypt_tk.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/enc_helper.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/mk_req_ext.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/ser_key.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/preauth_encts.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/send_tgs.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/ser_cksum.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/tgtname.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/encode_kdc.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/rd_cred.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/rd_safe.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/preauth_pkinit.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/srv_rcache.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/chk_trans.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/etype_list.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/get_creds.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/ser_princ.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/gic_pwd.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/authdata.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/gen_save_subkey.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/vfy_increds.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/addr_comp.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/kfree.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/response_items.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/serialize.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/cammac_util.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/gc_via_tkt.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_ctx.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/sendauth.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/addr_srch.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/mk_safe.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/preauth_ec.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/bld_pr_ext.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/random_str.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/sname_match.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/princ_comp.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/get_in_tkt.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/gen_seqnum.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/cp_key_cnt.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/mk_error.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_athctr.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/deltat.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/get_etype_info.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/plugin.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/kerrs.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/vic_opt.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/unparse.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/parse.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/rd_error.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/pac.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/valid_times.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/copy_data.c
${KRB5_SOURCE_DIR}/lib/krb5/krb/padata.c
${KRB5_SOURCE_DIR}/lib/krb5/os/hostrealm.c
${KRB5_SOURCE_DIR}/lib/krb5/os/thread_safe.c
${KRB5_SOURCE_DIR}/lib/krb5/os/krbfileio.c
${KRB5_SOURCE_DIR}/lib/krb5/os/toffset.c
${KRB5_SOURCE_DIR}/lib/krb5/os/hostaddr.c
${KRB5_SOURCE_DIR}/lib/krb5/os/ustime.c
${KRB5_SOURCE_DIR}/lib/krb5/os/timeofday.c
${KRB5_SOURCE_DIR}/lib/krb5/os/ccdefname.c
${KRB5_SOURCE_DIR}/lib/krb5/os/full_ipadr.c
${KRB5_SOURCE_DIR}/lib/krb5/os/read_pwd.c
${KRB5_SOURCE_DIR}/lib/krb5/os/trace.c
${KRB5_SOURCE_DIR}/lib/krb5/os/localauth_k5login.c
${KRB5_SOURCE_DIR}/lib/krb5/os/localauth_rule.c
${KRB5_SOURCE_DIR}/lib/krb5/os/localaddr.c
${KRB5_SOURCE_DIR}/lib/krb5/os/hostrealm_dns.c
${KRB5_SOURCE_DIR}/lib/krb5/os/hostrealm_domain.c
${KRB5_SOURCE_DIR}/lib/krb5/os/sn2princ.c
${KRB5_SOURCE_DIR}/lib/krb5/os/net_write.c
${KRB5_SOURCE_DIR}/lib/krb5/os/gen_rname.c
${KRB5_SOURCE_DIR}/lib/krb5/os/net_read.c
${KRB5_SOURCE_DIR}/lib/krb5/os/accessor.c
${KRB5_SOURCE_DIR}/lib/krb5/os/hostrealm_profile.c
${KRB5_SOURCE_DIR}/lib/krb5/os/c_ustime.c
${KRB5_SOURCE_DIR}/lib/krb5/os/expand_path.c
${KRB5_SOURCE_DIR}/lib/krb5/os/port2ip.c
${KRB5_SOURCE_DIR}/lib/krb5/os/changepw.c
${KRB5_SOURCE_DIR}/lib/krb5/os/unlck_file.c
${KRB5_SOURCE_DIR}/lib/krb5/os/gen_port.c
${KRB5_SOURCE_DIR}/lib/krb5/os/localauth_an2ln.c
${KRB5_SOURCE_DIR}/lib/krb5/os/genaddrs.c
${KRB5_SOURCE_DIR}/lib/krb5/os/init_os_ctx.c
${KRB5_SOURCE_DIR}/lib/krb5/os/localauth.c
${KRB5_SOURCE_DIR}/lib/krb5/os/locate_kdc.c
${KRB5_SOURCE_DIR}/lib/krb5/os/prompter.c
${KRB5_SOURCE_DIR}/lib/krb5/os/ktdefname.c
${KRB5_SOURCE_DIR}/lib/krb5/os/realm_dom.c
${KRB5_SOURCE_DIR}/lib/krb5/os/dnssrv.c
${KRB5_SOURCE_DIR}/lib/krb5/os/mk_faddr.c
# ${KRB5_SOURCE_DIR}/lib/krb5/os/dnsglue.c
${KRB5_SOURCE_DIR}/lib/krb5/os/sendto_kdc.c
${KRB5_SOURCE_DIR}/lib/krb5/os/hostrealm_registry.c
${KRB5_SOURCE_DIR}/lib/krb5/os/write_msg.c
${KRB5_SOURCE_DIR}/lib/krb5/os/localauth_names.c
${KRB5_SOURCE_DIR}/lib/krb5/os/read_msg.c
${KRB5_SOURCE_DIR}/lib/krb5/os/lock_file.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccselect.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccselect_realm.c
# ${KRB5_SOURCE_DIR}/lib/krb5/ccache/ser_cc.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccdefops.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/cc_retr.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccselect_k5identity.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/cccopy.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccfns.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/cc_file.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccbase.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/cccursor.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccdefault.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/cc_memory.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccmarshal.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccselect_hostname.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/cc_dir.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/cc_keyring.c
${KRB5_SOURCE_DIR}/lib/krb5/ccache/cc_kcm.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/ktadd.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/ktbase.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/ktdefault.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/kt_memory.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/ktfns.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/ktremove.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/read_servi.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/kt_file.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/read_servi.c
${KRB5_SOURCE_DIR}/lib/krb5/keytab/ktfr_entry.c
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/k5e1_err.c
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/kdb5_err.c
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/asn1_err.c
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/krb5_err.c
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/krb524_err.c
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/kv5m_err.c
${KRB5_SOURCE_DIR}/lib/krb5/rcache/rc_base.c
${KRB5_SOURCE_DIR}/lib/krb5/rcache/rc_dfl.c
${KRB5_SOURCE_DIR}/lib/krb5/rcache/rc_file2.c
${KRB5_SOURCE_DIR}/lib/krb5/rcache/rc_none.c
${KRB5_SOURCE_DIR}/lib/krb5/rcache/memrcache.c
${KRB5_SOURCE_DIR}/lib/krb5/unicode/ucdata/ucdata.c
${KRB5_SOURCE_DIR}/lib/krb5/unicode/ucstr.c
${KRB5_SOURCE_DIR}/lib/krb5/asn.1/asn1_encode.c
${KRB5_SOURCE_DIR}/lib/krb5/asn.1/asn1_k_encode.c
${KRB5_SOURCE_DIR}/lib/krb5/asn.1/ldap_key_seq.c
${KRB5_SOURCE_DIR}/lib/krb5/krb5_libinit.c
)
add_custom_command(
OUTPUT ${KRB5_SOURCE_DIR}/util/et/compile_et
COMMAND /bin/sh
./config_script
./compile_et.sh
"/usr/local/share/et"
${AWK_PROGRAM}
sed
>
compile_et
DEPENDS ${KRB5_SOURCE_DIR}/util/et/compile_et.sh ${KRB5_SOURCE_DIR}/util/et/config_script
WORKING_DIRECTORY "${KRB5_SOURCE_DIR}/util/et"
)
add_custom_target(
CREATE_COMPILE_ET ALL
DEPENDS ${KRB5_SOURCE_DIR}/util/et/compile_et
COMMENT "creating compile_et"
VERBATIM
)
file(GLOB_RECURSE ET_FILES
"${KRB5_SOURCE_DIR}/*.et"
)
function(preprocess_et out_var)
set(result)
foreach(in_f ${ARGN})
string(REPLACE
.et
.c
F_C
${in_f}
)
string(REPLACE
.et
.h
F_H
${in_f}
)
get_filename_component(ET_PATH ${in_f} DIRECTORY)
add_custom_command(OUTPUT ${F_C} ${F_H}
COMMAND perl ${KRB5_SOURCE_DIR}/util/et/compile_et -d "${KRB5_SOURCE_DIR}/util/et" ${in_f}
DEPENDS ${in_f} ${KRB5_SOURCE_DIR}/util/et/compile_et
WORKING_DIRECTORY ${ET_PATH}
COMMENT "Creating preprocessed file ${F_C}"
VERBATIM
)
list(APPEND result ${F_C})
endforeach()
set(${out_var} "${result}" PARENT_SCOPE)
endfunction()
add_custom_command(
OUTPUT ${KRB5_SOURCE_DIR}/lib/gssapi/krb5/error_map.h
COMMAND perl
-I../../../util
../../../util/gen-map.pl
-oerror_map.h
NAME=gsserrmap
KEY=OM_uint32
VALUE=char*
COMPARE=compare_OM_uint32
FREEVALUE=free_string
WORKING_DIRECTORY "${KRB5_SOURCE_DIR}/lib/gssapi/krb5"
)
add_custom_target(
ERROR_MAP_H ALL
DEPENDS ${KRB5_SOURCE_DIR}/lib/gssapi/krb5/error_map.h
COMMENT "generating error_map.h"
VERBATIM
)
add_custom_command(
OUTPUT ${KRB5_SOURCE_DIR}/lib/gssapi/generic/errmap.h
COMMAND perl -w -I../../../util ../../../util/gen.pl bimap errmap.h NAME=mecherrmap LEFT=OM_uint32 RIGHT=struct\ mecherror LEFTPRINT=print_OM_uint32 RIGHTPRINT=mecherror_print LEFTCMP=cmp_OM_uint32 RIGHTCMP=mecherror_cmp
WORKING_DIRECTORY "${KRB5_SOURCE_DIR}/lib/gssapi/generic"
)
add_custom_target(
ERRMAP_H ALL
DEPENDS ${KRB5_SOURCE_DIR}/lib/gssapi/generic/errmap.h
COMMENT "generating errmap.h"
VERBATIM
)
add_custom_target(
KRB_5_H ALL
DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/include/krb5/krb5.h
COMMENT "generating krb5.h"
VERBATIM
)
add_library(${KRB5_LIBRARY})
add_dependencies(
${KRB5_LIBRARY}
ERRMAP_H
ERROR_MAP_H
KRB_5_H
)
preprocess_et(processed_et_files ${ET_FILES})
add_custom_command(
OUTPUT ${KRB5_SOURCE_DIR}/lib/gssapi/generic/errmap.h
COMMAND perl -w -I../../../util ../../../util/gen.pl bimap errmap.h NAME=mecherrmap LEFT=OM_uint32 RIGHT=struct\ mecherror LEFTPRINT=print_OM_uint32 RIGHTPRINT=mecherror_print LEFTCMP=cmp_OM_uint32 RIGHTCMP=mecherror_cmp
WORKING_DIRECTORY "${KRB5_SOURCE_DIR}/lib/gssapi/generic"
)
target_sources(${KRB5_LIBRARY} PRIVATE
${ALL_SRCS}
)
file(MAKE_DIRECTORY
${CMAKE_CURRENT_BINARY_DIR}/include/gssapi
)
file(GLOB GSSAPI_GENERIC_HEADERS
${KRB5_SOURCE_DIR}/lib/gssapi/generic/*.h
${KRB5_SOURCE_DIR}/lib/gssapi/generic/gssapi.hin
)
file(COPY ${GSSAPI_GENERIC_HEADERS}
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/include/gssapi/
)
file(RENAME
${CMAKE_CURRENT_BINARY_DIR}/include/gssapi/gssapi.hin
${CMAKE_CURRENT_BINARY_DIR}/include/gssapi/gssapi.h
)
file(COPY ${KRB5_SOURCE_DIR}/lib/gssapi/krb5/gssapi_krb5.h
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/include/gssapi/
)
file(COPY ${KRB5_SOURCE_DIR}/util/et/com_err.h
DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/include/
)
file(MAKE_DIRECTORY
${CMAKE_CURRENT_BINARY_DIR}/include/krb5
)
SET(KRBHDEP
${KRB5_SOURCE_DIR}/include/krb5/krb5.hin
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/krb5_err.h
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/k5e1_err.h
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/kdb5_err.h
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/kv5m_err.h
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/krb524_err.h
${KRB5_SOURCE_DIR}/lib/krb5/error_tables/asn1_err.h
)
# cmake < 3.18 does not have 'cat' command
add_custom_command(
OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/include/krb5/krb5.h
COMMAND cat ${KRBHDEP} > ${CMAKE_CURRENT_BINARY_DIR}/include/krb5/krb5.h
DEPENDS ${KRBHDEP}
)
target_include_directories(${KRB5_LIBRARY} PUBLIC
${KRB5_SOURCE_DIR}/include
${CMAKE_CURRENT_BINARY_DIR}/include
)
target_include_directories(${KRB5_LIBRARY} PRIVATE
${CMAKE_CURRENT_SOURCE_DIR} #for autoconf.h
${KRB5_SOURCE_DIR}
${KRB5_SOURCE_DIR}/include
${KRB5_SOURCE_DIR}/lib/gssapi/mechglue
${KRB5_SOURCE_DIR}/lib/
${KRB5_SOURCE_DIR}/lib/gssapi
${KRB5_SOURCE_DIR}/lib/gssapi/generic
${KRB5_SOURCE_DIR}/lib/gssapi/krb5
${KRB5_SOURCE_DIR}/lib/gssapi/spnego
${KRB5_SOURCE_DIR}/util/et
${KRB5_SOURCE_DIR}/lib/crypto/openssl
${KRB5_SOURCE_DIR}/lib/crypto/krb
${KRB5_SOURCE_DIR}/util/profile
${KRB5_SOURCE_DIR}/lib/krb5/ccache/ccapi
${KRB5_SOURCE_DIR}/lib/krb5/ccache
${KRB5_SOURCE_DIR}/lib/krb5/keytab
${KRB5_SOURCE_DIR}/lib/krb5/rcache
${KRB5_SOURCE_DIR}/lib/krb5/unicode
${KRB5_SOURCE_DIR}/lib/krb5/os
# ${OPENSSL_INCLUDE_DIR}
)
target_compile_definitions(${KRB5_LIBRARY} PRIVATE
KRB5_PRIVATE
_GSS_STATIC_LINK=1
KRB5_DEPRECATED=1
LOCALEDIR="/usr/local/share/locale"
BINDIR="/usr/local/bin"
SBINDIR="/usr/local/sbin"
LIBDIR="/usr/local/lib"
)
target_link_libraries(${KRB5_LIBRARY}
PRIVATE ${OPENSSL_CRYPTO_LIBRARY}
)

View File

@ -0,0 +1,764 @@
/* include/autoconf.h. Generated from autoconf.h.in by configure. */
/* include/autoconf.h.in. Generated from configure.in by autoheader. */
#ifndef KRB5_AUTOCONF_H
#define KRB5_AUTOCONF_H
/* Define if AES-NI support is enabled */
/* #undef AESNI */
/* Define if socket can't be bound to 0.0.0.0 */
/* #undef BROKEN_STREAMS_SOCKETS */
/* Define if va_list objects can be simply copied by assignment. */
/* #undef CAN_COPY_VA_LIST */
/* Define to reduce code size even if it means more cpu usage */
/* #undef CONFIG_SMALL */
/* Define if __attribute__((constructor)) works */
#define CONSTRUCTOR_ATTR_WORKS 1
/* Define to default ccache name */
#define DEFCCNAME "FILE:/tmp/krb5cc_%{uid}"
/* Define to default client keytab name */
#define DEFCKTNAME "FILE:/etc/krb5/user/%{euid}/client.keytab"
/* Define to default keytab name */
#define DEFKTNAME "FILE:/etc/krb5.keytab"
/* Define if library initialization should be delayed until first use */
#define DELAY_INITIALIZER 1
/* Define if __attribute__((destructor)) works */
#define DESTRUCTOR_ATTR_WORKS 1
/* Define to disable PKINIT plugin support */
/* #undef DISABLE_PKINIT */
/* Define if LDAP KDB support within the Kerberos library (mainly ASN.1 code)
should be enabled. */
/* #undef ENABLE_LDAP */
/* Define if translation functions should be used. */
#define ENABLE_NLS 1
/* Define if thread support enabled */
#define ENABLE_THREADS 1
/* Define as return type of endrpcent */
#define ENDRPCENT_TYPE void
/* Define if Fortuna PRNG is selected */
#define FORTUNA 1
/* Define to the type of elements in the array set by `getgroups'. Usually
this is either `int' or `gid_t'. */
#define GETGROUPS_T gid_t
/* Define if gethostbyname_r returns int rather than struct hostent * */
#define GETHOSTBYNAME_R_RETURNS_INT 1
/* Type of getpeername second argument. */
#define GETPEERNAME_ARG3_TYPE GETSOCKNAME_ARG3_TYPE
/* Define if getpwnam_r exists but takes only 4 arguments (e.g., POSIX draft 6
implementations like some Solaris releases). */
/* #undef GETPWNAM_R_4_ARGS */
/* Define if getpwnam_r returns an int */
#define GETPWNAM_R_RETURNS_INT 1
/* Define if getpwuid_r exists but takes only 4 arguments (e.g., POSIX draft 6
implementations like some Solaris releases). */
/* #undef GETPWUID_R_4_ARGS */
/* Define if getservbyname_r returns int rather than struct servent * */
#define GETSERVBYNAME_R_RETURNS_INT 1
/* Type of pointer target for argument 3 to getsockname */
#define GETSOCKNAME_ARG3_TYPE socklen_t
/* Define if gmtime_r returns int instead of struct tm pointer, as on old
HP-UX systems. */
/* #undef GMTIME_R_RETURNS_INT */
/* Define if va_copy macro or function is available. */
#define HAS_VA_COPY 1
/* Define to 1 if you have the `access' function. */
#define HAVE_ACCESS 1
/* Define to 1 if you have the <alloca.h> header file. */
#define HAVE_ALLOCA_H 1
/* Define to 1 if you have the <arpa/inet.h> header file. */
#define HAVE_ARPA_INET_H 1
/* Define to 1 if you have the `bswap16' function. */
/* #undef HAVE_BSWAP16 */
/* Define to 1 if you have the `bswap64' function. */
/* #undef HAVE_BSWAP64 */
/* Define to 1 if bswap_16 is available via byteswap.h */
#define HAVE_BSWAP_16 1
/* Define to 1 if bswap_64 is available via byteswap.h */
#define HAVE_BSWAP_64 1
/* Define if bt_rseq is available, for recursive btree traversal. */
#define HAVE_BT_RSEQ 1
/* Define to 1 if you have the <byteswap.h> header file. */
#define HAVE_BYTESWAP_H 1
/* Define to 1 if you have the `chmod' function. */
#define HAVE_CHMOD 1
/* Define if cmocka library is available. */
/* #undef HAVE_CMOCKA */
/* Define to 1 if you have the `compile' function. */
/* #undef HAVE_COMPILE */
/* Define if com_err has compatible gettext support */
#define HAVE_COM_ERR_INTL 1
/* Define to 1 if you have the <cpuid.h> header file. */
/* #undef HAVE_CPUID_H */
/* Define to 1 if you have the `daemon' function. */
#define HAVE_DAEMON 1
/* Define to 1 if you have the declaration of `strerror_r', and to 0 if you
don't. */
#define HAVE_DECL_STRERROR_R 1
/* Define to 1 if you have the <dirent.h> header file, and it defines `DIR'.
*/
#define HAVE_DIRENT_H 1
/* Define to 1 if you have the <dlfcn.h> header file. */
#define HAVE_DLFCN_H 1
/* Define to 1 if you have the `dn_skipname' function. */
#define HAVE_DN_SKIPNAME 0
/* Define to 1 if you have the <endian.h> header file. */
#define HAVE_ENDIAN_H 1
/* Define to 1 if you have the <errno.h> header file. */
#define HAVE_ERRNO_H 1
/* Define to 1 if you have the `fchmod' function. */
#define HAVE_FCHMOD 1
/* Define to 1 if you have the <fcntl.h> header file. */
#define HAVE_FCNTL_H 1
/* Define to 1 if you have the `flock' function. */
#define HAVE_FLOCK 1
/* Define to 1 if you have the `fnmatch' function. */
#define HAVE_FNMATCH 1
/* Define to 1 if you have the <fnmatch.h> header file. */
#define HAVE_FNMATCH_H 1
/* Define if you have the getaddrinfo function */
#define HAVE_GETADDRINFO 1
/* Define to 1 if you have the `getcwd' function. */
#define HAVE_GETCWD 1
/* Define to 1 if you have the `getenv' function. */
#define HAVE_GETENV 1
/* Define to 1 if you have the `geteuid' function. */
#define HAVE_GETEUID 1
/* Define if gethostbyname_r exists and its return type is known */
#define HAVE_GETHOSTBYNAME_R 1
/* Define to 1 if you have the `getnameinfo' function. */
#define HAVE_GETNAMEINFO 1
/* Define if system getopt should be used. */
#define HAVE_GETOPT 1
/* Define if system getopt_long should be used. */
#define HAVE_GETOPT_LONG 1
/* Define if getpwnam_r is available and useful. */
#define HAVE_GETPWNAM_R 1
/* Define if getpwuid_r is available and useful. */
#define HAVE_GETPWUID_R 1
/* Define if getservbyname_r exists and its return type is known */
#define HAVE_GETSERVBYNAME_R 1
/* Have the gettimeofday function */
#define HAVE_GETTIMEOFDAY 1
/* Define to 1 if you have the `getusershell' function. */
#define HAVE_GETUSERSHELL 1
/* Define to 1 if you have the `gmtime_r' function. */
#define HAVE_GMTIME_R 1
/* Define to 1 if you have the <ifaddrs.h> header file. */
#define HAVE_IFADDRS_H 1
/* Define to 1 if you have the `inet_ntop' function. */
#define HAVE_INET_NTOP 1
/* Define to 1 if you have the `inet_pton' function. */
#define HAVE_INET_PTON 1
/* Define to 1 if the system has the type `int16_t'. */
#define HAVE_INT16_T 1
/* Define to 1 if the system has the type `int32_t'. */
#define HAVE_INT32_T 1
/* Define to 1 if the system has the type `int8_t'. */
#define HAVE_INT8_T 1
/* Define to 1 if you have the <inttypes.h> header file. */
#define HAVE_INTTYPES_H 1
/* Define to 1 if you have the <keyutils.h> header file. */
/* #undef HAVE_KEYUTILS_H */
/* Define to 1 if you have the <lber.h> header file. */
/* #undef HAVE_LBER_H */
/* Define to 1 if you have the <ldap.h> header file. */
/* #undef HAVE_LDAP_H */
/* Define to 1 if you have the `crypto' library (-lcrypto). */
#define HAVE_LIBCRYPTO 1
/* Define if building with libedit. */
/* #undef HAVE_LIBEDIT */
/* Define to 1 if you have the `nsl' library (-lnsl). */
/* #undef HAVE_LIBNSL */
/* Define to 1 if you have the `resolv' library (-lresolv). */
#define HAVE_LIBRESOLV 1
/* Define to 1 if you have the `socket' library (-lsocket). */
/* #undef HAVE_LIBSOCKET */
/* Define if the util library is available */
#define HAVE_LIBUTIL 1
/* Define to 1 if you have the <limits.h> header file. */
#define HAVE_LIMITS_H 1
/* Define to 1 if you have the `localtime_r' function. */
#define HAVE_LOCALTIME_R 1
/* Define to 1 if you have the <machine/byte_order.h> header file. */
/* #undef HAVE_MACHINE_BYTE_ORDER_H */
/* Define to 1 if you have the <machine/endian.h> header file. */
/* #undef HAVE_MACHINE_ENDIAN_H */
/* Define to 1 if you have the <memory.h> header file. */
#define HAVE_MEMORY_H 1
/* Define to 1 if you have the `mkstemp' function. */
#define HAVE_MKSTEMP 1
/* Define to 1 if you have the <ndir.h> header file, and it defines `DIR'. */
/* #undef HAVE_NDIR_H */
/* Define to 1 if you have the <netdb.h> header file. */
#define HAVE_NETDB_H 1
/* Define if netdb.h declares h_errno */
#define HAVE_NETDB_H_H_ERRNO 1
/* Define to 1 if you have the <netinet/in.h> header file. */
#define HAVE_NETINET_IN_H 1
/* Define to 1 if you have the `ns_initparse' function. */
#define HAVE_NS_INITPARSE 0
/* Define to 1 if you have the `ns_name_uncompress' function. */
#define HAVE_NS_NAME_UNCOMPRESS 0
/* Define if OpenSSL supports cms. */
#define HAVE_OPENSSL_CMS 1
/* Define to 1 if you have the <paths.h> header file. */
#define HAVE_PATHS_H 1
/* Define if persistent keyrings are supported */
/* #undef HAVE_PERSISTENT_KEYRING */
/* Define to 1 if you have the <poll.h> header file. */
#define HAVE_POLL_H 1
/* Define if #pragma weak references work */
#define HAVE_PRAGMA_WEAK_REF 1
/* Define if you have POSIX threads libraries and header files. */
#define HAVE_PTHREAD 1
/* Define to 1 if you have the `pthread_once' function. */
/* #undef HAVE_PTHREAD_ONCE */
/* Have PTHREAD_PRIO_INHERIT. */
#define HAVE_PTHREAD_PRIO_INHERIT 1
/* Define to 1 if you have the `pthread_rwlock_init' function. */
/* #undef HAVE_PTHREAD_RWLOCK_INIT */
/* Define if pthread_rwlock_init is provided in the thread library. */
#define HAVE_PTHREAD_RWLOCK_INIT_IN_THREAD_LIB 1
/* Define to 1 if you have the <pwd.h> header file. */
#define HAVE_PWD_H 1
/* Define if building with GNU Readline. */
/* #undef HAVE_READLINE */
/* Define if regcomp exists and functions */
#define HAVE_REGCOMP 1
/* Define to 1 if you have the `regexec' function. */
#define HAVE_REGEXEC 1
/* Define to 1 if you have the <regexpr.h> header file. */
/* #undef HAVE_REGEXPR_H */
/* Define to 1 if you have the <regex.h> header file. */
#define HAVE_REGEX_H 1
/* Define to 1 if you have the `res_nclose' function. */
#define HAVE_RES_NCLOSE 1
/* Define to 1 if you have the `res_ndestroy' function. */
/* #undef HAVE_RES_NDESTROY */
/* Define to 1 if you have the `res_ninit' function. */
#define HAVE_RES_NINIT 1
/* Define to 1 if you have the `res_nsearch' function. */
#define HAVE_RES_NSEARCH 0
/* Define to 1 if you have the `res_search' function */
#define HAVE_RES_SEARCH 1
/* Define to 1 if you have the `re_comp' function. */
#define HAVE_RE_COMP 1
/* Define to 1 if you have the `re_exec' function. */
#define HAVE_RE_EXEC 1
/* Define to 1 if you have the <sasl/sasl.h> header file. */
/* #undef HAVE_SASL_SASL_H */
/* Define if struct sockaddr contains sa_len */
/* #undef HAVE_SA_LEN */
/* Define to 1 if you have the `setegid' function. */
#define HAVE_SETEGID 1
/* Define to 1 if you have the `setenv' function. */
#define HAVE_SETENV 1
/* Define to 1 if you have the `seteuid' function. */
#define HAVE_SETEUID 1
/* Define if setluid provided in OSF/1 security library */
/* #undef HAVE_SETLUID */
/* Define to 1 if you have the `setregid' function. */
#define HAVE_SETREGID 1
/* Define to 1 if you have the `setresgid' function. */
#define HAVE_SETRESGID 1
/* Define to 1 if you have the `setresuid' function. */
#define HAVE_SETRESUID 1
/* Define to 1 if you have the `setreuid' function. */
#define HAVE_SETREUID 1
/* Define to 1 if you have the `setsid' function. */
#define HAVE_SETSID 1
/* Define to 1 if you have the `setvbuf' function. */
#define HAVE_SETVBUF 1
/* Define if there is a socklen_t type. If not, probably use size_t */
#define HAVE_SOCKLEN_T 1
/* Define to 1 if you have the `srand' function. */
#define HAVE_SRAND 1
/* Define to 1 if you have the `srand48' function. */
#define HAVE_SRAND48 1
/* Define to 1 if you have the `srandom' function. */
#define HAVE_SRANDOM 1
/* Define to 1 if the system has the type `ssize_t'. */
#define HAVE_SSIZE_T 1
/* Define to 1 if you have the `stat' function. */
#define HAVE_STAT 1
/* Define to 1 if you have the <stddef.h> header file. */
#define HAVE_STDDEF_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#define HAVE_STDINT_H 1
/* Define to 1 if you have the <stdlib.h> header file. */
#define HAVE_STDLIB_H 1
/* Define to 1 if you have the `step' function. */
/* #undef HAVE_STEP */
/* Define to 1 if you have the `strchr' function. */
#define HAVE_STRCHR 1
/* Define to 1 if you have the `strdup' function. */
#define HAVE_STRDUP 1
/* Define to 1 if you have the `strerror' function. */
#define HAVE_STRERROR 1
/* Define to 1 if you have the `strerror_r' function. */
#define HAVE_STRERROR_R 1
/* Define to 1 if you have the <strings.h> header file. */
#define HAVE_STRINGS_H 1
/* Define to 1 if you have the <string.h> header file. */
#define HAVE_STRING_H 1
/* Define to 1 if you have the `strlcpy' function. */
/* #undef HAVE_STRLCPY */
/* Define to 1 if you have the `strptime' function. */
#define HAVE_STRPTIME 1
/* Define to 1 if the system has the type `struct cmsghdr'. */
#define HAVE_STRUCT_CMSGHDR 1
/* Define if there is a struct if_laddrconf. */
/* #undef HAVE_STRUCT_IF_LADDRCONF */
/* Define to 1 if the system has the type `struct in6_pktinfo'. */
#define HAVE_STRUCT_IN6_PKTINFO 1
/* Define to 1 if the system has the type `struct in_pktinfo'. */
#define HAVE_STRUCT_IN_PKTINFO 1
/* Define if there is a struct lifconf. */
/* #undef HAVE_STRUCT_LIFCONF */
/* Define to 1 if the system has the type `struct rt_msghdr'. */
/* #undef HAVE_STRUCT_RT_MSGHDR */
/* Define to 1 if the system has the type `struct sockaddr_storage'. */
#define HAVE_STRUCT_SOCKADDR_STORAGE 1
/* Define to 1 if `st_mtimensec' is a member of `struct stat'. */
/* #undef HAVE_STRUCT_STAT_ST_MTIMENSEC */
/* Define to 1 if `st_mtimespec.tv_nsec' is a member of `struct stat'. */
/* #undef HAVE_STRUCT_STAT_ST_MTIMESPEC_TV_NSEC */
/* Define to 1 if `st_mtim.tv_nsec' is a member of `struct stat'. */
#define HAVE_STRUCT_STAT_ST_MTIM_TV_NSEC 1
/* Define to 1 if you have the <sys/bswap.h> header file. */
/* #undef HAVE_SYS_BSWAP_H */
/* Define to 1 if you have the <sys/dir.h> header file, and it defines `DIR'.
*/
/* #undef HAVE_SYS_DIR_H */
/* Define if sys_errlist in libc */
#define HAVE_SYS_ERRLIST 1
/* Define to 1 if you have the <sys/file.h> header file. */
#define HAVE_SYS_FILE_H 1
/* Define to 1 if you have the <sys/filio.h> header file. */
/* #undef HAVE_SYS_FILIO_H */
/* Define to 1 if you have the <sys/ndir.h> header file, and it defines `DIR'.
*/
/* #undef HAVE_SYS_NDIR_H */
/* Define to 1 if you have the <sys/param.h> header file. */
#define HAVE_SYS_PARAM_H 1
/* Define to 1 if you have the <sys/select.h> header file. */
#define HAVE_SYS_SELECT_H 1
/* Define to 1 if you have the <sys/socket.h> header file. */
#define HAVE_SYS_SOCKET_H 1
/* Define to 1 if you have the <sys/sockio.h> header file. */
/* #undef HAVE_SYS_SOCKIO_H */
/* Define to 1 if you have the <sys/stat.h> header file. */
#define HAVE_SYS_STAT_H 1
/* Define to 1 if you have the <sys/time.h> header file. */
#define HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/types.h> header file. */
#define HAVE_SYS_TYPES_H 1
/* Define to 1 if you have the <sys/uio.h> header file. */
#define HAVE_SYS_UIO_H 1
/* Define if tcl.h found */
/* #undef HAVE_TCL_H */
/* Define if tcl/tcl.h found */
/* #undef HAVE_TCL_TCL_H */
/* Define to 1 if you have the `timegm' function. */
#define HAVE_TIMEGM 1
/* Define to 1 if you have the <time.h> header file. */
#define HAVE_TIME_H 1
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Define to 1 if you have the `unsetenv' function. */
#define HAVE_UNSETENV 1
/* Define to 1 if the system has the type `u_char'. */
#define HAVE_U_CHAR 1
/* Define to 1 if the system has the type `u_int'. */
#define HAVE_U_INT 1
/* Define to 1 if the system has the type `u_int16_t'. */
#define HAVE_U_INT16_T 1
/* Define to 1 if the system has the type `u_int32_t'. */
#define HAVE_U_INT32_T 1
/* Define to 1 if the system has the type `u_int8_t'. */
#define HAVE_U_INT8_T 1
/* Define to 1 if the system has the type `u_long'. */
#define HAVE_U_LONG 1
/* Define to 1 if you have the `vasprintf' function. */
#define HAVE_VASPRINTF 1
/* Define to 1 if you have the `vsnprintf' function. */
#define HAVE_VSNPRINTF 1
/* Define to 1 if you have the `vsprintf' function. */
#define HAVE_VSPRINTF 1
/* Define to 1 if the system has the type `__int128_t'. */
#define HAVE___INT128_T 1
/* Define to 1 if the system has the type `__uint128_t'. */
#define HAVE___UINT128_T 1
/* Define if errno.h declares perror */
/* #undef HDR_HAS_PERROR */
/* May need to be defined to enable IPv6 support, for example on IRIX */
/* #undef INET6 */
/* Define if MIT Project Athena default configuration should be used */
/* #undef KRB5_ATHENA_COMPAT */
/* Define for DNS support of locating realms and KDCs */
#undef KRB5_DNS_LOOKUP
/* Define to enable DNS lookups of Kerberos realm names */
/* #undef KRB5_DNS_LOOKUP_REALM */
/* Define if the KDC should return only vague error codes to clients */
/* #undef KRBCONF_VAGUE_ERRORS */
/* define if the system header files are missing prototype for daemon() */
/* #undef NEED_DAEMON_PROTO */
/* Define if in6addr_any is not defined in libc */
/* #undef NEED_INSIXADDR_ANY */
/* define if the system header files are missing prototype for
ss_execute_command() */
/* #undef NEED_SS_EXECUTE_COMMAND_PROTO */
/* define if the system header files are missing prototype for strptime() */
/* #undef NEED_STRPTIME_PROTO */
/* define if the system header files are missing prototype for swab() */
/* #undef NEED_SWAB_PROTO */
/* Define if need to declare sys_errlist */
/* #undef NEED_SYS_ERRLIST */
/* define if the system header files are missing prototype for vasprintf() */
/* #undef NEED_VASPRINTF_PROTO */
/* Define if the KDC should use no lookaside cache */
/* #undef NOCACHE */
/* Define if references to pthread routines should be non-weak. */
/* #undef NO_WEAK_PTHREADS */
/* Define if lex produes code with yylineno */
/* #undef NO_YYLINENO */
/* Define to the address where bug reports for this package should be sent. */
#define PACKAGE_BUGREPORT "krb5-bugs@mit.edu"
/* Define to the full name of this package. */
#define PACKAGE_NAME "Kerberos 5"
/* Define to the full name and version of this package. */
#define PACKAGE_STRING "Kerberos 5 1.17.1"
/* Define to the one symbol short name of this package. */
#define PACKAGE_TARNAME "krb5"
/* Define to the home page for this package. */
#define PACKAGE_URL ""
/* Define to the version of this package. */
#define PACKAGE_VERSION "1.17.1"
/* Define if setjmp indicates POSIX interface */
/* #undef POSIX_SETJMP */
/* Define if POSIX signal handling is used */
#define POSIX_SIGNALS 1
/* Define if POSIX signal handlers are used */
#define POSIX_SIGTYPE 1
/* Define if termios.h exists and tcsetattr exists */
#define POSIX_TERMIOS 1
/* Define to necessary symbol if this constant uses a non-standard name on
your system. */
/* #undef PTHREAD_CREATE_JOINABLE */
/* Define as the return type of signal handlers (`int' or `void'). */
#define RETSIGTYPE void
/* Define as return type of setrpcent */
#define SETRPCENT_TYPE void
/* The size of `size_t', as computed by sizeof. */
#define SIZEOF_SIZE_T 8
/* The size of `time_t', as computed by sizeof. */
#define SIZEOF_TIME_T 8
/* Define to use OpenSSL for SPAKE preauth */
#define SPAKE_OPENSSL 1
/* Define for static plugin linkage */
/* #undef STATIC_PLUGINS */
/* Define to 1 if you have the ANSI C header files. */
#define STDC_HEADERS 1
/* Define to 1 if strerror_r returns char *. */
#define STRERROR_R_CHAR_P 1
/* Define if sys_errlist is defined in errno.h */
#define SYS_ERRLIST_DECLARED 1
/* Define to 1 if you can safely include both <sys/time.h> and <time.h>. */
#define TIME_WITH_SYS_TIME 1
/* Define if no TLS implementation is selected */
/* #undef TLS_IMPL_NONE */
/* Define if TLS implementation is OpenSSL */
#define TLS_IMPL_OPENSSL 1
/* Define if you have dirent.h functionality */
#define USE_DIRENT_H 1
/* Define if dlopen should be used */
#define USE_DLOPEN 1
/* Define if the keyring ccache should be enabled */
/* #undef USE_KEYRING_CCACHE */
/* Define if link-time options for library finalization will be used */
/* #undef USE_LINKER_FINI_OPTION */
/* Define if link-time options for library initialization will be used */
/* #undef USE_LINKER_INIT_OPTION */
/* Define if sigprocmask should be used */
#define USE_SIGPROCMASK 1
/* Define if wait takes int as a argument */
#define WAIT_USES_INT 1
/* Define to 1 if `lex' declares `yytext' as a `char *' by default, not a
`char[]'. */
#define YYTEXT_POINTER 1
/* Define to enable extensions in glibc */
#define _GNU_SOURCE 1
/* Define to enable C11 extensions */
#define __STDC_WANT_LIB_EXT1__ 1
/* Define to empty if `const' does not conform to ANSI C. */
/* #undef const */
/* Define to `int' if <sys/types.h> doesn't define. */
/* #undef gid_t */
/* Define to `__inline__' or `__inline' if that's what the C compiler
calls it, or to nothing if 'inline' is not supported under any name. */
#ifndef __cplusplus
/* #undef inline */
#endif
/* Define krb5_sigtype to type of signal handler */
#define krb5_sigtype void
/* Define to `int' if <sys/types.h> does not define. */
/* #undef mode_t */
/* Define to `long int' if <sys/types.h> does not define. */
/* #undef off_t */
/* Define to `long' if <sys/types.h> does not define. */
/* #undef time_t */
/* Define to `int' if <sys/types.h> doesn't define. */
/* #undef uid_t */
#if defined(__GNUC__) && !defined(inline)
/* Silence gcc pedantic warnings about ANSI C. */
# define inline __inline__
#endif
#endif /* KRB5_AUTOCONF_H */

141
contrib/krb5-cmake/osconf.h Normal file
View File

@ -0,0 +1,141 @@
/* -*- mode: c; c-basic-offset: 4; indent-tabs-mode: nil -*- */
/*
* Copyright 1990,1991,2008 by the Massachusetts Institute of Technology.
* All Rights Reserved.
*
* Export of this software from the United States of America may
* require a specific license from the United States Government.
* It is the responsibility of any person or organization contemplating
* export to obtain such a license before exporting.
*
* WITHIN THAT CONSTRAINT, permission to use, copy, modify, and
* distribute this software and its documentation for any purpose and
* without fee is hereby granted, provided that the above copyright
* notice appear in all copies and that both that copyright notice and
* this permission notice appear in supporting documentation, and that
* the name of M.I.T. not be used in advertising or publicity pertaining
* to distribution of the software without specific, written prior
* permission. Furthermore if you modify this software you must label
* your software as modified software and not distribute it in such a
* fashion that it might be confused with the original M.I.T. software.
* M.I.T. makes no representations about the suitability of
* this software for any purpose. It is provided "as is" without express
* or implied warranty.
*/
/* Site- and OS- dependent configuration */
#ifndef KRB5_OSCONF__
#define KRB5_OSCONF__
#if !defined(_WIN32)
/* Don't try to pull in autoconf.h for Windows, since it's not used */
#ifndef KRB5_AUTOCONF__
#define KRB5_AUTOCONF__
#include "autoconf.h"
#endif
#endif
#if defined(__MACH__) && defined(__APPLE__)
# include <TargetConditionals.h>
#endif
#if defined(_WIN32)
#define DEFAULT_PROFILE_FILENAME "krb5.ini"
#else /* !_WINDOWS */
#if TARGET_OS_MAC
#define DEFAULT_SECURE_PROFILE_PATH "/Library/Preferences/edu.mit.Kerberos:/etc/krb5.conf:/usr/local/etc/krb5.conf"
#define DEFAULT_PROFILE_PATH ("~/Library/Preferences/edu.mit.Kerberos" ":" DEFAULT_SECURE_PROFILE_PATH)
#define KRB5_PLUGIN_BUNDLE_DIR "/System/Library/KerberosPlugins/KerberosFrameworkPlugins"
#define KDB5_PLUGIN_BUNDLE_DIR "/System/Library/KerberosPlugins/KerberosDatabasePlugins"
#define KRB5_AUTHDATA_PLUGIN_BUNDLE_DIR "/System/Library/KerberosPlugins/KerberosAuthDataPlugins"
#else
#define DEFAULT_SECURE_PROFILE_PATH "/etc/krb5.conf:/usr/local/etc/krb5.conf"
#define DEFAULT_PROFILE_PATH DEFAULT_SECURE_PROFILE_PATH
#endif
#endif /* _WINDOWS */
#ifdef _WIN32
#define DEFAULT_PLUGIN_BASE_DIR "%{LIBDIR}\\plugins"
#else
#define DEFAULT_PLUGIN_BASE_DIR "/usr/local/lib/krb5/plugins"
#endif
#if defined(_WIN64)
#define PLUGIN_EXT "64.dll"
#elif defined(_WIN32)
#define PLUGIN_EXT "32.dll"
#else
#define PLUGIN_EXT ".so"
#endif
#define KDC_DIR "/usr/local/var/krb5kdc"
#define KDC_RUN_DIR "/run/krb5kdc"
#define DEFAULT_KDB_FILE KDC_DIR "/principal"
#define DEFAULT_KEYFILE_STUB KDC_DIR "/.k5."
#define KRB5_DEFAULT_ADMIN_ACL KDC_DIR "/krb5_adm.acl"
/* Used by old admin server */
#define DEFAULT_ADMIN_ACL KDC_DIR "/kadm_old.acl"
/* Location of KDC profile */
#define DEFAULT_KDC_PROFILE KDC_DIR "/kdc.conf"
#define KDC_PROFILE_ENV "KRB5_KDC_PROFILE"
#if TARGET_OS_MAC
#define DEFAULT_KDB_LIB_PATH { KDB5_PLUGIN_BUNDLE_DIR, "/usr/local/lib/krb5/plugins/kdb", NULL }
#else
#define DEFAULT_KDB_LIB_PATH { "/usr/local/lib/krb5/plugins/kdb", NULL }
#endif
#define DEFAULT_KDC_ENCTYPE ENCTYPE_AES256_CTS_HMAC_SHA1_96
#define KDCRCACHE "dfl:krb5kdc_rcache"
#define KDC_PORTNAME "kerberos" /* for /etc/services or equiv. */
#define KRB5_DEFAULT_PORT 88
#define DEFAULT_KPASSWD_PORT 464
#define DEFAULT_KDC_UDP_PORTLIST "88"
#define DEFAULT_KDC_TCP_PORTLIST "88"
#define DEFAULT_TCP_LISTEN_BACKLOG 5
/*
* Defaults for the KADM5 admin system.
*/
#define DEFAULT_KADM5_KEYTAB KDC_DIR "/kadm5.keytab"
#define DEFAULT_KADM5_ACL_FILE KDC_DIR "/kadm5.acl"
#define DEFAULT_KADM5_PORT 749 /* assigned by IANA */
#define KRB5_DEFAULT_SUPPORTED_ENCTYPES \
"aes256-cts-hmac-sha1-96:normal " \
"aes128-cts-hmac-sha1-96:normal"
#define MAX_DGRAM_SIZE 65536
#define RCTMPDIR "/var/tmp" /* directory to store replay caches */
#define KRB5_PATH_TTY "/dev/tty"
#define KRB5_PATH_LOGIN "/usr/local/sbin/login.krb5"
#define KRB5_PATH_RLOGIN "/usr/local/bin/rlogin"
#define KRB5_ENV_CCNAME "KRB5CCNAME"
/*
* krb5 replica support follows
*/
#define KPROP_DEFAULT_FILE KDC_DIR "/replica_datatrans"
#define KPROPD_DEFAULT_FILE KDC_DIR "/from_master"
#define KPROPD_DEFAULT_KDB5_UTIL "/usr/local/sbin/kdb5_util"
#define KPROPD_DEFAULT_KPROP "/usr/local/sbin/kprop"
#define KPROPD_DEFAULT_KRB_DB DEFAULT_KDB_FILE
#define KPROPD_ACL_FILE KDC_DIR "/kpropd.acl"
/*
* GSS mechglue
*/
#define MECH_CONF "/usr/local/etc/gss/mech"
#define MECH_LIB_PREFIX "/usr/local/lib/gss/"
#endif /* KRB5_OSCONF__ */

View File

@ -0,0 +1,2 @@
#include "util/profile/profile.hin"
#include "util/profile/prof_err.h"

View File

@ -49,7 +49,6 @@ set(SRCS
${RDKAFKA_SOURCE_DIR}/rdkafka_request.c ${RDKAFKA_SOURCE_DIR}/rdkafka_request.c
${RDKAFKA_SOURCE_DIR}/rdkafka_roundrobin_assignor.c ${RDKAFKA_SOURCE_DIR}/rdkafka_roundrobin_assignor.c
${RDKAFKA_SOURCE_DIR}/rdkafka_sasl.c ${RDKAFKA_SOURCE_DIR}/rdkafka_sasl.c
# ${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_cyrus.c # needed to support Kerberos, requires cyrus-sasl
${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_oauthbearer.c ${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_oauthbearer.c
${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_plain.c ${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_plain.c
${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_scram.c ${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_scram.c
@ -77,12 +76,34 @@ set(SRCS
${RDKAFKA_SOURCE_DIR}/rdgz.c ${RDKAFKA_SOURCE_DIR}/rdgz.c
) )
if(${ENABLE_CYRUS_SASL})
message (STATUS "librdkafka with SASL support")
set(SRCS
${SRCS}
${RDKAFKA_SOURCE_DIR}/rdkafka_sasl_cyrus.c # needed to support Kerberos, requires cyrus-sasl
)
endif()
add_library(rdkafka ${SRCS}) add_library(rdkafka ${SRCS})
target_compile_options(rdkafka PRIVATE -fno-sanitize=undefined) target_compile_options(rdkafka PRIVATE -fno-sanitize=undefined)
target_include_directories(rdkafka SYSTEM PUBLIC include) # target_include_directories(rdkafka SYSTEM PUBLIC include)
target_include_directories(rdkafka SYSTEM PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include) # for "librdkafka/rdkafka.h"
target_include_directories(rdkafka SYSTEM PUBLIC ${RDKAFKA_SOURCE_DIR}) # Because weird logic with "include_next" is used. target_include_directories(rdkafka SYSTEM PUBLIC ${RDKAFKA_SOURCE_DIR}) # Because weird logic with "include_next" is used.
target_include_directories(rdkafka SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR}/auxdir) # for "../config.h"
target_include_directories(rdkafka SYSTEM PRIVATE ${ZSTD_INCLUDE_DIR}/common) # Because wrong path to "zstd_errors.h" is used. target_include_directories(rdkafka SYSTEM PRIVATE ${ZSTD_INCLUDE_DIR}/common) # Because wrong path to "zstd_errors.h" is used.
target_link_libraries(rdkafka PRIVATE lz4 ${ZLIB_LIBRARIES} ${ZSTD_LIBRARY} ${LIBGSASL_LIBRARY}) target_link_libraries(rdkafka PRIVATE lz4 ${ZLIB_LIBRARIES} ${ZSTD_LIBRARY})
if(OPENSSL_SSL_LIBRARY AND OPENSSL_CRYPTO_LIBRARY) if(OPENSSL_SSL_LIBRARY AND OPENSSL_CRYPTO_LIBRARY)
target_link_libraries(rdkafka PRIVATE ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY}) target_link_libraries(rdkafka PRIVATE ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
endif() endif()
if(${ENABLE_CYRUS_SASL})
target_link_libraries(rdkafka PRIVATE ${CYRUS_SASL_LIBRARY})
set(WITH_SASL_CYRUS 1)
endif()
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/auxdir)
configure_file(
"${CMAKE_CURRENT_SOURCE_DIR}/config.h.in"
"${CMAKE_CURRENT_BINARY_DIR}/config.h"
IMMEDIATE @ONLY
)

View File

@ -1,4 +1,4 @@
// Automatically generated by ./configure // Originally generated by ./configure
#ifndef _CONFIG_H_ #ifndef _CONFIG_H_
#define _CONFIG_H_ #define _CONFIG_H_
#define ARCH "x86_64" #define ARCH "x86_64"
@ -65,6 +65,7 @@
#define WITH_SASL_SCRAM 1 #define WITH_SASL_SCRAM 1
// WITH_SASL_OAUTHBEARER // WITH_SASL_OAUTHBEARER
#define WITH_SASL_OAUTHBEARER 1 #define WITH_SASL_OAUTHBEARER 1
#cmakedefine WITH_SASL_CYRUS 1
// crc32chw // crc32chw
#if !defined(__PPC__) #if !defined(__PPC__)
#define WITH_CRC32C_HW 1 #define WITH_CRC32C_HW 1

View File

@ -76,7 +76,7 @@ is_supported_command()
is_running() is_running()
{ {
[ -r "$CLICKHOUSE_PIDFILE" ] && pgrep -s $(cat "$CLICKHOUSE_PIDFILE") 1> /dev/null 2> /dev/null pgrep --pidfile "$CLICKHOUSE_PIDFILE" $(echo "${PROGRAM}" | cut -c1-15) 1> /dev/null 2> /dev/null
} }

View File

@ -127,5 +127,21 @@
"docker/test/integration/postgresql_java_client": { "docker/test/integration/postgresql_java_client": {
"name": "yandex/clickhouse-postgresql-java-client", "name": "yandex/clickhouse-postgresql-java-client",
"dependent": [] "dependent": []
},
"docker/test/base": {
"name": "yandex/clickhouse-test-base",
"dependent": [
]
},
"docker/packager/unbundled": {
"name": "yandex/clickhouse-unbundled-builder",
"dependent": [
"docker/test/stateless_unbundled"
]
},
"docker/test/stateless_unbundled": {
"name": "yandex/clickhouse-stateless-unbundled-test",
"dependent": [
]
} }
} }

View File

@ -2,9 +2,6 @@
set -x -e set -x -e
# Update tzdata to the latest version. It is embedded into clickhouse binary.
sudo apt-get update && sudo apt-get install tzdata
mkdir -p build/cmake/toolchain/darwin-x86_64 mkdir -p build/cmake/toolchain/darwin-x86_64
tar xJf MacOSX10.14.sdk.tar.xz -C build/cmake/toolchain/darwin-x86_64 --strip-components=1 tar xJf MacOSX10.14.sdk.tar.xz -C build/cmake/toolchain/darwin-x86_64 --strip-components=1

View File

@ -2,9 +2,6 @@
set -x -e set -x -e
# Update tzdata to the latest version. It is embedded into clickhouse binary.
sudo apt-get update && sudo apt-get install tzdata
ccache --show-stats ||: ccache --show-stats ||:
ccache --zero-stats ||: ccache --zero-stats ||:
build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S' build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S'

View File

@ -0,0 +1,56 @@
# docker build -t yandex/clickhouse-unbundled-builder .
FROM yandex/clickhouse-deb-builder
# Libraries from OS are only needed to test the "unbundled" build (that is not used in production).
RUN apt-get --allow-unauthenticated update -y \
&& env DEBIAN_FRONTEND=noninteractive \
apt-get --allow-unauthenticated install --yes --no-install-recommends \
libicu-dev \
libreadline-dev \
gperf \
perl \
pkg-config \
devscripts \
libc++-dev \
libc++abi-dev \
libboost-program-options-dev \
libboost-system-dev \
libboost-filesystem-dev \
libboost-thread-dev \
libboost-iostreams-dev \
libboost-regex-dev \
zlib1g-dev \
liblz4-dev \
libdouble-conversion-dev \
librdkafka-dev \
libpoconetssl62 \
libpoco-dev \
libgoogle-perftools-dev \
libzstd-dev \
libltdl-dev \
libre2-dev \
libjemalloc-dev \
libmsgpack-dev \
libcurl4-openssl-dev \
opencl-headers \
ocl-icd-libopencl1 \
intel-opencl-icd \
unixodbc-dev \
odbcinst \
tzdata \
gperf \
alien \
libcapnp-dev \
cmake \
gdb \
pigz \
moreutils \
libcctz-dev \
libldap2-dev \
libsasl2-dev \
heimdal-multidev \
libhyperscan-dev
COPY build.sh /
CMD ["/bin/bash", "/build.sh"]

View File

@ -0,0 +1,28 @@
#!/usr/bin/env bash
set -x -e
# Update tzdata to the latest version. It is embedded into clickhouse binary.
sudo apt-get update && sudo apt-get install tzdata
ccache --show-stats ||:
ccache --zero-stats ||:
build/release --no-pbuilder $ALIEN_PKGS | ts '%Y-%m-%d %H:%M:%S'
mv /*.deb /output
mv *.changes /output
mv *.buildinfo /output
mv /*.rpm /output ||: # if exists
mv /*.tgz /output ||: # if exists
if [ -n "$BINARY_OUTPUT" ] && { [ "$BINARY_OUTPUT" = "programs" ] || [ "$BINARY_OUTPUT" = "tests" ] ;}
then
echo Place $BINARY_OUTPUT to output
mkdir /output/binary ||: # if exists
mv /build/obj-*/programs/clickhouse* /output/binary
if [ "$BINARY_OUTPUT" = "tests" ]
then
mv /build/obj-*/src/unit_tests_dbms /output/binary
fi
fi
ccache --show-stats ||:
ln -s /usr/lib/x86_64-linux-gnu/libOpenCL.so.1.0.0 /usr/lib/libOpenCL.so ||:

View File

@ -0,0 +1,51 @@
# docker build -t yandex/clickhouse-test-base .
FROM ubuntu:19.10
RUN apt-get --allow-unauthenticated update -y && apt-get install --yes wget gnupg
RUN wget -O - https://apt.llvm.org/llvm-snapshot.gpg.key | apt-key add -
RUN echo "deb [trusted=yes] http://apt.llvm.org/eoan/ llvm-toolchain-eoan-10 main" >> /etc/apt/sources.list
# initial packages
RUN apt-get --allow-unauthenticated update -y \
&& env DEBIAN_FRONTEND=noninteractive \
apt-get --allow-unauthenticated install --yes --no-install-recommends \
apt-transport-https \
bash \
ca-certificates \
curl \
fakeroot \
gnupg \
software-properties-common
# Special dpkg-deb (https://github.com/ClickHouse-Extras/dpkg) version which is able
# to compress files using pigz (https://zlib.net/pigz/) instead of gzip.
# Significantly increase deb packaging speed and compatible with old systems
RUN curl -O https://clickhouse-builds.s3.yandex.net/utils/1/dpkg-deb
RUN chmod +x dpkg-deb
RUN cp dpkg-deb /usr/bin
RUN apt-get --allow-unauthenticated update -y \
&& env DEBIAN_FRONTEND=noninteractive \
apt-get --allow-unauthenticated install --yes --no-install-recommends \
clang-10 \
debhelper \
devscripts \
gdb \
git \
gperf \
lcov \
llvm-10 \
moreutils \
perl \
perl \
pigz \
pkg-config \
tzdata
# Sanitizer options
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment; \
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \
echo "MSAN_OPTIONS='abort_on_error=1'" >> /etc/environment; \
ln -s /usr/lib/llvm-10/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
CMD sleep 1

View File

@ -13,39 +13,40 @@ RUN echo "deb [trusted=yes] http://apt.llvm.org/eoan/ llvm-toolchain-eoan-10 mai
RUN apt-get --allow-unauthenticated update -y \ RUN apt-get --allow-unauthenticated update -y \
&& env DEBIAN_FRONTEND=noninteractive \ && env DEBIAN_FRONTEND=noninteractive \
apt-get --allow-unauthenticated install --yes --no-install-recommends \ apt-get --allow-unauthenticated install --yes --no-install-recommends \
bash \
fakeroot \
ccache \
software-properties-common \
apt-transport-https \ apt-transport-https \
ca-certificates \
wget \
bash \ bash \
fakeroot \ bash \
cmake \ brotli \
ccache \
llvm-10 \
clang-10 \
lld-10 \
clang-tidy-10 \
ninja-build \
gperf \
git \
tzdata \
gperf \
rename \
build-essential \ build-essential \
ca-certificates \
ccache \
ccache \
clang-10 \
clang-tidy-10 \
cmake \
curl \
expect \ expect \
fakeroot \
fakeroot \
git \
gperf \
gperf \
lld-10 \
llvm-10 \
moreutils \
ninja-build \
psmisc \
python \ python \
python-lxml \ python-lxml \
python-termcolor \
python-requests \ python-requests \
unixodbc \ python-termcolor \
qemu-user-static \ qemu-user-static \
rename \
software-properties-common \
sudo \ sudo \
moreutils \ tzdata \
curl \ unixodbc \
brotli wget
RUN mkdir -p /tmp/clickhouse-odbc-tmp \ RUN mkdir -p /tmp/clickhouse-odbc-tmp \
&& wget --quiet -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \ && wget --quiet -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \

View File

@ -1,12 +1,65 @@
#!/bin/bash #!/bin/bash
set -xeu
set -o pipefail
trap "exit" INT TERM
trap 'kill $(jobs -pr) ||:' EXIT
set -x -e # This script is separated into two stages, cloning and everything else, so
# that we can run the "everything else" stage from the cloned source (we don't
# do this yet).
stage=${stage:-}
# A variable to pass additional flags to CMake.
# Here we explicitly default it to nothing so that bash doesn't complain about
# it being undefined. Also read it as array so that we can pass an empty list
# of additional variable to cmake properly, and it doesn't generate an extra
# empty parameter.
read -ra FASTTEST_CMAKE_FLAGS <<< "${FASTTEST_CMAKE_FLAGS:-}"
ls -la ls -la
function kill_clickhouse
{
for _ in {1..60}
do
if ! pkill -f clickhouse-server ; then break ; fi
sleep 1
done
if pgrep -f clickhouse-server
then
pstree -apgT
jobs
echo "Failed to kill the ClickHouse server $(pgrep -f clickhouse-server)"
return 1
fi
}
function wait_for_server_start
{
for _ in {1..60}
do
if clickhouse-client --query "select 1" || ! pgrep -f clickhouse-server
then
break
fi
sleep 1
done
if ! clickhouse-client --query "select 1"
then
echo "Failed to wait until ClickHouse server starts."
return 1
fi
echo "ClickHouse server pid '$(pgrep -f clickhouse-server)' started and responded"
}
function clone_root
{
git clone https://github.com/ClickHouse/ClickHouse.git | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/clone_log.txt git clone https://github.com/ClickHouse/ClickHouse.git | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/clone_log.txt
cd ClickHouse cd ClickHouse
CLICKHOUSE_DIR=`pwd` CLICKHOUSE_DIR=$(pwd)
if [ "$PULL_REQUEST_NUMBER" != "0" ]; then if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
@ -15,18 +68,21 @@ if [ "$PULL_REQUEST_NUMBER" != "0" ]; then
echo 'Clonned merge head' echo 'Clonned merge head'
else else
git fetch git fetch
git checkout $COMMIT_SHA git checkout "$COMMIT_SHA"
echo 'Checked out to commit' echo 'Checked out to commit'
fi fi
else else
if [ "$COMMIT_SHA" != "" ]; then if [ "$COMMIT_SHA" != "" ]; then
git checkout $COMMIT_SHA git checkout "$COMMIT_SHA"
fi fi
fi fi
}
SUBMODULES_TO_UPDATE="contrib/boost contrib/zlib-ng contrib/libxml2 contrib/poco contrib/libunwind contrib/ryu contrib/fmtlib contrib/base64 contrib/cctz contrib/libcpuid contrib/double-conversion contrib/libcxx contrib/libcxxabi contrib/libc-headers contrib/lz4 contrib/zstd contrib/fastops contrib/rapidjson contrib/re2 contrib/sparsehash-c11" function run
{
SUBMODULES_TO_UPDATE=(contrib/boost contrib/zlib-ng contrib/libxml2 contrib/poco contrib/libunwind contrib/ryu contrib/fmtlib contrib/base64 contrib/cctz contrib/libcpuid contrib/double-conversion contrib/libcxx contrib/libcxxabi contrib/libc-headers contrib/lz4 contrib/zstd contrib/fastops contrib/rapidjson contrib/re2 contrib/sparsehash-c11)
git submodule update --init --recursive $SUBMODULES_TO_UPDATE | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/submodule_log.txt git submodule update --init --recursive "${SUBMODULES_TO_UPDATE[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/submodule_log.txt
export CMAKE_LIBS_CONFIG="-DENABLE_LIBRARIES=0 -DENABLE_TESTS=0 -DENABLE_UTILS=0 -DENABLE_EMBEDDED_COMPILER=0 -DENABLE_THINLTO=0 -DUSE_UNWIND=1" export CMAKE_LIBS_CONFIG="-DENABLE_LIBRARIES=0 -DENABLE_TESTS=0 -DENABLE_UTILS=0 -DENABLE_EMBEDDED_COMPILER=0 -DENABLE_THINLTO=0 -DUSE_UNWIND=1"
@ -41,8 +97,7 @@ ccache --zero-stats ||:
mkdir build mkdir build
cd build cd build
CLICKHOUSE_BUILD_DIR=`pwd` cmake .. -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_CXX_COMPILER=clang++-10 -DCMAKE_C_COMPILER=clang-10 "$CMAKE_LIBS_CONFIG" "${FASTTEST_CMAKE_FLAGS[@]}" | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/cmake_log.txt
cmake .. -DCMAKE_INSTALL_PREFIX=/usr -DCMAKE_CXX_COMPILER=clang++-10 -DCMAKE_C_COMPILER=clang-10 $CMAKE_LIBS_CONFIG | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/cmake_log.txt
ninja clickhouse-bundle | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/build_log.txt ninja clickhouse-bundle | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/build_log.txt
ninja install | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/install_log.txt ninja install | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/install_log.txt
@ -54,8 +109,8 @@ mkdir -p /etc/clickhouse-client
mkdir -p /etc/clickhouse-server/config.d mkdir -p /etc/clickhouse-server/config.d
mkdir -p /etc/clickhouse-server/users.d mkdir -p /etc/clickhouse-server/users.d
ln -s /test_output /var/log/clickhouse-server ln -s /test_output /var/log/clickhouse-server
cp $CLICKHOUSE_DIR/programs/server/config.xml /etc/clickhouse-server/ cp "$CLICKHOUSE_DIR/programs/server/config.xml" /etc/clickhouse-server/
cp $CLICKHOUSE_DIR/programs/server/users.xml /etc/clickhouse-server/ cp "$CLICKHOUSE_DIR/programs/server/users.xml" /etc/clickhouse-server/
mkdir -p /etc/clickhouse-server/dict_examples mkdir -p /etc/clickhouse-server/dict_examples
ln -s /usr/share/clickhouse-test/config/ints_dictionary.xml /etc/clickhouse-server/dict_examples/ ln -s /usr/share/clickhouse-test/config/ints_dictionary.xml /etc/clickhouse-server/dict_examples/
@ -86,21 +141,12 @@ ln -sf /usr/share/clickhouse-test/config/client_config.xml /etc/clickhouse-clien
# Keep original query_masking_rules.xml # Keep original query_masking_rules.xml
ln -s --backup=simple --suffix=_original.xml /usr/share/clickhouse-test/config/query_masking_rules.xml /etc/clickhouse-server/config.d/ ln -s --backup=simple --suffix=_original.xml /usr/share/clickhouse-test/config/query_masking_rules.xml /etc/clickhouse-server/config.d/
# Kill the server in case we are running locally and not in docker
kill_clickhouse
clickhouse-server --config /etc/clickhouse-server/config.xml --daemon clickhouse-server --config /etc/clickhouse-server/config.xml --daemon
counter=0 wait_for_server_start
until clickhouse-client --query "SELECT 1"
do
sleep 0.1
if [ "$counter" -gt 1200 ]
then
break
fi
counter=$(($counter + 1))
done
TESTS_TO_SKIP=( TESTS_TO_SKIP=(
parquet parquet
@ -160,50 +206,58 @@ TESTS_TO_SKIP=(
01411_bayesian_ab_testing 01411_bayesian_ab_testing
01238_http_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently 01238_http_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently
01281_group_by_limit_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently 01281_group_by_limit_memory_tracking # max_memory_usage_for_user can interfere another queries running concurrently
# Not sure why these two fail even in sequential mode. Disabled for now
# to make some progress.
00646_url_engine
00974_query_profiler
) )
clickhouse-test -j 4 --no-long --testname --shard --zookeeper --skip ${TESTS_TO_SKIP[*]} 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/test_log.txt clickhouse-test -j 4 --no-long --testname --shard --zookeeper --skip "${TESTS_TO_SKIP[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee /test_output/test_log.txt
kill_clickhouse () { # substr is to remove semicolon after test name
killall clickhouse-server ||: readarray -t FAILED_TESTS < <(awk '/FAIL|TIMEOUT|ERROR/ { print substr($3, 1, length($3)-1) }' /test_output/test_log.txt | tee /test_output/failed-parallel-tests.txt)
for i in {1..10} # We will rerun sequentially any tests that have failed during parallel run.
do # They might have failed because there was some interference from other tests
if ! killall -0 clickhouse-server; then # running concurrently. If they fail even in seqential mode, we will report them.
echo "No clickhouse process" # FIXME All tests that require exclusive access to the server must be
break # explicitly marked as `sequential`, and `clickhouse-test` must detect them and
else # run them in a separate group after all other tests. This is faster and also
echo "Clickhouse server process" $(pgrep -f clickhouse-server) "still alive" # explicit instead of guessing.
sleep 10 if [[ -n "${FAILED_TESTS[*]}" ]]
fi then
done
}
FAILED_TESTS=`grep 'FAIL\|TIMEOUT\|ERROR' /test_output/test_log.txt | awk 'BEGIN { ORS=" " }; { print substr($3, 1, length($3)-1) }'`
if [[ ! -z "$FAILED_TESTS" ]]; then
kill_clickhouse kill_clickhouse
# Clean the data so that there is no interference from the previous test run.
rm -rvf /var/lib/clickhouse ||:
mkdir /var/lib/clickhouse
clickhouse-server --config /etc/clickhouse-server/config.xml --daemon clickhouse-server --config /etc/clickhouse-server/config.xml --daemon
counter=0 wait_for_server_start
until clickhouse-client --query "SELECT 1"
do
sleep 0.1
if [ "$counter" -gt 1200 ]
then
break
fi
counter=$(($counter + 1)) echo "Going to run again: ${FAILED_TESTS[*]}"
done
echo "Going to run again: $FAILED_TESTS" clickhouse-test --no-long --testname --shard --zookeeper "${FAILED_TESTS[@]}" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a /test_output/test_log.txt
clickhouse-test --no-long --testname --shard --zookeeper $FAILED_TESTS 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee -a /test_output/test_log.txt
else else
echo "No failed tests" echo "No failed tests"
fi fi
}
case "$stage" in
"")
;&
"clone_root")
clone_root
# TODO bootstrap into the cloned script here. Add this on Sep 1 2020 or
# later, so that most of the old branches are updated with this code.
;&
"run")
run
;&
esac
pstree -apgT
jobs

View File

@ -1,5 +1,5 @@
#!/bin/bash #!/bin/bash
set -ex set -exu
set -o pipefail set -o pipefail
trap "exit" INT TERM trap "exit" INT TERM
trap 'kill $(jobs -pr) ||:' EXIT trap 'kill $(jobs -pr) ||:' EXIT
@ -7,6 +7,29 @@ trap 'kill $(jobs -pr) ||:' EXIT
stage=${stage:-} stage=${stage:-}
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
function wait_for_server # port, pid
{
for _ in {1..60}
do
if clickhouse-client --port "$1" --query "select 1" || ! kill -0 "$2"
then
break
fi
sleep 1
done
if ! clickhouse-client --port "$1" --query "select 1"
then
echo "Cannot connect to ClickHouse server at $1"
return 1
fi
if ! kill -0 "$2"
then
echo "Server pid '$2' is not running"
return 1
fi
}
function configure function configure
{ {
@ -27,8 +50,9 @@ function configure
kill -0 $left_pid kill -0 $left_pid
disown $left_pid disown $left_pid
set +m set +m
while ! clickhouse-client --port 9001 --query "select 1" && kill -0 $left_pid ; do echo . ; sleep 1 ; done
echo server for setup started wait_for_server 9001 $left_pid
echo Server for setup started
clickhouse-client --port 9001 --query "create database test" ||: clickhouse-client --port 9001 --query "create database test" ||:
clickhouse-client --port 9001 --query "rename table datasets.hits_v1 to test.hits" ||: clickhouse-client --port 9001 --query "rename table datasets.hits_v1 to test.hits" ||:
@ -67,9 +91,10 @@ function restart
set +m set +m
while ! clickhouse-client --port 9001 --query "select 1" && kill -0 $left_pid ; do echo . ; sleep 1 ; done wait_for_server 9001 $left_pid
echo left ok echo left ok
while ! clickhouse-client --port 9002 --query "select 1" && kill -0 $right_pid ; do echo . ; sleep 1 ; done
wait_for_server 9002 $right_pid
echo right ok echo right ok
clickhouse-client --port 9001 --query "select * from system.tables where database != 'system'" clickhouse-client --port 9001 --query "select * from system.tables where database != 'system'"
@ -89,6 +114,8 @@ function run_tests
# Just check that the script runs at all # Just check that the script runs at all
"$script_dir/perf.py" --help > /dev/null "$script_dir/perf.py" --help > /dev/null
changed_test_files=""
# Find the directory with test files. # Find the directory with test files.
if [ -v CHPC_TEST_PATH ] if [ -v CHPC_TEST_PATH ]
then then
@ -117,6 +144,7 @@ function run_tests
if [ -v CHPC_TEST_GREP ] if [ -v CHPC_TEST_GREP ]
then then
# Run only explicitly specified tests, if any. # Run only explicitly specified tests, if any.
# shellcheck disable=SC2010
test_files=$(ls "$test_prefix" | grep "$CHPC_TEST_GREP" | xargs -I{} -n1 readlink -f "$test_prefix/{}") test_files=$(ls "$test_prefix" | grep "$CHPC_TEST_GREP" | xargs -I{} -n1 readlink -f "$test_prefix/{}")
elif [ "$changed_test_files" != "" ] elif [ "$changed_test_files" != "" ]
then then
@ -130,7 +158,7 @@ function run_tests
# Determine which concurrent benchmarks to run. For now, the only test # Determine which concurrent benchmarks to run. For now, the only test
# we run as a concurrent benchmark is 'website'. Run it as benchmark if we # we run as a concurrent benchmark is 'website'. Run it as benchmark if we
# are also going to run it as a normal test. # are also going to run it as a normal test.
for test in $test_files; do echo $test; done | sed -n '/website/p' > benchmarks-to-run.txt for test in $test_files; do echo "$test"; done | sed -n '/website/p' > benchmarks-to-run.txt
# Delete old report files. # Delete old report files.
for x in {test-times,wall-clock-times}.tsv for x in {test-times,wall-clock-times}.tsv
@ -178,7 +206,7 @@ function run_benchmark
mkdir benchmark ||: mkdir benchmark ||:
# The list is built by run_tests. # The list is built by run_tests.
for file in $(cat benchmarks-to-run.txt) while IFS= read -r file
do do
name=$(basename "$file" ".xml") name=$(basename "$file" ".xml")
@ -190,7 +218,7 @@ function run_benchmark
"${command[@]}" --port 9001 --json "benchmark/$name-left.json" < "benchmark/$name-queries.txt" "${command[@]}" --port 9001 --json "benchmark/$name-left.json" < "benchmark/$name-queries.txt"
"${command[@]}" --port 9002 --json "benchmark/$name-right.json" < "benchmark/$name-queries.txt" "${command[@]}" --port 9002 --json "benchmark/$name-right.json" < "benchmark/$name-queries.txt"
done done < benchmarks-to-run.txt
} }
function get_profiles_watchdog function get_profiles_watchdog
@ -273,8 +301,7 @@ mkdir analyze analyze/tmp ||:
build_log_column_definitions build_log_column_definitions
# Split the raw test output into files suitable for analysis. # Split the raw test output into files suitable for analysis.
IFS=$'\n' for test_file in *-raw.tsv
for test_file in $(find . -maxdepth 1 -name "*-raw.tsv" -print)
do do
test_name=$(basename "$test_file" "-raw.tsv") test_name=$(basename "$test_file" "-raw.tsv")
sed -n "s/^query\t/$test_name\t/p" < "$test_file" >> "analyze/query-runs.tsv" sed -n "s/^query\t/$test_name\t/p" < "$test_file" >> "analyze/query-runs.tsv"
@ -285,7 +312,6 @@ do
sed -n "s/^short\t/$test_name\t/p" < "$test_file" >> "analyze/marked-short-queries.tsv" sed -n "s/^short\t/$test_name\t/p" < "$test_file" >> "analyze/marked-short-queries.tsv"
sed -n "s/^partial\t/$test_name\t/p" < "$test_file" >> "analyze/partial-queries.tsv" sed -n "s/^partial\t/$test_name\t/p" < "$test_file" >> "analyze/partial-queries.tsv"
done done
unset IFS
# for each query run, prepare array of metrics from query log # for each query run, prepare array of metrics from query log
clickhouse-local --query " clickhouse-local --query "
@ -394,7 +420,7 @@ create table query_run_metric_names engine File(TSV, 'analyze/query-run-metric-n
IFS=$'\n' IFS=$'\n'
for prefix in $(cut -f1,2 "analyze/query-run-metrics-for-stats.tsv" | sort | uniq) for prefix in $(cut -f1,2 "analyze/query-run-metrics-for-stats.tsv" | sort | uniq)
do do
file="analyze/tmp/$(echo "$prefix" | sed 's/\t/_/g').tsv" file="analyze/tmp/${prefix// /_}.tsv"
grep "^$prefix " "analyze/query-run-metrics-for-stats.tsv" > "$file" & grep "^$prefix " "analyze/query-run-metrics-for-stats.tsv" > "$file" &
printf "%s\0\n" \ printf "%s\0\n" \
"clickhouse-local \ "clickhouse-local \
@ -831,15 +857,13 @@ wait
unset IFS unset IFS
# Create differential flamegraphs. # Create differential flamegraphs.
IFS=$'\n' while IFS= read -r query_file
for query_file in $(cat report/query-files.txt)
do do
~/fg/difffolded.pl "report/tmp/$query_file.stacks.left.tsv" \ ~/fg/difffolded.pl "report/tmp/$query_file.stacks.left.tsv" \
"report/tmp/$query_file.stacks.right.tsv" \ "report/tmp/$query_file.stacks.right.tsv" \
| tee "report/tmp/$query_file.stacks.diff.tsv" \ | tee "report/tmp/$query_file.stacks.diff.tsv" \
| ~/fg/flamegraph.pl > "$query_file.diff.svg" & | ~/fg/flamegraph.pl > "$query_file.diff.svg" &
done done < report/query-files.txt
unset IFS
wait wait
# Create per-query files with metrics. Note that the key is different from flamegraphs. # Create per-query files with metrics. Note that the key is different from flamegraphs.
@ -906,8 +930,7 @@ create table changes engine File(TSV, 'metrics/changes.tsv') as
) )
order by diff desc order by diff desc
; ;
" " 2> >(tee -a metrics/errors.log 1>&2)
2> >(tee -a metrics/errors.log 1>&2)
IFS=$'\n' IFS=$'\n'
for prefix in $(cut -f1 "metrics/metrics.tsv" | sort | uniq) for prefix in $(cut -f1 "metrics/metrics.tsv" | sort | uniq)
@ -981,7 +1004,7 @@ case "$stage" in
# to collect the logs. Prefer not to restart, because addresses might change # to collect the logs. Prefer not to restart, because addresses might change
# and we won't be able to process trace_log data. Start in a subshell, so that # and we won't be able to process trace_log data. Start in a subshell, so that
# it doesn't interfere with the watchdog through `wait`. # it doesn't interfere with the watchdog through `wait`.
( get_profiles || restart && get_profiles ||: ) ( get_profiles || restart && get_profiles ) ||:
# Kill the whole process group, because somehow when the subshell is killed, # Kill the whole process group, because somehow when the subshell is killed,
# the sleep inside remains alive and orphaned. # the sleep inside remains alive and orphaned.

View File

@ -6,6 +6,16 @@
<allow_introspection_functions>1</allow_introspection_functions> <allow_introspection_functions>1</allow_introspection_functions>
<log_queries>1</log_queries> <log_queries>1</log_queries>
<metrics_perf_events_enabled>1</metrics_perf_events_enabled> <metrics_perf_events_enabled>1</metrics_perf_events_enabled>
<!--
If a test takes too long by mistake, the entire test task can
time out and the author won't get a proper message. Put some cap
on query execution time to prevent this. Test query run time is
limited to about 2 seconds, but this limit applies to all queries,
including fill/create and maintenance such as downloading trace
logs, so it must be generous enough. As a second line of defense,
we might also add time check to perf.py script.
-->
<max_execution_time>300</max_execution_time>
</default> </default>
</profiles> </profiles>
</yandex> </yandex>

View File

@ -0,0 +1,84 @@
# docker build -t yandex/clickhouse-stateless-unbundled-test .
FROM yandex/clickhouse-test-base
ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
RUN apt-get --allow-unauthenticated update -y \
&& env DEBIAN_FRONTEND=noninteractive \
apt-get --allow-unauthenticated install --yes --no-install-recommends \
alien \
brotli \
cmake \
devscripts \
expect \
gdb \
gperf \
gperf \
heimdal-multidev \
intel-opencl-icd \
libboost-filesystem-dev \
libboost-iostreams-dev \
libboost-program-options-dev \
libboost-regex-dev \
libboost-system-dev \
libboost-thread-dev \
libc++-dev \
libc++abi-dev \
libcapnp-dev \
libcctz-dev \
libcurl4-openssl-dev \
libdouble-conversion-dev \
libgoogle-perftools-dev \
libhyperscan-dev \
libicu-dev \
libjemalloc-dev \
libldap2-dev \
libltdl-dev \
liblz4-dev \
libmsgpack-dev \
libpoco-dev \
libpoconetssl62 \
librdkafka-dev \
libre2-dev \
libreadline-dev \
libsasl2-dev \
libzstd-dev \
lsof \
moreutils \
ncdu \
netcat-openbsd \
ocl-icd-libopencl1 \
odbcinst \
opencl-headers \
openssl \
perl \
pigz \
pkg-config \
python \
python-lxml \
python-requests \
python-termcolor \
qemu-user-static \
sudo \
telnet \
tree \
tzdata \
unixodbc \
unixodbc-dev \
wget \
zlib1g-dev \
zookeeper \
zookeeperd
RUN mkdir -p /tmp/clickhouse-odbc-tmp \
&& wget --quiet -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \
&& cp /tmp/clickhouse-odbc-tmp/lib64/*.so /usr/local/lib/ \
&& odbcinst -i -d -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbcinst.ini.sample \
&& odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
&& rm -rf /tmp/clickhouse-odbc-tmp
ENV TZ=Europe/Moscow
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
COPY run.sh /
CMD ["/bin/bash", "/run.sh"]

View File

@ -0,0 +1,15 @@
# Since right now we can't set volumes to the docker during build, we split building container in stages:
# 1. build base container
# 2. run base conatiner with mounted volumes
# 3. commit container as image
FROM ubuntu:18.10 as clickhouse-test-runner-base
# A volume where directory with clickhouse packages to be mounted,
# for later installing.
VOLUME /packages
CMD apt-get update ;\
DEBIAN_FRONTEND=noninteractive \
apt install -y /packages/clickhouse-common-static_*.deb \
/packages/clickhouse-client_*.deb \
/packages/clickhouse-test_*.deb

View File

@ -0,0 +1,65 @@
#!/bin/bash
set -e -x
dpkg -i package_folder/clickhouse-common-static_*.deb
dpkg -i package_folder/clickhouse-common-static-dbg_*.deb
dpkg -i package_folder/clickhouse-server_*.deb
dpkg -i package_folder/clickhouse-client_*.deb
dpkg -i package_folder/clickhouse-test_*.deb
mkdir -p /etc/clickhouse-server/dict_examples
ln -s /usr/share/clickhouse-test/config/ints_dictionary.xml /etc/clickhouse-server/dict_examples/
ln -s /usr/share/clickhouse-test/config/strings_dictionary.xml /etc/clickhouse-server/dict_examples/
ln -s /usr/share/clickhouse-test/config/decimals_dictionary.xml /etc/clickhouse-server/dict_examples/
ln -s /usr/share/clickhouse-test/config/zookeeper.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/listen.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/part_log.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/text_log.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/metric_log.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/custom_settings_prefixes.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/log_queries.xml /etc/clickhouse-server/users.d/
ln -s /usr/share/clickhouse-test/config/readonly.xml /etc/clickhouse-server/users.d/
ln -s /usr/share/clickhouse-test/config/access_management.xml /etc/clickhouse-server/users.d/
ln -s /usr/share/clickhouse-test/config/ints_dictionary.xml /etc/clickhouse-server/
ln -s /usr/share/clickhouse-test/config/strings_dictionary.xml /etc/clickhouse-server/
ln -s /usr/share/clickhouse-test/config/decimals_dictionary.xml /etc/clickhouse-server/
ln -s /usr/share/clickhouse-test/config/macros.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/disks.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/secure_ports.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/clusters.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/graphite.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/server.key /etc/clickhouse-server/
ln -s /usr/share/clickhouse-test/config/server.crt /etc/clickhouse-server/
ln -s /usr/share/clickhouse-test/config/dhparam.pem /etc/clickhouse-server/
# Retain any pre-existing config and allow ClickHouse to load it if required
ln -s --backup=simple --suffix=_original.xml \
/usr/share/clickhouse-test/config/query_masking_rules.xml /etc/clickhouse-server/config.d/
if [[ -n "$USE_POLYMORPHIC_PARTS" ]] && [[ "$USE_POLYMORPHIC_PARTS" -eq 1 ]]; then
ln -s /usr/share/clickhouse-test/config/polymorphic_parts.xml /etc/clickhouse-server/config.d/
fi
if [[ -n "$USE_DATABASE_ATOMIC" ]] && [[ "$USE_DATABASE_ATOMIC" -eq 1 ]]; then
ln -s /usr/share/clickhouse-test/config/database_atomic_configd.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/database_atomic_usersd.xml /etc/clickhouse-server/users.d/
fi
ln -sf /usr/share/clickhouse-test/config/client_config.xml /etc/clickhouse-client/config.xml
echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment
echo "TSAN_SYMBOLIZER_PATH=/usr/lib/llvm-10/bin/llvm-symbolizer" >> /etc/environment
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment
echo "ASAN_SYMBOLIZER_PATH=/usr/lib/llvm-10/bin/llvm-symbolizer" >> /etc/environment
echo "UBSAN_SYMBOLIZER_PATH=/usr/lib/llvm-10/bin/llvm-symbolizer" >> /etc/environment
echo "LLVM_SYMBOLIZER_PATH=/usr/lib/llvm-10/bin/llvm-symbolizer" >> /etc/environment
service zookeeper start
sleep 5
service clickhouse-server start && sleep 5
if cat /usr/bin/clickhouse-test | grep -q -- "--use-skip-list"; then
SKIP_LIST_OPT="--use-skip-list"
fi
clickhouse-test --testname --shard --zookeeper "$SKIP_LIST_OPT" $ADDITIONAL_OPTIONS $SKIP_TESTS_OPTION 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt

View File

@ -8,15 +8,32 @@ dpkg -i package_folder/clickhouse-server_*.deb
dpkg -i package_folder/clickhouse-client_*.deb dpkg -i package_folder/clickhouse-client_*.deb
dpkg -i package_folder/clickhouse-test_*.deb dpkg -i package_folder/clickhouse-test_*.deb
function wait_server() function stop()
{
timeout 120 service clickhouse-server stop
# Wait for process to disappear from processlist and also try to kill zombies.
while kill -9 $(pidof clickhouse-server)
do
echo "Killed clickhouse-server"
sleep 0.5
done
}
function start()
{ {
counter=0 counter=0
until clickhouse-client --query "SELECT 1" until clickhouse-client --query "SELECT 1"
do do
if [ "$counter" -gt 120 ] if [ "$counter" -gt 120 ]
then then
echo "Cannot start clickhouse-server"
cat /var/log/clickhouse-server/stdout.log
tail -n1000 /var/log/clickhouse-server/stderr.log
tail -n1000 /var/log/clickhouse-server/clickhouse-server.log
break break
fi fi
timeout 120 service clickhouse-server start
sleep 0.5 sleep 0.5
counter=$(($counter + 1)) counter=$(($counter + 1))
done done
@ -24,24 +41,21 @@ function wait_server()
ln -s /usr/share/clickhouse-test/config/log_queries.xml /etc/clickhouse-server/users.d/ ln -s /usr/share/clickhouse-test/config/log_queries.xml /etc/clickhouse-server/users.d/
ln -s /usr/share/clickhouse-test/config/part_log.xml /etc/clickhouse-server/config.d/ ln -s /usr/share/clickhouse-test/config/part_log.xml /etc/clickhouse-server/config.d/
ln -s /usr/share/clickhouse-test/config/text_log.xml /etc/clickhouse-server/config.d/
echo "TSAN_OPTIONS='halt_on_error=1 history_size=7 ignore_noninstrumented_modules=1 verbosity=1'" >> /etc/environment echo "TSAN_OPTIONS='halt_on_error=1 history_size=7 ignore_noninstrumented_modules=1 verbosity=1'" >> /etc/environment
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment
echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment
timeout 120 service clickhouse-server start start
wait_server
/s3downloader --dataset-names $DATASETS /s3downloader --dataset-names $DATASETS
chmod 777 -R /var/lib/clickhouse chmod 777 -R /var/lib/clickhouse
clickhouse-client --query "ATTACH DATABASE IF NOT EXISTS datasets ENGINE = Ordinary" clickhouse-client --query "ATTACH DATABASE IF NOT EXISTS datasets ENGINE = Ordinary"
clickhouse-client --query "CREATE DATABASE IF NOT EXISTS test" clickhouse-client --query "CREATE DATABASE IF NOT EXISTS test"
timeout 120 service clickhouse-server stop stop
timeout 120 service clickhouse-server start start
wait_server
clickhouse-client --query "SHOW TABLES FROM datasets" clickhouse-client --query "SHOW TABLES FROM datasets"
clickhouse-client --query "SHOW TABLES FROM test" clickhouse-client --query "SHOW TABLES FROM test"
@ -51,9 +65,7 @@ clickhouse-client --query "SHOW TABLES FROM test"
./stress --output-folder test_output --skip-func-tests "$SKIP_TESTS_OPTION" ./stress --output-folder test_output --skip-func-tests "$SKIP_TESTS_OPTION"
timeout 120 service clickhouse-server stop stop
timeout 120 service clickhouse-server start start
wait_server
clickhouse-client --query "SELECT 'Server successfuly started'" > /test_output/alive_check.txt || echo 'Server failed to start' > /test_output/alive_check.txt clickhouse-client --query "SELECT 'Server successfuly started'" > /test_output/alive_check.txt || echo 'Server failed to start' > /test_output/alive_check.txt

View File

@ -1,7 +1,7 @@
# docker build -t yandex/clickhouse-style-test . # docker build -t yandex/clickhouse-style-test .
FROM ubuntu:20.04 FROM ubuntu:20.04
RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes shellcheck libxml2-utils git RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes shellcheck libxml2-utils git python3-pip && pip3 install codespell
CMD cd /ClickHouse/utils/check-style && ./check-style -n | tee /test_output/style_output.txt && \ CMD cd /ClickHouse/utils/check-style && ./check-style -n | tee /test_output/style_output.txt && \

View File

@ -146,6 +146,14 @@ $ cd ClickHouse
$ ./release $ ./release
``` ```
## Faster builds for development
Normally all tools of the ClickHouse bundle, such as `clickhouse-server`, `clickhouse-client` etc., are linked into a single static executable, `clickhouse`. This executable must be re-linked on every change, which might be slow. Two common ways to improve linking time are to use `lld` linker, and use the 'split' build configuration, which builds a separate binary for every tool, and further splits the code into serveral shared libraries. To enable these tweaks, pass the following flags to `cmake`:
```
-DCMAKE_C_FLAGS="-fuse-ld=lld" -DCMAKE_CXX_FLAGS="-fuse-ld=lld" -DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1
```
## You Dont Have to Build ClickHouse {#you-dont-have-to-build-clickhouse} ## You Dont Have to Build ClickHouse {#you-dont-have-to-build-clickhouse}
ClickHouse is available in pre-built binaries and packages. Binaries are portable and can be run on any Linux flavour. ClickHouse is available in pre-built binaries and packages. Binaries are portable and can be run on any Linux flavour.
@ -154,4 +162,13 @@ They are built for stable, prestable and testing releases as long as for every c
To find the freshest build from `master`, go to [commits page](https://github.com/ClickHouse/ClickHouse/commits/master), click on the first green checkmark or red cross near commit, and click to the “Details” link right after “ClickHouse Build Check”. To find the freshest build from `master`, go to [commits page](https://github.com/ClickHouse/ClickHouse/commits/master), click on the first green checkmark or red cross near commit, and click to the “Details” link right after “ClickHouse Build Check”.
## Split build configuration {#split-build}
Normally ClickHouse is statically linked into a single static `clickhouse` binary with minimal dependencies. This is convenient for distribution, but it means that on every change the entire binary is linked again, which is slow and may be inconvenient for development. There is an alternative configuration which creates dynamically loaded shared libraries instead, allowing faster incremental builds. To use it, add the following flags to your `cmake` invocation:
```
-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1
```
Note that in this configuration there is no single `clickhouse` binary, and you have to run `clickhouse-server`, `clickhouse-client` etc.
[Original article](https://clickhouse.tech/docs/en/development/build/) <!--hide--> [Original article](https://clickhouse.tech/docs/en/development/build/) <!--hide-->

View File

@ -0,0 +1,215 @@
---
toc_priority: 62
toc_title: Continuous Integration Checks
---
# Continuous Integration Checks
When you submit a pull request, some automated checks are ran for your code by
the ClickHouse [continuous integration (CI) system](tests.md#test-automation).
This happens after a repository maintainer (someone from ClickHouse team) has
screened your code and added the `can be tested` label to your pull request.
The results of the checks are listed on the GitHub pull request page as
described in the [GitHub checks
documentation](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/about-status-checks).
If a check is failing, you might be required to fix it. This page gives an
overview of checks you may encounter, and what you can do to fix them.
If it looks like the check failure is not related to your changes, it may be
some transient failure or an infrastructure problem. Push an empty commit to
the pull request to restart the CI checks:
```
git reset
git commit --allow-empty
git push
```
If you are not sure what to do, ask a maintainer for help.
## Merge With Master
Verifies that the PR can be merged to master. If not, it will fail with the
message 'Cannot fetch mergecommit'. To fix this check, resolve the conflict as
described in the [GitHub
documentation](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/resolving-a-merge-conflict-on-github),
or merge the `master` branch to your pull request branch using git.
## Docs check
Tries to build the ClickHouse documentation website. It can fail if you changed
something in the documentation. Most probable reason is that some cross-link in
the documentation is wrong. Go to the check report and look for `ERROR` and `WARNING` messages.
### Report Details
- [Status page example](https://clickhouse-test-reports.s3.yandex.net/12550/eabcc293eb02214caa6826b7c15f101643f67a6b/docs_check.html)
- `docs_output.txt` contains the building log. [Successful result example](https://clickhouse-test-reports.s3.yandex.net/12550/eabcc293eb02214caa6826b7c15f101643f67a6b/docs_check/docs_output.txt)
## Description Check
Check that the description of your pull request conforms to the template
[PULL_REQUEST_TEMPLATE.md](https://github.com/ClickHouse/ClickHouse/blob/master/.github/PULL_REQUEST_TEMPLATE.md).
You have to specify a changelog category for your change (e.g., Bug Fix), and
write a user-readable message describing the change for [CHANGELOG.md](../whats-new/changelog/index.md)
## Push To Dockerhub
Builds docker images used for build and tests, then pushes them to DockerHub.
## Marker Check
This check means that the CI system started to process the pull request. When it has 'pending' status, it means that not all checks have been started yet. After all checks have been started, it changes status to 'success'.
## Style Check
Performs some simple regex-based checks of code style, using the [`utils/check-style/check-style`](https://github.com/ClickHouse/ClickHouse/blob/master/utils/check-style/check-style) binary (note that it can be run locally).
If it fails, fix the style errors following the [code style guide](style.md).
### Report Details
- [Status page example](https://clickhouse-test-reports.s3.yandex.net/12550/659c78c7abb56141723af6a81bfae39335aa8cb2/style_check.html)
- `output.txt` contains the check resulting errors (invalid tabulation etc), blank page means no errors. [Successful result example](https://clickhouse-test-reports.s3.yandex.net/12550/659c78c7abb56141723af6a81bfae39335aa8cb2/style_check/output.txt).
## PVS Check
Check the code with [PVS-studio](https://www.viva64.com/en/pvs-studio/), a static analysis tool. Look at the report to see the exact errors. Fix them if you can, if not -- ask a ClickHouse maintainer for help.
### Report Details
- [Status page example](https://clickhouse-test-reports.s3.yandex.net/12550/67d716b5cc3987801996c31a67b31bf141bc3486/pvs_check.html)
- `test_run.txt.out.log` contains the building and analyzing log file. It includes only parsing or not-found errors.
- `HTML report` contains the analysis results. For its description visit PVS's [official site](https://www.viva64.com/en/m/0036/#ID14E9A2B2CD).
## Fast Test
Normally this is the first check that is ran for a PR. It builds ClickHouse and
runs most of [stateless functional tests](tests.md#functional-tests), omitting
some. If it fails, further checks are not started until it is fixed. Look at
the report to see which tests fail, then reproduce the failure locally as
described [here](tests.md#functional-test-locally).
### Report Details
[Status page example](https://clickhouse-test-reports.s3.yandex.net/12550/67d716b5cc3987801996c31a67b31bf141bc3486/fast_test.html)
#### Status Page Files
- `runlog.out.log` is the general log that includes all other logs.
- `test_log.txt`
- `submodule_log.txt` contains the messages about cloning and checkouting needed submodules.
- `stderr.log`
- `stdout.log`
- `clickhouse-server.log`
- `clone_log.txt`
- `install_log.txt`
- `clickhouse-server.err.log`
- `build_log.txt`
- `cmake_log.txt` contains messages about the C/C++ and Linux flags check.
#### Status Page Columns
- *Test name* contains the name of the test (without the path e.g. all types of tests will be stripped to the name).
- *Test status* -- one of _Skipped_, _Success_, or _Fail_.
- *Test time, sec.* -- empty on this test.
## Build Check {#build-check}
Builds ClickHouse in various configurations for use in further steps. You have to fix the builds that fail. Build logs often has enough information to fix the error, but you might have to reproduce the failure locally. The `cmake` options can be found in the build log, grepping for `cmake`. Use these options and follow the [general build process](build.md).
### Report Details
[Status page example](https://clickhouse-builds.s3.yandex.net/12550/67d716b5cc3987801996c31a67b31bf141bc3486/clickhouse_build_check/report.html).
- **Compiler**: `gcc-9` or `clang-10` (or `clang-10-xx` for other architectures e.g. `clang-10-freebsd`).
- **Build type**: `Debug` or `RelWithDebInfo` (cmake).
- **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan).
- **Bundled**: `bundled` build uses system libraries, and `unbundled` build uses libraries from `contrib` folder.
- **Splitted** `splitted` is a [split build](build.md#split-build)
- **Status**: `success` or `fail`
- **Build log**: link to the building and files copying log, useful when build failed.
- **Build time**.
- **Artifacts**: build result files (with `XXX` being the server version e.g. `20.8.1.4344`).
- `clickhouse-client_XXX_all.deb`
- `clickhouse-common-static-dbg_XXX[+asan, +msan, +ubsan, +tsan]_amd64.deb`
- `clickhouse-common-staticXXX_amd64.deb`
- `clickhouse-server_XXX_all.deb`
- `clickhouse-test_XXX_all.deb`
- `clickhouse_XXX_amd64.buildinfo`
- `clickhouse_XXX_amd64.changes`
- `clickhouse`: Main built binary.
- `clickhouse-odbc-bridge`
- `unit_tests_dbms`: GoogleTest binary with ClickHouse unit tests.
- `shared_build.tgz`: build with shared libraries.
- `performance.tgz`: Special package for performance tests.
## Special Build Check
Performs static analysis and code style checks using `clang-tidy`. The report is similar to the [build check](#build-check). Fix the errors found in the build log.
## Functional Stateless Tests
Runs [stateless functional tests](tests.md#functional-tests) for ClickHouse
binaries built in various configurations -- release, debug, with sanitizers,
etc. Look at the report to see which tests fail, then reproduce the failure
locally as described [here](tests.md#functional-test-locally). Note that you
have to use the correct build configuration to reproduce -- a test might fail
under AddressSanitizer but pass in Debug. Download the binary from [CI build
checks page](build.md#you-dont-have-to-build-clickhouse), or build it locally.
## Functional Stateful Tests
Runs [stateful functional tests](tests.md#functional-tests). Treat them in the same way as the functional stateless tests. The difference is that they require `hits` and `visits` tables from the [Yandex.Metrica dataset](../getting-started/example-datasets/metrica.md) to run.
## Integration Tests
Runs [integration tests](tests.md#integration-tests).
## Testflows Check
Runs some tests using Testflows test system. See [here](https://github.com/ClickHouse/ClickHouse/tree/master/tests/testflows#running-tests-locally) how to run them locally.
## Stress Test
Runs stateless functional tests concurrently from several clients to detect
concurrency-related errors. If it fails:
* Fix all other test failures first;
* Look at the report to find the server logs and check them for possible causes
of error.
## Split Build Smoke Test
Checks that the server build in [split build](build.md#split-build)
configuration can start and run simple queries. If it fails:
* Fix other test errors first;
* Build the server in [split build](build.md#split-build) configuration
locally and check whether it can start and run `select 1`.
## Compatibility Check
Checks that `clickhouse` binary runs on distributions with old libc versions. If it fails, ask a maintainer for help.
## AST Fuzzer
Runs randomly generated queries to catch program errors. If it fails, ask a maintainer for help.
## Performance Tests
Measure changes in query performance. This is the longest check that takes just below 6 hours to run. The performance test report is described in detail [here](https://github.com/ClickHouse/ClickHouse/tree/master/docker/test/performance-comparison#how-to-read-the-report).
# QA
> What is a `Task (private network)` item on status pages?
It's a link to the Yandex's internal job system. Yandex employees can see the check's start time and its more verbose status.
> Where the tests are run
Somewhere on Yandex internal infrastructure.

View File

@ -703,7 +703,7 @@ But other things being equal, cross-platform or portable code is preferred.
**3.** Compiler: `gcc`. At this time (August 2020), the code is compiled using version 9.3. (It can also be compiled using `clang 8`.) **3.** Compiler: `gcc`. At this time (August 2020), the code is compiled using version 9.3. (It can also be compiled using `clang 8`.)
The standard library is used (`libstdc++` or `libc++`). The standard library is used (`libc++`).
**4.**OS: Linux Ubuntu, not older than Precise. **4.**OS: Linux Ubuntu, not older than Precise.

View File

@ -27,6 +27,18 @@ If you want to use distributed queries in functional tests, you can leverage `re
Some tests are marked with `zookeeper`, `shard` or `long` in their names. `zookeeper` is for tests that are using ZooKeeper. `shard` is for tests that requires server to listen `127.0.0.*`; `distributed` or `global` have the same meaning. `long` is for tests that run slightly longer that one second. You can disable these groups of tests using `--no-zookeeper`, `--no-shard` and `--no-long` options, respectively. Some tests are marked with `zookeeper`, `shard` or `long` in their names. `zookeeper` is for tests that are using ZooKeeper. `shard` is for tests that requires server to listen `127.0.0.*`; `distributed` or `global` have the same meaning. `long` is for tests that run slightly longer that one second. You can disable these groups of tests using `--no-zookeeper`, `--no-shard` and `--no-long` options, respectively.
### Running a particular test locally {#functional-test-locally}
Start the ClickHouse server locally, listening on the default port (9000). To
run, for example, the test `01428_hash_set_nan_key`, change to the repository
folder and run the following command:
```
PATH=$PATH:<path to clickhouse-client> tests/clickhouse-test 01428_hash_set_nan_key
```
For more options, see `tests/clickhouse-test --help`.
## Known Bugs {#known-bugs} ## Known Bugs {#known-bugs}
If we know some bugs that can be easily reproduced by functional tests, we place prepared functional tests in `tests/queries/bugs` directory. These tests will be moved to `tests/queries/0_stateless` when bugs are fixed. If we know some bugs that can be easily reproduced by functional tests, we place prepared functional tests in `tests/queries/bugs` directory. These tests will be moved to `tests/queries/0_stateless` when bugs are fixed.
@ -168,7 +180,7 @@ Main ClickHouse code (that is located in `dbms` directory) is built with `-Wall
Clang has even more useful warnings - you can look for them with `-Weverything` and pick something to default build. Clang has even more useful warnings - you can look for them with `-Weverything` and pick something to default build.
For production builds, gcc is used (it still generates slightly more efficient code than clang). For development, clang is usually more convenient to use. You can build on your own machine with debug mode (to save battery of your laptop), but please note that compiler is able to generate more warnings with `-O3` due to better control flow and inter-procedure analysis. When building with clang, `libc++` is used instead of `libstdc++` and when building with debug mode, debug version of `libc++` is used that allows to catch more errors at runtime. For production builds, gcc is used (it still generates slightly more efficient code than clang). For development, clang is usually more convenient to use. You can build on your own machine with debug mode (to save battery of your laptop), but please note that compiler is able to generate more warnings with `-O3` due to better control flow and inter-procedure analysis. When building with clang in debug mode, debug version of `libc++` is used that allows to catch more errors at runtime.
## Sanitizers {#sanitizers} ## Sanitizers {#sanitizers}

View File

@ -31,7 +31,7 @@ For a description of request parameters, see [statement description](../../../sq
**ReplacingMergeTree Parameters** **ReplacingMergeTree Parameters**
- `ver` — column with version. Type `UInt*`, `Date` or `DateTime`. Optional parameter. - `ver` — column with version. Type `UInt*`, `Date`, `DateTime` or `DateTime64`. Optional parameter.
When merging, `ReplacingMergeTree` from all the rows with the same sorting key leaves only one: When merging, `ReplacingMergeTree` from all the rows with the same sorting key leaves only one:

View File

@ -5,6 +5,6 @@ toc_title: MaterializedView
# MaterializedView Table Engine {#materializedview} # MaterializedView Table Engine {#materializedview}
Used for implementing materialized views (for more information, see [CREATE TABLE](../../../sql-reference/statements/create/table.md)). For storing data, it uses a different engine that was specified when creating the view. When reading from a table, it just uses that engine. Used for implementing materialized views (for more information, see [CREATE VIEW](../../../sql-reference/statements/create/view.md#materialized)). For storing data, it uses a different engine that was specified when creating the view. When reading from a table, it just uses that engine.
[Original article](https://clickhouse.tech/docs/en/operations/table_engines/materializedview/) <!--hide--> [Original article](https://clickhouse.tech/docs/en/operations/table_engines/materializedview/) <!--hide-->

View File

@ -19,7 +19,7 @@ $ echo '{"foo":"bar"}' | curl 'http://localhost:8123/?query=INSERT%20INTO%20test
Using [CLI interface](../../interfaces/cli.md): Using [CLI interface](../../interfaces/cli.md):
``` bash ``` bash
$ echo '{"foo":"bar"}' | clickhouse-client ---query="INSERT INTO test FORMAT 20JSONEachRow" $ echo '{"foo":"bar"}' | clickhouse-client ---query="INSERT INTO test FORMAT JSONEachRow"
``` ```
Instead of inserting data manually, you might consider to use one of [client libraries](../../interfaces/index.md) instead. Instead of inserting data manually, you might consider to use one of [client libraries](../../interfaces/index.md) instead.

View File

@ -46,6 +46,7 @@ toc_title: Client Libraries
- Kotlin - Kotlin
- [AORM](https://github.com/TanVD/AORM) - [AORM](https://github.com/TanVD/AORM)
- C\# - C\#
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net) - [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client) - [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net) - [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)

View File

@ -15,6 +15,7 @@ toc_title: Adopters
| <a href="https://amadeus.com/" class="favicon">Amadeus</a> | Travel | Analytics | — | — | [Press Release, April 2018](https://www.altinity.com/blog/2018/4/5/amadeus-technologies-launches-investment-and-insights-tool-based-on-machine-learning-and-strategy-algorithms) | | <a href="https://amadeus.com/" class="favicon">Amadeus</a> | Travel | Analytics | — | — | [Press Release, April 2018](https://www.altinity.com/blog/2018/4/5/amadeus-technologies-launches-investment-and-insights-tool-based-on-machine-learning-and-strategy-algorithms) |
| <a href="https://www.appsflyer.com" class="favicon">Appsflyer</a> | Mobile analytics | Main product | — | — | [Talk in Russian, July 2019](https://www.youtube.com/watch?v=M3wbRlcpBbY) | | <a href="https://www.appsflyer.com" class="favicon">Appsflyer</a> | Mobile analytics | Main product | — | — | [Talk in Russian, July 2019](https://www.youtube.com/watch?v=M3wbRlcpBbY) |
| <a href="https://arenadata.tech/" class="favicon">ArenaData</a> | Data Platform | Main product | — | — | [Slides in Russian, December 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup38/indexes.pdf) | | <a href="https://arenadata.tech/" class="favicon">ArenaData</a> | Data Platform | Main product | — | — | [Slides in Russian, December 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup38/indexes.pdf) |
| <a href="https://avito.ru/" class="favicon">Avito</a> | Classifieds | Monitoring | — | — | [Meetup, April 2020](https://www.youtube.com/watch?v=n1tm4j4W8ZQ) |
| <a href="https://badoo.com" class="favicon">Badoo</a> | Dating | Timeseries | — | — | [Slides in Russian, December 2019](https://presentations.clickhouse.tech/meetup38/forecast.pdf) | | <a href="https://badoo.com" class="favicon">Badoo</a> | Dating | Timeseries | — | — | [Slides in Russian, December 2019](https://presentations.clickhouse.tech/meetup38/forecast.pdf) |
| <a href="https://www.benocs.com/" class="favicon">Benocs</a> | Network Telemetry and Analytics | Main Product | — | — | [Slides in English, October 2017](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup9/lpm.pdf) | | <a href="https://www.benocs.com/" class="favicon">Benocs</a> | Network Telemetry and Analytics | Main Product | — | — | [Slides in English, October 2017](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup9/lpm.pdf) |
| <a href="https://www.bloomberg.com/" class="favicon">Bloomberg</a> | Finance, Media | Monitoring | 102 servers | — | [Slides, May 2018](https://www.slideshare.net/Altinity/http-analytics-for-6m-requests-per-second-using-clickhouse-by-alexander-bocharov) | | <a href="https://www.bloomberg.com/" class="favicon">Bloomberg</a> | Finance, Media | Monitoring | 102 servers | — | [Slides, May 2018](https://www.slideshare.net/Altinity/http-analytics-for-6m-requests-per-second-using-clickhouse-by-alexander-bocharov) |
@ -34,7 +35,9 @@ toc_title: Adopters
| <a href="https://www.chinatelecomglobal.com/" class="favicon">Dataliance for China Telecom</a> | Telecom | Analytics | — | — | [Slides in Chinese, January 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup12/telecom.pdf) | | <a href="https://www.chinatelecomglobal.com/" class="favicon">Dataliance for China Telecom</a> | Telecom | Analytics | — | — | [Slides in Chinese, January 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup12/telecom.pdf) |
| <a href="https://db.com" class="favicon">Deutsche Bank</a> | Finance | BI Analytics | — | — | [Slides in English, October 2019](https://bigdatadays.ru/wp-content/uploads/2019/10/D2-H3-3_Yakunin-Goihburg.pdf) | | <a href="https://db.com" class="favicon">Deutsche Bank</a> | Finance | BI Analytics | — | — | [Slides in English, October 2019](https://bigdatadays.ru/wp-content/uploads/2019/10/D2-H3-3_Yakunin-Goihburg.pdf) |
| <a href="https://www.diva-e.com" class="favicon">Diva-e</a> | Digital consulting | Main Product | — | — | [Slides in English, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup29/ClickHouse-MeetUp-Unusual-Applications-sd-2019-09-17.pdf) | | <a href="https://www.diva-e.com" class="favicon">Diva-e</a> | Digital consulting | Main Product | — | — | [Slides in English, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup29/ClickHouse-MeetUp-Unusual-Applications-sd-2019-09-17.pdf) |
| <a href="https://www.ecwid.com/" class="favicon">Ecwid</a> | E-commerce SaaS | Metrics, Logging | — | — | [Slides in Russian, April 2019](https://nastachku.ru/var/files/1/presentation/backend/2_Backend_6.pdf) |
| <a href="https://www.exness.com" class="favicon">Exness</a> | Trading | Metrics, Logging | — | — | [Talk in Russian, May 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) | | <a href="https://www.exness.com" class="favicon">Exness</a> | Trading | Metrics, Logging | — | — | [Talk in Russian, May 2019](https://youtu.be/_rpU-TvSfZ8?t=3215) |
| <a href="https://fastnetmon.com/" class="favicon">FastNetMon</a> | DDoS Protection | Main Product | | — | [Official website](https://fastnetmon.com/docs-fnm-advanced/fastnetmon-advanced-traffic-persistency/) |
| <a href="https://www.flipkart.com/" class="favicon">Flipkart</a> | e-Commerce | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=239) | | <a href="https://www.flipkart.com/" class="favicon">Flipkart</a> | e-Commerce | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=239) |
| <a href="https://fun.co/rp" class="favicon">FunCorp</a> | Games | | — | — | [Article](https://www.altinity.com/blog/migrating-from-redshift-to-clickhouse) | | <a href="https://fun.co/rp" class="favicon">FunCorp</a> | Games | | — | — | [Article](https://www.altinity.com/blog/migrating-from-redshift-to-clickhouse) |
| <a href="https://geniee.co.jp" class="favicon">Geniee</a> | Ad network | Main product | — | — | [Blog post in Japanese, July 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) | | <a href="https://geniee.co.jp" class="favicon">Geniee</a> | Ad network | Main product | — | — | [Blog post in Japanese, July 2017](https://tech.geniee.co.jp/entry/2017/07/20/160100) |
@ -45,6 +48,7 @@ toc_title: Adopters
| <a href="https://www.instana.com" class="favicon">Instana</a> | APM Platform | Main product | — | — | [Twitter post](https://twitter.com/mieldonkers/status/1248884119158882304) | | <a href="https://www.instana.com" class="favicon">Instana</a> | APM Platform | Main product | — | — | [Twitter post](https://twitter.com/mieldonkers/status/1248884119158882304) |
| <a href="https://integros.com" class="favicon">Integros</a> | Platform for video services | Analytics | — | — | [Slides in Russian, May 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) | | <a href="https://integros.com" class="favicon">Integros</a> | Platform for video services | Analytics | — | — | [Slides in Russian, May 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup22/strategies.pdf) |
| <a href="https://ippon.tech" class="favicon">Ippon Technologies</a> | Technology Consulting | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=205) | | <a href="https://ippon.tech" class="favicon">Ippon Technologies</a> | Technology Consulting | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=205) |
| <a href="https://www.ivi.ru/" class="favicon">Ivi</a> | Online Cinema | Analytics, Monitoring | — | — | [Article in Russian, Jan 2018](https://habr.com/en/company/ivi/blog/347408/) |
| <a href="https://jinshuju.net" class="favicon">Jinshuju 金数据</a> | BI Analytics | Main product | — | — | [Slides in Chinese, October 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/3.%20金数据数据架构调整方案Public.pdf) | | <a href="https://jinshuju.net" class="favicon">Jinshuju 金数据</a> | BI Analytics | Main product | — | — | [Slides in Chinese, October 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/3.%20金数据数据架构调整方案Public.pdf) |
| <a href="https://www.kodiakdata.com/" class="favicon">Kodiak Data</a> | Clouds | Main product | — | — | [Slides in Engish, April 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup13/kodiak_data.pdf) | | <a href="https://www.kodiakdata.com/" class="favicon">Kodiak Data</a> | Clouds | Main product | — | — | [Slides in Engish, April 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup13/kodiak_data.pdf) |
| <a href="https://kontur.ru" class="favicon">Kontur</a> | Software Development | Metrics | — | — | [Talk in Russian, November 2018](https://www.youtube.com/watch?v=U4u4Bd0FtrY) | | <a href="https://kontur.ru" class="favicon">Kontur</a> | Software Development | Metrics | — | — | [Talk in Russian, November 2018](https://www.youtube.com/watch?v=U4u4Bd0FtrY) |
@ -53,7 +57,9 @@ toc_title: Adopters
| <a href="https://mcs.mail.ru/" class="favicon">Mail.ru Cloud Solutions</a> | Cloud services | Main product | — | — | [Article in Russian](https://mcs.mail.ru/help/db-create/clickhouse#) | | <a href="https://mcs.mail.ru/" class="favicon">Mail.ru Cloud Solutions</a> | Cloud services | Main product | — | — | [Article in Russian](https://mcs.mail.ru/help/db-create/clickhouse#) |
| <a href="https://tech.mymarilyn.ru" class="favicon">Marilyn</a> | Advertising | Statistics | — | — | [Talk in Russian, June 2017](https://www.youtube.com/watch?v=iXlIgx2khwc) | | <a href="https://tech.mymarilyn.ru" class="favicon">Marilyn</a> | Advertising | Statistics | — | — | [Talk in Russian, June 2017](https://www.youtube.com/watch?v=iXlIgx2khwc) |
| <a href="https://www.messagebird.com" class="favicon">MessageBird</a> | Telecommunications | Statistics | — | — | [Slides in English, November 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup20/messagebird.pdf) | | <a href="https://www.messagebird.com" class="favicon">MessageBird</a> | Telecommunications | Statistics | — | — | [Slides in English, November 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup20/messagebird.pdf) |
| <a href="https://www.mindsdb.com/" class="favicon">MindsDB</a> | Machine Learning | Main Product | — | — | [Official Website](https://www.mindsdb.com/blog/machine-learning-models-as-tables-in-ch) |
| <a href="https://www.mgid.com/" class="favicon">MGID</a> | Ad network | Web-analytics | — | — | [Blog post in Russian, April 2020](http://gs-studio.com/news-about-it/32777----clickhouse---c) | | <a href="https://www.mgid.com/" class="favicon">MGID</a> | Ad network | Web-analytics | — | — | [Blog post in Russian, April 2020](http://gs-studio.com/news-about-it/32777----clickhouse---c) |
| <a href="https://getnoc.com/" class="favicon">NOC Project</a> | Network Monitoring | Analytics | Main Product | — | [Official Website](https://getnoc.com/features/big-data/) |
| <a href="https://www.nuna.com/" class="favicon">Nuna Inc.</a> | Health Data Analytics | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=170) | | <a href="https://www.nuna.com/" class="favicon">Nuna Inc.</a> | Health Data Analytics | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=170) |
| <a href="https://www.oneapm.com/" class="favicon">OneAPM</a> | Monitorings and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) | | <a href="https://www.oneapm.com/" class="favicon">OneAPM</a> | Monitorings and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) |
| <a href="https://www.percent.cn/" class="favicon">Percent 百分点</a> | Analytics | Main Product | — | — | [Slides in Chinese, June 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/4.%20ClickHouse万亿数据双中心的设计与实践%20.pdf) | | <a href="https://www.percent.cn/" class="favicon">Percent 百分点</a> | Analytics | Main Product | — | — | [Slides in Chinese, June 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/4.%20ClickHouse万亿数据双中心的设计与实践%20.pdf) |
@ -63,6 +69,7 @@ toc_title: Adopters
| <a href="https://www.qingcloud.com/" class="favicon">QINGCLOUD</a> | Cloud services | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/4.%20Cloud%20%2B%20TSDB%20for%20ClickHouse%20张健%20QingCloud.pdf) | | <a href="https://www.qingcloud.com/" class="favicon">QINGCLOUD</a> | Cloud services | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/4.%20Cloud%20%2B%20TSDB%20for%20ClickHouse%20张健%20QingCloud.pdf) |
| <a href="https://qrator.net" class="favicon">Qrator</a> | DDoS protection | Main product | — | — | [Blog Post, March 2019](https://blog.qrator.net/en/clickhouse-ddos-mitigation_37/) | | <a href="https://qrator.net" class="favicon">Qrator</a> | DDoS protection | Main product | — | — | [Blog Post, March 2019](https://blog.qrator.net/en/clickhouse-ddos-mitigation_37/) |
| <a href="https://rambler.ru" class="favicon">Rambler</a> | Internet services | Analytics | — | — | [Talk in Russian, April 2018](https://medium.com/@ramblertop/разработка-api-clickhouse-для-рамблер-топ-100-f4c7e56f3141) | | <a href="https://rambler.ru" class="favicon">Rambler</a> | Internet services | Analytics | — | — | [Talk in Russian, April 2018](https://medium.com/@ramblertop/разработка-api-clickhouse-для-рамблер-топ-100-f4c7e56f3141) |
| <a href="https://rspamd.com/" class="favicon">Rspamd</a> | Antispam | Analytics | — | — | [Official Website](https://rspamd.com/doc/modules/clickhouse.html) |
| <a href="https://www.s7.ru" class="favicon">S7 Airlines</a> | Airlines | Metrics, Logging | — | — | [Talk in Russian, March 2019](https://www.youtube.com/watch?v=nwG68klRpPg&t=15s) | | <a href="https://www.s7.ru" class="favicon">S7 Airlines</a> | Airlines | Metrics, Logging | — | — | [Talk in Russian, March 2019](https://www.youtube.com/watch?v=nwG68klRpPg&t=15s) |
| <a href="https://www.scireum.de/" class="favicon">scireum GmbH</a> | e-Commerce | Main product | — | — | [Talk in German, February 2020](https://www.youtube.com/watch?v=7QWAn5RbyR4) | | <a href="https://www.scireum.de/" class="favicon">scireum GmbH</a> | e-Commerce | Main product | — | — | [Talk in German, February 2020](https://www.youtube.com/watch?v=7QWAn5RbyR4) |
| <a href="https://segment.com/" class="favicon">Segment</a> | Data processing | Main product | 9 * i3en.3xlarge nodes 7.5TB NVME SSDs, 96GB Memory, 12 vCPUs | — | [Slides, 2019](https://slides.com/abraithwaite/segment-clickhouse) | | <a href="https://segment.com/" class="favicon">Segment</a> | Data processing | Main product | 9 * i3en.3xlarge nodes 7.5TB NVME SSDs, 96GB Memory, 12 vCPUs | — | [Slides, 2019](https://slides.com/abraithwaite/segment-clickhouse) |
@ -91,5 +98,6 @@ toc_title: Adopters
| <a href="https://metrica.yandex.com" class="favicon">Yandex Metrica</a> | Web analytics | Main product | 360 servers in one cluster, 1862 servers in one department | 66.41 PiB / 5.68 PiB | [Slides, February 2020](https://presentations.clickhouse.tech/meetup40/introduction/#13) | | <a href="https://metrica.yandex.com" class="favicon">Yandex Metrica</a> | Web analytics | Main product | 360 servers in one cluster, 1862 servers in one department | 66.41 PiB / 5.68 PiB | [Slides, February 2020](https://presentations.clickhouse.tech/meetup40/introduction/#13) |
| <a href="https://htc-cs.ru/" class="favicon">ЦВТ</a> | Software Development | Metrics, Logging | — | — | [Blog Post, March 2019, in Russian](https://vc.ru/dev/62715-kak-my-stroili-monitoring-na-prometheus-clickhouse-i-elk) | | <a href="https://htc-cs.ru/" class="favicon">ЦВТ</a> | Software Development | Metrics, Logging | — | — | [Blog Post, March 2019, in Russian](https://vc.ru/dev/62715-kak-my-stroili-monitoring-na-prometheus-clickhouse-i-elk) |
| <a href="https://mkb.ru/" class="favicon">МКБ</a> | Bank | Web-system monitoring | — | — | [Slides in Russian, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/mkb.pdf) | | <a href="https://mkb.ru/" class="favicon">МКБ</a> | Bank | Web-system monitoring | — | — | [Slides in Russian, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/mkb.pdf) |
| <a href="https://cft.ru/" class="favicon">ЦФТ</a> | Banking, Financial products, Payments | — | — | — | [Meetup in Russian, April 2020](https://team.cft.ru/events/162) |
[Original article](https://clickhouse.tech/docs/en/introduction/adopters/) <!--hide--> [Original article](https://clickhouse.tech/docs/en/introduction/adopters/) <!--hide-->

View File

@ -8,7 +8,7 @@ toc_title: Quotas
Quotas allow you to limit resource usage over a period of time or track the use of resources. Quotas allow you to limit resource usage over a period of time or track the use of resources.
Quotas are set up in the user config, which is usually users.xml. Quotas are set up in the user config, which is usually users.xml.
The system also has a feature for limiting the complexity of a single query. See the section “Restrictions on query complexity”). The system also has a feature for limiting the complexity of a single query. See the section [Restrictions on query complexity](../operations/settings/query-complexity.md).
In contrast to query complexity restrictions, quotas: In contrast to query complexity restrictions, quotas:

View File

@ -339,13 +339,13 @@ Writing to the syslog is also supported. Config example:
</logger> </logger>
``` ```
Keys: Keys for syslog:
- use\_syslog — Required setting if you want to write to the syslog. - use\_syslog — Required setting if you want to write to the syslog.
- address — The host\[:port\] of syslogd. If omitted, the local daemon is used. - address — The host\[:port\] of syslogd. If omitted, the local daemon is used.
- hostname — Optional. The name of the host that logs are sent from. - hostname — Optional. The name of the host that logs are sent from.
- facility — [The syslog facility keyword](https://en.wikipedia.org/wiki/Syslog#Facility) in uppercase letters with the “LOG\_” prefix: (`LOG_USER`, `LOG_DAEMON`, `LOG_LOCAL3`, and so on). - facility — [The syslog facility keyword](https://en.wikipedia.org/wiki/Syslog#Facility) in uppercase letters with the “LOG\_” prefix: (`LOG_USER`, `LOG_DAEMON`, `LOG_LOCAL3`, and so on).
Default value: `LOG_USER` if `address` is specified, `LOG_DAEMON otherwise.` Default value: `LOG_USER` if `address` is specified, `LOG_DAEMON` otherwise.
- format Message format. Possible values: `bsd` and `syslog.` - format Message format. Possible values: `bsd` and `syslog.`
## send\_crash\_reports {#server_configuration_parameters-logger} ## send\_crash\_reports {#server_configuration_parameters-logger}
@ -357,8 +357,8 @@ The server will need an access to public Internet via IPv4 (at the time of writi
Keys: Keys:
- `enabled` Boolean flag to enable the feature. Set to `true` to allow sending crash reports. - `enabled` Boolean flag to enable the feature, `false` by default. Set to `true` to allow sending crash reports.
- `endpoint` Overrides the Sentry endpoint. - `endpoint` You can override the Sentry endpoint URL for sending crash reports. It can be either separate Sentry account or your self-hosted Sentry instance. Use the [Sentry DSN](https://docs.sentry.io/error-reporting/quickstart/?platform=native#configure-the-sdk) syntax.
- `anonymize` - Avoid attaching the server hostname to crash report. - `anonymize` - Avoid attaching the server hostname to crash report.
- `http_proxy` - Configure HTTP proxy for sending crash reports. - `http_proxy` - Configure HTTP proxy for sending crash reports.
- `debug` - Sets the Sentry client into debug mode. - `debug` - Sets the Sentry client into debug mode.
@ -397,6 +397,7 @@ The cache is shared for the server and memory is allocated as needed. The cache
``` xml ``` xml
<mark_cache_size>5368709120</mark_cache_size> <mark_cache_size>5368709120</mark_cache_size>
``` ```
## max\_server\_memory\_usage {#max_server_memory_usage} ## max\_server\_memory\_usage {#max_server_memory_usage}
Limits total RAM usage by the ClickHouse server. Limits total RAM usage by the ClickHouse server.
@ -589,7 +590,8 @@ Use the following parameters to configure logging:
- `database` Name of the database. - `database` Name of the database.
- `table` Name of the system table. - `table` Name of the system table.
- `partition_by` Sets a [custom partitioning key](../../engines/table-engines/mergetree-family/custom-partitioning-key.md). - `partition_by` — [Custom partitioning key](../../engines/table-engines/mergetree-family/custom-partitioning-key.md) for a system table. Can't be used if `engine` defined.
- `engine` - [MergeTree Engine Definition](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table) for a system table. Can't be used if `partition_by` defined.
- `flush_interval_milliseconds` Interval for flushing data from the buffer in memory to the table. - `flush_interval_milliseconds` Interval for flushing data from the buffer in memory to the table.
**Example** **Example**
@ -650,7 +652,8 @@ Use the following parameters to configure logging:
- `database` Name of the database. - `database` Name of the database.
- `table` Name of the system table the queries will be logged in. - `table` Name of the system table the queries will be logged in.
- `partition_by` Sets a [custom partitioning key](../../engines/table-engines/mergetree-family/custom-partitioning-key.md) for a table. - `partition_by` — [Custom partitioning key](../../engines/table-engines/mergetree-family/custom-partitioning-key.md) for a system table. Can't be used if `engine` defined.
- `engine` - [MergeTree Engine Definition](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table) for a system table. Can't be used if `partition_by` defined.
- `flush_interval_milliseconds` Interval for flushing data from the buffer in memory to the table. - `flush_interval_milliseconds` Interval for flushing data from the buffer in memory to the table.
If the table doesnt exist, ClickHouse will create it. If the structure of the query log changed when the ClickHouse server was updated, the table with the old structure is renamed, and a new table is created automatically. If the table doesnt exist, ClickHouse will create it. If the structure of the query log changed when the ClickHouse server was updated, the table with the old structure is renamed, and a new table is created automatically.
@ -661,7 +664,7 @@ If the table doesnt exist, ClickHouse will create it. If the structure of the
<query_log> <query_log>
<database>system</database> <database>system</database>
<table>query_log</table> <table>query_log</table>
<partition_by>toMonday(event_date)</partition_by> <engine>Engine = MergeTree PARTITION BY event_date ORDER BY event_time TTL event_date + INTERVAL 30 day</engine>
<flush_interval_milliseconds>7500</flush_interval_milliseconds> <flush_interval_milliseconds>7500</flush_interval_milliseconds>
</query_log> </query_log>
``` ```
@ -676,7 +679,8 @@ Use the following parameters to configure logging:
- `database` Name of the database. - `database` Name of the database.
- `table` Name of the system table the queries will be logged in. - `table` Name of the system table the queries will be logged in.
- `partition_by` Sets a [custom partitioning key](../../engines/table-engines/mergetree-family/custom-partitioning-key.md) for a system table. - `partition_by` — [Custom partitioning key](../../engines/table-engines/mergetree-family/custom-partitioning-key.md) for a system table. Can't be used if `engine` defined.
- `engine` - [MergeTree Engine Definition](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table) for a system table. Can't be used if `partition_by` defined.
- `flush_interval_milliseconds` Interval for flushing data from the buffer in memory to the table. - `flush_interval_milliseconds` Interval for flushing data from the buffer in memory to the table.
If the table doesnt exist, ClickHouse will create it. If the structure of the query thread log changed when the ClickHouse server was updated, the table with the old structure is renamed, and a new table is created automatically. If the table doesnt exist, ClickHouse will create it. If the structure of the query thread log changed when the ClickHouse server was updated, the table with the old structure is renamed, and a new table is created automatically.
@ -692,6 +696,34 @@ If the table doesnt exist, ClickHouse will create it. If the structure of the
</query_thread_log> </query_thread_log>
``` ```
## text\_log {#server_configuration_parameters-text_log}
Settings for the [text\_log](../../operations/system-tables/text_log.md#system_tables-text_log) system table for logging text messages.
Parameters:
- `level` — Maximum Message Level (by default `Trace`) which will be stored in a table.
- `database` — Database name.
- `table` — Table name.
- `partition_by` — [Custom partitioning key](../../engines/table-engines/mergetree-family/custom-partitioning-key.md) for a system table. Can't be used if `engine` defined.
- `engine` - [MergeTree Engine Definition](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table) for a system table. Can't be used if `partition_by` defined.
- `flush_interval_milliseconds` — Interval for flushing data from the buffer in memory to the table.
**Example**
```xml
<yandex>
<text_log>
<level>notice</level>
<database>system</database>
<table>text_log</table>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
<!-- <partition_by>event_date</partition_by> -->
<engine>Engine = MergeTree PARTITION BY event_date ORDER BY event_time TTL event_date + INTERVAL 30 day</engine>
</text_log>
</yandex>
```
## trace\_log {#server_configuration_parameters-trace_log} ## trace\_log {#server_configuration_parameters-trace_log}
Settings for the [trace\_log](../../operations/system-tables/trace_log.md#system_tables-trace_log) system table operation. Settings for the [trace\_log](../../operations/system-tables/trace_log.md#system_tables-trace_log) system table operation.
@ -700,7 +732,8 @@ Parameters:
- `database` — Database for storing a table. - `database` — Database for storing a table.
- `table` — Table name. - `table` — Table name.
- `partition_by` — [Custom partitioning key](../../engines/table-engines/mergetree-family/custom-partitioning-key.md) for a system table. - `partition_by` — [Custom partitioning key](../../engines/table-engines/mergetree-family/custom-partitioning-key.md) for a system table. Can't be used if `engine` defined.
- `engine` - [MergeTree Engine Definition](../../engines/table-engines/mergetree-family/index.md) for a system table. Can't be used if `partition_by` defined.
- `flush_interval_milliseconds` — Interval for flushing data from the buffer in memory to the table. - `flush_interval_milliseconds` — Interval for flushing data from the buffer in memory to the table.
The default server configuration file `config.xml` contains the following settings section: The default server configuration file `config.xml` contains the following settings section:
@ -717,7 +750,7 @@ The default server configuration file `config.xml` contains the following settin
## query\_masking\_rules {#query-masking-rules} ## query\_masking\_rules {#query-masking-rules}
Regexp-based rules, which will be applied to queries as well as all log messages before storing them in server logs, Regexp-based rules, which will be applied to queries as well as all log messages before storing them in server logs,
`system.query_log`, `system.text_log`, `system.processes` table, and in logs sent to the client. That allows preventing `system.query_log`, `system.text_log`, `system.processes` tables, and in logs sent to the client. That allows preventing
sensitive data leakage from SQL queries (like names, emails, personal sensitive data leakage from SQL queries (like names, emails, personal
identifiers or credit card numbers) to logs. identifiers or credit card numbers) to logs.

View File

@ -1546,6 +1546,17 @@ Sets [Confluent Schema Registry](https://docs.confluent.io/current/schema-regist
Default value: `Empty`. Default value: `Empty`.
## input_format_avro_allow_missing_fields {#input_format_avro_allow_missing_fields}
Enables using fields that are not specified in [Avro](../../interfaces/formats.md#data-format-avro) or [AvroConfluent](../../interfaces/formats.md#data-format-avro-confluent) format schema. When a field is not found in the schema, ClickHouse uses the default value instead of throwing an exception.
Possible values:
- 0 — Disabled.
- 1 — Enabled.
Default value: 0.
## background\_pool\_size {#background_pool_size} ## background\_pool\_size {#background_pool_size}
Sets the number of threads performing background operations in table engines (for example, merges in [MergeTree engine](../../engines/table-engines/mergetree-family/index.md) tables). This setting is applied from `default` profile at ClickHouse server start and cant be changed in a user session. By adjusting this setting, you manage CPU and disk load. Smaller pool size utilizes less CPU and disk resources, but background processes advance slower which might eventually impact query performance. Sets the number of threads performing background operations in table engines (for example, merges in [MergeTree engine](../../engines/table-engines/mergetree-family/index.md) tables). This setting is applied from `default` profile at ClickHouse server start and cant be changed in a user session. By adjusting this setting, you manage CPU and disk load. Smaller pool size utilizes less CPU and disk resources, but background processes advance slower which might eventually impact query performance.

View File

@ -1,4 +1,4 @@
# system.text_log {#system-tables-text-log} # system.text\_log {#system_tables-text_log}
Contains logging entries. Logging level which goes to this table can be limited with `text_log.level` server setting. Contains logging entries. Logging level which goes to this table can be limited with `text_log.level` server setting.

View File

@ -0,0 +1,81 @@
---
toc_priority: 114
---
# groupArraySample {#grouparraysample}
Creates an array of sample argument values. The size of the resulting array is limited to `max_size` elements. Argument values are selected and added to the array randomly.
**Syntax**
``` sql
groupArraySample(max_size[, seed])(x)
```
**Parameters**
- `max_size` — Maximum size of the resulting array. [UInt64](../../data-types/int-uint.md).
- `seed` — Seed for the random number generator. Optional. [UInt64](../../data-types/int-uint.md). Default value: `123456`.
- `x` — Argument (column name or expression).
**Returned values**
- Array of randomly selected `x` arguments.
Type: [Array](../../data-types/array.md).
**Examples**
Consider table `colors`:
``` text
┌─id─┬─color──┐
│ 1 │ red │
│ 2 │ blue │
│ 3 │ green │
│ 4 │ white │
│ 5 │ orange │
└────┴────────┘
```
Query with column name as argument:
``` sql
SELECT groupArraySample(3)(color) as newcolors FROM colors;
```
Result:
```text
┌─newcolors──────────────────┐
│ ['white','blue','green'] │
└────────────────────────────┘
```
Query with column name and different seed:
``` sql
SELECT groupArraySample(3, 987654321)(color) as newcolors FROM colors;
```
Result:
```text
┌─newcolors──────────────────┐
│ ['red','orange','green'] │
└────────────────────────────┘
```
Query with expression as argument:
``` sql
SELECT groupArraySample(3)(concat('light-', color)) as newcolors FROM colors;
```
Result:
```text
┌─newcolors───────────────────────────────────┐
│ ['light-blue','light-orange','light-green'] │
└─────────────────────────────────────────────┘
```

View File

@ -60,6 +60,8 @@ ClickHouse-specific aggregate functions:
- [quantile](../../../sql-reference/aggregate-functions/reference/quantile.md) - [quantile](../../../sql-reference/aggregate-functions/reference/quantile.md)
- [quantiles](../../../sql-reference/aggregate-functions/reference/quantiles.md) - [quantiles](../../../sql-reference/aggregate-functions/reference/quantiles.md)
- [quantileExact](../../../sql-reference/aggregate-functions/reference/quantileexact.md) - [quantileExact](../../../sql-reference/aggregate-functions/reference/quantileexact.md)
- [quantileExactLow](../../../sql-reference/aggregate-functions/reference/quantileexact.md#quantileexactlow)
- [quantileExactHigh](../../../sql-reference/aggregate-functions/reference/quantileexact.md#quantileexacthigh)
- [quantileExactWeighted](../../../sql-reference/aggregate-functions/reference/quantileexactweighted.md) - [quantileExactWeighted](../../../sql-reference/aggregate-functions/reference/quantileexactweighted.md)
- [quantileTiming](../../../sql-reference/aggregate-functions/reference/quantiletiming.md) - [quantileTiming](../../../sql-reference/aggregate-functions/reference/quantiletiming.md)
- [quantileTimingWeighted](../../../sql-reference/aggregate-functions/reference/quantiletimingweighted.md) - [quantileTimingWeighted](../../../sql-reference/aggregate-functions/reference/quantiletimingweighted.md)

View File

@ -49,6 +49,114 @@ Result:
└───────────────────────┘ └───────────────────────┘
``` ```
# quantileExactLow {#quantileexactlow}
Similar to `quantileExact`, this computes the exact [quantile](https://en.wikipedia.org/wiki/Quantile) of a numeric data sequence.
To get exact value, all the passed values are combined into an array, which is then fully sorted. The sorting [algorithm's](https://en.cppreference.com/w/cpp/algorithm/sort) complexity is `O(N·log(N))`, where `N = std::distance(first, last)` comparisons.
Depending on the level, i.e if the level is 0.5 then the exact lower median value is returned if there are even number of elements and the middle value is returned if there are odd number of elements. Median is calculated similar to the [median_low](https://docs.python.org/3/library/statistics.html#statistics.median_low) implementation which is used in python.
For all other levels, the element at the the index corresponding to the value of `level * size_of_array` is returned. For example:
```$sql
SELECT quantileExactLow(0.1)(number) FROM numbers(10)
┌─quantileExactLow(0.1)(number)─┐
│ 1 │
└───────────────────────────────┘
```
When using multiple `quantile*` functions with different levels in a query, the internal states are not combined (that is, the query works less efficiently than it could). In this case, use the [quantiles](../../../sql-reference/aggregate-functions/reference/quantiles.md#quantiles) function.
**Syntax**
``` sql
quantileExact(level)(expr)
```
Alias: `medianExactLow`.
**Parameters**
- `level` — Level of quantile. Optional parameter. Constant floating-point number from 0 to 1. We recommend using a `level` value in the range of `[0.01, 0.99]`. Default value: 0.5. At `level=0.5` the function calculates [median](https://en.wikipedia.org/wiki/Median).
- `expr` — Expression over the column values resulting in numeric [data types](../../../sql-reference/data-types/index.md#data_types), [Date](../../../sql-reference/data-types/date.md) or [DateTime](../../../sql-reference/data-types/datetime.md).
**Returned value**
- Quantile of the specified level.
Type:
- [Float64](../../../sql-reference/data-types/float.md) for numeric data type input.
- [Date](../../../sql-reference/data-types/date.md) if input values have the `Date` type.
- [DateTime](../../../sql-reference/data-types/datetime.md) if input values have the `DateTime` type.
**Example**
Query:
``` sql
SELECT quantileExactLow(number) FROM numbers(10)
```
Result:
``` text
┌─quantileExactLow(number)─┐
│ 4 │
└──────────────────────────┘
```
# quantileExactHigh {#quantileexacthigh}
Similar to `quantileExact`, this computes the exact [quantile](https://en.wikipedia.org/wiki/Quantile) of a numeric data sequence.
To get exact value, all the passed values are combined into an array, which is then fully sorted. The sorting [algorithm's](https://en.cppreference.com/w/cpp/algorithm/sort) complexity is `O(N·log(N))`, where `N = std::distance(first, last)` comparisons.
Depending on the level, i.e if the level is 0.5 then the exact higher median value is returned if there are even number of elements and the middle value is returned if there are odd number of elements. Median is calculated similar to the [median_high](https://docs.python.org/3/library/statistics.html#statistics.median_high) implementation which is used in python. For all other levels, the element at the the index corresponding to the value of `level * size_of_array` is returned.
This implementation behaves exactly similar to the current `quantileExact` implementation.
When using multiple `quantile*` functions with different levels in a query, the internal states are not combined (that is, the query works less efficiently than it could). In this case, use the [quantiles](../../../sql-reference/aggregate-functions/reference/quantiles.md#quantiles) function.
**Syntax**
``` sql
quantileExactHigh(level)(expr)
```
Alias: `medianExactHigh`.
**Parameters**
- `level` — Level of quantile. Optional parameter. Constant floating-point number from 0 to 1. We recommend using a `level` value in the range of `[0.01, 0.99]`. Default value: 0.5. At `level=0.5` the function calculates [median](https://en.wikipedia.org/wiki/Median).
- `expr` — Expression over the column values resulting in numeric [data types](../../../sql-reference/data-types/index.md#data_types), [Date](../../../sql-reference/data-types/date.md) or [DateTime](../../../sql-reference/data-types/datetime.md).
**Returned value**
- Quantile of the specified level.
Type:
- [Float64](../../../sql-reference/data-types/float.md) for numeric data type input.
- [Date](../../../sql-reference/data-types/date.md) if input values have the `Date` type.
- [DateTime](../../../sql-reference/data-types/datetime.md) if input values have the `DateTime` type.
**Example**
Query:
``` sql
SELECT quantileExactHigh(number) FROM numbers(10)
```
Result:
``` text
┌─quantileExactHigh(number)─┐
│ 5 │
└───────────────────────────┘
```
**See Also** **See Also**
- [median](../../../sql-reference/aggregate-functions/reference/median.md#median) - [median](../../../sql-reference/aggregate-functions/reference/median.md#median)

View File

@ -21,15 +21,16 @@ For a case-insensitive search, use the function [positionCaseInsensitive](#posit
**Syntax** **Syntax**
``` sql ``` sql
position(haystack, needle) position(haystack, needle[, start_pos])
``` ```
Alias: `locate(haystack, needle)`. Alias: `locate(haystack, needle[, start_pos])`.
**Parameters** **Parameters**
- `haystack` — string, in which substring will to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal). - `needle` — substring to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**Returned values** **Returned values**
@ -56,6 +57,18 @@ Result:
└────────────────────────────────┘ └────────────────────────────────┘
``` ```
``` sql
SELECT
position('Hello, world!', 'o', 1),
position('Hello, world!', 'o', 7)
```
``` text
┌─position('Hello, world!', 'o', 1)─┬─position('Hello, world!', 'o', 7)─┐
│ 5 │ 9 │
└───────────────────────────────────┴───────────────────────────────────┘
```
The same phrase in Russian contains characters which cant be represented using a single byte. The function returns some unexpected result (use [positionUTF8](#positionutf8) function for multi-byte encoded text): The same phrase in Russian contains characters which cant be represented using a single byte. The function returns some unexpected result (use [positionUTF8](#positionutf8) function for multi-byte encoded text):
Query: Query:
@ -81,13 +94,14 @@ Works under the assumption that the string contains a set of bytes representing
**Syntax** **Syntax**
``` sql ``` sql
positionCaseInsensitive(haystack, needle) positionCaseInsensitive(haystack, needle[, start_pos])
``` ```
**Parameters** **Parameters**
- `haystack` — string, in which substring will to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal). - `needle` — substring to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**Returned values** **Returned values**
@ -123,13 +137,14 @@ For a case-insensitive search, use the function [positionCaseInsensitiveUTF8](#p
**Syntax** **Syntax**
``` sql ``` sql
positionUTF8(haystack, needle) positionUTF8(haystack, needle[, start_pos])
``` ```
**Parameters** **Parameters**
- `haystack` — string, in which substring will to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal). - `needle` — substring to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**Returned values** **Returned values**
@ -195,13 +210,14 @@ Works under the assumption that the string contains a set of bytes representing
**Syntax** **Syntax**
``` sql ``` sql
positionCaseInsensitiveUTF8(haystack, needle) positionCaseInsensitiveUTF8(haystack, needle[, start_pos])
``` ```
**Parameters** **Parameters**
- `haystack` — string, in which substring will to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal). - `needle` — substring to be searched. [String](../../sql-reference/syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**Returned value** **Returned value**

View File

@ -14,5 +14,4 @@ The command changes the [sorting key](../../../engines/table-engines/mergetree-f
The command is lightweight in a sense that it only changes metadata. To keep the property that data part rows are ordered by the sorting key expression you cannot add expressions containing existing columns to the sorting key (only columns added by the `ADD COLUMN` command in the same `ALTER` query). The command is lightweight in a sense that it only changes metadata. To keep the property that data part rows are ordered by the sorting key expression you cannot add expressions containing existing columns to the sorting key (only columns added by the `ADD COLUMN` command in the same `ALTER` query).
!!! note "Note" !!! note "Note"
It only works for tables in the [`MergeTree`](../../../engines/table-engines/mergetree-family/mergetree.md) family (including It only works for tables in the [`MergeTree`](../../../engines/table-engines/mergetree-family/mergetree.md) family (including [replicated](../../../engines/table-engines/mergetree-family/replication.md) tables).
[replicated](../../../engines/table-engines/mergetree-family/replication.md) tables).

View File

@ -705,7 +705,7 @@ Pero en igualdad de condiciones, se prefiere el código multiplataforma o portá
**3.** Compilación: `gcc`. En este momento (agosto 2020), el código se compila utilizando la versión 9.3. (También se puede compilar usando `clang 8`.) **3.** Compilación: `gcc`. En este momento (agosto 2020), el código se compila utilizando la versión 9.3. (También se puede compilar usando `clang 8`.)
Se utiliza la biblioteca estándar (`libstdc++` o `libc++`). Se utiliza la biblioteca estándar (`libc++`).
**4.**OS: Linux Ubuntu, no más viejo que Precise. **4.**OS: Linux Ubuntu, no más viejo que Precise.

View File

@ -175,7 +175,7 @@ Código principal de ClickHouse (que se encuentra en `dbms` directorio) se const
Clang tiene advertencias aún más útiles: puedes buscarlas con `-Weverything` y elige algo para la compilación predeterminada. Clang tiene advertencias aún más útiles: puedes buscarlas con `-Weverything` y elige algo para la compilación predeterminada.
Para las compilaciones de producción, se usa gcc (todavía genera un código ligeramente más eficiente que clang). Para el desarrollo, el clang suele ser más conveniente de usar. Puede construir en su propia máquina con el modo de depuración (para ahorrar batería de su computadora portátil), pero tenga en cuenta que el compilador puede generar más advertencias con `-O3` debido a un mejor flujo de control y análisis entre procedimientos. Al construir con clang, `libc++` se utiliza en lugar de `libstdc++` y al construir con el modo de depuración, la versión de depuración de `libc++` se utiliza que permite detectar más errores en tiempo de ejecución. Para las compilaciones de producción, se usa gcc (todavía genera un código ligeramente más eficiente que clang). Para el desarrollo, el clang suele ser más conveniente de usar. Puede construir en su propia máquina con el modo de depuración (para ahorrar batería de su computadora portátil), pero tenga en cuenta que el compilador puede generar más advertencias con `-O3` debido a un mejor flujo de control y análisis entre procedimientos. Al construir con clang con el modo de depuración, la versión de depuración de `libc++` se utiliza que permite detectar más errores en tiempo de ejecución.
## Desinfectantes {#sanitizers} ## Desinfectantes {#sanitizers}

View File

@ -33,7 +33,7 @@ Para obtener una descripción de los parámetros de solicitud, consulte [descrip
**ReplacingMergeTree Parámetros** **ReplacingMergeTree Parámetros**
- `ver` — column with version. Type `UInt*`, `Date` o `DateTime`. Parámetro opcional. - `ver` — column with version. Type `UInt*`, `Date`, `DateTime` o `DateTime64`. Parámetro opcional.
Al fusionar, `ReplacingMergeTree` de todas las filas con la misma clave primaria deja solo una: Al fusionar, `ReplacingMergeTree` de todas las filas con la misma clave primaria deja solo una:

View File

@ -46,6 +46,7 @@ toc_title: Client Libraries
- Kotlin - Kotlin
- [AORM](https://github.com/TanVD/AORM) - [AORM](https://github.com/TanVD/AORM)
- C\# - C\#
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net) - [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client) - [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net) - [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)

View File

@ -532,7 +532,7 @@ Columna:
- `query` (String) The query text. For `INSERT`, no incluye los datos para insertar. - `query` (String) The query text. For `INSERT`, no incluye los datos para insertar.
- `query_id` (String) Query ID, if defined. - `query_id` (String) Query ID, if defined.
## sistema.text\_log {#system-tables-text-log} ## sistema.text\_log {#system_tables-text_log}
Contiene entradas de registro. El nivel de registro que va a esta tabla se puede limitar con `text_log.level` configuración del servidor. Contiene entradas de registro. El nivel de registro que va a esta tabla se puede limitar con `text_log.level` configuración del servidor.

View File

@ -20,15 +20,16 @@ Para una búsqueda sin distinción de mayúsculas y minúsculas, utilice la func
**Sintaxis** **Sintaxis**
``` sql ``` sql
position(haystack, needle) position(haystack, needle[, start_pos])
``` ```
Apodo: `locate(haystack, needle)`. Apodo: `locate(haystack, needle[, start_pos])`.
**Parámetros** **Parámetros**
- `haystack` — string, in which substring will to be searched. [Cadena](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [Cadena](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [Cadena](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [Cadena](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**Valores devueltos** **Valores devueltos**
@ -80,13 +81,14 @@ Funciona bajo el supuesto de que la cadena contiene un conjunto de bytes que rep
**Sintaxis** **Sintaxis**
``` sql ``` sql
positionCaseInsensitive(haystack, needle) positionCaseInsensitive(haystack, needle[, start_pos])
``` ```
**Parámetros** **Parámetros**
- `haystack` — string, in which substring will to be searched. [Cadena](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [Cadena](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [Cadena](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [Cadena](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**Valores devueltos** **Valores devueltos**
@ -122,13 +124,14 @@ Para una búsqueda sin distinción de mayúsculas y minúsculas, utilice la func
**Sintaxis** **Sintaxis**
``` sql ``` sql
positionUTF8(haystack, needle) positionUTF8(haystack, needle[, start_pos])
``` ```
**Parámetros** **Parámetros**
- `haystack` — string, in which substring will to be searched. [Cadena](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [Cadena](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [Cadena](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [Cadena](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**Valores devueltos** **Valores devueltos**
@ -194,13 +197,14 @@ Funciona bajo el supuesto de que la cadena contiene un conjunto de bytes que rep
**Sintaxis** **Sintaxis**
``` sql ``` sql
positionCaseInsensitiveUTF8(haystack, needle) positionCaseInsensitiveUTF8(haystack, needle[, start_pos])
``` ```
**Parámetros** **Parámetros**
- `haystack` — string, in which substring will to be searched. [Cadena](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [Cadena](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [Cadena](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [Cadena](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**Valor devuelto** **Valor devuelto**

View File

@ -706,7 +706,7 @@ auto s = std::string{"Hello"};
**3.** کامپایلر: `gcc`. در این زمان (اوت 2020), کد با استفاده از نسخه وارد شده 9.3. (همچنین می تواند با استفاده از وارد شود `clang 8`.) **3.** کامپایلر: `gcc`. در این زمان (اوت 2020), کد با استفاده از نسخه وارد شده 9.3. (همچنین می تواند با استفاده از وارد شود `clang 8`.)
کتابخانه استاندارد استفاده شده است (`libstdc++` یا `libc++`). کتابخانه استاندارد استفاده شده است (`libc++`).
**4.**سیستم عامل: لینوکس اوبونتو, مسن تر از دقیق نیست. **4.**سیستم عامل: لینوکس اوبونتو, مسن تر از دقیق نیست.

View File

@ -33,7 +33,7 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster]
**پارامترهای جایگزین** **پارامترهای جایگزین**
- `ver` — column with version. Type `UInt*`, `Date` یا `DateTime`. پارامتر اختیاری. - `ver` — column with version. Type `UInt*`, `Date`, `DateTime` یا `DateTime64`. پارامتر اختیاری.
هنگام ادغام, `ReplacingMergeTree` از تمام ردیف ها با همان کلید اصلی تنها یک برگ دارد: هنگام ادغام, `ReplacingMergeTree` از تمام ردیف ها با همان کلید اصلی تنها یک برگ دارد:

View File

@ -49,6 +49,7 @@ toc_title: "\u06A9\u062A\u0627\u0628\u062E\u0627\u0646\u0647 \u0647\u0627\u06CC
- کوتلین - کوتلین
- [AORM](https://github.com/TanVD/AORM) - [AORM](https://github.com/TanVD/AORM)
- C\# - C\#
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
- [فاحشه خانه.ادو](https://github.com/killwort/ClickHouse-Net) - [فاحشه خانه.ادو](https://github.com/killwort/ClickHouse-Net)
- [فاحشه خانه.کارگیر](https://github.com/DarkWanderer/ClickHouse.Client) - [فاحشه خانه.کارگیر](https://github.com/DarkWanderer/ClickHouse.Client)
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net) - [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)

View File

@ -532,7 +532,7 @@ CurrentMetric_ReplicatedChecks: 0
- `query` (String) The query text. For `INSERT` این شامل داده ها برای وارد کردن نیست. - `query` (String) The query text. For `INSERT` این شامل داده ها برای وارد کردن نیست.
- `query_id` (String) Query ID, if defined. - `query_id` (String) Query ID, if defined.
## سیستم.\_خروج {#system-tables-text-log} ## سیستم.\_خروج {#system_tables-text_log}
شامل ورودی ورود به سیستم. سطح ورود به سیستم که می رود به این جدول را می توان با محدود `text_log.level` تنظیم سرور. شامل ورودی ورود به سیستم. سطح ورود به سیستم که می رود به این جدول را می توان با محدود `text_log.level` تنظیم سرور.

View File

@ -21,15 +21,16 @@ toc_title: "\u0628\u0631\u0627\u06CC \u062C\u0633\u062A\u062C\u0648\u06CC \u0631
**نحو** **نحو**
``` sql ``` sql
position(haystack, needle) position(haystack, needle[, start_pos])
``` ```
نام مستعار: `locate(haystack, needle)`. نام مستعار: `locate(haystack, needle[, start_pos])`.
**پارامترها** **پارامترها**
- `haystack` — string, in which substring will to be searched. [رشته](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [رشته](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [رشته](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [رشته](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**مقادیر بازگشتی** **مقادیر بازگشتی**
@ -81,13 +82,14 @@ SELECT position('Привет, мир!', '!')
**نحو** **نحو**
``` sql ``` sql
positionCaseInsensitive(haystack, needle) positionCaseInsensitive(haystack, needle[, start_pos])
``` ```
**پارامترها** **پارامترها**
- `haystack` — string, in which substring will to be searched. [رشته](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [رشته](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [رشته](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [رشته](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**مقادیر بازگشتی** **مقادیر بازگشتی**
@ -123,13 +125,14 @@ SELECT positionCaseInsensitive('Hello, world!', 'hello')
**نحو** **نحو**
``` sql ``` sql
positionUTF8(haystack, needle) positionUTF8(haystack, needle[, start_pos])
``` ```
**پارامترها** **پارامترها**
- `haystack` — string, in which substring will to be searched. [رشته](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [رشته](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [رشته](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [رشته](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**مقادیر بازگشتی** **مقادیر بازگشتی**
@ -195,13 +198,14 @@ SELECT positionUTF8('Salut, étudiante!', '!')
**نحو** **نحو**
``` sql ``` sql
positionCaseInsensitiveUTF8(haystack, needle) positionCaseInsensitiveUTF8(haystack, needle[, start_pos])
``` ```
**پارامترها** **پارامترها**
- `haystack` — string, in which substring will to be searched. [رشته](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [رشته](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [رشته](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [رشته](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**مقدار بازگشتی** **مقدار بازگشتی**

View File

@ -705,7 +705,7 @@ Mais toutes choses étant égales par ailleurs, le code multi-plateforme ou port
**3.** Compilateur: `gcc`. En ce moment (août 2020), le code est compilé en utilisant la version 9.3. (Il peut également être compilé en utilisant `clang 8`.) **3.** Compilateur: `gcc`. En ce moment (août 2020), le code est compilé en utilisant la version 9.3. (Il peut également être compilé en utilisant `clang 8`.)
La bibliothèque standard est utilisée (`libstdc++` ou `libc++`). La bibliothèque standard est utilisée (`libc++`).
**4.**OS: Linux Ubuntu, pas plus vieux que précis. **4.**OS: Linux Ubuntu, pas plus vieux que précis.

View File

@ -175,7 +175,7 @@ Code ClickHouse principal (qui est situé dans `dbms` annuaire) est construit av
Clang a des avertissements encore plus utiles - vous pouvez les chercher avec `-Weverything` et choisissez quelque chose à construire par défaut. Clang a des avertissements encore plus utiles - vous pouvez les chercher avec `-Weverything` et choisissez quelque chose à construire par défaut.
Pour les builds de production, gcc est utilisé (il génère toujours un code légèrement plus efficace que clang). Pour le développement, clang est généralement plus pratique à utiliser. Vous pouvez construire sur votre propre machine avec le mode débogage (pour économiser la batterie de votre ordinateur portable), mais veuillez noter que le compilateur est capable de générer plus d'Avertissements avec `-O3` grâce à une meilleure analyse du flux de contrôle et de l'inter-procédure. Lors de la construction avec clang, `libc++` est utilisé au lieu de `libstdc++` et lors de la construction avec le mode débogage, la version de débogage de `libc++` est utilisé qui permet d'attraper plus d'erreurs à l'exécution. Pour les builds de production, gcc est utilisé (il génère toujours un code légèrement plus efficace que clang). Pour le développement, clang est généralement plus pratique à utiliser. Vous pouvez construire sur votre propre machine avec le mode débogage (pour économiser la batterie de votre ordinateur portable), mais veuillez noter que le compilateur est capable de générer plus d'Avertissements avec `-O3` grâce à une meilleure analyse du flux de contrôle et de l'inter-procédure. Lors de la construction avec clang avec le mode débogage, la version de débogage de `libc++` est utilisé qui permet d'attraper plus d'erreurs à l'exécution.
## Désinfectant {#sanitizers} ## Désinfectant {#sanitizers}

View File

@ -33,7 +33,7 @@ Pour une description des paramètres de requête, voir [demande de description](
**ReplacingMergeTree Paramètres** **ReplacingMergeTree Paramètres**
- `ver` — column with version. Type `UInt*`, `Date` ou `DateTime`. Paramètre facultatif. - `ver` — column with version. Type `UInt*`, `Date`, `DateTime` ou `DateTime64`. Paramètre facultatif.
Lors de la fusion, `ReplacingMergeTree` de toutes les lignes avec la même clé primaire ne laisse qu'un: Lors de la fusion, `ReplacingMergeTree` de toutes les lignes avec la même clé primaire ne laisse qu'un:

View File

@ -48,6 +48,7 @@ toc_title: "Biblioth\xE8ques Clientes"
- Kotlin - Kotlin
- [AORM](https://github.com/TanVD/AORM) - [AORM](https://github.com/TanVD/AORM)
- C\# - C\#
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net) - [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client) - [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net) - [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)

View File

@ -532,7 +532,7 @@ Colonne:
- `query` (String) The query text. For `INSERT` il n'inclut pas les données à insérer. - `query` (String) The query text. For `INSERT` il n'inclut pas les données à insérer.
- `query_id` (String) Query ID, if defined. - `query_id` (String) Query ID, if defined.
## système.text\_log {#system-tables-text-log} ## système.text\_log {#system_tables-text_log}
Contient des entrées de journalisation. Niveau de journalisation qui va à cette table peut être limité `text_log.level` paramètre de serveur. Contient des entrées de journalisation. Niveau de journalisation qui va à cette table peut être limité `text_log.level` paramètre de serveur.

View File

@ -320,10 +320,9 @@ ou
LAYOUT(DIRECT()) LAYOUT(DIRECT())
``` ```
### complex\_key\_cache {#complex-key-cache} ### complex\_key\_direct {#complex-key-direct}
Ce type de stockage est pour une utilisation avec composite [touches](external-dicts-dict-structure.md). Semblable à `direct`.
Ce type de stockage est destiné à être utilisé avec des [clés](../../../sql-reference/dictionaries/external-dictionaries/external-dicts-dict-structure.md) composites. Similaire à `direct`
### ip\_trie {#ip-trie} ### ip\_trie {#ip-trie}

View File

@ -705,7 +705,7 @@ auto s = std::string{"Hello"};
**3.** コンパイラ: `gcc`. 2020年現在、コードはバージョン9.3を使用してコンパイルされている。 (以下を使ってコンパイルできます `clang 8`.) **3.** コンパイラ: `gcc`. 2020年現在、コードはバージョン9.3を使用してコンパイルされている。 (以下を使ってコンパイルできます `clang 8`.)
標準ライブラリが使用されます (`libstdc++` または `libc++`). 標準ライブラリが使用されます (`libc++`).
**4.**OSLinuxのUbuntuの、正確よりも古いではありません。 **4.**OSLinuxのUbuntuの、正確よりも古いではありません。

View File

@ -33,7 +33,7 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster]
**ReplacingMergeTreeパラメータ** **ReplacingMergeTreeパラメータ**
- `ver` — column with version. Type `UInt*`, `Date` または `DateTime`. 任意パラメータ。 - `ver` — column with version. Type `UInt*`, `Date`, `DateTime` または `DateTime64`. 任意パラメータ。
マージ時, `ReplacingMergeTree` 同じ主キーを持つすべての行から、一つだけを残します: マージ時, `ReplacingMergeTree` 同じ主キーを持つすべての行から、一つだけを残します:

View File

@ -48,6 +48,7 @@ toc_title: "\u30AF\u30E9\u30A4\u30A2\u30F3\u30C8"
- コトリン - コトリン
- [AORM](https://github.com/TanVD/AORM) - [AORM](https://github.com/TanVD/AORM)
- C\# - C\#
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
- [クリックハウスAdo](https://github.com/killwort/ClickHouse-Net) - [クリックハウスAdo](https://github.com/killwort/ClickHouse-Net)
- [クリックハウスクライアン](https://github.com/DarkWanderer/ClickHouse.Client) - [クリックハウスクライアン](https://github.com/DarkWanderer/ClickHouse.Client)
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net) - [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)

View File

@ -532,7 +532,7 @@ CurrentMetric_ReplicatedChecks: 0
- `query` (String) The query text. For `INSERT`,挿入するデータは含まれません。 - `query` (String) The query text. For `INSERT`,挿入するデータは含まれません。
- `query_id` (String) Query ID, if defined. - `query_id` (String) Query ID, if defined.
## システムtext\_log {#system-tables-text-log} ## システムtext\_log {#system_tables-text_log}
を含むログイン作品の応募がありました。 ログレベルがこのテーブルで限定 `text_log.level` サーバー設定。 を含むログイン作品の応募がありました。 ログレベルがこのテーブルで限定 `text_log.level` サーバー設定。

View File

@ -20,15 +20,16 @@ toc_title: "\u6587\u5B57\u5217\u3092\u691C\u7D22\u3059\u308B\u5834\u5408"
**構文** **構文**
``` sql ``` sql
position(haystack, needle) position(haystack, needle[, start_pos])
``` ```
別名: `locate(haystack, needle)`. 別名: `locate(haystack, needle[, start_pos])`.
**パラメータ** **パラメータ**
- `haystack` — string, in which substring will to be searched. [文字列](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [文字列](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [文字列](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [文字列](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**戻り値** **戻り値**
@ -80,13 +81,14 @@ SELECT position('Привет, мир!', '!')
**構文** **構文**
``` sql ``` sql
positionCaseInsensitive(haystack, needle) positionCaseInsensitive(haystack, needle[, start_pos])
``` ```
**パラメータ** **パラメータ**
- `haystack` — string, in which substring will to be searched. [文字列](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [文字列](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [文字列](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [文字列](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**戻り値** **戻り値**
@ -122,13 +124,14 @@ SELECT positionCaseInsensitive('Hello, world!', 'hello')
**構文** **構文**
``` sql ``` sql
positionUTF8(haystack, needle) positionUTF8(haystack, needle[, start_pos])
``` ```
**パラメータ** **パラメータ**
- `haystack` — string, in which substring will to be searched. [文字列](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [文字列](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [文字列](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [文字列](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**戻り値** **戻り値**
@ -194,13 +197,14 @@ SELECT positionUTF8('Salut, étudiante!', '!')
**構文** **構文**
``` sql ``` sql
positionCaseInsensitiveUTF8(haystack, needle) positionCaseInsensitiveUTF8(haystack, needle[, start_pos])
``` ```
**パラメータ** **パラメータ**
- `haystack` — string, in which substring will to be searched. [文字列](../syntax.md#syntax-string-literal). - `haystack` — string, in which substring will to be searched. [文字列](../syntax.md#syntax-string-literal).
- `needle` — substring to be searched. [文字列](../syntax.md#syntax-string-literal). - `needle` — substring to be searched. [文字列](../syntax.md#syntax-string-literal).
- `start_pos` Optional parameter, position of the first character in the string to start search. [UInt](../../sql-reference/data-types/int-uint.md)
**戻り値** **戻り値**

View File

@ -26,7 +26,7 @@ toc_priority: 29
Во время запросов `INSERT` таблица блокируется, а другие запросы на чтение и запись ожидают разблокировки таблицы. Если запросов на запись данных нет, то можно выполнять любое количество конкуретных запросов на чтение. Во время запросов `INSERT` таблица блокируется, а другие запросы на чтение и запись ожидают разблокировки таблицы. Если запросов на запись данных нет, то можно выполнять любое количество конкуретных запросов на чтение.
- Не поддерживают операции [мутации](../../../sql-reference/statements/alter.md#mutations). - Не поддерживают операции [мутации](../../../sql-reference/statements/alter/index.md#mutations).
- Не поддерживают индексы. - Не поддерживают индексы.

View File

@ -113,7 +113,7 @@ drwxr-xr-x 2 clickhouse clickhouse 4096 Feb 1 16:48 detached
201901\_1\_1\_0, 201901\_1\_7\_1 и т. д. это директории кусков партиции. Каждый кусок содержит данные только для соответствующего месяца (таблица в данном примере содержит партиционирование по месяцам). 201901\_1\_1\_0, 201901\_1\_7\_1 и т. д. это директории кусков партиции. Каждый кусок содержит данные только для соответствующего месяца (таблица в данном примере содержит партиционирование по месяцам).
Директория `detached` содержит куски, отсоединенные от таблицы с помощью запроса [DETACH](../../../sql-reference/statements/alter.md#alter_detach-partition). Поврежденные куски также попадают в эту директорию они не удаляются с сервера. Директория `detached` содержит куски, отсоединенные от таблицы с помощью запроса [DETACH](../../../sql-reference/statements/alter/partition.md#alter_detach-partition). Поврежденные куски также попадают в эту директорию они не удаляются с сервера.
Сервер не использует куски из директории `detached`. Вы можете в любое время добавлять, удалять, модифицировать данные в директории detached - сервер не будет об этом знать, пока вы не сделаете запрос [ATTACH](../../../engines/table-engines/mergetree-family/custom-partitioning-key.md#alter_attach-partition). Сервер не использует куски из директории `detached`. Вы можете в любое время добавлять, удалять, модифицировать данные в директории detached - сервер не будет об этом знать, пока вы не сделаете запрос [ATTACH](../../../engines/table-engines/mergetree-family/custom-partitioning-key.md#alter_attach-partition).

View File

@ -601,7 +601,7 @@ SETTINGS storage_policy = 'moving_from_ssd_to_hdd'
В таблицах `MergeTree` данные попадают на диск несколькими способами: В таблицах `MergeTree` данные попадают на диск несколькими способами:
- В результате вставки (запрос `INSERT`). - В результате вставки (запрос `INSERT`).
- В фоновых операциях слияний и [мутаций](../../../sql-reference/statements/alter.md#mutations). - В фоновых операциях слияний и [мутаций](../../../sql-reference/statements/alter/index.md#mutations).
- При скачивании данных с другой реплики. - При скачивании данных с другой реплики.
- В результате заморозки партиций [ALTER TABLE … FREEZE PARTITION](../../../engines/table-engines/mergetree-family/mergetree.md#alter_freeze-partition). - В результате заморозки партиций [ALTER TABLE … FREEZE PARTITION](../../../engines/table-engines/mergetree-family/mergetree.md#alter_freeze-partition).

View File

@ -25,7 +25,7 @@ CREATE TABLE [IF NOT EXISTS] [db.]table_name [ON CLUSTER cluster]
**Параметры ReplacingMergeTree** **Параметры ReplacingMergeTree**
- `ver` — столбец с версией, тип `UInt*`, `Date` или `DateTime`. Необязательный параметр. - `ver` — столбец с версией, тип `UInt*`, `Date`, `DateTime` или `DateTime64`. Необязательный параметр.
При слиянии, из всех строк с одинаковым значением ключа сортировки `ReplacingMergeTree` оставляет только одну: При слиянии, из всех строк с одинаковым значением ключа сортировки `ReplacingMergeTree` оставляет только одну:

View File

@ -40,6 +40,7 @@
- Kotlin - Kotlin
- [AORM](https://github.com/TanVD/AORM) - [AORM](https://github.com/TanVD/AORM)
- C\# - C\#
- [Octonica.ClickHouseClient](https://github.com/Octonica/ClickHouseClient)
- [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net) - [ClickHouse.Ado](https://github.com/killwort/ClickHouse-Net)
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client) - [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net) - [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)

View File

@ -56,7 +56,7 @@ ClickHouse поддерживает управление доступом на
Запросы управления: Запросы управления:
- [CREATE USER](../sql-reference/statements/create/user.md#create-user-statement) - [CREATE USER](../sql-reference/statements/create/user.md#create-user-statement)
- [ALTER USER](../sql-reference/statements/alter.md#alter-user-statement) - [ALTER USER](../sql-reference/statements/alter/user.md)
- [DROP USER](../sql-reference/statements/misc.md#drop-user-statement) - [DROP USER](../sql-reference/statements/misc.md#drop-user-statement)
- [SHOW CREATE USER](../sql-reference/statements/show.md#show-create-user-statement) - [SHOW CREATE USER](../sql-reference/statements/show.md#show-create-user-statement)
@ -83,7 +83,7 @@ ClickHouse поддерживает управление доступом на
Запросы управления: Запросы управления:
- [CREATE ROLE](../sql-reference/statements/create/index.md#create-role-statement) - [CREATE ROLE](../sql-reference/statements/create/index.md#create-role-statement)
- [ALTER ROLE](../sql-reference/statements/alter.md#alter-role-statement) - [ALTER ROLE](../sql-reference/statements/alter/role.md)
- [DROP ROLE](../sql-reference/statements/misc.md#drop-role-statement) - [DROP ROLE](../sql-reference/statements/misc.md#drop-role-statement)
- [SET ROLE](../sql-reference/statements/misc.md#set-role-statement) - [SET ROLE](../sql-reference/statements/misc.md#set-role-statement)
- [SET DEFAULT ROLE](../sql-reference/statements/misc.md#set-default-role-statement) - [SET DEFAULT ROLE](../sql-reference/statements/misc.md#set-default-role-statement)
@ -98,7 +98,7 @@ ClickHouse поддерживает управление доступом на
Запросы управления: Запросы управления:
- [CREATE ROW POLICY](../sql-reference/statements/create/index.md#create-row-policy-statement) - [CREATE ROW POLICY](../sql-reference/statements/create/index.md#create-row-policy-statement)
- [ALTER ROW POLICY](../sql-reference/statements/alter.md#alter-row-policy-statement) - [ALTER ROW POLICY](../sql-reference/statements/alter/row-policy.md)
- [DROP ROW POLICY](../sql-reference/statements/misc.md#drop-row-policy-statement) - [DROP ROW POLICY](../sql-reference/statements/misc.md#drop-row-policy-statement)
- [SHOW CREATE ROW POLICY](../sql-reference/statements/show.md#show-create-row-policy-statement) - [SHOW CREATE ROW POLICY](../sql-reference/statements/show.md#show-create-row-policy-statement)
@ -110,7 +110,7 @@ ClickHouse поддерживает управление доступом на
Запросы управления: Запросы управления:
- [CREATE SETTINGS PROFILE](../sql-reference/statements/create/index.md#create-settings-profile-statement) - [CREATE SETTINGS PROFILE](../sql-reference/statements/create/index.md#create-settings-profile-statement)
- [ALTER SETTINGS PROFILE](../sql-reference/statements/alter.md#alter-settings-profile-statement) - [ALTER SETTINGS PROFILE](../sql-reference/statements/alter/settings-profile.md)
- [DROP SETTINGS PROFILE](../sql-reference/statements/misc.md#drop-settings-profile-statement) - [DROP SETTINGS PROFILE](../sql-reference/statements/misc.md#drop-settings-profile-statement)
- [SHOW CREATE SETTINGS PROFILE](../sql-reference/statements/show.md#show-create-settings-profile-statement) - [SHOW CREATE SETTINGS PROFILE](../sql-reference/statements/show.md#show-create-settings-profile-statement)
@ -124,7 +124,7 @@ ClickHouse поддерживает управление доступом на
Запросы управления: Запросы управления:
- [CREATE QUOTA](../sql-reference/statements/create/index.md#create-quota-statement) - [CREATE QUOTA](../sql-reference/statements/create/index.md#create-quota-statement)
- [ALTER QUOTA](../sql-reference/statements/alter.md#alter-quota-statement) - [ALTER QUOTA](../sql-reference/statements/alter/quota.md)
- [DROP QUOTA](../sql-reference/statements/misc.md#drop-quota-statement) - [DROP QUOTA](../sql-reference/statements/misc.md#drop-quota-statement)
- [SHOW CREATE QUOTA](../sql-reference/statements/show.md#show-create-quota-statement) - [SHOW CREATE QUOTA](../sql-reference/statements/show.md#show-create-quota-statement)

View File

@ -27,7 +27,7 @@
ClickHouse позволяет использовать запрос `ALTER TABLE ... FREEZE PARTITION ...` для создания локальной копии партиций таблицы. Это реализуется с помощью жестких ссылок (hardlinks) на каталог `/var/lib/clickhouse/shadow/`, поэтому такая копия обычно не занимает дополнительное место на диске для старых данных. Созданные копии файлов не обрабатываются сервером ClickHouse, поэтому вы можете просто оставить их там: у вас будет простая резервная копия, которая не требует дополнительной внешней системы, однако при аппаратных проблемах вы можете утратить и актуальные данные и сохраненную копию. По этой причине, лучше удаленно скопировать их в другое место, а затем удалить локальную копию. Распределенные файловые системы и хранилища объектов по-прежнему являются хорошими вариантами для этого, однако можно использовать и обычные присоединенные файловые серверы с достаточно большой ёмкостью (в этом случае передача будет происходить через сетевую файловую систему или, возможно, [rsync](https://en.wikipedia.org/wiki/Rsync)). ClickHouse позволяет использовать запрос `ALTER TABLE ... FREEZE PARTITION ...` для создания локальной копии партиций таблицы. Это реализуется с помощью жестких ссылок (hardlinks) на каталог `/var/lib/clickhouse/shadow/`, поэтому такая копия обычно не занимает дополнительное место на диске для старых данных. Созданные копии файлов не обрабатываются сервером ClickHouse, поэтому вы можете просто оставить их там: у вас будет простая резервная копия, которая не требует дополнительной внешней системы, однако при аппаратных проблемах вы можете утратить и актуальные данные и сохраненную копию. По этой причине, лучше удаленно скопировать их в другое место, а затем удалить локальную копию. Распределенные файловые системы и хранилища объектов по-прежнему являются хорошими вариантами для этого, однако можно использовать и обычные присоединенные файловые серверы с достаточно большой ёмкостью (в этом случае передача будет происходить через сетевую файловую систему или, возможно, [rsync](https://en.wikipedia.org/wiki/Rsync)).
Дополнительные сведения о запросах, связанных с манипуляциями партициями, см. в разделе [ALTER](../sql-reference/statements/alter.md#alter_manipulations-with-partitions). Дополнительные сведения о запросах, связанных с манипуляциями партициями, см. в разделе [ALTER](../sql-reference/statements/alter/partition.md#alter_manipulations-with-partitions).
Для автоматизации этого подхода доступен инструмент от сторонних разработчиков: [clickhouse-backup](https://github.com/AlexAkulov/clickhouse-backup). Для автоматизации этого подхода доступен инструмент от сторонних разработчиков: [clickhouse-backup](https://github.com/AlexAkulov/clickhouse-backup).

View File

@ -26,21 +26,28 @@ ClickHouse перезагружает встроенные словари с з
``` xml ``` xml
<compression> <compression>
<case> <case>
<parameters/> <min_part_size>...</min_part_size>
<min_part_size_ratio>...</min_part_size_ratio>
<method>...</method>
</case> </case>
... ...
</compression> </compression>
``` ```
Можно сконфигурировать несколько разделов `<case>`.
Поля блока `<case>`: Поля блока `<case>`:
- `min_part_size` - Минимальный размер части таблицы. - `min_part_size` - Минимальный размер части таблицы.
- `min_part_size_ratio` - Отношение размера минимальной части таблицы к полному размеру таблицы. - `min_part_size_ratio` - Отношение размера минимальной части таблицы к полному размеру таблицы.
- `method` - Метод сжатия. Возможные значения: `lz4`, `zstd` (экспериментальный). - `method` - Метод сжатия. Возможные значения: `lz4`, `zstd`.
ClickHouse проверит условия `min_part_size` и `min_part_size_ratio` и выполнит те блоки `case`, для которых условия совпали. Если ни один `<case>` не подходит, то ClickHouse применит алгоритм сжатия `lz4`. Можно сконфигурировать несколько разделов `<case>`.
ClickHouse проверяет условия для `min_part_size` и `min_part_size_ratio` и выполнит те блоки `case`, для которых условия совпали.
- Если кусок данных совпадает с условиями, ClickHouse использует указанные метод сжатия.
- Если кусок данных совпадает с несколькими блоками `case`, ClickHouse использует перый совпавший блок условий.
Если ни один `<case>` не подходит, то ClickHouse применит алгоритм сжатия `lz4`.
**Пример** **Пример**
@ -217,7 +224,7 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
Путь к файлу с подстановками. Путь к файлу с подстановками.
Подробности смотрите в разделе «[Конфигурационный файлы](../configuration-files.md#configuration_files)». Подробности смотрите в разделе «[Конфигурационные файлы](../configuration-files.md#configuration_files)».
**Пример** **Пример**
@ -295,11 +302,11 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
Ключи: Ключи:
- level - Уровень логирования. Допустимые значения: `trace`, `debug`, `information`, `warning`, `error`. - `level` - Уровень логирования. Допустимые значения: `trace`, `debug`, `information`, `warning`, `error`.
- log - Файл лога. Содержит все записи согласно `level`. - `log` - Файл лога. Содержит все записи согласно `level`.
- errorlog - Файл лога ошибок. - `errorlog` - Файл лога ошибок.
- size - Размер файла. Действует для `log` и `errorlog`. Как только файл достиг размера `size`, ClickHouse архивирует и переименовывает его, а на его месте создает новый файл лога. - `size` - Размер файла. Действует для `log` и `errorlog`. Как только файл достиг размера `size`, ClickHouse архивирует и переименовывает его, а на его месте создает новый файл лога.
- count - Количество заархивированных файлов логов, которые сохраняет ClickHouse. - `count` - Количество заархивированных файлов логов, которые сохраняет ClickHouse.
**Пример** **Пример**
@ -327,15 +334,39 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
</logger> </logger>
``` ```
Ключи: Ключи для syslog:
- use\_syslog - обязательная настройка, если требуется запись в syslog - use\_syslog - обязательная настройка, если требуется запись в syslog
- address - хост\[:порт\] демона syslogd. Если не указан, используется локальный - address - хост\[:порт\] демона syslogd. Если не указан, используется локальный
- hostname - опционально, имя хоста, с которого отсылаются логи - hostname - опционально, имя хоста, с которого отсылаются логи
- facility - [категория syslog](https://en.wikipedia.org/wiki/Syslog#Facility), - facility - [категория syslog](https://en.wikipedia.org/wiki/Syslog#Facility), записанная в верхнем регистре, с префиксом «LOG\_»: (`LOG_USER`, `LOG_DAEMON`, `LOG_LOCAL3` и прочие).
записанная в верхнем регистре, с префиксом «LOG\_»: (`LOG_USER`, `LOG_DAEMON`, `LOG_LOCAL3` и прочие).
Значения по умолчанию: при указанном `address` - `LOG_USER`, иначе - `LOG_DAEMON` Значения по умолчанию: при указанном `address` - `LOG_USER`, иначе - `LOG_DAEMON`
- format - формат сообщений. Возможные значения - `bsd` и `syslog` - format - формат сообщений. Возможные значения - `bsd` и `syslog`
## send\_crash\_reports {#server_configuration_parameters-logger}
Настройки для отправки сообщений о сбоях в команду разработчиков ядра ClickHouse через [Sentry](https://sentry.io).
Включение этих настроек, особенно в pre-production среде, может дать очень ценную информацию и поможет развитию ClickHouse.
Сервер на котором включены данные настройки должен иметь доступ в Интернет по протоколу IPv4 (на момент написания документации IPv6 не поддерживается публичным облаком Sentry) для правильной работы данной функциональности.
Ключи:
- `enabled` Булевый флаг чтобы включить функциональность, по умолчанию `false`. Установите `true` чтобы разрешить отправку отчетов о сбоях.
- `endpoint` Вы можете переопределить URL на который будут отсылаться отчеты об ошибках и использовать собственную инсталяцию Sentry. Используйте URL синтаксис [Sentry DSN](https://docs.sentry.io/error-reporting/quickstart/?platform=native#configure-the-sdk).
- `anonymize` - Запретить отсылку имени хоста сервера в отчете о сбое.
- `http_proxy` - Настройка HTTP proxy для отсылки отчетов о сбоях.
- `debug` - Настроить клиентскую библиотеку Sentry в debug режим.
- `tmp_path` - Путь в файловой системе для временного хранения состояния отчетов о сбоях перед отправкой на сервер Sentry.
**Рекомендованые настройки**
``` xml
<send_crash_reports>
<enabled>true</enabled>
</send_crash_reports>
```
## macros {#macros} ## macros {#macros}
Подстановки параметров реплицируемых таблиц. Подстановки параметров реплицируемых таблиц.
@ -362,19 +393,9 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
<mark_cache_size>5368709120</mark_cache_size> <mark_cache_size>5368709120</mark_cache_size>
``` ```
## max\_concurrent\_queries {#max-concurrent-queries} ## max\_server\_memory\_usage {#max_server_memory_usage}
Максимальное количество одновременно обрабатываемых запросов. Ограничивает объём оперативной памяти, используемой сервером ClickHouse. Настройка может быть задана только для профиля `default`.
**Пример**
``` xml
<max_concurrent_queries>100</max_concurrent_queries>
```
## max_server_memory_usage {#max_server_memory_usage}
Ограничивает объём оперативной памяти, используемой сервером ClickHouse.
Возможные значения: Возможные значения:
@ -389,7 +410,8 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
**См. также** **См. также**
- [max_memory_usage](../settings/query-complexity.md#settings_max_memory_usage) - [max\_memory\_usage](../../operations/settings/query-complexity.md#settings_max_memory_usage)
- [max_server_memory_usage_to_ram_ratio](#max_server_memory_usage_to_ram_ratio)
## max_server_memory_usage_to_ram_ratio {#max_server_memory_usage_to_ram_ratio} ## max_server_memory_usage_to_ram_ratio {#max_server_memory_usage_to_ram_ratio}
@ -416,6 +438,16 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
- [max_server_memory_usage](#max_server_memory_usage) - [max_server_memory_usage](#max_server_memory_usage)
## max\_concurrent\_queries {#max-concurrent-queries}
Максимальное количество одновременно обрабатываемых запросов.
**Пример**
``` xml
<max_concurrent_queries>100</max_concurrent_queries>
```
## max\_connections {#max-connections} ## max\_connections {#max-connections}
Максимальное количество входящих соединений. Максимальное количество входящих соединений.
@ -458,6 +490,18 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
<max_table_size_to_drop>0</max_table_size_to_drop> <max_table_size_to_drop>0</max_table_size_to_drop>
``` ```
## max\_thread\_pool\_size {#max-thread-pool-size}
Максимальное кол-во потоков в глобальном пуле потоков.
Default value: 10000.
**Example**
``` xml
<max_thread_pool_size>12000</max_thread_pool_size>
```
## merge\_tree {#server_configuration_parameters-merge_tree} ## merge\_tree {#server_configuration_parameters-merge_tree}
Тонкая настройка таблиц семейства [MergeTree](../../operations/server-configuration-parameters/settings.md). Тонкая настройка таблиц семейства [MergeTree](../../operations/server-configuration-parameters/settings.md).
@ -533,15 +577,16 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
## part\_log {#server_configuration_parameters-part-log} ## part\_log {#server_configuration_parameters-part-log}
Логирование событий, связанных с данными типа [MergeTree](../../operations/server-configuration-parameters/settings.md). Например, события добавления или мержа данных. Лог можно использовать для симуляции алгоритмов слияния, чтобы сравнивать их характеристики. Также, можно визуализировать процесс слияния. Логирование событий, связанных с данными типа [MergeTree](../../engines/table-engines/mergetree-family/mergetree.md). Например, события добавления или мержа данных. Лог можно использовать для симуляции алгоритмов слияния, чтобы сравнивать их характеристики. Также, можно визуализировать процесс слияния.
Запросы логируются не в отдельный файл, а в таблицу [system.part\_log](../../operations/server-configuration-parameters/settings.md#system_tables-part-log). Вы можете изменить название этой таблицы в параметре `table` (см. ниже). Запросы логируются не в отдельный файл, а в таблицу [system.part\_log](../../operations/system-tables/part_log.md#system_tables-part-log). Вы можете изменить название этой таблицы в параметре `table` (см. ниже).
При настройке логирования используются следующие параметры: При настройке логирования используются следующие параметры:
- `database` — имя базы данных; - `database` — имя базы данных;
- `table` — имя таблицы; - `table` — имя таблицы;
- `partition_by` — устанавливает [произвольный ключ партиционирования](../../operations/server-configuration-parameters/settings.md); - `partition_by` — устанавливает [произвольный ключ партиционирования](../../operations/server-configuration-parameters/settings.md). Нельзя использовать если используется `engine`
- `engine` - устанавливает [настройки MergeTree Engine](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table) для системной таблицы. Нельзя использовать если используется `partition_by`.
- `flush_interval_milliseconds` — период сброса данных из буфера в памяти в таблицу. - `flush_interval_milliseconds` — период сброса данных из буфера в памяти в таблицу.
**Пример** **Пример**
@ -594,15 +639,16 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
## query\_log {#server_configuration_parameters-query-log} ## query\_log {#server_configuration_parameters-query-log}
Настройка логирования запросов, принятых с настройкой [log\_queries=1](../settings/settings.md). Настройка логирования запросов, принятых с настройкой [log\_queries=1](../../operations/settings/settings.md).
Запросы логируются не в отдельный файл, а в системную таблицу [system.query\_log](../../operations/server-configuration-parameters/settings.md#system_tables-query_log). Вы можете изменить название этой таблицы в параметре `table` (см. ниже). Запросы логируются не в отдельный файл, а в системную таблицу [system.query\_log](../../operations/system-tables/query_log.md#system_tables-query_log). Вы можете изменить название этой таблицы в параметре `table` (см. ниже).
При настройке логирования используются следующие параметры: При настройке логирования используются следующие параметры:
- `database` — имя базы данных; - `database` — имя базы данных;
- `table` — имя таблицы, куда будет записываться лог; - `table` — имя таблицы, куда будет записываться лог;
- `partition_by` — [произвольный ключ партиционирования](../../operations/server-configuration-parameters/settings.md) для таблицы с логами; - `partition_by` — устанавливает [произвольный ключ партиционирования](../../operations/server-configuration-parameters/settings.md). Нельзя использовать если используется `engine`
- `engine` - устанавливает [настройки MergeTree Engine](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table) для системной таблицы. Нельзя использовать если используется `partition_by`.
- `flush_interval_milliseconds` — период сброса данных из буфера в памяти в таблицу. - `flush_interval_milliseconds` — период сброса данных из буфера в памяти в таблицу.
Если таблица не существует, то ClickHouse создаст её. Если структура журнала запросов изменилась при обновлении сервера ClickHouse, то таблица со старой структурой переименовывается, а новая таблица создается автоматически. Если таблица не существует, то ClickHouse создаст её. Если структура журнала запросов изменилась при обновлении сервера ClickHouse, то таблица со старой структурой переименовывается, а новая таблица создается автоматически.
@ -613,7 +659,7 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
<query_log> <query_log>
<database>system</database> <database>system</database>
<table>query_log</table> <table>query_log</table>
<partition_by>toMonday(event_date)</partition_by> <engine>Engine = MergeTree PARTITION BY event_date ORDER BY event_time TTL event_date + INTERVAL 30 day</engine>
<flush_interval_milliseconds>7500</flush_interval_milliseconds> <flush_interval_milliseconds>7500</flush_interval_milliseconds>
</query_log> </query_log>
``` ```
@ -622,13 +668,14 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
Настройка логирования потоков выполнения запросов, принятых с настройкой [log\_query\_threads=1](../settings/settings.md#settings-log-query-threads). Настройка логирования потоков выполнения запросов, принятых с настройкой [log\_query\_threads=1](../settings/settings.md#settings-log-query-threads).
Запросы логируются не в отдельный файл, а в системную таблицу [system.query\_thread\_log](../../operations/server-configuration-parameters/settings.md#system_tables-query_thread_log). Вы можете изменить название этой таблицы в параметре `table` (см. ниже). Запросы логируются не в отдельный файл, а в системную таблицу [system.query\_thread\_log](../../operations/system-tables/query_thread_log.md#system_tables-query_thread_log). Вы можете изменить название этой таблицы в параметре `table` (см. ниже).
При настройке логирования используются следующие параметры: При настройке логирования используются следующие параметры:
- `database` — имя базы данных; - `database` — имя базы данных;
- `table` — имя таблицы, куда будет записываться лог; - `table` — имя таблицы, куда будет записываться лог;
- `partition_by` — [произвольный ключ партиционирования](../../operations/server-configuration-parameters/settings.md) для таблицы с логами; - `partition_by` — устанавливает [произвольный ключ партиционирования](../../operations/server-configuration-parameters/settings.md). Нельзя использовать если используется `engine`
- `engine` - устанавливает [настройки MergeTree Engine](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table) для системной таблицы. Нельзя использовать если используется `partition_by`.
- `flush_interval_milliseconds` — период сброса данных из буфера в памяти в таблицу. - `flush_interval_milliseconds` — период сброса данных из буфера в памяти в таблицу.
Если таблица не существует, то ClickHouse создаст её. Если структура журнала запросов изменилась при обновлении сервера ClickHouse, то таблица со старой структурой переименовывается, а новая таблица создается автоматически. Если таблица не существует, то ClickHouse создаст её. Если структура журнала запросов изменилась при обновлении сервера ClickHouse, то таблица со старой структурой переименовывается, а новая таблица создается автоматически.
@ -644,15 +691,44 @@ ClickHouse проверит условия `min_part_size` и `min_part_size_rat
</query_thread_log> </query_thread_log>
``` ```
## text\_log {#server_configuration_parameters-text_log}
Настройка логирования текстовых сообщений в системную таблицу [text\_log](../../operations/system-tables/text_log.md#system_tables-text_log).
Параметры:
- `level` — Максимальный уровень сообщения (по умолчанию `Trace`) которое будет сохранено в таблице.
- `database` — имя базы данных для хранения таблицы.
- `table` — имя таблицы, куда будут записываться текстовые сообщения.
- `partition_by` — устанавливает [произвольный ключ партиционирования](../../operations/server-configuration-parameters/settings.md). Нельзя использовать если используется `engine`
- `engine` - устанавливает [настройки MergeTree Engine](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table) для системной таблицы. Нельзя использовать если используется `partition_by`.
- `flush_interval_milliseconds` — период сброса данных из буфера в памяти в таблицу.
**Пример**
```xml
<yandex>
<text_log>
<level>notice</level>
<database>system</database>
<table>text_log</table>
<flush_interval_milliseconds>7500</flush_interval_milliseconds>
<!-- <partition_by>event_date</partition_by> -->
<engine>Engine = MergeTree PARTITION BY event_date ORDER BY event_time TTL event_date + INTERVAL 30 day</engine>
</text_log>
</yandex>
```
## trace\_log {#server_configuration_parameters-trace_log} ## trace\_log {#server_configuration_parameters-trace_log}
Settings for the [trace\_log](../../operations/server-configuration-parameters/settings.md#system_tables-trace_log) system table operation. Настройки для [trace\_log](../../operations/system-tables/trace_log.md#system_tables-trace_log) system table operation.
Parameters: Parameters:
- `database` — Database for storing a table. - `database` — Database for storing a table.
- `table` — Table name. - `table` — Table name.
- `partition_by` — [Custom partitioning key](../../operations/server-configuration-parameters/settings.md) for a system table. - `partition_by` — устанавливает [произвольный ключ партиционирования](../../operations/server-configuration-parameters/settings.md). Нельзя использовать если используется `engine`
- `engine` - устанавливает [настройки MergeTree Engine](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-creating-a-table) для системной таблицы. Нельзя использовать если используется `partition_by`.
- `flush_interval_milliseconds` — Interval for flushing data from the buffer in memory to the table. - `flush_interval_milliseconds` — Interval for flushing data from the buffer in memory to the table.
The default server configuration file `config.xml` contains the following settings section: The default server configuration file `config.xml` contains the following settings section:
@ -666,6 +742,36 @@ The default server configuration file `config.xml` contains the following settin
</trace_log> </trace_log>
``` ```
## query\_masking\_rules {#query-masking-rules}
Правила основанные на регурялных выражениях, которые будут применены для всех запросов а также для всех сообщений перед сохранением их в лог на сервере,
`system.query_log`, `system.text_log`, `system.processes` таблицы, а также в логах отсылаемых клиенту. Это позволяет предотвратить
утечку конфиденциальных данных из SQL запросов (такие как имена, электронные письма, личные идентификаторы или номера кредитных карт) в логи.
**Пример**
``` xml
<query_masking_rules>
<rule>
<name>hide SSN</name>
<regexp>(^|\D)\d{3}-\d{2}-\d{4}($|\D)</regexp>
<replace>000-00-0000</replace>
</rule>
</query_masking_rules>
```
Параметры конфигурации:
- `name` - имя правила (необязательно)
- `regexp` - совместимое с RE2 регулярное выражение (обязательное)
- `replace` - строка замены для конфиденциальных данных (опционально, по умолчанию - шесть звездочек)
Правила маскировки применяются ко всему запросу (для предотвращения утечки конфиденциальных данных из неправильно оформленных / не интерпритируемых запросов).
`system.events` таблица содержит счетчик `QueryMaskingRulesMatch` который считает общее кол-во совпадений правил маскировки.
Для распределенных запросов каждый сервер должен быть сконфигурирован отдельно, иначе, подзапросы,
переданные на другие узлы, будут сохраняться без маскировки.
## remote\_servers {#server-settings-remote-servers} ## remote\_servers {#server-settings-remote-servers}
Конфигурация кластеров, которые использует движок таблиц [Distributed](../../operations/server-configuration-parameters/settings.md) и табличная функция `cluster`. Конфигурация кластеров, которые использует движок таблиц [Distributed](../../operations/server-configuration-parameters/settings.md) и табличная функция `cluster`.
@ -724,6 +830,10 @@ TCP порт для защищённого обмена данными с кли
Порт для взаимодействия с клиентами по протоколу MySQL. Порт для взаимодействия с клиентами по протоколу MySQL.
**Возможные значения**
Положительное целое.
Пример Пример
``` xml ``` xml
@ -742,7 +852,8 @@ TCP порт для защищённого обмена данными с кли
``` xml ``` xml
<tmp_path>/var/lib/clickhouse/tmp/</tmp_path> <tmp_path>/var/lib/clickhouse/tmp/</tmp_path>
``` ```
## tmp_policy {#tmp-policy}
## tmp\_policy {#tmp-policy}
Политика из [storage_configuration](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-multiple-volumes) для хранения временных файлов. Политика из [storage_configuration](../../engines/table-engines/mergetree-family/mergetree.md#table_engine-mergetree-multiple-volumes) для хранения временных файлов.
@ -833,6 +944,7 @@ ClickHouse использует ZooKeeper для хранения метадан
<port>2181</port> <port>2181</port>
</node> </node>
<session_timeout_ms>30000</session_timeout_ms> <session_timeout_ms>30000</session_timeout_ms>
<operation_timeout_ms>10000</operation_timeout_ms>
<!-- Optional. Chroot suffix. Should exist. --> <!-- Optional. Chroot suffix. Should exist. -->
<root>/path/to/zookeeper/node</root> <root>/path/to/zookeeper/node</root>
<!-- Optional. Zookeeper digest ACL string. --> <!-- Optional. Zookeeper digest ACL string. -->
@ -857,7 +969,7 @@ ClickHouse использует ZooKeeper для хранения метадан
- Для каждой отдельной таблицы. - Для каждой отдельной таблицы.
При создании таблицы укажите соответствующую [настройку движка](../../operations/server_configuration_parameters/settings.md#table_engine-mergetree-creating-a-table). Поведение существующей таблицы с установленным параметром не изменяется даже при изменении глобального параметра. При создании таблицы укажите соответствующую [настройку движка](../../operations/server-configuration-parameters/settings.md#table_engine-mergetree-creating-a-table). Поведение существующей таблицы с установленным параметром не изменяется даже при изменении глобального параметра.
**Возможные значения** **Возможные значения**

View File

@ -1549,6 +1549,16 @@ SELECT idx, i FROM null_in WHERE i IN (1, NULL) SETTINGS transform_null_in = 1;
Значение по умолчанию: `Пустая строка`. Значение по умолчанию: `Пустая строка`.
## input_format_avro_allow_missing_fields {#input_format_avro_allow_missing_fields}
Позволяет использовать данные, которых не нашлось в схеме формата [Avro](../../interfaces/formats.md#data-format-avro) или [AvroConfluent](../../interfaces/formats.md#data-format-avro-confluent). Если поле не найдено в схеме, ClickHouse подставит значение по умолчанию вместо исключения.
Возможные значения:
- 0 — Выключена.
- 1 — Включена.
Значение по умолчанию: `0`.
## min_insert_block_size_rows_for_materialized_views {#min-insert-block-size-rows-for-materialized-views} ## min_insert_block_size_rows_for_materialized_views {#min-insert-block-size-rows-for-materialized-views}
Устанавливает минимальное количество строк в блоке, который может быть вставлен в таблицу запросом `INSERT`. Блоки меньшего размера склеиваются в блоки большего размера. Настройка применяется только для блоков, вставляемых в [материализованное представление](../../sql-reference/statements/create/view.md#create-view). Настройка позволяет избежать избыточного потребления памяти. Устанавливает минимальное количество строк в блоке, который может быть вставлен в таблицу запросом `INSERT`. Блоки меньшего размера склеиваются в блоки большего размера. Настройка применяется только для блоков, вставляемых в [материализованное представление](../../sql-reference/statements/create/view.md#create-view). Настройка позволяет избежать избыточного потребления памяти.
@ -1596,7 +1606,7 @@ SELECT idx, i FROM null_in WHERE i IN (1, NULL) SETTINGS transform_null_in = 1;
## mutations_sync {#mutations_sync} ## mutations_sync {#mutations_sync}
Позволяет выполнять запросы `ALTER TABLE ... UPDATE|DELETE` ([мутации](../../sql-reference/statements/alter.md#mutations)) синхронно. Позволяет выполнять запросы `ALTER TABLE ... UPDATE|DELETE` ([мутации](../../sql-reference/statements/alter/index.md#mutations)) синхронно.
Возможные значения: Возможные значения:
@ -1608,8 +1618,8 @@ SELECT idx, i FROM null_in WHERE i IN (1, NULL) SETTINGS transform_null_in = 1;
**См. также** **См. также**
- [Синхронность запросов ALTER](../../sql-reference/statements/alter.md#synchronicity-of-alter-queries) - [Синхронность запросов ALTER](../../sql-reference/statements/alter/index.md#synchronicity-of-alter-queries)
- [Мутации](../../sql-reference/statements/alter.md#mutations) - [Мутации](../../sql-reference/statements/alter/index.md#mutations)
[Оригинальная статья](https://clickhouse.tech/docs/ru/operations/settings/settings/) <!--hide--> [Оригинальная статья](https://clickhouse.tech/docs/ru/operations/settings/settings/) <!--hide-->

View File

@ -1,6 +1,6 @@
# system.mutations {#system_tables-mutations} # system.mutations {#system_tables-mutations}
Таблица содержит информацию о ходе выполнения [мутаций](../../sql-reference/statements/alter.md#mutations) таблиц семейства MergeTree. Каждой команде мутации соответствует одна строка таблицы. Таблица содержит информацию о ходе выполнения [мутаций](../../sql-reference/statements/alter/index.md#mutations) таблиц семейства MergeTree. Каждой команде мутации соответствует одна строка таблицы.
Столбцы: Столбцы:
@ -41,7 +41,7 @@
**См. также** **См. также**
- [Мутации](../../sql-reference/statements/alter.md#mutations) - [Мутации](../../sql-reference/statements/alter/index.md#mutations)
- [Движок MergeTree](../../engines/table-engines/mergetree-family/mergetree.md) - [Движок MergeTree](../../engines/table-engines/mergetree-family/mergetree.md)
- [Репликация данных](../../engines/table-engines/mergetree-family/replication.md) (семейство ReplicatedMergeTree) - [Репликация данных](../../engines/table-engines/mergetree-family/replication.md) (семейство ReplicatedMergeTree)

View File

@ -10,7 +10,7 @@
- `NEW_PART` — вставка нового куска. - `NEW_PART` — вставка нового куска.
- `MERGE_PARTS` — слияние кусков. - `MERGE_PARTS` — слияние кусков.
- `DOWNLOAD_PART` — загрузка с реплики. - `DOWNLOAD_PART` — загрузка с реплики.
- `REMOVE_PART` — удаление или отсоединение из таблицы с помощью [DETACH PARTITION](../../sql-reference/statements/alter.md#alter_detach-partition). - `REMOVE_PART` — удаление или отсоединение из таблицы с помощью [DETACH PARTITION](../../sql-reference/statements/alter/partition.md#alter_detach-partition).
- `MUTATE_PART` — изменение куска. - `MUTATE_PART` — изменение куска.
- `MOVE_PART` — перемещение куска между дисками. - `MOVE_PART` — перемещение куска между дисками.
- `event_date` (Date) — дата события. - `event_date` (Date) — дата события.

View File

@ -6,7 +6,7 @@
Столбцы: Столбцы:
- `partition` ([String](../../sql-reference/data-types/string.md)) имя партиции. Что такое партиция можно узнать из описания запроса [ALTER](../../sql-reference/statements/alter.md#query_language_queries_alter). - `partition` ([String](../../sql-reference/data-types/string.md)) имя партиции. Что такое партиция можно узнать из описания запроса [ALTER](../../sql-reference/statements/alter/index.md#query_language_queries_alter).
Форматы: Форматы:
@ -66,7 +66,7 @@
- `primary_key_bytes_in_memory_allocated` ([UInt64](../../sql-reference/data-types/int-uint.md)) объём памяти (в байтах) выделенный для размещения первичных ключей. - `primary_key_bytes_in_memory_allocated` ([UInt64](../../sql-reference/data-types/int-uint.md)) объём памяти (в байтах) выделенный для размещения первичных ключей.
- `is_frozen` ([UInt8](../../sql-reference/data-types/int-uint.md)) Признак, показывающий существование бэкапа партиции. 1, бэкап есть. 0, бэкапа нет. Смотрите раздел [FREEZE PARTITION](../../sql-reference/statements/alter.md#alter_freeze-partition). - `is_frozen` ([UInt8](../../sql-reference/data-types/int-uint.md)) Признак, показывающий существование бэкапа партиции. 1, бэкап есть. 0, бэкапа нет. Смотрите раздел [FREEZE PARTITION](../../sql-reference/statements/alter/partition.md#alter_freeze-partition).
- `database` ([String](../../sql-reference/data-types/string.md)) имя базы данных. - `database` ([String](../../sql-reference/data-types/string.md)) имя базы данных.

View File

@ -1,4 +1,4 @@
# system.text_log {#system-tables-text-log} # system.text_log {#system_tables-text_log}
Содержит записи логов. Уровень логирования для таблицы может быть ограничен параметром сервера `text_log.level`. Содержит записи логов. Уровень логирования для таблицы может быть ограничен параметром сервера `text_log.level`.

View File

@ -1,33 +1,33 @@
# system.trace_log {#system_tables-trace_log} # system.trace_log {#system_tables-trace_log}
Contains stack traces collected by the sampling query profiler. Содержит экземпляры трассировки стека адресов вызова, собранные с помощью семплирующего профайлера запросов.
ClickHouse creates this table when the [trace\_log](../server-configuration-parameters/settings.md#server_configuration_parameters-trace_log) server configuration section is set. Also the [query_profiler_real_time_period_ns](../settings/settings.md#query_profiler_real_time_period_ns) and [query_profiler_cpu_time_period_ns](../settings/settings.md#query_profiler_cpu_time_period_ns) settings should be set. ClickHouse создает эту таблицу когда утсановлена настройка [trace\_log](../server-configuration-parameters/settings.md#server_configuration_parameters-trace_log) в конфигурационном файле сервереа. А также настройки [query_profiler_real_time_period_ns](../settings/settings.md#query_profiler_real_time_period_ns) и [query_profiler_cpu_time_period_ns](../settings/settings.md#query_profiler_cpu_time_period_ns).
To analyze logs, use the `addressToLine`, `addressToSymbol` and `demangle` introspection functions. Для анализа stack traces, используйте функции интроспекции `addressToLine`, `addressToSymbol` и `demangle`.
Columns: Колонки:
- `event_date`([Date](../../sql-reference/data-types/date.md)) — Date of sampling moment. - `event_date`([Date](../../sql-reference/data-types/date.md)) — Дата в момент снятия экземпляра стэка адресов вызова.
- `event_time`([DateTime](../../sql-reference/data-types/datetime.md)) — Timestamp of sampling moment. - `event_time`([DateTime](../../sql-reference/data-types/datetime.md)) — Дата и время в момент снятия экземпляра стэка адресов вызова.
- `revision`([UInt32](../../sql-reference/data-types/int-uint.md)) — ClickHouse server build revision. - `revision`([UInt32](../../sql-reference/data-types/int-uint.md)) — ревизия сборки сервера ClickHouse.
When connecting to server by `clickhouse-client`, you see the string similar to `Connected to ClickHouse server version 19.18.1 revision 54429.`. This field contains the `revision`, but not the `version` of a server. Во время соединения с сервером через `clickhouse-client`, вы видите строку похожую на `Connected to ClickHouse server version 19.18.1 revision 54429.`. Это поле содержит номер после `revision`, но не содержит строку после `version`.
- `timer_type`([Enum8](../../sql-reference/data-types/enum.md)) — Timer type: - `timer_type`([Enum8](../../sql-reference/data-types/enum.md)) — Тип таймера:
- `Real` represents wall-clock time. - `Real` означает wall-clock время.
- `CPU` represents CPU time. - `CPU` означает относительное CPU время.
- `thread_number`([UInt32](../../sql-reference/data-types/int-uint.md)) — Thread identifier. - `thread_number`([UInt32](../../sql-reference/data-types/int-uint.md)) — Идентификатор треда.
- `query_id`([String](../../sql-reference/data-types/string.md)) — Query identifier that can be used to get details about a query that was running from the [query_log](#system_tables-query_log) system table. - `query_id`([String](../../sql-reference/data-types/string.md)) — Идентификатор запроса который может быть использован для получения деталей о запросе из таблицы [query_log](query_log.md#system_tables-query_log) system table.
- `trace`([Array(UInt64)](../../sql-reference/data-types/array.md)) — Stack trace at the moment of sampling. Each element is a virtual memory address inside ClickHouse server process. - `trace`([Array(UInt64)](../../sql-reference/data-types/array.md)) — Трассировка стека адресов вызова в момент семплирования. Каждый элемент массива это адрес виртуальной памяти внутри процесса сервера ClickHouse.
**Example** **Пример**
``` sql ``` sql
SELECT * FROM system.trace_log LIMIT 1 \G SELECT * FROM system.trace_log LIMIT 1 \G

View File

@ -0,0 +1,80 @@
---
toc_priority: 114
---
# groupArraySample {#grouparraysample}
Создает массив из случайно выбранных значений аргумента. Количество элементов в массиве ограничено значением параметра `max_size`. Элементы добавляются в результирующий массив в случайном порядке.
**Синтаксис**
``` sql
groupArraySample(max_size[, seed])(x)
```
**Параметры**
- `max_size` — максимальное количество элементов в возвращаемом массиве. [UInt64](../../data-types/int-uint.md).
- `seed` — состояние генератора случайных чисел. Необязательный параметр. [UInt64](../../data-types/int-uint.md). Значение по умолчанию: `123456`.
- `x` — аргумент (название колонки таблицы или выражение).
**Возвращаемые значения**
- Массив случайно выбранных значений аргумента `x`.
Тип: [Массив](../../data-types/array.md).
**Примеры**
Рассмотрим таблицу `colors`:
``` text
┌─id─┬─color──┐
│ 1 │ red │
│ 2 │ blue │
│ 3 │ green │
│ 4 │ white │
│ 5 │ orange │
└────┴────────┘
```
Запрос с названием колонки таблицы в качестве аргумента:
``` sql
SELECT groupArraySample(3)(color) as newcolors FROM colors;
```
Результат:
```text
┌─newcolors──────────────────┐
│ ['white','blue','green'] │
└────────────────────────────┘
```
Запрос с названием колонки и другим состоянием генератора случайных чисел:
``` sql
SELECT groupArraySample(3, 987654321)(color) as newcolors FROM colors;
```
Результат:
```text
┌─newcolors─────────────────────────────┐
│ ['red','orange','green'] │
└───────────────────────────────────────┘
```
Запрос с выражением в качестве аргумента:
``` sql
SELECT groupArraySample(3)(concat('light-', color)) as newcolors FROM colors;
```
Результат:
```text
┌─newcolors───────────────────────────────────┐
│ ['light-blue','light-orange','light-green'] │
└─────────────────────────────────────────────┘
```

Some files were not shown because too many files have changed in this diff Show More