mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-09-20 00:30:49 +00:00
Merge branch 'master' into do-not-run-clear-old-parts-at-shutdown
This commit is contained in:
commit
9a1152339c
27
.clang-tidy
27
.clang-tidy
@ -16,6 +16,7 @@ Checks: '*,
|
|||||||
|
|
||||||
-android-*,
|
-android-*,
|
||||||
|
|
||||||
|
-bugprone-assignment-in-if-condition,
|
||||||
-bugprone-branch-clone,
|
-bugprone-branch-clone,
|
||||||
-bugprone-easily-swappable-parameters,
|
-bugprone-easily-swappable-parameters,
|
||||||
-bugprone-exception-escape,
|
-bugprone-exception-escape,
|
||||||
@ -23,7 +24,6 @@ Checks: '*,
|
|||||||
-bugprone-narrowing-conversions,
|
-bugprone-narrowing-conversions,
|
||||||
-bugprone-not-null-terminated-result,
|
-bugprone-not-null-terminated-result,
|
||||||
-bugprone-unchecked-optional-access,
|
-bugprone-unchecked-optional-access,
|
||||||
-bugprone-assignment-in-if-condition,
|
|
||||||
|
|
||||||
-cert-dcl16-c,
|
-cert-dcl16-c,
|
||||||
-cert-err58-cpp,
|
-cert-err58-cpp,
|
||||||
@ -34,7 +34,6 @@ Checks: '*,
|
|||||||
|
|
||||||
-clang-analyzer-optin.performance.Padding,
|
-clang-analyzer-optin.performance.Padding,
|
||||||
-clang-analyzer-optin.portability.UnixAPI,
|
-clang-analyzer-optin.portability.UnixAPI,
|
||||||
|
|
||||||
-clang-analyzer-security.insecureAPI.bzero,
|
-clang-analyzer-security.insecureAPI.bzero,
|
||||||
-clang-analyzer-security.insecureAPI.strcpy,
|
-clang-analyzer-security.insecureAPI.strcpy,
|
||||||
|
|
||||||
@ -103,12 +102,13 @@ Checks: '*,
|
|||||||
|
|
||||||
-openmp-*,
|
-openmp-*,
|
||||||
|
|
||||||
|
-misc-const-correctness,
|
||||||
-misc-no-recursion,
|
-misc-no-recursion,
|
||||||
-misc-non-private-member-variables-in-classes,
|
-misc-non-private-member-variables-in-classes,
|
||||||
-misc-const-correctness,
|
|
||||||
|
|
||||||
-modernize-avoid-c-arrays,
|
-modernize-avoid-c-arrays,
|
||||||
-modernize-concat-nested-namespaces,
|
-modernize-concat-nested-namespaces,
|
||||||
|
-modernize-macro-to-enum,
|
||||||
-modernize-pass-by-value,
|
-modernize-pass-by-value,
|
||||||
-modernize-return-braced-init-list,
|
-modernize-return-braced-init-list,
|
||||||
-modernize-use-auto,
|
-modernize-use-auto,
|
||||||
@ -117,7 +117,6 @@ Checks: '*,
|
|||||||
-modernize-use-nodiscard,
|
-modernize-use-nodiscard,
|
||||||
-modernize-use-override,
|
-modernize-use-override,
|
||||||
-modernize-use-trailing-return-type,
|
-modernize-use-trailing-return-type,
|
||||||
-modernize-macro-to-enum,
|
|
||||||
|
|
||||||
-performance-inefficient-string-concatenation,
|
-performance-inefficient-string-concatenation,
|
||||||
-performance-no-int-to-ptr,
|
-performance-no-int-to-ptr,
|
||||||
@ -135,17 +134,35 @@ Checks: '*,
|
|||||||
-readability-magic-numbers,
|
-readability-magic-numbers,
|
||||||
-readability-named-parameter,
|
-readability-named-parameter,
|
||||||
-readability-redundant-declaration,
|
-readability-redundant-declaration,
|
||||||
|
-readability-simplify-boolean-expr,
|
||||||
-readability-static-accessed-through-instance,
|
-readability-static-accessed-through-instance,
|
||||||
-readability-suspicious-call-argument,
|
-readability-suspicious-call-argument,
|
||||||
-readability-uppercase-literal-suffix,
|
-readability-uppercase-literal-suffix,
|
||||||
-readability-use-anyofallof,
|
-readability-use-anyofallof,
|
||||||
-readability-simplify-boolean-expr,
|
|
||||||
|
|
||||||
-zirkon-*,
|
-zirkon-*,
|
||||||
|
|
||||||
|
-misc-*, # temporarily disabled due to being too slow
|
||||||
|
# also disable checks in other categories which are aliases of checks in misc-*:
|
||||||
|
# https://releases.llvm.org/15.0.0/tools/clang/tools/extra/docs/clang-tidy/checks/list.html
|
||||||
|
-cert-dcl54-cpp, # alias of misc-new-delete-overloads
|
||||||
|
-hicpp-new-delete-operators, # alias of misc-new-delete-overloads
|
||||||
|
-cert-fio38-c, # alias of misc-non-copyable-objects
|
||||||
|
-cert-dcl03-c, # alias of misc-static-assert
|
||||||
|
-hicpp-static-assert, # alias of misc-static-assert
|
||||||
|
-cert-err09-cpp, # alias of misc-throw-by-value-catch-by-reference
|
||||||
|
-cert-err61-cpp, # alias of misc-throw-by-value-catch-by-reference
|
||||||
|
-cppcoreguidelines-c-copy-assignment-signature, # alias of misc-unconventional-assign-operator
|
||||||
|
-cppcoreguidelines-non-private-member-variables-in-classes, # alias of misc-non-private-member-variables-in-classes
|
||||||
'
|
'
|
||||||
|
|
||||||
WarningsAsErrors: '*'
|
WarningsAsErrors: '*'
|
||||||
|
|
||||||
|
# TODO: use dictionary syntax for CheckOptions when minimum clang-tidy level rose to 15
|
||||||
|
# some-check.SomeOption: 'some value'
|
||||||
|
# instead of
|
||||||
|
# - key: some-check.SomeOption
|
||||||
|
# value: 'some value'
|
||||||
CheckOptions:
|
CheckOptions:
|
||||||
- key: readability-identifier-naming.ClassCase
|
- key: readability-identifier-naming.ClassCase
|
||||||
value: CamelCase
|
value: CamelCase
|
||||||
|
3
.github/workflows/tags_stable.yml
vendored
3
.github/workflows/tags_stable.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
|||||||
--gh-user-or-token="$GITHUB_TOKEN" --jobs=5 \
|
--gh-user-or-token="$GITHUB_TOKEN" --jobs=5 \
|
||||||
--output="/ClickHouse/docs/changelogs/${GITHUB_TAG}.md" "${GITHUB_TAG}"
|
--output="/ClickHouse/docs/changelogs/${GITHUB_TAG}.md" "${GITHUB_TAG}"
|
||||||
git add "./docs/changelogs/${GITHUB_TAG}.md"
|
git add "./docs/changelogs/${GITHUB_TAG}.md"
|
||||||
python ./utils/security-generator/generate_security.py > SECURITY.md
|
python3 ./utils/security-generator/generate_security.py > SECURITY.md
|
||||||
git diff HEAD
|
git diff HEAD
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v3
|
uses: peter-evans/create-pull-request@v3
|
||||||
@ -61,6 +61,7 @@ jobs:
|
|||||||
committer: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
committer: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
||||||
commit-message: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }}
|
commit-message: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }}
|
||||||
branch: auto/${{ env.GITHUB_TAG }}
|
branch: auto/${{ env.GITHUB_TAG }}
|
||||||
|
assignees: ${{ github.event.sender.login }} # assign the PR to the tag pusher
|
||||||
delete-branch: true
|
delete-branch: true
|
||||||
title: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }}
|
title: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }}
|
||||||
labels: do not test
|
labels: do not test
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -17,6 +17,7 @@
|
|||||||
|
|
||||||
# logs
|
# logs
|
||||||
*.log
|
*.log
|
||||||
|
*.debuglog
|
||||||
*.stderr
|
*.stderr
|
||||||
*.stdout
|
*.stdout
|
||||||
|
|
||||||
|
3
.gitmodules
vendored
3
.gitmodules
vendored
@ -287,3 +287,6 @@
|
|||||||
[submodule "contrib/xxHash"]
|
[submodule "contrib/xxHash"]
|
||||||
path = contrib/xxHash
|
path = contrib/xxHash
|
||||||
url = https://github.com/Cyan4973/xxHash.git
|
url = https://github.com/Cyan4973/xxHash.git
|
||||||
|
[submodule "contrib/google-benchmark"]
|
||||||
|
path = contrib/google-benchmark
|
||||||
|
url = https://github.com/google/benchmark.git
|
||||||
|
@ -111,6 +111,7 @@ if (ENABLE_FUZZING)
|
|||||||
set (ENABLE_JEMALLOC 0)
|
set (ENABLE_JEMALLOC 0)
|
||||||
set (ENABLE_CHECK_HEAVY_BUILDS 1)
|
set (ENABLE_CHECK_HEAVY_BUILDS 1)
|
||||||
set (GLIBC_COMPATIBILITY OFF)
|
set (GLIBC_COMPATIBILITY OFF)
|
||||||
|
set (ENABLE_BENCHMARKS 0)
|
||||||
|
|
||||||
# For codegen_select_fuzzer
|
# For codegen_select_fuzzer
|
||||||
set (ENABLE_PROTOBUF 1)
|
set (ENABLE_PROTOBUF 1)
|
||||||
@ -168,6 +169,7 @@ endif ()
|
|||||||
|
|
||||||
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
||||||
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
||||||
|
option(ENABLE_BENCHMARKS "Build all benchmark programs in 'benchmarks' subdirectories" OFF)
|
||||||
|
|
||||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND USE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND USE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
||||||
# Only for Linux, x86_64 or aarch64.
|
# Only for Linux, x86_64 or aarch64.
|
||||||
|
@ -16,8 +16,6 @@ ClickHouse® is an open-source column-oriented database management system that a
|
|||||||
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||||
|
|
||||||
## Upcoming events
|
## Upcoming events
|
||||||
* [**v22.11 Release Webinar**](https://clickhouse.com/company/events/v22-11-release-webinar) Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release, provide live demos, and share vision into what is coming in the roadmap.
|
* [**v22.12 Release Webinar**](https://clickhouse.com/company/events/v22-12-release-webinar) Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release, provide live demos, and share vision into what is coming in the roadmap.
|
||||||
* [**ClickHosue Meetup at the RELEX Solutions office in Stockholm**](https://www.meetup.com/clickhouse-stockholm-user-group/events/289492084/) - Dec 1 - Formulate by RELEX is a Swedish promotion planning and analytics company. They will share why they chose ClickHouse for their real time analytics and forecasting solution. The ClickHouse team will then present how ClickHouse is used for real time financial data analytics, including tick data, trade analytics and risk management.
|
* [**ClickHouse Meetup at the CHEQ office in Tel Aviv**](https://www.meetup.com/clickhouse-tel-aviv-user-group/events/289599423/) - Jan 16 - We are very excited to be holding our next in-person ClickHouse meetup at the CHEQ office in Tel Aviv! Hear from CHEQ, ServiceNow and Contentsquare, as well as a deep dive presentation from ClickHouse CTO Alexey Milovidov. Join us for a fun evening of talks, food and discussion!
|
||||||
* [**ClickHouse Meetup at the Deutsche Bank office in Berlin**](https://www.meetup.com/clickhouse-berlin-user-group/events/289311596/) - Dec 5 - Hear from Deutsche Bank on why they chose ClickHouse for big sensitive data in a regulated environment. The ClickHouse team will then present how ClickHouse is used for real time financial data analytics, including tick data, trade analytics and risk management.
|
* **ClickHouse Meetup in Seattle* - Keep an eye on this space as we will be announcing a January meetup in Seattle soon!
|
||||||
* [**ClickHouse Meetup at the Rokt offices in Manhattan**](https://www.meetup.com/clickhouse-new-york-user-group/events/289403909/) - Dec 6 - We are very excited to be holding our next in-person ClickHouse meetup at the Rokt offices in Manhattan. Featuring talks from Bloomberg, Disney Streaming, Prequel, Rokt, and ClickHouse
|
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
#include <filesystem>
|
#include <filesystem>
|
||||||
#include <fmt/format.h>
|
#include <fmt/format.h>
|
||||||
#include <boost/algorithm/string/split.hpp>
|
#include <boost/algorithm/string/split.hpp>
|
||||||
|
#include <boost/algorithm/string/replace.hpp>
|
||||||
#include <boost/algorithm/string/classification.hpp> /// is_any_of
|
#include <boost/algorithm/string/classification.hpp> /// is_any_of
|
||||||
|
|
||||||
namespace
|
namespace
|
||||||
@ -38,7 +39,7 @@ std::string getEditor()
|
|||||||
return editor;
|
return editor;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string getFuzzyFinder()
|
std::pair<std::string, FuzzyFinderType> getFuzzyFinder()
|
||||||
{
|
{
|
||||||
const char * env_path = std::getenv("PATH"); // NOLINT(concurrency-mt-unsafe)
|
const char * env_path = std::getenv("PATH"); // NOLINT(concurrency-mt-unsafe)
|
||||||
|
|
||||||
@ -52,14 +53,20 @@ std::string getFuzzyFinder()
|
|||||||
std::filesystem::path path(path_str);
|
std::filesystem::path path(path_str);
|
||||||
std::filesystem::path sk_bin_path = path / "sk";
|
std::filesystem::path sk_bin_path = path / "sk";
|
||||||
if (!access(sk_bin_path.c_str(), X_OK))
|
if (!access(sk_bin_path.c_str(), X_OK))
|
||||||
return sk_bin_path;
|
return {sk_bin_path, FUZZY_FINDER_SKIM};
|
||||||
|
|
||||||
std::filesystem::path fzf_bin_path = path / "fzf";
|
std::filesystem::path fzf_bin_path = path / "fzf";
|
||||||
if (!access(fzf_bin_path.c_str(), X_OK))
|
if (!access(fzf_bin_path.c_str(), X_OK))
|
||||||
return fzf_bin_path;
|
return {fzf_bin_path, FUZZY_FINDER_FZF};
|
||||||
}
|
}
|
||||||
|
|
||||||
return {};
|
return {"", FUZZY_FINDER_NONE};
|
||||||
|
}
|
||||||
|
|
||||||
|
String escapeShellArgument(std::string arg)
|
||||||
|
{
|
||||||
|
boost::replace_all(arg, "'", "'\\''");
|
||||||
|
return fmt::format("'{}'", arg);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// See comments in ShellCommand::executeImpl()
|
/// See comments in ShellCommand::executeImpl()
|
||||||
@ -305,11 +312,12 @@ ReplxxLineReader::ReplxxLineReader(
|
|||||||
replxx::Replxx::highlighter_callback_t highlighter_)
|
replxx::Replxx::highlighter_callback_t highlighter_)
|
||||||
: LineReader(history_file_path_, multiline_, std::move(extenders_), std::move(delimiters_)), highlighter(std::move(highlighter_))
|
: LineReader(history_file_path_, multiline_, std::move(extenders_), std::move(delimiters_)), highlighter(std::move(highlighter_))
|
||||||
, editor(getEditor())
|
, editor(getEditor())
|
||||||
, fuzzy_finder(getFuzzyFinder())
|
|
||||||
{
|
{
|
||||||
using namespace std::placeholders;
|
using namespace std::placeholders;
|
||||||
using Replxx = replxx::Replxx;
|
using Replxx = replxx::Replxx;
|
||||||
|
|
||||||
|
std::tie(fuzzy_finder, fuzzy_finder_type) = getFuzzyFinder();
|
||||||
|
|
||||||
if (!history_file_path.empty())
|
if (!history_file_path.empty())
|
||||||
{
|
{
|
||||||
history_file_fd = open(history_file_path.c_str(), O_RDWR);
|
history_file_fd = open(history_file_path.c_str(), O_RDWR);
|
||||||
@ -415,11 +423,12 @@ ReplxxLineReader::ReplxxLineReader(
|
|||||||
rx.bind_key(Replxx::KEY::meta('#'), insert_comment_action);
|
rx.bind_key(Replxx::KEY::meta('#'), insert_comment_action);
|
||||||
|
|
||||||
/// interactive search in history (requires fzf/sk)
|
/// interactive search in history (requires fzf/sk)
|
||||||
if (!fuzzy_finder.empty())
|
if (fuzzy_finder_type != FUZZY_FINDER_NONE)
|
||||||
{
|
{
|
||||||
auto interactive_history_search = [this](char32_t code)
|
auto interactive_history_search = [this](char32_t code)
|
||||||
{
|
{
|
||||||
openInteractiveHistorySearch();
|
openInteractiveHistorySearch();
|
||||||
|
rx.invoke(Replxx::ACTION::CLEAR_SELF, code);
|
||||||
return rx.invoke(Replxx::ACTION::REPAINT, code);
|
return rx.invoke(Replxx::ACTION::REPAINT, code);
|
||||||
};
|
};
|
||||||
rx.bind_key(Replxx::KEY::control('R'), interactive_history_search);
|
rx.bind_key(Replxx::KEY::control('R'), interactive_history_search);
|
||||||
@ -515,9 +524,22 @@ void ReplxxLineReader::openInteractiveHistorySearch()
|
|||||||
///
|
///
|
||||||
/// And also note, that fzf and skim is 95% compatible (at least option
|
/// And also note, that fzf and skim is 95% compatible (at least option
|
||||||
/// that is used here)
|
/// that is used here)
|
||||||
std::string fuzzy_finder_command = fmt::format(
|
std::string fuzzy_finder_command = fmt::format("{} --read0 --height=30%", fuzzy_finder);
|
||||||
"{} --read0 --tac --no-sort --tiebreak=index --bind=ctrl-r:toggle-sort --height=30% < {} > {}",
|
switch (fuzzy_finder_type)
|
||||||
fuzzy_finder, history_file.getPath(), output_file.getPath());
|
{
|
||||||
|
case FUZZY_FINDER_SKIM:
|
||||||
|
fuzzy_finder_command += " --tac --tiebreak=-score";
|
||||||
|
break;
|
||||||
|
case FUZZY_FINDER_FZF:
|
||||||
|
fuzzy_finder_command += " --tac --tiebreak=index";
|
||||||
|
break;
|
||||||
|
case FUZZY_FINDER_NONE:
|
||||||
|
/// assertion for !fuzzy_finder.empty() is enough
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
fuzzy_finder_command += fmt::format(" < {} > {}",
|
||||||
|
escapeShellArgument(history_file.getPath()),
|
||||||
|
escapeShellArgument(output_file.getPath()));
|
||||||
char * const argv[] = {sh, sh_c, fuzzy_finder_command.data(), nullptr};
|
char * const argv[] = {sh, sh_c, fuzzy_finder_command.data(), nullptr};
|
||||||
|
|
||||||
try
|
try
|
||||||
|
@ -4,6 +4,14 @@
|
|||||||
|
|
||||||
#include <replxx.hxx>
|
#include <replxx.hxx>
|
||||||
|
|
||||||
|
enum FuzzyFinderType
|
||||||
|
{
|
||||||
|
FUZZY_FINDER_NONE,
|
||||||
|
/// Use https://github.com/junegunn/fzf
|
||||||
|
FUZZY_FINDER_FZF,
|
||||||
|
/// Use https://github.com/lotabout/skim
|
||||||
|
FUZZY_FINDER_SKIM,
|
||||||
|
};
|
||||||
|
|
||||||
class ReplxxLineReader : public LineReader
|
class ReplxxLineReader : public LineReader
|
||||||
{
|
{
|
||||||
@ -38,4 +46,5 @@ private:
|
|||||||
|
|
||||||
std::string editor;
|
std::string editor;
|
||||||
std::string fuzzy_finder;
|
std::string fuzzy_finder;
|
||||||
|
FuzzyFinderType fuzzy_finder_type = FUZZY_FINDER_NONE;
|
||||||
};
|
};
|
||||||
|
@ -187,8 +187,20 @@ struct integer<Bits, Signed>::_impl
|
|||||||
static_assert(Bits % base_bits == 0);
|
static_assert(Bits % base_bits == 0);
|
||||||
|
|
||||||
/// Simple iteration in both directions
|
/// Simple iteration in both directions
|
||||||
static constexpr unsigned little(unsigned idx) { return idx; }
|
static constexpr unsigned little(unsigned idx)
|
||||||
static constexpr unsigned big(unsigned idx) { return item_count - 1 - idx; }
|
{
|
||||||
|
if constexpr (std::endian::native == std::endian::little)
|
||||||
|
return idx;
|
||||||
|
else
|
||||||
|
return item_count - 1 - idx;
|
||||||
|
}
|
||||||
|
static constexpr unsigned big(unsigned idx)
|
||||||
|
{
|
||||||
|
if constexpr (std::endian::native == std::endian::little)
|
||||||
|
return item_count - 1 - idx;
|
||||||
|
else
|
||||||
|
return idx;
|
||||||
|
}
|
||||||
static constexpr unsigned any(unsigned idx) { return idx; }
|
static constexpr unsigned any(unsigned idx) { return idx; }
|
||||||
|
|
||||||
template <class T>
|
template <class T>
|
||||||
@ -240,20 +252,20 @@ struct integer<Bits, Signed>::_impl
|
|||||||
{
|
{
|
||||||
static_assert(sizeof(Integral) <= sizeof(base_type));
|
static_assert(sizeof(Integral) <= sizeof(base_type));
|
||||||
|
|
||||||
self.items[0] = _impl::to_Integral(rhs);
|
self.items[little(0)] = _impl::to_Integral(rhs);
|
||||||
|
|
||||||
if constexpr (std::is_signed_v<Integral>)
|
if constexpr (std::is_signed_v<Integral>)
|
||||||
{
|
{
|
||||||
if (rhs < 0)
|
if (rhs < 0)
|
||||||
{
|
{
|
||||||
for (size_t i = 1; i < item_count; ++i)
|
for (unsigned i = 1; i < item_count; ++i)
|
||||||
self.items[i] = -1;
|
self.items[little(i)] = -1;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (size_t i = 1; i < item_count; ++i)
|
for (unsigned i = 1; i < item_count; ++i)
|
||||||
self.items[i] = 0;
|
self.items[little(i)] = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename TupleLike, size_t i = 0>
|
template <typename TupleLike, size_t i = 0>
|
||||||
@ -348,7 +360,7 @@ struct integer<Bits, Signed>::_impl
|
|||||||
constexpr const unsigned to_copy = min_bits / base_bits;
|
constexpr const unsigned to_copy = min_bits / base_bits;
|
||||||
|
|
||||||
for (unsigned i = 0; i < to_copy; ++i)
|
for (unsigned i = 0; i < to_copy; ++i)
|
||||||
self.items[i] = rhs.items[i];
|
self.items[little(i)] = rhs.items[little(i)];
|
||||||
|
|
||||||
if constexpr (Bits > Bits2)
|
if constexpr (Bits > Bits2)
|
||||||
{
|
{
|
||||||
@ -357,13 +369,13 @@ struct integer<Bits, Signed>::_impl
|
|||||||
if (rhs < 0)
|
if (rhs < 0)
|
||||||
{
|
{
|
||||||
for (unsigned i = to_copy; i < item_count; ++i)
|
for (unsigned i = to_copy; i < item_count; ++i)
|
||||||
self.items[i] = -1;
|
self.items[little(i)] = -1;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (unsigned i = to_copy; i < item_count; ++i)
|
for (unsigned i = to_copy; i < item_count; ++i)
|
||||||
self.items[i] = 0;
|
self.items[little(i)] = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -454,7 +466,7 @@ private:
|
|||||||
{
|
{
|
||||||
if constexpr (sizeof(T) <= sizeof(base_type))
|
if constexpr (sizeof(T) <= sizeof(base_type))
|
||||||
{
|
{
|
||||||
if (0 == idx)
|
if (little(0) == idx)
|
||||||
return static_cast<base_type>(x);
|
return static_cast<base_type>(x);
|
||||||
}
|
}
|
||||||
else if (idx * sizeof(base_type) < sizeof(T))
|
else if (idx * sizeof(base_type) < sizeof(T))
|
||||||
@ -475,7 +487,7 @@ private:
|
|||||||
|
|
||||||
for (unsigned i = 0; i < op_items; ++i)
|
for (unsigned i = 0; i < op_items; ++i)
|
||||||
{
|
{
|
||||||
base_type rhs_item = get_item(rhs, i);
|
base_type rhs_item = get_item(rhs, little(i));
|
||||||
base_type & res_item = res.items[little(i)];
|
base_type & res_item = res.items[little(i)];
|
||||||
|
|
||||||
underflows[i] = res_item < rhs_item;
|
underflows[i] = res_item < rhs_item;
|
||||||
@ -508,7 +520,7 @@ private:
|
|||||||
|
|
||||||
for (unsigned i = 0; i < op_items; ++i)
|
for (unsigned i = 0; i < op_items; ++i)
|
||||||
{
|
{
|
||||||
base_type rhs_item = get_item(rhs, i);
|
base_type rhs_item = get_item(rhs, little(i));
|
||||||
base_type & res_item = res.items[little(i)];
|
base_type & res_item = res.items[little(i)];
|
||||||
|
|
||||||
res_item += rhs_item;
|
res_item += rhs_item;
|
||||||
@ -580,12 +592,12 @@ private:
|
|||||||
else if constexpr (Bits == 128 && sizeof(base_type) == 8)
|
else if constexpr (Bits == 128 && sizeof(base_type) == 8)
|
||||||
{
|
{
|
||||||
using CompilerUInt128 = unsigned __int128;
|
using CompilerUInt128 = unsigned __int128;
|
||||||
CompilerUInt128 a = (CompilerUInt128(lhs.items[1]) << 64) + lhs.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
CompilerUInt128 a = (CompilerUInt128(lhs.items[little(1)]) << 64) + lhs.items[little(0)]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||||
CompilerUInt128 b = (CompilerUInt128(rhs.items[1]) << 64) + rhs.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
CompilerUInt128 b = (CompilerUInt128(rhs.items[little(1)]) << 64) + rhs.items[little(0)]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||||
CompilerUInt128 c = a * b;
|
CompilerUInt128 c = a * b;
|
||||||
integer<Bits, Signed> res;
|
integer<Bits, Signed> res;
|
||||||
res.items[0] = c;
|
res.items[little(0)] = c;
|
||||||
res.items[1] = c >> 64;
|
res.items[little(1)] = c >> 64;
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -597,7 +609,7 @@ private:
|
|||||||
#endif
|
#endif
|
||||||
for (unsigned i = 0; i < item_count; ++i)
|
for (unsigned i = 0; i < item_count; ++i)
|
||||||
{
|
{
|
||||||
base_type rhs_item = get_item(rhs, i);
|
base_type rhs_item = get_item(rhs, little(i));
|
||||||
unsigned pos = i * base_bits;
|
unsigned pos = i * base_bits;
|
||||||
|
|
||||||
while (rhs_item)
|
while (rhs_item)
|
||||||
@ -792,7 +804,7 @@ public:
|
|||||||
integer<Bits, Signed> res;
|
integer<Bits, Signed> res;
|
||||||
|
|
||||||
for (unsigned i = 0; i < item_count; ++i)
|
for (unsigned i = 0; i < item_count; ++i)
|
||||||
res.items[little(i)] = lhs.items[little(i)] | get_item(rhs, i);
|
res.items[little(i)] = lhs.items[little(i)] | get_item(rhs, little(i));
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -810,7 +822,7 @@ public:
|
|||||||
integer<Bits, Signed> res;
|
integer<Bits, Signed> res;
|
||||||
|
|
||||||
for (unsigned i = 0; i < item_count; ++i)
|
for (unsigned i = 0; i < item_count; ++i)
|
||||||
res.items[little(i)] = lhs.items[little(i)] & get_item(rhs, i);
|
res.items[little(i)] = lhs.items[little(i)] & get_item(rhs, little(i));
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -845,17 +857,17 @@ public:
|
|||||||
{
|
{
|
||||||
using CompilerUInt128 = unsigned __int128;
|
using CompilerUInt128 = unsigned __int128;
|
||||||
|
|
||||||
CompilerUInt128 a = (CompilerUInt128(numerator.items[1]) << 64) + numerator.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
CompilerUInt128 a = (CompilerUInt128(numerator.items[little(1)]) << 64) + numerator.items[little(0)]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||||
CompilerUInt128 b = (CompilerUInt128(denominator.items[1]) << 64) + denominator.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
CompilerUInt128 b = (CompilerUInt128(denominator.items[little(1)]) << 64) + denominator.items[little(0)]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||||
CompilerUInt128 c = a / b; // NOLINT
|
CompilerUInt128 c = a / b; // NOLINT
|
||||||
|
|
||||||
integer<Bits, Signed> res;
|
integer<Bits, Signed> res;
|
||||||
res.items[0] = c;
|
res.items[little(0)] = c;
|
||||||
res.items[1] = c >> 64;
|
res.items[little(1)] = c >> 64;
|
||||||
|
|
||||||
CompilerUInt128 remainder = a - b * c;
|
CompilerUInt128 remainder = a - b * c;
|
||||||
numerator.items[0] = remainder;
|
numerator.items[little(0)] = remainder;
|
||||||
numerator.items[1] = remainder >> 64;
|
numerator.items[little(1)] = remainder >> 64;
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
@ -1039,15 +1051,15 @@ constexpr integer<Bits, Signed>::integer(std::initializer_list<T> il) noexcept
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
auto it = il.begin();
|
auto it = il.begin();
|
||||||
for (size_t i = 0; i < _impl::item_count; ++i)
|
for (unsigned i = 0; i < _impl::item_count; ++i)
|
||||||
{
|
{
|
||||||
if (it < il.end())
|
if (it < il.end())
|
||||||
{
|
{
|
||||||
items[i] = *it;
|
items[_impl::little(i)] = *it;
|
||||||
++it;
|
++it;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
items[i] = 0;
|
items[_impl::little(i)] = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1208,7 +1220,7 @@ constexpr integer<Bits, Signed>::operator T() const noexcept
|
|||||||
|
|
||||||
UnsignedT res{};
|
UnsignedT res{};
|
||||||
for (unsigned i = 0; i < _impl::item_count && i < (sizeof(T) + sizeof(base_type) - 1) / sizeof(base_type); ++i)
|
for (unsigned i = 0; i < _impl::item_count && i < (sizeof(T) + sizeof(base_type) - 1) / sizeof(base_type); ++i)
|
||||||
res += UnsignedT(items[i]) << (sizeof(base_type) * 8 * i); // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
res += UnsignedT(items[_impl::little(i)]) << (sizeof(base_type) * 8 * i); // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
@ -5,21 +5,21 @@ if (ENABLE_CLANG_TIDY)
|
|||||||
|
|
||||||
find_program (CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache")
|
find_program (CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache")
|
||||||
if (CLANG_TIDY_CACHE_PATH)
|
if (CLANG_TIDY_CACHE_PATH)
|
||||||
find_program (_CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-15" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12")
|
find_program (_CLANG_TIDY_PATH NAMES "clang-tidy-15" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12" "clang-tidy")
|
||||||
|
|
||||||
# Why do we use ';' here?
|
# Why do we use ';' here?
|
||||||
# It's a cmake black magic: https://cmake.org/cmake/help/latest/prop_tgt/LANG_CLANG_TIDY.html#prop_tgt:%3CLANG%3E_CLANG_TIDY
|
# It's a cmake black magic: https://cmake.org/cmake/help/latest/prop_tgt/LANG_CLANG_TIDY.html#prop_tgt:%3CLANG%3E_CLANG_TIDY
|
||||||
# The CLANG_TIDY_PATH is passed to CMAKE_CXX_CLANG_TIDY, which follows CXX_CLANG_TIDY syntax.
|
# The CLANG_TIDY_PATH is passed to CMAKE_CXX_CLANG_TIDY, which follows CXX_CLANG_TIDY syntax.
|
||||||
set (CLANG_TIDY_PATH "${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_PATH}" CACHE STRING "A combined command to run clang-tidy with caching wrapper")
|
set (CLANG_TIDY_PATH "${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_PATH}" CACHE STRING "A combined command to run clang-tidy with caching wrapper")
|
||||||
else ()
|
else ()
|
||||||
find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-15" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12")
|
find_program (CLANG_TIDY_PATH NAMES "clang-tidy-15" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12" "clang-tidy")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (CLANG_TIDY_PATH)
|
if (CLANG_TIDY_PATH)
|
||||||
message (STATUS
|
message (STATUS
|
||||||
"Using clang-tidy: ${CLANG_TIDY_PATH}.
|
"Using clang-tidy: ${CLANG_TIDY_PATH}.
|
||||||
The checks will be run during build process.
|
The checks will be run during the build process.
|
||||||
See the .clang-tidy file at the root directory to configure the checks.")
|
See the .clang-tidy file in the root directory to configure the checks.")
|
||||||
|
|
||||||
set (USE_CLANG_TIDY ON)
|
set (USE_CLANG_TIDY ON)
|
||||||
|
|
||||||
|
@ -21,12 +21,12 @@ set (APPLE_CLANG_MINIMUM_VERSION 12.0.0)
|
|||||||
set (GCC_MINIMUM_VERSION 11)
|
set (GCC_MINIMUM_VERSION 11)
|
||||||
|
|
||||||
if (COMPILER_GCC)
|
if (COMPILER_GCC)
|
||||||
|
message (FATAL_ERROR "Compilation with GCC is unsupported. Please use Clang instead.")
|
||||||
|
|
||||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${GCC_MINIMUM_VERSION})
|
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${GCC_MINIMUM_VERSION})
|
||||||
message (FATAL_ERROR "Compilation with GCC version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${GCC_MINIMUM_VERSION}.")
|
message (FATAL_ERROR "Compilation with GCC version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${GCC_MINIMUM_VERSION}.")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
message (WARNING "Compilation with GCC is unsupported. Please use Clang instead.")
|
|
||||||
|
|
||||||
elseif (COMPILER_CLANG)
|
elseif (COMPILER_CLANG)
|
||||||
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
||||||
# (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
|
# (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
|
||||||
@ -83,7 +83,7 @@ if ((OS_LINUX OR OS_DARWIN) AND NOT LINKER_NAME)
|
|||||||
|
|
||||||
if (NOT LINKER_NAME)
|
if (NOT LINKER_NAME)
|
||||||
if (GOLD_PATH)
|
if (GOLD_PATH)
|
||||||
message (WARNING "Linking with gold is not recommended. Please use lld.")
|
message (FATAL_ERROR "Linking with gold is unsupported. Please use lld.")
|
||||||
if (COMPILER_GCC)
|
if (COMPILER_GCC)
|
||||||
set (LINKER_NAME "gold")
|
set (LINKER_NAME "gold")
|
||||||
else ()
|
else ()
|
||||||
|
2
contrib/CMakeLists.txt
vendored
2
contrib/CMakeLists.txt
vendored
@ -171,6 +171,8 @@ add_contrib (annoy-cmake annoy)
|
|||||||
|
|
||||||
add_contrib (xxHash-cmake xxHash)
|
add_contrib (xxHash-cmake xxHash)
|
||||||
|
|
||||||
|
add_contrib (google-benchmark-cmake google-benchmark)
|
||||||
|
|
||||||
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
||||||
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
||||||
# in "contrib/..." as originally planned, so we workaround this by fixing FOLDER properties of all targets manually,
|
# in "contrib/..." as originally planned, so we workaround this by fixing FOLDER properties of all targets manually,
|
||||||
|
1
contrib/google-benchmark
vendored
Submodule
1
contrib/google-benchmark
vendored
Submodule
@ -0,0 +1 @@
|
|||||||
|
Subproject commit 2257fa4d6afb8e5a2ccd510a70f38fe7fcdf1edf
|
34
contrib/google-benchmark-cmake/CMakeLists.txt
Normal file
34
contrib/google-benchmark-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/google-benchmark/src")
|
||||||
|
|
||||||
|
set (SRCS
|
||||||
|
"${SRC_DIR}/benchmark.cc"
|
||||||
|
"${SRC_DIR}/benchmark_api_internal.cc"
|
||||||
|
"${SRC_DIR}/benchmark_name.cc"
|
||||||
|
"${SRC_DIR}/benchmark_register.cc"
|
||||||
|
"${SRC_DIR}/benchmark_runner.cc"
|
||||||
|
"${SRC_DIR}/check.cc"
|
||||||
|
"${SRC_DIR}/colorprint.cc"
|
||||||
|
"${SRC_DIR}/commandlineflags.cc"
|
||||||
|
"${SRC_DIR}/complexity.cc"
|
||||||
|
"${SRC_DIR}/console_reporter.cc"
|
||||||
|
"${SRC_DIR}/counter.cc"
|
||||||
|
"${SRC_DIR}/csv_reporter.cc"
|
||||||
|
"${SRC_DIR}/json_reporter.cc"
|
||||||
|
"${SRC_DIR}/perf_counters.cc"
|
||||||
|
"${SRC_DIR}/reporter.cc"
|
||||||
|
"${SRC_DIR}/sleep.cc"
|
||||||
|
"${SRC_DIR}/statistics.cc"
|
||||||
|
"${SRC_DIR}/string_util.cc"
|
||||||
|
"${SRC_DIR}/sysinfo.cc"
|
||||||
|
"${SRC_DIR}/timers.cc")
|
||||||
|
|
||||||
|
add_library(google_benchmark "${SRCS}")
|
||||||
|
target_include_directories(google_benchmark SYSTEM PUBLIC "${SRC_DIR}/../include")
|
||||||
|
|
||||||
|
add_library(google_benchmark_main "${SRC_DIR}/benchmark_main.cc")
|
||||||
|
target_link_libraries(google_benchmark_main PUBLIC google_benchmark)
|
||||||
|
|
||||||
|
add_library(google_benchmark_all INTERFACE)
|
||||||
|
target_link_libraries(google_benchmark_all INTERFACE google_benchmark google_benchmark_main)
|
||||||
|
|
||||||
|
add_library(ch_contrib::gbenchmark_all ALIAS google_benchmark_all)
|
2
contrib/qpl
vendored
2
contrib/qpl
vendored
@ -1 +1 @@
|
|||||||
Subproject commit cdc8442f7a5e7a6ff6eea39c69665e0c5034d85d
|
Subproject commit becb7a1b15bdb4845ec3721a550707ffa51d029d
|
@ -15,7 +15,7 @@ set (QPL_SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/qpl/sources")
|
|||||||
set (QPL_BINARY_DIR "${ClickHouse_BINARY_DIR}/build/contrib/qpl")
|
set (QPL_BINARY_DIR "${ClickHouse_BINARY_DIR}/build/contrib/qpl")
|
||||||
set (UUID_DIR "${ClickHouse_SOURCE_DIR}/contrib/qpl-cmake")
|
set (UUID_DIR "${ClickHouse_SOURCE_DIR}/contrib/qpl-cmake")
|
||||||
|
|
||||||
set (EFFICIENT_WAIT ON)
|
set (EFFICIENT_WAIT OFF)
|
||||||
set (BLOCK_ON_FAULT ON)
|
set (BLOCK_ON_FAULT ON)
|
||||||
set (LOG_HW_INIT OFF)
|
set (LOG_HW_INIT OFF)
|
||||||
set (SANITIZE_MEMORY OFF)
|
set (SANITIZE_MEMORY OFF)
|
||||||
@ -110,18 +110,18 @@ target_compile_options(isal PRIVATE
|
|||||||
"$<$<CONFIG:Debug>:>"
|
"$<$<CONFIG:Debug>:>"
|
||||||
"$<$<CONFIG:Release>:>")
|
"$<$<CONFIG:Release>:>")
|
||||||
|
|
||||||
target_compile_options(isal_asm PUBLIC "-I${QPL_SRC_DIR}/isal/include/"
|
target_compile_options(isal_asm PRIVATE "-I${QPL_SRC_DIR}/isal/include/"
|
||||||
PUBLIC "-I${QPL_SRC_DIR}/isal/igzip/"
|
PRIVATE "-I${QPL_SRC_DIR}/isal/igzip/"
|
||||||
PUBLIC "-I${QPL_SRC_DIR}/isal/crc/"
|
PRIVATE "-I${QPL_SRC_DIR}/isal/crc/"
|
||||||
PUBLIC "-DQPL_LIB")
|
PRIVATE "-DQPL_LIB")
|
||||||
|
|
||||||
# AS_FEATURE_LEVEL=10 means "Check SIMD capabilities of the target system at runtime and use up to AVX512 if available".
|
# AS_FEATURE_LEVEL=10 means "Check SIMD capabilities of the target system at runtime and use up to AVX512 if available".
|
||||||
# AS_FEATURE_LEVEL=5 means "Check SIMD capabilities of the target system at runtime and use up to AVX2 if available".
|
# AS_FEATURE_LEVEL=5 means "Check SIMD capabilities of the target system at runtime and use up to AVX2 if available".
|
||||||
# HAVE_KNOWS_AVX512 means rely on AVX512 being available on the target system.
|
# HAVE_KNOWS_AVX512 means rely on AVX512 being available on the target system.
|
||||||
if (ENABLE_AVX512)
|
if (ENABLE_AVX512)
|
||||||
target_compile_options(isal_asm PUBLIC "-DHAVE_AS_KNOWS_AVX512" "-DAS_FEATURE_LEVEL=10")
|
target_compile_options(isal_asm PRIVATE "-DHAVE_AS_KNOWS_AVX512" "-DAS_FEATURE_LEVEL=10")
|
||||||
else()
|
else()
|
||||||
target_compile_options(isal_asm PUBLIC "-DAS_FEATURE_LEVEL=5")
|
target_compile_options(isal_asm PRIVATE "-DAS_FEATURE_LEVEL=5")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Here must remove "-fno-sanitize=undefined" from COMPILE_OPTIONS.
|
# Here must remove "-fno-sanitize=undefined" from COMPILE_OPTIONS.
|
||||||
@ -315,7 +315,13 @@ target_compile_definitions(_qpl
|
|||||||
PRIVATE -DQPL_BADARG_CHECK
|
PRIVATE -DQPL_BADARG_CHECK
|
||||||
PUBLIC -DENABLE_QPL_COMPRESSION)
|
PUBLIC -DENABLE_QPL_COMPRESSION)
|
||||||
|
|
||||||
|
find_library(LIBACCEL accel-config)
|
||||||
|
if(NOT LIBACCEL)
|
||||||
|
message(FATAL_ERROR "Please install QPL dependency library:libaccel-config from https://github.com/intel/idxd-config")
|
||||||
|
endif()
|
||||||
|
|
||||||
target_link_libraries(_qpl
|
target_link_libraries(_qpl
|
||||||
|
PRIVATE ${LIBACCEL}
|
||||||
PRIVATE ${CMAKE_DL_LIBS})
|
PRIVATE ${CMAKE_DL_LIBS})
|
||||||
|
|
||||||
add_library (ch_contrib::qpl ALIAS _qpl)
|
add_library (ch_contrib::qpl ALIAS _qpl)
|
||||||
|
@ -33,7 +33,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
# lts / testing / prestable / etc
|
# lts / testing / prestable / etc
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
||||||
ARG VERSION="22.11.1.1360"
|
ARG VERSION="22.11.2.30"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -21,7 +21,7 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list
|
|||||||
|
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="deb https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
ARG REPOSITORY="deb https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
||||||
ARG VERSION="22.11.1.1360"
|
ARG VERSION="22.11.2.30"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# set non-empty deb_location_url url to create a docker image
|
# set non-empty deb_location_url url to create a docker image
|
||||||
|
32
docs/changelogs/v22.10.3.27-stable.md
Normal file
32
docs/changelogs/v22.10.3.27-stable.md
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.10.3.27-stable (6d3b2985724) FIXME as compared to v22.10.2.11-stable (d2bfcaba002)
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Backported in [#42842](https://github.com/ClickHouse/ClickHouse/issues/42842): Update tzdata to 2022f. Mexico will no longer observe DST except near the US border: https://www.timeanddate.com/news/time/mexico-abolishes-dst-2022.html. Chihuahua moves to year-round UTC-6 on 2022-10-30. Fiji no longer observes DST. See https://github.com/google/cctz/pull/235 and https://bugs.launchpad.net/ubuntu/+source/tzdata/+bug/1995209. [#42796](https://github.com/ClickHouse/ClickHouse/pull/42796) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#42959](https://github.com/ClickHouse/ClickHouse/issues/42959): Before the fix, the user-defined config was preserved by RPM in `$file.rpmsave`. The PR fixes it and won't replace the user's files from packages. [#42936](https://github.com/ClickHouse/ClickHouse/pull/42936) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Backported in [#43042](https://github.com/ClickHouse/ClickHouse/issues/43042): Add a CI step to mark commits as ready for release; soft-forbid launching a release script from branches but master. [#43017](https://github.com/ClickHouse/ClickHouse/pull/43017) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#42864](https://github.com/ClickHouse/ClickHouse/issues/42864): Fix lowerUTF8()/upperUTF8() in case of symbol was in between 16-byte boundary (very frequent case of you have strings > 16 bytes long). [#42812](https://github.com/ClickHouse/ClickHouse/pull/42812) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Backported in [#43173](https://github.com/ClickHouse/ClickHouse/issues/43173): Fix rare possible hung on query cancellation. [#42874](https://github.com/ClickHouse/ClickHouse/pull/42874) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Backported in [#43064](https://github.com/ClickHouse/ClickHouse/issues/43064): Fix rare NOT_FOUND_COLUMN_IN_BLOCK error when projection is possible to use but there is no projection available. This fixes [#42771](https://github.com/ClickHouse/ClickHouse/issues/42771) . The bug was introduced in https://github.com/ClickHouse/ClickHouse/pull/25563. [#42938](https://github.com/ClickHouse/ClickHouse/pull/42938) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Backported in [#43075](https://github.com/ClickHouse/ClickHouse/issues/43075): Fix lambda parsing. Closes [#41848](https://github.com/ClickHouse/ClickHouse/issues/41848). [#42979](https://github.com/ClickHouse/ClickHouse/pull/42979) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Backported in [#43444](https://github.com/ClickHouse/ClickHouse/issues/43444): - Fix several buffer over-reads. [#43159](https://github.com/ClickHouse/ClickHouse/pull/43159) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Backported in [#43430](https://github.com/ClickHouse/ClickHouse/issues/43430): Fixed queries with `SAMPLE BY` with prewhere optimization on tables using `Merge` engine. [#43315](https://github.com/ClickHouse/ClickHouse/pull/43315) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Fix a bug in CAST function parser [#42980](https://github.com/ClickHouse/ClickHouse/pull/42980) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Fix TSan errors (correctly ignore _exit interception) [#43009](https://github.com/ClickHouse/ClickHouse/pull/43009) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Update SECURITY.md on new stable tags [#43365](https://github.com/ClickHouse/ClickHouse/pull/43365) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use all parameters with prefixes from ssm [#43467](https://github.com/ClickHouse/ClickHouse/pull/43467) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
29
docs/changelogs/v22.10.4.23-stable.md
Normal file
29
docs/changelogs/v22.10.4.23-stable.md
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.10.4.23-stable (352772987f4) FIXME as compared to v22.10.3.27-stable (6d3b2985724)
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Backported in [#43487](https://github.com/ClickHouse/ClickHouse/issues/43487): Fixed backward incompatibility in (de)serialization of states of `min`, `max`, `any*`, `argMin`, `argMax` aggregate functions with `String` argument. The incompatibility was introduced in https://github.com/ClickHouse/ClickHouse/pull/41431 and affects 22.9, 22.10 and 22.11 branches (fixed since 22.9.6, 22.10.4 and 22.11.2 correspondingly). Some minor releases of 22.3, 22.7 and 22.8 branches are also affected: 22.3.13...22.3.14 (fixed since 22.3.15), 22.8.6...22.8.9 (fixed since 22.8.10), 22.7.6 and newer (will not be fixed in 22.7, we recommend to upgrade from 22.7.* to 22.8.10 or newer). This release note does not concern users that have never used affected versions. Incompatible versions append extra `'\0'` to strings when reading states of the aggregate functions mentioned above. For example, if an older version saved state of `anyState('foobar')` to `state_column` then incompatible version will print `'foobar\0'` on `anyMerge(state_column)`. Also incompatible versions write states of the aggregate functions without trailing `'\0'`. Newer versions (that have the fix) can correctly read data written by all versions including incompatible versions, except one corner case. If an incompatible version saved a state with a string that actually ends with null character, then newer version will trim trailing `'\0'` when reading state of affected aggregate function. For example, if an incompatible version saved state of `anyState('abrac\0dabra\0')` to `state_column` then incompatible versions will print `'abrac\0dabra'` on `anyMerge(state_column)`. The issue also affects distributed queries when an incompatible version works in a cluster together with older or newer versions. [#43038](https://github.com/ClickHouse/ClickHouse/pull/43038) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#43053](https://github.com/ClickHouse/ClickHouse/issues/43053): Wait for all files are in sync before archiving them in integration tests. [#42891](https://github.com/ClickHouse/ClickHouse/pull/42891) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#43715](https://github.com/ClickHouse/ClickHouse/issues/43715): An issue with the following exception has been reported while trying to read a Parquet file from S3 into ClickHouse:. [#43297](https://github.com/ClickHouse/ClickHouse/pull/43297) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Backported in [#43576](https://github.com/ClickHouse/ClickHouse/issues/43576): Fix possible `Cannot create non-empty column with type Nothing` in functions if/multiIf. Closes [#43356](https://github.com/ClickHouse/ClickHouse/issues/43356). [#43368](https://github.com/ClickHouse/ClickHouse/pull/43368) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Backported in [#43506](https://github.com/ClickHouse/ClickHouse/issues/43506): Fix a bug when row level filter uses default value of column. [#43387](https://github.com/ClickHouse/ClickHouse/pull/43387) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Backported in [#43723](https://github.com/ClickHouse/ClickHouse/issues/43723): Fixed primary key analysis with conditions involving `toString(enum)`. [#43596](https://github.com/ClickHouse/ClickHouse/pull/43596) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Temporarily disable `test_hive_query` [#43542](https://github.com/ClickHouse/ClickHouse/pull/43542) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Do not checkout submodules recursively [#43637](https://github.com/ClickHouse/ClickHouse/pull/43637) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use docker images cache from merged PRs in master and release branches [#43664](https://github.com/ClickHouse/ClickHouse/pull/43664) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fix pagination issue in GITHUB_JOB_ID() [#43681](https://github.com/ClickHouse/ClickHouse/pull/43681) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
33
docs/changelogs/v22.11.2.30-stable.md
Normal file
33
docs/changelogs/v22.11.2.30-stable.md
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.11.2.30-stable (28f72d8ab09) FIXME as compared to v22.11.1.1360-stable (0d211ed1984)
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Backported in [#43488](https://github.com/ClickHouse/ClickHouse/issues/43488): Fixed backward incompatibility in (de)serialization of states of `min`, `max`, `any*`, `argMin`, `argMax` aggregate functions with `String` argument. The incompatibility was introduced in https://github.com/ClickHouse/ClickHouse/pull/41431 and affects 22.9, 22.10 and 22.11 branches (fixed since 22.9.6, 22.10.4 and 22.11.2 correspondingly). Some minor releases of 22.3, 22.7 and 22.8 branches are also affected: 22.3.13...22.3.14 (fixed since 22.3.15), 22.8.6...22.8.9 (fixed since 22.8.10), 22.7.6 and newer (will not be fixed in 22.7, we recommend to upgrade from 22.7.* to 22.8.10 or newer). This release note does not concern users that have never used affected versions. Incompatible versions append extra `'\0'` to strings when reading states of the aggregate functions mentioned above. For example, if an older version saved state of `anyState('foobar')` to `state_column` then incompatible version will print `'foobar\0'` on `anyMerge(state_column)`. Also incompatible versions write states of the aggregate functions without trailing `'\0'`. Newer versions (that have the fix) can correctly read data written by all versions including incompatible versions, except one corner case. If an incompatible version saved a state with a string that actually ends with null character, then newer version will trim trailing `'\0'` when reading state of affected aggregate function. For example, if an incompatible version saved state of `anyState('abrac\0dabra\0')` to `state_column` then incompatible versions will print `'abrac\0dabra'` on `anyMerge(state_column)`. The issue also affects distributed queries when an incompatible version works in a cluster together with older or newer versions. [#43038](https://github.com/ClickHouse/ClickHouse/pull/43038) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Backported in [#43511](https://github.com/ClickHouse/ClickHouse/issues/43511): Restrict default access to named collections for user defined in config. It must have explicit `show_named_collections=1` to be able to see them. [#43325](https://github.com/ClickHouse/ClickHouse/pull/43325) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#43716](https://github.com/ClickHouse/ClickHouse/issues/43716): An issue with the following exception has been reported while trying to read a Parquet file from S3 into ClickHouse:. [#43297](https://github.com/ClickHouse/ClickHouse/pull/43297) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Backported in [#43431](https://github.com/ClickHouse/ClickHouse/issues/43431): Fixed queries with `SAMPLE BY` with prewhere optimization on tables using `Merge` engine. [#43315](https://github.com/ClickHouse/ClickHouse/pull/43315) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Backported in [#43577](https://github.com/ClickHouse/ClickHouse/issues/43577): Fix possible `Cannot create non-empty column with type Nothing` in functions if/multiIf. Closes [#43356](https://github.com/ClickHouse/ClickHouse/issues/43356). [#43368](https://github.com/ClickHouse/ClickHouse/pull/43368) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Backported in [#43507](https://github.com/ClickHouse/ClickHouse/issues/43507): Fix a bug when row level filter uses default value of column. [#43387](https://github.com/ClickHouse/ClickHouse/pull/43387) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Backported in [#43724](https://github.com/ClickHouse/ClickHouse/issues/43724): Fixed primary key analysis with conditions involving `toString(enum)`. [#43596](https://github.com/ClickHouse/ClickHouse/pull/43596) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Backported in [#43807](https://github.com/ClickHouse/ClickHouse/issues/43807): Optimized number of List requests to ZooKeeper when selecting a part to merge. Previously it could produce thousands of requests in some cases. Fixes [#43647](https://github.com/ClickHouse/ClickHouse/issues/43647). [#43675](https://github.com/ClickHouse/ClickHouse/pull/43675) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Update SECURITY.md on new stable tags [#43365](https://github.com/ClickHouse/ClickHouse/pull/43365) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use all parameters with prefixes from ssm [#43467](https://github.com/ClickHouse/ClickHouse/pull/43467) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Temporarily disable `test_hive_query` [#43542](https://github.com/ClickHouse/ClickHouse/pull/43542) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Do not checkout submodules recursively [#43637](https://github.com/ClickHouse/ClickHouse/pull/43637) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use docker images cache from merged PRs in master and release branches [#43664](https://github.com/ClickHouse/ClickHouse/pull/43664) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fix pagination issue in GITHUB_JOB_ID() [#43681](https://github.com/ClickHouse/ClickHouse/pull/43681) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
34
docs/changelogs/v22.3.15.33-lts.md
Normal file
34
docs/changelogs/v22.3.15.33-lts.md
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.3.15.33-lts (4ef30f2c4b6) FIXME as compared to v22.3.14.23-lts (74956bfee4d)
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Backported in [#43484](https://github.com/ClickHouse/ClickHouse/issues/43484): Fixed backward incompatibility in (de)serialization of states of `min`, `max`, `any*`, `argMin`, `argMax` aggregate functions with `String` argument. The incompatibility was introduced in https://github.com/ClickHouse/ClickHouse/pull/41431 and affects 22.9, 22.10 and 22.11 branches (fixed since 22.9.6, 22.10.4 and 22.11.2 correspondingly). Some minor releases of 22.3, 22.7 and 22.8 branches are also affected: 22.3.13...22.3.14 (fixed since 22.3.15), 22.8.6...22.8.9 (fixed since 22.8.10), 22.7.6 and newer (will not be fixed in 22.7, we recommend to upgrade from 22.7.* to 22.8.10 or newer). This release note does not concern users that have never used affected versions. Incompatible versions append extra `'\0'` to strings when reading states of the aggregate functions mentioned above. For example, if an older version saved state of `anyState('foobar')` to `state_column` then incompatible version will print `'foobar\0'` on `anyMerge(state_column)`. Also incompatible versions write states of the aggregate functions without trailing `'\0'`. Newer versions (that have the fix) can correctly read data written by all versions including incompatible versions, except one corner case. If an incompatible version saved a state with a string that actually ends with null character, then newer version will trim trailing `'\0'` when reading state of affected aggregate function. For example, if an incompatible version saved state of `anyState('abrac\0dabra\0')` to `state_column` then incompatible versions will print `'abrac\0dabra'` on `anyMerge(state_column)`. The issue also affects distributed queries when an incompatible version works in a cluster together with older or newer versions. [#43038](https://github.com/ClickHouse/ClickHouse/pull/43038) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Backported in [#42839](https://github.com/ClickHouse/ClickHouse/issues/42839): Update tzdata to 2022f. Mexico will no longer observe DST except near the US border: https://www.timeanddate.com/news/time/mexico-abolishes-dst-2022.html. Chihuahua moves to year-round UTC-6 on 2022-10-30. Fiji no longer observes DST. See https://github.com/google/cctz/pull/235 and https://bugs.launchpad.net/ubuntu/+source/tzdata/+bug/1995209. [#42796](https://github.com/ClickHouse/ClickHouse/pull/42796) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#43050](https://github.com/ClickHouse/ClickHouse/issues/43050): Wait for all files are in sync before archiving them in integration tests. [#42891](https://github.com/ClickHouse/ClickHouse/pull/42891) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Backported in [#42963](https://github.com/ClickHouse/ClickHouse/issues/42963): Before the fix, the user-defined config was preserved by RPM in `$file.rpmsave`. The PR fixes it and won't replace the user's files from packages. [#42936](https://github.com/ClickHouse/ClickHouse/pull/42936) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Backported in [#43039](https://github.com/ClickHouse/ClickHouse/issues/43039): Add a CI step to mark commits as ready for release; soft-forbid launching a release script from branches but master. [#43017](https://github.com/ClickHouse/ClickHouse/pull/43017) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#43427](https://github.com/ClickHouse/ClickHouse/issues/43427): Fixed queries with `SAMPLE BY` with prewhere optimization on tables using `Merge` engine. [#43315](https://github.com/ClickHouse/ClickHouse/pull/43315) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Backported in [#43720](https://github.com/ClickHouse/ClickHouse/issues/43720): Fixed primary key analysis with conditions involving `toString(enum)`. [#43596](https://github.com/ClickHouse/ClickHouse/pull/43596) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Always run `BuilderReport` and `BuilderSpecialReport` in all CI types [#42684](https://github.com/ClickHouse/ClickHouse/pull/42684) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Update SECURITY.md on new stable tags [#43365](https://github.com/ClickHouse/ClickHouse/pull/43365) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use all parameters with prefixes from ssm [#43467](https://github.com/ClickHouse/ClickHouse/pull/43467) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Temporarily disable `test_hive_query` [#43542](https://github.com/ClickHouse/ClickHouse/pull/43542) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Do not checkout submodules recursively [#43637](https://github.com/ClickHouse/ClickHouse/pull/43637) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use docker images cache from merged PRs in master and release branches [#43664](https://github.com/ClickHouse/ClickHouse/pull/43664) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
32
docs/changelogs/v22.8.10.29-lts.md
Normal file
32
docs/changelogs/v22.8.10.29-lts.md
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.8.10.29-lts (d568a57f7af) FIXME as compared to v22.8.9.24-lts (a1b69551d40)
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Backported in [#43485](https://github.com/ClickHouse/ClickHouse/issues/43485): Fixed backward incompatibility in (de)serialization of states of `min`, `max`, `any*`, `argMin`, `argMax` aggregate functions with `String` argument. The incompatibility was introduced in https://github.com/ClickHouse/ClickHouse/pull/41431 and affects 22.9, 22.10 and 22.11 branches (fixed since 22.9.6, 22.10.4 and 22.11.2 correspondingly). Some minor releases of 22.3, 22.7 and 22.8 branches are also affected: 22.3.13...22.3.14 (fixed since 22.3.15), 22.8.6...22.8.9 (fixed since 22.8.10), 22.7.6 and newer (will not be fixed in 22.7, we recommend to upgrade from 22.7.* to 22.8.10 or newer). This release note does not concern users that have never used affected versions. Incompatible versions append extra `'\0'` to strings when reading states of the aggregate functions mentioned above. For example, if an older version saved state of `anyState('foobar')` to `state_column` then incompatible version will print `'foobar\0'` on `anyMerge(state_column)`. Also incompatible versions write states of the aggregate functions without trailing `'\0'`. Newer versions (that have the fix) can correctly read data written by all versions including incompatible versions, except one corner case. If an incompatible version saved a state with a string that actually ends with null character, then newer version will trim trailing `'\0'` when reading state of affected aggregate function. For example, if an incompatible version saved state of `anyState('abrac\0dabra\0')` to `state_column` then incompatible versions will print `'abrac\0dabra'` on `anyMerge(state_column)`. The issue also affects distributed queries when an incompatible version works in a cluster together with older or newer versions. [#43038](https://github.com/ClickHouse/ClickHouse/pull/43038) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#43051](https://github.com/ClickHouse/ClickHouse/issues/43051): Wait for all files are in sync before archiving them in integration tests. [#42891](https://github.com/ClickHouse/ClickHouse/pull/42891) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#43513](https://github.com/ClickHouse/ClickHouse/issues/43513): - Fix several buffer over-reads. [#43159](https://github.com/ClickHouse/ClickHouse/pull/43159) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Backported in [#43428](https://github.com/ClickHouse/ClickHouse/issues/43428): Fixed queries with `SAMPLE BY` with prewhere optimization on tables using `Merge` engine. [#43315](https://github.com/ClickHouse/ClickHouse/pull/43315) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Backported in [#43580](https://github.com/ClickHouse/ClickHouse/issues/43580): Fix a bug when row level filter uses default value of column. [#43387](https://github.com/ClickHouse/ClickHouse/pull/43387) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Backported in [#43721](https://github.com/ClickHouse/ClickHouse/issues/43721): Fixed primary key analysis with conditions involving `toString(enum)`. [#43596](https://github.com/ClickHouse/ClickHouse/pull/43596) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Fix 02267_file_globs_schema_inference.sql flakiness [#41877](https://github.com/ClickHouse/ClickHouse/pull/41877) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Update SECURITY.md on new stable tags [#43365](https://github.com/ClickHouse/ClickHouse/pull/43365) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use all parameters with prefixes from ssm [#43467](https://github.com/ClickHouse/ClickHouse/pull/43467) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Temporarily disable `test_hive_query` [#43542](https://github.com/ClickHouse/ClickHouse/pull/43542) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Do not checkout submodules recursively [#43637](https://github.com/ClickHouse/ClickHouse/pull/43637) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use docker images cache from merged PRs in master and release branches [#43664](https://github.com/ClickHouse/ClickHouse/pull/43664) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fix pagination issue in GITHUB_JOB_ID() [#43681](https://github.com/ClickHouse/ClickHouse/pull/43681) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
23
docs/changelogs/v22.8.11.15-lts.md
Normal file
23
docs/changelogs/v22.8.11.15-lts.md
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.8.11.15-lts (65c9506d161) FIXME as compared to v22.8.10.29-lts (d568a57f7af)
|
||||||
|
|
||||||
|
#### Bug Fix
|
||||||
|
* Backported in [#43098](https://github.com/ClickHouse/ClickHouse/issues/43098): Updated normaliser to clone the alias ast. resolves [#42452](https://github.com/ClickHouse/ClickHouse/issues/42452) Implementation: * Updated QueryNormalizer to clone alias ast, when its replaced. Previously just assigning the same leads to exception in LogicalExpressinsOptimizer as it would be the same parent being inserted again. * This bug is not seen with new analyser (allow_experimental_analyzer), so no changes for it. I added a test for the same. [#42827](https://github.com/ClickHouse/ClickHouse/pull/42827) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#43751](https://github.com/ClickHouse/ClickHouse/issues/43751): An issue with the following exception has been reported while trying to read a Parquet file from S3 into ClickHouse:. [#43297](https://github.com/ClickHouse/ClickHouse/pull/43297) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Backported in [#43617](https://github.com/ClickHouse/ClickHouse/issues/43617): Fix sumMap() for Nullable(Decimal()). [#43414](https://github.com/ClickHouse/ClickHouse/pull/43414) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Backported in [#43886](https://github.com/ClickHouse/ClickHouse/issues/43886): Fixed `ALTER ... RESET SETTING` with `ON CLUSTER`. It could be applied to one replica only. Fixes [#43843](https://github.com/ClickHouse/ClickHouse/issues/43843). [#43848](https://github.com/ClickHouse/ClickHouse/pull/43848) ([Elena Torró](https://github.com/elenatorro)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Use only PRs to our repository in pr_info on push [#43895](https://github.com/ClickHouse/ClickHouse/pull/43895) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fix tags workflow [#43942](https://github.com/ClickHouse/ClickHouse/pull/43942) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
30
docs/changelogs/v22.9.5.25-stable.md
Normal file
30
docs/changelogs/v22.9.5.25-stable.md
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.9.5.25-stable (68ba857aa82) FIXME as compared to v22.9.4.32-stable (3db8bcf1a70)
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Backported in [#42841](https://github.com/ClickHouse/ClickHouse/issues/42841): Update tzdata to 2022f. Mexico will no longer observe DST except near the US border: https://www.timeanddate.com/news/time/mexico-abolishes-dst-2022.html. Chihuahua moves to year-round UTC-6 on 2022-10-30. Fiji no longer observes DST. See https://github.com/google/cctz/pull/235 and https://bugs.launchpad.net/ubuntu/+source/tzdata/+bug/1995209. [#42796](https://github.com/ClickHouse/ClickHouse/pull/42796) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#42965](https://github.com/ClickHouse/ClickHouse/issues/42965): Before the fix, the user-defined config was preserved by RPM in `$file.rpmsave`. The PR fixes it and won't replace the user's files from packages. [#42936](https://github.com/ClickHouse/ClickHouse/pull/42936) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Backported in [#43041](https://github.com/ClickHouse/ClickHouse/issues/43041): Add a CI step to mark commits as ready for release; soft-forbid launching a release script from branches but master. [#43017](https://github.com/ClickHouse/ClickHouse/pull/43017) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#42749](https://github.com/ClickHouse/ClickHouse/issues/42749): A segmentation fault related to DNS & c-ares has been reported. The below error ocurred in multiple threads: ``` 2022-09-28 15:41:19.008,2022.09.28 15:41:19.008088 [ 356 ] {} <Fatal> BaseDaemon: ######################################## 2022-09-28 15:41:19.008,"2022.09.28 15:41:19.008147 [ 356 ] {} <Fatal> BaseDaemon: (version 22.8.5.29 (official build), build id: 92504ACA0B8E2267) (from thread 353) (no query) Received signal Segmentation fault (11)" 2022-09-28 15:41:19.008,2022.09.28 15:41:19.008196 [ 356 ] {} <Fatal> BaseDaemon: Address: 0xf Access: write. Address not mapped to object. 2022-09-28 15:41:19.008,2022.09.28 15:41:19.008216 [ 356 ] {} <Fatal> BaseDaemon: Stack trace: 0x188f8212 0x1626851b 0x1626a69e 0x16269b3f 0x16267eab 0x13cf8284 0x13d24afc 0x13c5217e 0x14ec2495 0x15ba440f 0x15b9d13b 0x15bb2699 0x1891ccb3 0x1891e00d 0x18ae0769 0x18ade022 0x7f76aa985609 0x7f76aa8aa133 2022-09-28 15:41:19.008,2022.09.28 15:41:19.008274 [ 356 ] {} <Fatal> BaseDaemon: 2. Poco::Net::IPAddress::family() const @ 0x188f8212 in /usr/bin/clickhouse 2022-09-28 15:41:19.008,2022.09.28 15:41:19.008297 [ 356 ] {} <Fatal> BaseDaemon: 3. ? @ 0x1626851b in /usr/bin/clickhouse 2022-09-28 15:41:19.008,2022.09.28 15:41:19.008309 [ 356 ] {} <Fatal> BaseDaemon: 4. ? @ 0x1626a69e in /usr/bin/clickhouse ```. [#42234](https://github.com/ClickHouse/ClickHouse/pull/42234) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Backported in [#42863](https://github.com/ClickHouse/ClickHouse/issues/42863): Fix lowerUTF8()/upperUTF8() in case of symbol was in between 16-byte boundary (very frequent case of you have strings > 16 bytes long). [#42812](https://github.com/ClickHouse/ClickHouse/pull/42812) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Backported in [#43063](https://github.com/ClickHouse/ClickHouse/issues/43063): Fix rare NOT_FOUND_COLUMN_IN_BLOCK error when projection is possible to use but there is no projection available. This fixes [#42771](https://github.com/ClickHouse/ClickHouse/issues/42771) . The bug was introduced in https://github.com/ClickHouse/ClickHouse/pull/25563. [#42938](https://github.com/ClickHouse/ClickHouse/pull/42938) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Backported in [#43443](https://github.com/ClickHouse/ClickHouse/issues/43443): - Fix several buffer over-reads. [#43159](https://github.com/ClickHouse/ClickHouse/pull/43159) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Backported in [#43429](https://github.com/ClickHouse/ClickHouse/issues/43429): Fixed queries with `SAMPLE BY` with prewhere optimization on tables using `Merge` engine. [#43315](https://github.com/ClickHouse/ClickHouse/pull/43315) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Always run `BuilderReport` and `BuilderSpecialReport` in all CI types [#42684](https://github.com/ClickHouse/ClickHouse/pull/42684) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Update SECURITY.md on new stable tags [#43365](https://github.com/ClickHouse/ClickHouse/pull/43365) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use all parameters with prefixes from ssm [#43467](https://github.com/ClickHouse/ClickHouse/pull/43467) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
28
docs/changelogs/v22.9.6.20-stable.md
Normal file
28
docs/changelogs/v22.9.6.20-stable.md
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.9.6.20-stable (ef6343f9579) FIXME as compared to v22.9.5.25-stable (68ba857aa82)
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Backported in [#43486](https://github.com/ClickHouse/ClickHouse/issues/43486): Fixed backward incompatibility in (de)serialization of states of `min`, `max`, `any*`, `argMin`, `argMax` aggregate functions with `String` argument. The incompatibility was introduced in https://github.com/ClickHouse/ClickHouse/pull/41431 and affects 22.9, 22.10 and 22.11 branches (fixed since 22.9.6, 22.10.4 and 22.11.2 correspondingly). Some minor releases of 22.3, 22.7 and 22.8 branches are also affected: 22.3.13...22.3.14 (fixed since 22.3.15), 22.8.6...22.8.9 (fixed since 22.8.10), 22.7.6 and newer (will not be fixed in 22.7, we recommend to upgrade from 22.7.* to 22.8.10 or newer). This release note does not concern users that have never used affected versions. Incompatible versions append extra `'\0'` to strings when reading states of the aggregate functions mentioned above. For example, if an older version saved state of `anyState('foobar')` to `state_column` then incompatible version will print `'foobar\0'` on `anyMerge(state_column)`. Also incompatible versions write states of the aggregate functions without trailing `'\0'`. Newer versions (that have the fix) can correctly read data written by all versions including incompatible versions, except one corner case. If an incompatible version saved a state with a string that actually ends with null character, then newer version will trim trailing `'\0'` when reading state of affected aggregate function. For example, if an incompatible version saved state of `anyState('abrac\0dabra\0')` to `state_column` then incompatible versions will print `'abrac\0dabra'` on `anyMerge(state_column)`. The issue also affects distributed queries when an incompatible version works in a cluster together with older or newer versions. [#43038](https://github.com/ClickHouse/ClickHouse/pull/43038) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#43052](https://github.com/ClickHouse/ClickHouse/issues/43052): Wait for all files are in sync before archiving them in integration tests. [#42891](https://github.com/ClickHouse/ClickHouse/pull/42891) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#43505](https://github.com/ClickHouse/ClickHouse/issues/43505): Fix a bug when row level filter uses default value of column. [#43387](https://github.com/ClickHouse/ClickHouse/pull/43387) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Backported in [#43722](https://github.com/ClickHouse/ClickHouse/issues/43722): Fixed primary key analysis with conditions involving `toString(enum)`. [#43596](https://github.com/ClickHouse/ClickHouse/pull/43596) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* Fix 02267_file_globs_schema_inference.sql flakiness [#41877](https://github.com/ClickHouse/ClickHouse/pull/41877) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Temporarily disable `test_hive_query` [#43542](https://github.com/ClickHouse/ClickHouse/pull/43542) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Do not checkout submodules recursively [#43637](https://github.com/ClickHouse/ClickHouse/pull/43637) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Use docker images cache from merged PRs in master and release branches [#43664](https://github.com/ClickHouse/ClickHouse/pull/43664) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Fix pagination issue in GITHUB_JOB_ID() [#43681](https://github.com/ClickHouse/ClickHouse/pull/43681) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
@ -16,7 +16,7 @@ import SupersetDocker from '@site/docs/en/_snippets/_add_superset_detail.md';
|
|||||||
## Goal
|
## Goal
|
||||||
|
|
||||||
In this guide you will learn how to:
|
In this guide you will learn how to:
|
||||||
- Load the OpenCelliD data in Clickhouse
|
- Load the OpenCelliD data in ClickHouse
|
||||||
- Connect Apache Superset to ClickHouse
|
- Connect Apache Superset to ClickHouse
|
||||||
- Build a dashboard based on data available in the dataset
|
- Build a dashboard based on data available in the dataset
|
||||||
|
|
||||||
|
@ -56,6 +56,7 @@ As of November 8th, 2022, each TSV is approximately the following size and numbe
|
|||||||
- [Line by line commit history of a file](#line-by-line-commit-history-of-a-file)
|
- [Line by line commit history of a file](#line-by-line-commit-history-of-a-file)
|
||||||
- [Unsolved Questions](#unsolved-questions)
|
- [Unsolved Questions](#unsolved-questions)
|
||||||
- [Git blame](#git-blame)
|
- [Git blame](#git-blame)
|
||||||
|
- [Related Content](#related-content)
|
||||||
|
|
||||||
# Generating the data
|
# Generating the data
|
||||||
|
|
||||||
@ -2497,3 +2498,7 @@ LIMIT 20
|
|||||||
We welcome exact and improved solutions here.
|
We welcome exact and improved solutions here.
|
||||||
|
|
||||||
|
|
||||||
|
# Related Content
|
||||||
|
|
||||||
|
- [Git commits and our community](https://clickhouse.com/blog/clickhouse-git-community-commits)
|
||||||
|
- [Window and array functions for Git commit sequences](https://clickhouse.com/blog/clickhouse-window-array-functions-git-commits)
|
||||||
|
@ -22,5 +22,8 @@ functions in ClickHouse. The sample datasets include:
|
|||||||
- The [Cell Towers dataset](../getting-started/example-datasets/cell-towers.md) imports a CSV into ClickHouse
|
- The [Cell Towers dataset](../getting-started/example-datasets/cell-towers.md) imports a CSV into ClickHouse
|
||||||
- The [NYPD Complaint Data](../getting-started/example-datasets/nypd_complaint_data.md) demonstrates how to use data inference to simplify creating tables
|
- The [NYPD Complaint Data](../getting-started/example-datasets/nypd_complaint_data.md) demonstrates how to use data inference to simplify creating tables
|
||||||
- The ["What's on the Menu?" dataset](../getting-started/example-datasets/menus.md) has an example of denormalizing data
|
- The ["What's on the Menu?" dataset](../getting-started/example-datasets/menus.md) has an example of denormalizing data
|
||||||
|
- The [Getting Data Into ClickHouse - Part 1](https://clickhouse.com/blog/getting-data-into-clickhouse-part-1) provides examples of defining a schema and loading a small Hacker News dataset
|
||||||
|
- The [Getting Data Into ClickHouse - Part 2 - A JSON detour](https://clickhouse.com/blog/getting-data-into-clickhouse-part-2-json) shows how JSON data can be loaded
|
||||||
|
- The [Getting Data Into ClickHouse - Part 3 - Using S3](https://clickhouse.com/blog/getting-data-into-clickhouse-part-3-s3) has examples of loading data from s3
|
||||||
|
|
||||||
View the **Tutorials and Datasets** menu for a complete list of sample datasets.
|
View the **Tutorials and Datasets** menu for a complete list of sample datasets.
|
@ -8,8 +8,8 @@ slug: /en/install
|
|||||||
|
|
||||||
You have two options for getting up and running with ClickHouse:
|
You have two options for getting up and running with ClickHouse:
|
||||||
|
|
||||||
- **[ClickHouse Cloud](https://clickhouse.cloud/):** the official ClickHouse as a service, - built by, maintained, and supported by the creators of ClickHouse
|
- **[ClickHouse Cloud](https://clickhouse.com/cloud/):** the official ClickHouse as a service, - built by, maintained, and supported by the creators of ClickHouse
|
||||||
- **Self-managed ClickHouse:** ClickHouse can run on any Linux, FreeBSD, or Mac OS X with x86_64, AArch64, or PowerPC64LE CPU architecture
|
- **[Self-managed ClickHouse](https://github.com/ClickHouse/ClickHouse):** ClickHouse can run on any Linux, FreeBSD, or Mac OS X with x86_64, AArch64, or PowerPC64LE CPU architecture
|
||||||
|
|
||||||
## ClickHouse Cloud
|
## ClickHouse Cloud
|
||||||
|
|
||||||
@ -406,4 +406,3 @@ SELECT 1
|
|||||||
**Congratulations, the system works!**
|
**Congratulations, the system works!**
|
||||||
|
|
||||||
To continue experimenting, you can download one of the test data sets or go through [tutorial](/docs/en/tutorial.md).
|
To continue experimenting, you can download one of the test data sets or go through [tutorial](/docs/en/tutorial.md).
|
||||||
|
|
||||||
|
@ -1202,6 +1202,7 @@ SELECT * FROM json_each_row_nested
|
|||||||
- [input_format_import_nested_json](../operations/settings/settings.md#input_format_import_nested_json) - map nested JSON data to nested tables (it works for JSONEachRow format). Default value - `false`.
|
- [input_format_import_nested_json](../operations/settings/settings.md#input_format_import_nested_json) - map nested JSON data to nested tables (it works for JSONEachRow format). Default value - `false`.
|
||||||
- [input_format_json_read_bools_as_numbers](../operations/settings/settings.md#input_format_json_read_bools_as_numbers) - allow to parse bools as numbers in JSON input formats. Default value - `true`.
|
- [input_format_json_read_bools_as_numbers](../operations/settings/settings.md#input_format_json_read_bools_as_numbers) - allow to parse bools as numbers in JSON input formats. Default value - `true`.
|
||||||
- [input_format_json_read_numbers_as_strings](../operations/settings/settings.md#input_format_json_read_numbers_as_strings) - allow to parse numbers as strings in JSON input formats. Default value - `false`.
|
- [input_format_json_read_numbers_as_strings](../operations/settings/settings.md#input_format_json_read_numbers_as_strings) - allow to parse numbers as strings in JSON input formats. Default value - `false`.
|
||||||
|
- [input_format_json_read_objects_as_strings](../operations/settings/settings.md#input_format_json_read_objects_as_strings) - allow to parse JSON objects as strings in JSON input formats. Default value - `false`.
|
||||||
- [output_format_json_quote_64bit_integers](../operations/settings/settings.md#output_format_json_quote_64bit_integers) - controls quoting of 64-bit integers in JSON output format. Default value - `true`.
|
- [output_format_json_quote_64bit_integers](../operations/settings/settings.md#output_format_json_quote_64bit_integers) - controls quoting of 64-bit integers in JSON output format. Default value - `true`.
|
||||||
- [output_format_json_quote_64bit_floats](../operations/settings/settings.md#output_format_json_quote_64bit_floats) - controls quoting of 64-bit floats in JSON output format. Default value - `false`.
|
- [output_format_json_quote_64bit_floats](../operations/settings/settings.md#output_format_json_quote_64bit_floats) - controls quoting of 64-bit floats in JSON output format. Default value - `false`.
|
||||||
- [output_format_json_quote_denormals](../operations/settings/settings.md#output_format_json_quote_denormals) - enables '+nan', '-nan', '+inf', '-inf' outputs in JSON output format. Default value - `false`.
|
- [output_format_json_quote_denormals](../operations/settings/settings.md#output_format_json_quote_denormals) - enables '+nan', '-nan', '+inf', '-inf' outputs in JSON output format. Default value - `false`.
|
||||||
|
@ -244,7 +244,7 @@ The username and password can be indicated in one of three ways:
|
|||||||
$ echo 'SELECT 1' | curl 'http://user:password@localhost:8123/' -d @-
|
$ echo 'SELECT 1' | curl 'http://user:password@localhost:8123/' -d @-
|
||||||
```
|
```
|
||||||
|
|
||||||
1. In the ‘user’ and ‘password’ URL parameters. Example:
|
2. In the ‘user’ and ‘password’ URL parameters (*We do not recommend using this method as the parameter might be logged by web proxy and cached in the browser*). Example:
|
||||||
|
|
||||||
<!-- -->
|
<!-- -->
|
||||||
|
|
||||||
@ -252,7 +252,7 @@ $ echo 'SELECT 1' | curl 'http://user:password@localhost:8123/' -d @-
|
|||||||
$ echo 'SELECT 1' | curl 'http://localhost:8123/?user=user&password=password' -d @-
|
$ echo 'SELECT 1' | curl 'http://localhost:8123/?user=user&password=password' -d @-
|
||||||
```
|
```
|
||||||
|
|
||||||
1. Using ‘X-ClickHouse-User’ and ‘X-ClickHouse-Key’ headers. Example:
|
3. Using ‘X-ClickHouse-User’ and ‘X-ClickHouse-Key’ headers. Example:
|
||||||
|
|
||||||
<!-- -->
|
<!-- -->
|
||||||
|
|
||||||
|
@ -1,30 +0,0 @@
|
|||||||
|
|
||||||
[//]: # (This file is included in Manage > Updates)
|
|
||||||
|
|
||||||
## Self-managed ClickHouse Upgrade
|
|
||||||
|
|
||||||
If ClickHouse was installed from `deb` packages, execute the following commands on the server:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ sudo apt-get update
|
|
||||||
$ sudo apt-get install clickhouse-client clickhouse-server
|
|
||||||
$ sudo service clickhouse-server restart
|
|
||||||
```
|
|
||||||
|
|
||||||
If you installed ClickHouse using something other than the recommended `deb` packages, use the appropriate update method.
|
|
||||||
|
|
||||||
:::note
|
|
||||||
You can update multiple servers at once as soon as there is no moment when all replicas of one shard are offline.
|
|
||||||
:::
|
|
||||||
|
|
||||||
The upgrade of older version of ClickHouse to specific version:
|
|
||||||
|
|
||||||
As an example:
|
|
||||||
|
|
||||||
`xx.yy.a.b` is a current stable version. The latest stable version could be found [here](https://github.com/ClickHouse/ClickHouse/releases)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
$ sudo apt-get update
|
|
||||||
$ sudo apt-get install clickhouse-server=xx.yy.a.b clickhouse-client=xx.yy.a.b clickhouse-common-static=xx.yy.a.b
|
|
||||||
$ sudo service clickhouse-server restart
|
|
||||||
```
|
|
@ -1,5 +1,8 @@
|
|||||||
|
---
|
||||||
|
slug: /en/operations/backup
|
||||||
|
---
|
||||||
|
|
||||||
[//]: # (This file is included in Manage > Backups)
|
# Backup and Restore
|
||||||
|
|
||||||
- [Backup to a local disk](#backup-to-a-local-disk)
|
- [Backup to a local disk](#backup-to-a-local-disk)
|
||||||
- [Configuring backup/restore to use an S3 endpoint](#configuring-backuprestore-to-use-an-s3-endpoint)
|
- [Configuring backup/restore to use an S3 endpoint](#configuring-backuprestore-to-use-an-s3-endpoint)
|
||||||
@ -356,4 +359,3 @@ Data can be restored from backup using the `ALTER TABLE ... ATTACH PARTITION ...
|
|||||||
For more information about queries related to partition manipulations, see the [ALTER documentation](../sql-reference/statements/alter/partition.md#alter_manipulations-with-partitions).
|
For more information about queries related to partition manipulations, see the [ALTER documentation](../sql-reference/statements/alter/partition.md#alter_manipulations-with-partitions).
|
||||||
|
|
||||||
A third-party tool is available to automate this approach: [clickhouse-backup](https://github.com/AlexAkulov/clickhouse-backup).
|
A third-party tool is available to automate this approach: [clickhouse-backup](https://github.com/AlexAkulov/clickhouse-backup).
|
||||||
|
|
@ -91,4 +91,21 @@ Code: 452, e.displayText() = DB::Exception: Setting force_index_by_date should n
|
|||||||
|
|
||||||
**Note:** the `default` profile has special handling: all the constraints defined for the `default` profile become the default constraints, so they restrict all the users until they’re overridden explicitly for these users.
|
**Note:** the `default` profile has special handling: all the constraints defined for the `default` profile become the default constraints, so they restrict all the users until they’re overridden explicitly for these users.
|
||||||
|
|
||||||
|
## Constraints on Merge Tree Settings
|
||||||
|
It is possible to set constraints for [merge tree settings](merge-tree-settings.md). There constraints are applied when table with merge tree engine is created or its storage settings are altered. Name of merge tree setting must be prepended by `merge_tree_` prefix when referenced in `<constraint>` section.
|
||||||
|
|
||||||
|
**Example:** Forbid to create new tables with explicitly specified `storage_policy`
|
||||||
|
|
||||||
|
``` xml
|
||||||
|
<profiles>
|
||||||
|
<default>
|
||||||
|
<constraints>
|
||||||
|
<merge_tree_storage_policy>
|
||||||
|
<const/>
|
||||||
|
</merge_tree_storage_policy>
|
||||||
|
</constraints>
|
||||||
|
</default>
|
||||||
|
</profiles>
|
||||||
|
```
|
||||||
|
|
||||||
[Original article](https://clickhouse.com/docs/en/operations/settings/constraints_on_settings/) <!--hide-->
|
[Original article](https://clickhouse.com/docs/en/operations/settings/constraints_on_settings/) <!--hide-->
|
||||||
|
@ -3753,6 +3753,29 @@ Allow parsing numbers as strings in JSON input formats.
|
|||||||
|
|
||||||
Disabled by default.
|
Disabled by default.
|
||||||
|
|
||||||
|
### input_format_json_read_objects_as_strings {#input_format_json_read_objects_as_strings}
|
||||||
|
|
||||||
|
Allow parsing JSON objects as strings in JSON input formats.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```sql
|
||||||
|
SET input_format_json_read_objects_as_strings = 1;
|
||||||
|
CREATE TABLE test (id UInt64, obj String, date Date) ENGINE=Memory();
|
||||||
|
INSERT INTO test FORMAT JSONEachRow {"id" : 1, "obj" : {"a" : 1, "b" : "Hello"}, "date" : "2020-01-01"};
|
||||||
|
SELECT * FROM test;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
```
|
||||||
|
┌─id─┬─obj──────────────────────┬───────date─┐
|
||||||
|
│ 1 │ {"a" : 1, "b" : "Hello"} │ 2020-01-01 │
|
||||||
|
└────┴──────────────────────────┴────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Disabled by default.
|
||||||
|
|
||||||
### input_format_json_validate_types_from_metadata {#input_format_json_validate_types_from_metadata}
|
### input_format_json_validate_types_from_metadata {#input_format_json_validate_types_from_metadata}
|
||||||
|
|
||||||
For JSON/JSONCompact/JSONColumnsWithMetadata input formats, if this setting is set to 1,
|
For JSON/JSONCompact/JSONColumnsWithMetadata input formats, if this setting is set to 1,
|
||||||
|
@ -286,3 +286,7 @@ end script
|
|||||||
If you use antivirus software configure it to skip folders with ClickHouse datafiles (`/var/lib/clickhouse`) otherwise performance may be reduced and you may experience unexpected errors during data ingestion and background merges.
|
If you use antivirus software configure it to skip folders with ClickHouse datafiles (`/var/lib/clickhouse`) otherwise performance may be reduced and you may experience unexpected errors during data ingestion and background merges.
|
||||||
|
|
||||||
[Original article](https://clickhouse.com/docs/en/operations/tips/)
|
[Original article](https://clickhouse.com/docs/en/operations/tips/)
|
||||||
|
|
||||||
|
## Related Content
|
||||||
|
|
||||||
|
- [Getting started with ClickHouse? Here are 13 "Deadly Sins" and how to avoid them](https://clickhouse.com/blog/common-getting-started-issues-with-clickhouse)
|
||||||
|
105
docs/en/operations/update.md
Normal file
105
docs/en/operations/update.md
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
---
|
||||||
|
slug: /en/operations/update
|
||||||
|
sidebar_title: Self-managed Upgrade
|
||||||
|
title: Self-managed Upgrade
|
||||||
|
---
|
||||||
|
|
||||||
|
## ClickHouse upgrade overview
|
||||||
|
|
||||||
|
This document contains:
|
||||||
|
- general guidelines
|
||||||
|
- a recommended plan
|
||||||
|
- specifics for upgrading the binaries on your systems
|
||||||
|
|
||||||
|
## General guidelines
|
||||||
|
|
||||||
|
These notes should help you with planning, and to understand why we make the recommendations that we do later in the document.
|
||||||
|
|
||||||
|
### Upgrade ClickHouse server separately from ClickHouse Keeper or ZooKeeper
|
||||||
|
Unless there is a security fix needed for ClickHouse Keeper or Apache ZooKeeper it is not necessary to upgrade Keeper when you upgrade ClickHouse server. Keeper stability is required during the upgrade process, so complete the ClickHouse server upgrades before considering an upgrade of Keeper.
|
||||||
|
|
||||||
|
### Minor version upgrades should be adopted often
|
||||||
|
It is highly recommended to always upgrade to the newest minor version as soon as it is released. Minor releases do not have breaking changes but do have important bug fixes (and may have security fixes).
|
||||||
|
|
||||||
|
|
||||||
|
### Test experimental features on a separate ClickHouse server running the target version
|
||||||
|
|
||||||
|
The compatibility of experimental features can be broken at any moment in any way. If you are using experimental features, then check the changelogs and consider setting up a separate ClickHouse server with the target version installed and test your use of the experimental features there.
|
||||||
|
|
||||||
|
### Downgrades
|
||||||
|
If you upgrade and then realize that the new version is not compatible with some feature that you depend on you may be able to downgrade to a recent (less than one year old) version if you have not started to use any of the new features. Once the new features are used the downgrade will not work.
|
||||||
|
|
||||||
|
### Multiple ClickHouse server versions in a cluster
|
||||||
|
|
||||||
|
We make an effort to maintain a one-year compatibility window (which includes 2 LTS versions). This means that any two versions should be able to work together in a cluster if the difference between them is less than one year (or if there are less than two LTS versions between them). However, it is recommended to upgrade all members of a cluster to the same version as quickly as possible, as some minor issues are possible (like slowdown of distributed queries, retriable errors in some background operations in ReplicatedMergeTree, etc).
|
||||||
|
|
||||||
|
We never recommend running different versions in the same cluster when the release dates are more than one year. While we do not expect that you will have data loss, the cluster may become unusable. The issues that you should expect if you have more than one year difference in versions include:
|
||||||
|
|
||||||
|
- the cluster may not work
|
||||||
|
- some (or even all) queries may fail with arbitrary errors
|
||||||
|
- arbitrary errors/warnings may appear in the logs
|
||||||
|
- it may be impossible to downgrade
|
||||||
|
|
||||||
|
### Incremental upgrades
|
||||||
|
|
||||||
|
If the difference between the current version and the target version is more than one year, then it is recommended to either:
|
||||||
|
- Upgrade with downtime (stop all servers, upgrade all servers, run all servers).
|
||||||
|
- Or to upgrade through an intermediate version (a version less than one year more recent than the current version).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Recommended plan
|
||||||
|
|
||||||
|
These are the recommended steps for a zero-downtime ClickHouse upgrade:
|
||||||
|
|
||||||
|
1. Make sure that your configuration changes are not in the default `/etc/clickhouse-server/config.xml` file and that they are instead in `/etc/clickhouse-server/config.d/`, as `/etc/clickhouse-server/config.xml` could be overwritten during an upgrade.
|
||||||
|
2. Read through the [changelogs](/docs/en/whats-new/changelog/index.md) for breaking changes (going back from the target release to the release you are currently on).
|
||||||
|
3. Make any updates identified in the breaking changes that can be made before upgrading, and a list of the changes that will need to be made after the upgrade.
|
||||||
|
4. Identify one or more replicas for each shard to keep up while the rest of the replicas for each shard are upgraded.
|
||||||
|
5. On the replicas that will be upgraded, one at a time:
|
||||||
|
- shutdown ClickHouse server
|
||||||
|
- upgrade the server to the target version
|
||||||
|
- bring ClickHouse server up
|
||||||
|
- wait for the Keeper messages to indicate that the system is stable
|
||||||
|
- continue to the next replica
|
||||||
|
6. Check for errors in the Keeper log and the ClickHouse log
|
||||||
|
7. Upgrade the replicas identified in step 4 to the new version
|
||||||
|
8. Refer to the list of changes made in steps 1 through 3 and make the changes that need to be made after the upgrade.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
This error message is expected when there are multiple versions of ClickHouse running in a replicated environment. You will stop seeing these when all replicas are upgraded to the same version.
|
||||||
|
```
|
||||||
|
MergeFromLogEntryTask: Code: 40. DB::Exception: Checksums of parts don't match:
|
||||||
|
hash of uncompressed files doesn't match. (CHECKSUM_DOESNT_MATCH) Data after merge is not
|
||||||
|
byte-identical to data on another replicas.
|
||||||
|
```
|
||||||
|
:::
|
||||||
|
|
||||||
|
|
||||||
|
## ClickHouse server binary upgrade process
|
||||||
|
|
||||||
|
If ClickHouse was installed from `deb` packages, execute the following commands on the server:
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
$ sudo apt-get update
|
||||||
|
$ sudo apt-get install clickhouse-client clickhouse-server
|
||||||
|
$ sudo service clickhouse-server restart
|
||||||
|
```
|
||||||
|
|
||||||
|
If you installed ClickHouse using something other than the recommended `deb` packages, use the appropriate update method.
|
||||||
|
|
||||||
|
:::note
|
||||||
|
You can update multiple servers at once as soon as there is no moment when all replicas of one shard are offline.
|
||||||
|
:::
|
||||||
|
|
||||||
|
The upgrade of older version of ClickHouse to specific version:
|
||||||
|
|
||||||
|
As an example:
|
||||||
|
|
||||||
|
`xx.yy.a.b` is a current stable version. The latest stable version could be found [here](https://github.com/ClickHouse/ClickHouse/releases)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ sudo apt-get update
|
||||||
|
$ sudo apt-get install clickhouse-server=xx.yy.a.b clickhouse-client=xx.yy.a.b clickhouse-common-static=xx.yy.a.b
|
||||||
|
$ sudo service clickhouse-server restart
|
||||||
|
```
|
@ -117,3 +117,8 @@ Read 186 rows, 4.15 KiB in 0.035 sec., 5302 rows/sec., 118.34 KiB/sec.
|
|||||||
```
|
```
|
||||||
|
|
||||||
[Original article](https://clickhouse.com/docs/en/operations/utils/clickhouse-local/) <!--hide-->
|
[Original article](https://clickhouse.com/docs/en/operations/utils/clickhouse-local/) <!--hide-->
|
||||||
|
|
||||||
|
## Related Content
|
||||||
|
|
||||||
|
- [Getting Data Into ClickHouse - Part 1](https://clickhouse.com/blog/getting-data-into-clickhouse-part-1)
|
||||||
|
- [Exploring massive, real-world data sets: 100+ Years of Weather Records in ClickHouse](https://clickhouse.com/blog/real-world-data-noaa-climate-data)
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
---
|
---
|
||||||
slug: /en/sql-reference/aggregate-functions/reference/exponentialmovingaverage
|
slug: /en/sql-reference/aggregate-functions/reference/exponentialmovingaverage
|
||||||
sidebar_position: 108
|
sidebar_position: 108
|
||||||
|
sidebar_title: exponentialMovingAverage
|
||||||
---
|
---
|
||||||
|
|
||||||
## exponentialMovingAverage
|
## exponentialMovingAverage
|
||||||
|
@ -95,3 +95,6 @@ Result:
|
|||||||
└─────────────────────────────────────────────────────────────────────────────────────────────────┴─────────────────┘
|
└─────────────────────────────────────────────────────────────────────────────────────────────────┴─────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Related Content
|
||||||
|
|
||||||
|
- [Exploring massive, real-world data sets: 100+ Years of Weather Records in ClickHouse](https://clickhouse.com/blog/real-world-data-noaa-climate-data)
|
||||||
|
@ -75,3 +75,7 @@ SELECT * FROM json FORMAT JSONEachRow
|
|||||||
```text
|
```text
|
||||||
{"o":{"a":1,"b":{"c":2,"d":[1,2,3]}}}
|
{"o":{"a":1,"b":{"c":2,"d":[1,2,3]}}}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Related Content
|
||||||
|
|
||||||
|
- [Getting Data Into ClickHouse - Part 2 - A JSON detour](https://clickhouse.com/blog/getting-data-into-clickhouse-part-2-json)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
:::tip
|
:::tip
|
||||||
If you are using a dictionary with ClickHouse Cloud please use the DDL query option to create your dictionaries, and create your dictionary as user `default`.
|
If you are using a dictionary with ClickHouse Cloud please use the DDL query option to create your dictionaries, and create your dictionary as user `default`.
|
||||||
Also, verify the list of supported dictionary sources in the [Cloud Compatibility guide](/docs/en/whats-new/cloud-capabilities.md).
|
Also, verify the list of supported dictionary sources in the [Cloud Compatibility guide](/docs/en/cloud/reference/cloud-compatibility.md).
|
||||||
:::
|
:::
|
||||||
|
@ -134,3 +134,7 @@ Result:
|
|||||||
│ [[[(3,1),(0,1),(0,-1),(3,-1)]]] │ Value │
|
│ [[[(3,1),(0,1),(0,-1),(3,-1)]]] │ Value │
|
||||||
└─────────────────────────────────┴───────┘
|
└─────────────────────────────────┴───────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Related Content
|
||||||
|
|
||||||
|
- [Exploring massive, real-world data sets: 100+ Years of Weather Records in ClickHouse](https://clickhouse.com/blog/real-world-data-noaa-climate-data)
|
||||||
|
@ -410,35 +410,35 @@ Converts a date with time to a certain fixed date, while preserving the time.
|
|||||||
|
|
||||||
## toRelativeYearNum
|
## toRelativeYearNum
|
||||||
|
|
||||||
Converts a date with time or date to the number of the year, starting from a certain fixed point in the past.
|
Converts a date or date with time to the number of the year, starting from a certain fixed point in the past.
|
||||||
|
|
||||||
## toRelativeQuarterNum
|
## toRelativeQuarterNum
|
||||||
|
|
||||||
Converts a date with time or date to the number of the quarter, starting from a certain fixed point in the past.
|
Converts a date or date with time to the number of the quarter, starting from a certain fixed point in the past.
|
||||||
|
|
||||||
## toRelativeMonthNum
|
## toRelativeMonthNum
|
||||||
|
|
||||||
Converts a date with time or date to the number of the month, starting from a certain fixed point in the past.
|
Converts a date or date with time to the number of the month, starting from a certain fixed point in the past.
|
||||||
|
|
||||||
## toRelativeWeekNum
|
## toRelativeWeekNum
|
||||||
|
|
||||||
Converts a date with time or date to the number of the week, starting from a certain fixed point in the past.
|
Converts a date or date with time to the number of the week, starting from a certain fixed point in the past.
|
||||||
|
|
||||||
## toRelativeDayNum
|
## toRelativeDayNum
|
||||||
|
|
||||||
Converts a date with time or date to the number of the day, starting from a certain fixed point in the past.
|
Converts a date or date with time to the number of the day, starting from a certain fixed point in the past.
|
||||||
|
|
||||||
## toRelativeHourNum
|
## toRelativeHourNum
|
||||||
|
|
||||||
Converts a date with time or date to the number of the hour, starting from a certain fixed point in the past.
|
Converts a date or date with time to the number of the hour, starting from a certain fixed point in the past.
|
||||||
|
|
||||||
## toRelativeMinuteNum
|
## toRelativeMinuteNum
|
||||||
|
|
||||||
Converts a date with time or date to the number of the minute, starting from a certain fixed point in the past.
|
Converts a date or date with time to the number of the minute, starting from a certain fixed point in the past.
|
||||||
|
|
||||||
## toRelativeSecondNum
|
## toRelativeSecondNum
|
||||||
|
|
||||||
Converts a date with time or date to the number of the second, starting from a certain fixed point in the past.
|
Converts a date or date with time to the number of the second, starting from a certain fixed point in the past.
|
||||||
|
|
||||||
## toISOYear
|
## toISOYear
|
||||||
|
|
||||||
@ -517,6 +517,154 @@ SELECT toDate('2016-12-27') AS date, toYearWeek(date) AS yearWeek0, toYearWeek(d
|
|||||||
└────────────┴───────────┴───────────┴───────────┘
|
└────────────┴───────────┴───────────┴───────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## age
|
||||||
|
|
||||||
|
Returns the `unit` component of the difference between `startdate` and `enddate`. The difference is calculated using a precision of 1 second.
|
||||||
|
E.g. the difference between `2021-12-29` and `2022-01-01` is 3 days for `day` unit, 0 months for `month` unit, 0 years for `year` unit.
|
||||||
|
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
age('unit', startdate, enddate, [timezone])
|
||||||
|
```
|
||||||
|
|
||||||
|
**Arguments**
|
||||||
|
|
||||||
|
- `unit` — The type of interval for result. [String](../../sql-reference/data-types/string.md).
|
||||||
|
Possible values:
|
||||||
|
|
||||||
|
- `second` (possible abbreviations: `ss`, `s`)
|
||||||
|
- `minute` (possible abbreviations: `mi`, `n`)
|
||||||
|
- `hour` (possible abbreviations: `hh`, `h`)
|
||||||
|
- `day` (possible abbreviations: `dd`, `d`)
|
||||||
|
- `week` (possible abbreviations: `wk`, `ww`)
|
||||||
|
- `month` (possible abbreviations: `mm`, `m`)
|
||||||
|
- `quarter` (possible abbreviations: `qq`, `q`)
|
||||||
|
- `year` (possible abbreviations: `yyyy`, `yy`)
|
||||||
|
|
||||||
|
- `startdate` — The first time value to subtract (the subtrahend). [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) or [DateTime64](../../sql-reference/data-types/datetime64.md).
|
||||||
|
|
||||||
|
- `enddate` — The second time value to subtract from (the minuend). [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) or [DateTime64](../../sql-reference/data-types/datetime64.md).
|
||||||
|
|
||||||
|
- `timezone` — [Timezone name](../../operations/server-configuration-parameters/settings.md#server_configuration_parameters-timezone) (optional). If specified, it is applied to both `startdate` and `enddate`. If not specified, timezones of `startdate` and `enddate` are used. If they are not the same, the result is unspecified. [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
Difference between `enddate` and `startdate` expressed in `unit`.
|
||||||
|
|
||||||
|
Type: [Int](../../sql-reference/data-types/int-uint.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT age('hour', toDateTime('2018-01-01 22:30:00'), toDateTime('2018-01-02 23:00:00'));
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─age('hour', toDateTime('2018-01-01 22:30:00'), toDateTime('2018-01-02 23:00:00'))─┐
|
||||||
|
│ 24 │
|
||||||
|
└───────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT
|
||||||
|
toDate('2022-01-01') AS e,
|
||||||
|
toDate('2021-12-29') AS s,
|
||||||
|
age('day', s, e) AS day_age,
|
||||||
|
age('month', s, e) AS month__age,
|
||||||
|
age('year', s, e) AS year_age;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌──────────e─┬──────────s─┬─day_age─┬─month__age─┬─year_age─┐
|
||||||
|
│ 2022-01-01 │ 2021-12-29 │ 3 │ 0 │ 0 │
|
||||||
|
└────────────┴────────────┴─────────┴────────────┴──────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## date\_diff
|
||||||
|
|
||||||
|
Returns the count of the specified `unit` boundaries crossed between the `startdate` and `enddate`.
|
||||||
|
The difference is calculated using relative units, e.g. the difference between `2021-12-29` and `2022-01-01` is 3 days for day unit (see [toRelativeDayNum](#torelativedaynum)), 1 month for month unit (see [toRelativeMonthNum](#torelativemonthnum)), 1 year for year unit (see [toRelativeYearNum](#torelativeyearnum)).
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
date_diff('unit', startdate, enddate, [timezone])
|
||||||
|
```
|
||||||
|
|
||||||
|
Aliases: `dateDiff`, `DATE_DIFF`.
|
||||||
|
|
||||||
|
**Arguments**
|
||||||
|
|
||||||
|
- `unit` — The type of interval for result. [String](../../sql-reference/data-types/string.md).
|
||||||
|
Possible values:
|
||||||
|
|
||||||
|
- `second` (possible abbreviations: `ss`, `s`)
|
||||||
|
- `minute` (possible abbreviations: `mi`, `n`)
|
||||||
|
- `hour` (possible abbreviations: `hh`, `h`)
|
||||||
|
- `day` (possible abbreviations: `dd`, `d`)
|
||||||
|
- `week` (possible abbreviations: `wk`, `ww`)
|
||||||
|
- `month` (possible abbreviations: `mm`, `m`)
|
||||||
|
- `quarter` (possible abbreviations: `qq`, `q`)
|
||||||
|
- `year` (possible abbreviations: `yyyy`, `yy`)
|
||||||
|
|
||||||
|
- `startdate` — The first time value to subtract (the subtrahend). [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) or [DateTime64](../../sql-reference/data-types/datetime64.md).
|
||||||
|
|
||||||
|
- `enddate` — The second time value to subtract from (the minuend). [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) or [DateTime64](../../sql-reference/data-types/datetime64.md).
|
||||||
|
|
||||||
|
- `timezone` — [Timezone name](../../operations/server-configuration-parameters/settings.md#server_configuration_parameters-timezone) (optional). If specified, it is applied to both `startdate` and `enddate`. If not specified, timezones of `startdate` and `enddate` are used. If they are not the same, the result is unspecified. [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
Difference between `enddate` and `startdate` expressed in `unit`.
|
||||||
|
|
||||||
|
Type: [Int](../../sql-reference/data-types/int-uint.md).
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT dateDiff('hour', toDateTime('2018-01-01 22:00:00'), toDateTime('2018-01-02 23:00:00'));
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─dateDiff('hour', toDateTime('2018-01-01 22:00:00'), toDateTime('2018-01-02 23:00:00'))─┐
|
||||||
|
│ 25 │
|
||||||
|
└────────────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT
|
||||||
|
toDate('2022-01-01') AS e,
|
||||||
|
toDate('2021-12-29') AS s,
|
||||||
|
dateDiff('day', s, e) AS day_diff,
|
||||||
|
dateDiff('month', s, e) AS month__diff,
|
||||||
|
dateDiff('year', s, e) AS year_diff;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌──────────e─┬──────────s─┬─day_diff─┬─month__diff─┬─year_diff─┐
|
||||||
|
│ 2022-01-01 │ 2021-12-29 │ 3 │ 1 │ 1 │
|
||||||
|
└────────────┴────────────┴──────────┴─────────────┴───────────┘
|
||||||
|
```
|
||||||
|
|
||||||
## date\_trunc
|
## date\_trunc
|
||||||
|
|
||||||
Truncates date and time data to the specified part of date.
|
Truncates date and time data to the specified part of date.
|
||||||
@ -637,80 +785,6 @@ Result:
|
|||||||
└───────────────────────────────────────────────┘
|
└───────────────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## date\_diff
|
|
||||||
|
|
||||||
Returns the difference between two dates or dates with time values.
|
|
||||||
The difference is calculated using relative units, e.g. the difference between `2022-01-01` and `2021-12-29` is 3 days for day unit (see [toRelativeDayNum](#torelativedaynum)), 1 month for month unit (see [toRelativeMonthNum](#torelativemonthnum)), 1 year for year unit (see [toRelativeYearNum](#torelativeyearnum)).
|
|
||||||
|
|
||||||
**Syntax**
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
date_diff('unit', startdate, enddate, [timezone])
|
|
||||||
```
|
|
||||||
|
|
||||||
Aliases: `dateDiff`, `DATE_DIFF`.
|
|
||||||
|
|
||||||
**Arguments**
|
|
||||||
|
|
||||||
- `unit` — The type of interval for result. [String](../../sql-reference/data-types/string.md).
|
|
||||||
Possible values:
|
|
||||||
|
|
||||||
- `second`
|
|
||||||
- `minute`
|
|
||||||
- `hour`
|
|
||||||
- `day`
|
|
||||||
- `week`
|
|
||||||
- `month`
|
|
||||||
- `quarter`
|
|
||||||
- `year`
|
|
||||||
|
|
||||||
- `startdate` — The first time value to subtract (the subtrahend). [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) or [DateTime64](../../sql-reference/data-types/datetime64.md).
|
|
||||||
|
|
||||||
- `enddate` — The second time value to subtract from (the minuend). [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) or [DateTime64](../../sql-reference/data-types/datetime64.md).
|
|
||||||
|
|
||||||
- `timezone` — [Timezone name](../../operations/server-configuration-parameters/settings.md#server_configuration_parameters-timezone) (optional). If specified, it is applied to both `startdate` and `enddate`. If not specified, timezones of `startdate` and `enddate` are used. If they are not the same, the result is unspecified. [String](../../sql-reference/data-types/string.md).
|
|
||||||
|
|
||||||
**Returned value**
|
|
||||||
|
|
||||||
Difference between `enddate` and `startdate` expressed in `unit`.
|
|
||||||
|
|
||||||
Type: [Int](../../sql-reference/data-types/int-uint.md).
|
|
||||||
|
|
||||||
**Example**
|
|
||||||
|
|
||||||
Query:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
SELECT dateDiff('hour', toDateTime('2018-01-01 22:00:00'), toDateTime('2018-01-02 23:00:00'));
|
|
||||||
```
|
|
||||||
|
|
||||||
Result:
|
|
||||||
|
|
||||||
``` text
|
|
||||||
┌─dateDiff('hour', toDateTime('2018-01-01 22:00:00'), toDateTime('2018-01-02 23:00:00'))─┐
|
|
||||||
│ 25 │
|
|
||||||
└────────────────────────────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
Query:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
SELECT
|
|
||||||
toDate('2022-01-01') AS e,
|
|
||||||
toDate('2021-12-29') AS s,
|
|
||||||
dateDiff('day', s, e) AS day_diff,
|
|
||||||
dateDiff('month', s, e) AS month__diff,
|
|
||||||
dateDiff('year', s, e) AS year_diff;
|
|
||||||
```
|
|
||||||
|
|
||||||
Result:
|
|
||||||
|
|
||||||
``` text
|
|
||||||
┌──────────e─┬──────────s─┬─day_diff─┬─month__diff─┬─year_diff─┐
|
|
||||||
│ 2022-01-01 │ 2021-12-29 │ 3 │ 1 │ 1 │
|
|
||||||
└────────────┴────────────┴──────────┴─────────────┴───────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## date\_sub
|
## date\_sub
|
||||||
|
|
||||||
Subtracts the time interval or date interval from the provided date or date with time.
|
Subtracts the time interval or date interval from the provided date or date with time.
|
||||||
|
@ -464,5 +464,39 @@ Removes the query string and fragment identifier. The question mark and number s
|
|||||||
|
|
||||||
### cutURLParameter(URL, name)
|
### cutURLParameter(URL, name)
|
||||||
|
|
||||||
Removes the ‘name’ URL parameter, if present. This function works under the assumption that the parameter name is encoded in the URL exactly the same way as in the passed argument.
|
Removes the `name` parameter from URL, if present. This function does not encode or decode characters in parameter names, e.g. `Client ID` and `Client%20ID` are treated as different parameter names.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
cutURLParameter(URL, name)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Arguments**
|
||||||
|
|
||||||
|
- `url` — URL. [String](../../sql-reference/data-types/string.md).
|
||||||
|
- `name` — name of URL parameter. [String](../../sql-reference/data-types/string.md) or [Array](../../sql-reference/data-types/array.md) of Strings.
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
- URL with `name` URL parameter removed.
|
||||||
|
|
||||||
|
Type: `String`.
|
||||||
|
|
||||||
|
**Example**
|
||||||
|
|
||||||
|
Query:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT
|
||||||
|
cutURLParameter('http://bigmir.net/?a=b&c=d&e=f#g', 'a') as url_without_a,
|
||||||
|
cutURLParameter('http://bigmir.net/?a=b&c=d&e=f#g', ['c', 'e']) as url_without_c_and_e;
|
||||||
|
```
|
||||||
|
|
||||||
|
Result:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─url_without_a────────────────┬─url_without_c_and_e──────┐
|
||||||
|
│ http://bigmir.net/?c=d&e=f#g │ http://bigmir.net/?a=b#g │
|
||||||
|
└──────────────────────────────┴──────────────────────────┘
|
||||||
|
```
|
||||||
|
@ -194,7 +194,7 @@ To restore data from a backup, do the following:
|
|||||||
|
|
||||||
Restoring from a backup does not require stopping the server.
|
Restoring from a backup does not require stopping the server.
|
||||||
|
|
||||||
For more information about backups and restoring data, see the [Data Backup](/docs/en/manage/backups.mdx) section.
|
For more information about backups and restoring data, see the [Data Backup](/docs/en/operations/backup.md) section.
|
||||||
|
|
||||||
## UNFREEZE PARTITION
|
## UNFREEZE PARTITION
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ sidebar_label: UPDATE
|
|||||||
# ALTER TABLE … UPDATE Statements
|
# ALTER TABLE … UPDATE Statements
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
ALTER TABLE [db.]table [ON CLUSTER cluster] UPDATE column1 = expr1 [, ...] WHERE filter_expr
|
ALTER TABLE [db.]table [ON CLUSTER cluster] UPDATE column1 = expr1 [, ...] [IN PARTITION partition_id] WHERE filter_expr
|
||||||
```
|
```
|
||||||
|
|
||||||
Manipulates data matching the specified filtering expression. Implemented as a [mutation](/docs/en/sql-reference/statements/alter/index.md#mutations).
|
Manipulates data matching the specified filtering expression. Implemented as a [mutation](/docs/en/sql-reference/statements/alter/index.md#mutations).
|
||||||
|
@ -10,7 +10,7 @@ Creates [settings profiles](../../../operations/access-rights.md#settings-profil
|
|||||||
Syntax:
|
Syntax:
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
CREATE SETTINGS PROFILE [IF NOT EXISTS | OR REPLACE] TO name1 [ON CLUSTER cluster_name1]
|
CREATE SETTINGS PROFILE [IF NOT EXISTS | OR REPLACE] name1 [ON CLUSTER cluster_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
@ -587,3 +587,8 @@ ORDER BY
|
|||||||
│ ambient_temp │ 2020-03-01 12:00:00 │ 16 │ 16 │
|
│ ambient_temp │ 2020-03-01 12:00:00 │ 16 │ 16 │
|
||||||
└──────────────┴─────────────────────┴───────┴─────────────────────────┘
|
└──────────────┴─────────────────────┴───────┴─────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Related Content
|
||||||
|
|
||||||
|
- [Window and array functions for Git commit sequences](https://clickhouse.com/blog/clickhouse-window-array-functions-git-commits)
|
||||||
|
- [Getting Data Into ClickHouse - Part 3 - Using S3](https://clickhouse.com/blog/getting-data-into-clickhouse-part-3-s3)
|
||||||
|
@ -424,23 +424,23 @@ WITH toDateTime64('2020-01-01 10:20:30.999', 3) AS dt64 SELECT toStartOfSecond(d
|
|||||||
|
|
||||||
## toRelativeYearNum {#torelativeyearnum}
|
## toRelativeYearNum {#torelativeyearnum}
|
||||||
|
|
||||||
Переводит дату-с-временем или дату в номер года, начиная с некоторого фиксированного момента в прошлом.
|
Переводит дату или дату-с-временем в номер года, начиная с некоторого фиксированного момента в прошлом.
|
||||||
|
|
||||||
## toRelativeQuarterNum {#torelativequarternum}
|
## toRelativeQuarterNum {#torelativequarternum}
|
||||||
|
|
||||||
Переводит дату-с-временем или дату в номер квартала, начиная с некоторого фиксированного момента в прошлом.
|
Переводит дату или дату-с-временем в номер квартала, начиная с некоторого фиксированного момента в прошлом.
|
||||||
|
|
||||||
## toRelativeMonthNum {#torelativemonthnum}
|
## toRelativeMonthNum {#torelativemonthnum}
|
||||||
|
|
||||||
Переводит дату-с-временем или дату в номер месяца, начиная с некоторого фиксированного момента в прошлом.
|
Переводит дату или дату-с-временем в номер месяца, начиная с некоторого фиксированного момента в прошлом.
|
||||||
|
|
||||||
## toRelativeWeekNum {#torelativeweeknum}
|
## toRelativeWeekNum {#torelativeweeknum}
|
||||||
|
|
||||||
Переводит дату-с-временем или дату в номер недели, начиная с некоторого фиксированного момента в прошлом.
|
Переводит дату или дату-с-временем в номер недели, начиная с некоторого фиксированного момента в прошлом.
|
||||||
|
|
||||||
## toRelativeDayNum {#torelativedaynum}
|
## toRelativeDayNum {#torelativedaynum}
|
||||||
|
|
||||||
Переводит дату-с-временем или дату в номер дня, начиная с некоторого фиксированного момента в прошлом.
|
Переводит дату или дату-с-временем в номер дня, начиная с некоторого фиксированного момента в прошлом.
|
||||||
|
|
||||||
## toRelativeHourNum {#torelativehournum}
|
## toRelativeHourNum {#torelativehournum}
|
||||||
|
|
||||||
@ -456,7 +456,7 @@ WITH toDateTime64('2020-01-01 10:20:30.999', 3) AS dt64 SELECT toStartOfSecond(d
|
|||||||
|
|
||||||
## toISOYear {#toisoyear}
|
## toISOYear {#toisoyear}
|
||||||
|
|
||||||
Переводит дату-с-временем или дату в число типа UInt16, содержащее номер ISO года. ISO год отличается от обычного года, потому что в соответствии с [ISO 8601:1988](https://en.wikipedia.org/wiki/ISO_8601) ISO год начинается необязательно первого января.
|
Переводит дату или дату-с-временем в число типа UInt16, содержащее номер ISO года. ISO год отличается от обычного года, потому что в соответствии с [ISO 8601:1988](https://en.wikipedia.org/wiki/ISO_8601) ISO год начинается необязательно первого января.
|
||||||
|
|
||||||
**Пример**
|
**Пример**
|
||||||
|
|
||||||
@ -479,7 +479,7 @@ SELECT
|
|||||||
|
|
||||||
## toISOWeek {#toisoweek}
|
## toISOWeek {#toisoweek}
|
||||||
|
|
||||||
Переводит дату-с-временем или дату в число типа UInt8, содержащее номер ISO недели.
|
Переводит дату или дату-с-временем в число типа UInt8, содержащее номер ISO недели.
|
||||||
Начало ISO года отличается от начала обычного года, потому что в соответствии с [ISO 8601:1988](https://en.wikipedia.org/wiki/ISO_8601) первая неделя года - это неделя с четырьмя или более днями в этом году.
|
Начало ISO года отличается от начала обычного года, потому что в соответствии с [ISO 8601:1988](https://en.wikipedia.org/wiki/ISO_8601) первая неделя года - это неделя с четырьмя или более днями в этом году.
|
||||||
|
|
||||||
1 Января 2017 г. - воскресение, т.е. первая ISO неделя 2017 года началась в понедельник 2 января, поэтому 1 января 2017 это последняя неделя 2016 года.
|
1 Января 2017 г. - воскресение, т.е. первая ISO неделя 2017 года началась в понедельник 2 января, поэтому 1 января 2017 это последняя неделя 2016 года.
|
||||||
@ -503,7 +503,7 @@ SELECT
|
|||||||
```
|
```
|
||||||
|
|
||||||
## toWeek(date\[, mode\]\[, timezone\]) {#toweek}
|
## toWeek(date\[, mode\]\[, timezone\]) {#toweek}
|
||||||
Переводит дату-с-временем или дату в число UInt8, содержащее номер недели. Второй аргументам mode задает режим, начинается ли неделя с воскресенья или с понедельника и должно ли возвращаемое значение находиться в диапазоне от 0 до 53 или от 1 до 53. Если аргумент mode опущен, то используется режим 0.
|
Переводит дату или дату-с-временем в число UInt8, содержащее номер недели. Второй аргументам mode задает режим, начинается ли неделя с воскресенья или с понедельника и должно ли возвращаемое значение находиться в диапазоне от 0 до 53 или от 1 до 53. Если аргумент mode опущен, то используется режим 0.
|
||||||
|
|
||||||
`toISOWeek() ` эквивалентно `toWeek(date,3)`.
|
`toISOWeek() ` эквивалентно `toWeek(date,3)`.
|
||||||
|
|
||||||
@ -569,6 +569,132 @@ SELECT toDate('2016-12-27') AS date, toYearWeek(date) AS yearWeek0, toYearWeek(d
|
|||||||
└────────────┴───────────┴───────────┴───────────┘
|
└────────────┴───────────┴───────────┴───────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## age
|
||||||
|
|
||||||
|
Вычисляет компонент `unit` разницы между `startdate` и `enddate`. Разница вычисляется с точностью в 1 секунду.
|
||||||
|
Например, разница между `2021-12-29` и `2022-01-01` 3 дня для единицы `day`, 0 месяцев для единицы `month`, 0 лет для единицы `year`.
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
age('unit', startdate, enddate, [timezone])
|
||||||
|
```
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
- `unit` — единица измерения времени, в которой будет выражено возвращаемое значение функции. [String](../../sql-reference/data-types/string.md).
|
||||||
|
Возможные значения:
|
||||||
|
|
||||||
|
- `second` (возможные сокращения: `ss`, `s`)
|
||||||
|
- `minute` (возможные сокращения: `mi`, `n`)
|
||||||
|
- `hour` (возможные сокращения: `hh`, `h`)
|
||||||
|
- `day` (возможные сокращения: `dd`, `d`)
|
||||||
|
- `week` (возможные сокращения: `wk`, `ww`)
|
||||||
|
- `month` (возможные сокращения: `mm`, `m`)
|
||||||
|
- `quarter` (возможные сокращения: `qq`, `q`)
|
||||||
|
- `year` (возможные сокращения: `yyyy`, `yy`)
|
||||||
|
|
||||||
|
- `startdate` — первая дата или дата со временем, которая вычитается из `enddate`. [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) или [DateTime64](../../sql-reference/data-types/datetime64.md).
|
||||||
|
|
||||||
|
- `enddate` — вторая дата или дата со временем, из которой вычитается `startdate`. [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) или [DateTime64](../../sql-reference/data-types/datetime64.md).
|
||||||
|
|
||||||
|
- `timezone` — [часовой пояс](../../operations/server-configuration-parameters/settings.md#server_configuration_parameters-timezone) (необязательно). Если этот аргумент указан, то он применяется как для `startdate`, так и для `enddate`. Если этот аргумент не указан, то используются часовые пояса аргументов `startdate` и `enddate`. Если часовые пояса аргументов `startdate` и `enddate` не совпадают, то результат не определен. [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
Разница между `enddate` и `startdate`, выраженная в `unit`.
|
||||||
|
|
||||||
|
Тип: [Int](../../sql-reference/data-types/int-uint.md).
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT age('hour', toDateTime('2018-01-01 22:30:00'), toDateTime('2018-01-02 23:00:00'));
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─age('hour', toDateTime('2018-01-01 22:30:00'), toDateTime('2018-01-02 23:00:00'))─┐
|
||||||
|
│ 24 │
|
||||||
|
└───────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT
|
||||||
|
toDate('2022-01-01') AS e,
|
||||||
|
toDate('2021-12-29') AS s,
|
||||||
|
age('day', s, e) AS day_age,
|
||||||
|
age('month', s, e) AS month__age,
|
||||||
|
age('year', s, e) AS year_age;
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌──────────e─┬──────────s─┬─day_age─┬─month__age─┬─year_age─┐
|
||||||
|
│ 2022-01-01 │ 2021-12-29 │ 3 │ 0 │ 0 │
|
||||||
|
└────────────┴────────────┴─────────┴────────────┴──────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
## date\_diff {#date_diff}
|
||||||
|
|
||||||
|
Вычисляет разницу указанных границ `unit` пересекаемых между `startdate` и `enddate`.
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
date_diff('unit', startdate, enddate, [timezone])
|
||||||
|
```
|
||||||
|
|
||||||
|
Синонимы: `dateDiff`, `DATE_DIFF`.
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
- `unit` — единица измерения времени, в которой будет выражено возвращаемое значение функции. [String](../../sql-reference/data-types/string.md).
|
||||||
|
Возможные значения:
|
||||||
|
|
||||||
|
- `second` (возможные сокращения: `ss`, `s`)
|
||||||
|
- `minute` (возможные сокращения: `mi`, `n`)
|
||||||
|
- `hour` (возможные сокращения: `hh`, `h`)
|
||||||
|
- `day` (возможные сокращения: `dd`, `d`)
|
||||||
|
- `week` (возможные сокращения: `wk`, `ww`)
|
||||||
|
- `month` (возможные сокращения: `mm`, `m`)
|
||||||
|
- `quarter` (возможные сокращения: `qq`, `q`)
|
||||||
|
- `year` (возможные сокращения: `yyyy`, `yy`)
|
||||||
|
|
||||||
|
- `startdate` — первая дата или дата со временем, которая вычитается из `enddate`. [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) или [DateTime64](../../sql-reference/data-types/datetime64.md).
|
||||||
|
|
||||||
|
- `enddate` — вторая дата или дата со временем, из которой вычитается `startdate`. [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) или [DateTime64](../../sql-reference/data-types/datetime64.md).
|
||||||
|
|
||||||
|
- `timezone` — [часовой пояс](../../operations/server-configuration-parameters/settings.md#server_configuration_parameters-timezone) (необязательно). Если этот аргумент указан, то он применяется как для `startdate`, так и для `enddate`. Если этот аргумент не указан, то используются часовые пояса аргументов `startdate` и `enddate`. Если часовые пояса аргументов `startdate` и `enddate` не совпадают, то результат не определен. [String](../../sql-reference/data-types/string.md).
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
Разница между `enddate` и `startdate`, выраженная в `unit`.
|
||||||
|
|
||||||
|
Тип: [Int](../../sql-reference/data-types/int-uint.md).
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT dateDiff('hour', toDateTime('2018-01-01 22:00:00'), toDateTime('2018-01-02 23:00:00'));
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─dateDiff('hour', toDateTime('2018-01-01 22:00:00'), toDateTime('2018-01-02 23:00:00'))─┐
|
||||||
|
│ 25 │
|
||||||
|
└────────────────────────────────────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
## date_trunc {#date_trunc}
|
## date_trunc {#date_trunc}
|
||||||
|
|
||||||
Отсекает от даты и времени части, меньшие чем указанная часть.
|
Отсекает от даты и времени части, меньшие чем указанная часть.
|
||||||
@ -689,60 +815,6 @@ SELECT date_add(YEAR, 3, toDate('2018-01-01'));
|
|||||||
└───────────────────────────────────────────────┘
|
└───────────────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
## date\_diff {#date_diff}
|
|
||||||
|
|
||||||
Вычисляет разницу между двумя значениями дат или дат со временем.
|
|
||||||
|
|
||||||
**Синтаксис**
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
date_diff('unit', startdate, enddate, [timezone])
|
|
||||||
```
|
|
||||||
|
|
||||||
Синонимы: `dateDiff`, `DATE_DIFF`.
|
|
||||||
|
|
||||||
**Аргументы**
|
|
||||||
|
|
||||||
- `unit` — единица измерения времени, в которой будет выражено возвращаемое значение функции. [String](../../sql-reference/data-types/string.md).
|
|
||||||
Возможные значения:
|
|
||||||
|
|
||||||
- `second`
|
|
||||||
- `minute`
|
|
||||||
- `hour`
|
|
||||||
- `day`
|
|
||||||
- `week`
|
|
||||||
- `month`
|
|
||||||
- `quarter`
|
|
||||||
- `year`
|
|
||||||
|
|
||||||
- `startdate` — первая дата или дата со временем, которая вычитается из `enddate`. [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) или [DateTime64](../../sql-reference/data-types/datetime64.md).
|
|
||||||
|
|
||||||
- `enddate` — вторая дата или дата со временем, из которой вычитается `startdate`. [Date](../../sql-reference/data-types/date.md), [Date32](../../sql-reference/data-types/date32.md), [DateTime](../../sql-reference/data-types/datetime.md) или [DateTime64](../../sql-reference/data-types/datetime64.md).
|
|
||||||
|
|
||||||
- `timezone` — [часовой пояс](../../operations/server-configuration-parameters/settings.md#server_configuration_parameters-timezone) (необязательно). Если этот аргумент указан, то он применяется как для `startdate`, так и для `enddate`. Если этот аргумент не указан, то используются часовые пояса аргументов `startdate` и `enddate`. Если часовые пояса аргументов `startdate` и `enddate` не совпадают, то результат не определен. [String](../../sql-reference/data-types/string.md).
|
|
||||||
|
|
||||||
**Возвращаемое значение**
|
|
||||||
|
|
||||||
Разница между `enddate` и `startdate`, выраженная в `unit`.
|
|
||||||
|
|
||||||
Тип: [Int](../../sql-reference/data-types/int-uint.md).
|
|
||||||
|
|
||||||
**Пример**
|
|
||||||
|
|
||||||
Запрос:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
SELECT dateDiff('hour', toDateTime('2018-01-01 22:00:00'), toDateTime('2018-01-02 23:00:00'));
|
|
||||||
```
|
|
||||||
|
|
||||||
Результат:
|
|
||||||
|
|
||||||
``` text
|
|
||||||
┌─dateDiff('hour', toDateTime('2018-01-01 22:00:00'), toDateTime('2018-01-02 23:00:00'))─┐
|
|
||||||
│ 25 │
|
|
||||||
└────────────────────────────────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## date\_sub {#date_sub}
|
## date\_sub {#date_sub}
|
||||||
|
|
||||||
Вычитает интервал времени или даты из указанной даты или даты со временем.
|
Вычитает интервал времени или даты из указанной даты или даты со временем.
|
||||||
|
@ -404,5 +404,39 @@ SELECT netloc('http://paul@www.example.com:80/');
|
|||||||
|
|
||||||
### cutURLParameter(URL, name) {#cuturlparameterurl-name}
|
### cutURLParameter(URL, name) {#cuturlparameterurl-name}
|
||||||
|
|
||||||
Удаляет параметр URL с именем name, если такой есть. Функция работает при допущении, что имя параметра закодировано в URL в точности таким же образом, что и в переданном аргументе.
|
Удаляет параметр с именем `name` из URL, если такой есть. Функция не кодирует или декодирует символы в именах параметров. Например `Client ID` и `Client%20ID` обрабатываются как разные имена параметров.
|
||||||
|
|
||||||
|
**Синтаксис**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
cutURLParameter(URL, name)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Аргументы**
|
||||||
|
|
||||||
|
- `url` — URL. [String](../../sql-reference/data-types/string.md).
|
||||||
|
- `name` — имя параметра URL. [String](../../sql-reference/data-types/string.md) или [Array](../../sql-reference/data-types/array.md) состоящий из строк.
|
||||||
|
|
||||||
|
**Возвращаемое значение**
|
||||||
|
|
||||||
|
- URL с удалённым параметром URL с именем `name`.
|
||||||
|
|
||||||
|
Type: `String`.
|
||||||
|
|
||||||
|
**Пример**
|
||||||
|
|
||||||
|
Запрос:
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT
|
||||||
|
cutURLParameter('http://bigmir.net/?a=b&c=d&e=f#g', 'a') as url_without_a,
|
||||||
|
cutURLParameter('http://bigmir.net/?a=b&c=d&e=f#g', ['c', 'e']) as url_without_c_and_e;
|
||||||
|
```
|
||||||
|
|
||||||
|
Результат:
|
||||||
|
|
||||||
|
``` text
|
||||||
|
┌─url_without_a────────────────┬─url_without_c_and_e──────┐
|
||||||
|
│ http://bigmir.net/?c=d&e=f#g │ http://bigmir.net/?a=b#g │
|
||||||
|
└──────────────────────────────┴──────────────────────────┘
|
||||||
|
```
|
||||||
|
@ -11,7 +11,7 @@ sidebar_label: "Профиль настроек"
|
|||||||
Синтаксис:
|
Синтаксис:
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
CREATE SETTINGS PROFILE [IF NOT EXISTS | OR REPLACE] TO name1 [ON CLUSTER cluster_name1]
|
CREATE SETTINGS PROFILE [IF NOT EXISTS | OR REPLACE] name1 [ON CLUSTER cluster_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
@ -1 +0,0 @@
|
|||||||
../../../en/sql-reference/table-functions/format.md
|
|
75
docs/ru/sql-reference/table-functions/format.md
Normal file
75
docs/ru/sql-reference/table-functions/format.md
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
---
|
||||||
|
slug: /ru/sql-reference/table-functions/format
|
||||||
|
sidebar_position: 56
|
||||||
|
sidebar_label: format
|
||||||
|
---
|
||||||
|
|
||||||
|
# format
|
||||||
|
|
||||||
|
Extracts table structure from data and parses it according to specified input format.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
format(format_name, data)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `format_name` — The [format](../../interfaces/formats.md#formats) of the data.
|
||||||
|
- `data` — String literal or constant expression that returns a string containing data in specified format
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
A table with data parsed from `data` argument according specified format and extracted schema.
|
||||||
|
|
||||||
|
**Examples**
|
||||||
|
|
||||||
|
**Query:**
|
||||||
|
``` sql
|
||||||
|
:) select * from format(JSONEachRow,
|
||||||
|
$$
|
||||||
|
{"a": "Hello", "b": 111}
|
||||||
|
{"a": "World", "b": 123}
|
||||||
|
{"a": "Hello", "b": 112}
|
||||||
|
{"a": "World", "b": 124}
|
||||||
|
$$)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result:**
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌───b─┬─a─────┐
|
||||||
|
│ 111 │ Hello │
|
||||||
|
│ 123 │ World │
|
||||||
|
│ 112 │ Hello │
|
||||||
|
│ 124 │ World │
|
||||||
|
└─────┴───────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Query:**
|
||||||
|
```sql
|
||||||
|
|
||||||
|
:) desc format(JSONEachRow,
|
||||||
|
$$
|
||||||
|
{"a": "Hello", "b": 111}
|
||||||
|
{"a": "World", "b": 123}
|
||||||
|
{"a": "Hello", "b": 112}
|
||||||
|
{"a": "World", "b": 124}
|
||||||
|
$$)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result:**
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌─name─┬─type──────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ b │ Nullable(Float64) │ │ │ │ │ │
|
||||||
|
│ a │ Nullable(String) │ │ │ │ │ │
|
||||||
|
└──────┴───────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**See Also**
|
||||||
|
|
||||||
|
- [Formats](../../interfaces/formats.md)
|
||||||
|
|
||||||
|
[Original article](https://clickhouse.com/docs/en/sql-reference/table-functions/format) <!--hide-->
|
@ -1 +0,0 @@
|
|||||||
../../../en/sql-reference/table-functions/format.md
|
|
75
docs/zh/sql-reference/table-functions/format.md
Normal file
75
docs/zh/sql-reference/table-functions/format.md
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
---
|
||||||
|
slug: /zh/sql-reference/table-functions/format
|
||||||
|
sidebar_position: 56
|
||||||
|
sidebar_label: format
|
||||||
|
---
|
||||||
|
|
||||||
|
# format
|
||||||
|
|
||||||
|
Extracts table structure from data and parses it according to specified input format.
|
||||||
|
|
||||||
|
**Syntax**
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
format(format_name, data)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Parameters**
|
||||||
|
|
||||||
|
- `format_name` — The [format](../../interfaces/formats.md#formats) of the data.
|
||||||
|
- `data` — String literal or constant expression that returns a string containing data in specified format
|
||||||
|
|
||||||
|
**Returned value**
|
||||||
|
|
||||||
|
A table with data parsed from `data` argument according specified format and extracted schema.
|
||||||
|
|
||||||
|
**Examples**
|
||||||
|
|
||||||
|
**Query:**
|
||||||
|
``` sql
|
||||||
|
:) select * from format(JSONEachRow,
|
||||||
|
$$
|
||||||
|
{"a": "Hello", "b": 111}
|
||||||
|
{"a": "World", "b": 123}
|
||||||
|
{"a": "Hello", "b": 112}
|
||||||
|
{"a": "World", "b": 124}
|
||||||
|
$$)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result:**
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌───b─┬─a─────┐
|
||||||
|
│ 111 │ Hello │
|
||||||
|
│ 123 │ World │
|
||||||
|
│ 112 │ Hello │
|
||||||
|
│ 124 │ World │
|
||||||
|
└─────┴───────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Query:**
|
||||||
|
```sql
|
||||||
|
|
||||||
|
:) desc format(JSONEachRow,
|
||||||
|
$$
|
||||||
|
{"a": "Hello", "b": 111}
|
||||||
|
{"a": "World", "b": 123}
|
||||||
|
{"a": "Hello", "b": 112}
|
||||||
|
{"a": "World", "b": 124}
|
||||||
|
$$)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Result:**
|
||||||
|
|
||||||
|
```text
|
||||||
|
┌─name─┬─type──────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
|
||||||
|
│ b │ Nullable(Float64) │ │ │ │ │ │
|
||||||
|
│ a │ Nullable(String) │ │ │ │ │ │
|
||||||
|
└──────┴───────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**See Also**
|
||||||
|
|
||||||
|
- [Formats](../../interfaces/formats.md)
|
||||||
|
|
||||||
|
[Original article](https://clickhouse.com/docs/en/sql-reference/table-functions/format) <!--hide-->
|
@ -111,6 +111,8 @@ EOF
|
|||||||
tar -czf "$TARBALL" -C "$OUTPUT_DIR" "$PKG_DIR"
|
tar -czf "$TARBALL" -C "$OUTPUT_DIR" "$PKG_DIR"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
sha512sum "$TARBALL" > "$TARBALL".sha512
|
||||||
|
|
||||||
rm -r "$PKG_PATH"
|
rm -r "$PKG_PATH"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,22 +58,52 @@ namespace ErrorCodes
|
|||||||
class Benchmark : public Poco::Util::Application
|
class Benchmark : public Poco::Util::Application
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
Benchmark(unsigned concurrency_, double delay_,
|
Benchmark(unsigned concurrency_,
|
||||||
Strings && hosts_, Ports && ports_, bool round_robin_,
|
double delay_,
|
||||||
bool cumulative_, bool secure_, const String & default_database_,
|
Strings && hosts_,
|
||||||
const String & user_, const String & password_, const String & quota_key_, const String & stage,
|
Ports && ports_,
|
||||||
bool randomize_, size_t max_iterations_, double max_time_,
|
bool round_robin_,
|
||||||
const String & json_path_, size_t confidence_,
|
bool cumulative_,
|
||||||
const String & query_id_, const String & query_to_execute_, bool continue_on_errors_,
|
bool secure_,
|
||||||
bool reconnect_, bool display_client_side_time_, bool print_stacktrace_, const Settings & settings_)
|
const String & default_database_,
|
||||||
|
const String & user_,
|
||||||
|
const String & password_,
|
||||||
|
const String & quota_key_,
|
||||||
|
const String & stage,
|
||||||
|
bool randomize_,
|
||||||
|
size_t max_iterations_,
|
||||||
|
double max_time_,
|
||||||
|
const String & json_path_,
|
||||||
|
size_t confidence_,
|
||||||
|
const String & query_id_,
|
||||||
|
const String & query_to_execute_,
|
||||||
|
size_t max_consecutive_errors_,
|
||||||
|
bool continue_on_errors_,
|
||||||
|
bool reconnect_,
|
||||||
|
bool display_client_side_time_,
|
||||||
|
bool print_stacktrace_,
|
||||||
|
const Settings & settings_)
|
||||||
:
|
:
|
||||||
round_robin(round_robin_), concurrency(concurrency_), delay(delay_), queue(concurrency), randomize(randomize_),
|
round_robin(round_robin_),
|
||||||
cumulative(cumulative_), max_iterations(max_iterations_), max_time(max_time_),
|
concurrency(concurrency_),
|
||||||
json_path(json_path_), confidence(confidence_), query_id(query_id_),
|
delay(delay_),
|
||||||
query_to_execute(query_to_execute_), continue_on_errors(continue_on_errors_), reconnect(reconnect_),
|
queue(concurrency),
|
||||||
|
randomize(randomize_),
|
||||||
|
cumulative(cumulative_),
|
||||||
|
max_iterations(max_iterations_),
|
||||||
|
max_time(max_time_),
|
||||||
|
json_path(json_path_),
|
||||||
|
confidence(confidence_),
|
||||||
|
query_id(query_id_),
|
||||||
|
query_to_execute(query_to_execute_),
|
||||||
|
continue_on_errors(continue_on_errors_),
|
||||||
|
max_consecutive_errors(max_consecutive_errors_),
|
||||||
|
reconnect(reconnect_),
|
||||||
display_client_side_time(display_client_side_time_),
|
display_client_side_time(display_client_side_time_),
|
||||||
print_stacktrace(print_stacktrace_), settings(settings_),
|
print_stacktrace(print_stacktrace_),
|
||||||
shared_context(Context::createShared()), global_context(Context::createGlobal(shared_context.get())),
|
settings(settings_),
|
||||||
|
shared_context(Context::createShared()),
|
||||||
|
global_context(Context::createGlobal(shared_context.get())),
|
||||||
pool(concurrency)
|
pool(concurrency)
|
||||||
{
|
{
|
||||||
const auto secure = secure_ ? Protocol::Secure::Enable : Protocol::Secure::Disable;
|
const auto secure = secure_ ? Protocol::Secure::Enable : Protocol::Secure::Disable;
|
||||||
@ -166,6 +196,7 @@ private:
|
|||||||
String query_id;
|
String query_id;
|
||||||
String query_to_execute;
|
String query_to_execute;
|
||||||
bool continue_on_errors;
|
bool continue_on_errors;
|
||||||
|
size_t max_consecutive_errors;
|
||||||
bool reconnect;
|
bool reconnect;
|
||||||
bool display_client_side_time;
|
bool display_client_side_time;
|
||||||
bool print_stacktrace;
|
bool print_stacktrace;
|
||||||
@ -174,6 +205,8 @@ private:
|
|||||||
ContextMutablePtr global_context;
|
ContextMutablePtr global_context;
|
||||||
QueryProcessingStage::Enum query_processing_stage;
|
QueryProcessingStage::Enum query_processing_stage;
|
||||||
|
|
||||||
|
std::atomic<size_t> consecutive_errors{0};
|
||||||
|
|
||||||
/// Don't execute new queries after timelimit or SIGINT or exception
|
/// Don't execute new queries after timelimit or SIGINT or exception
|
||||||
std::atomic<bool> shutdown{false};
|
std::atomic<bool> shutdown{false};
|
||||||
|
|
||||||
@ -393,13 +426,14 @@ private:
|
|||||||
try
|
try
|
||||||
{
|
{
|
||||||
execute(connection_entries, query, connection_index);
|
execute(connection_entries, query, connection_index);
|
||||||
|
consecutive_errors = 0;
|
||||||
}
|
}
|
||||||
catch (...)
|
catch (...)
|
||||||
{
|
{
|
||||||
std::lock_guard lock(mutex);
|
std::lock_guard lock(mutex);
|
||||||
std::cerr << "An error occurred while processing the query " << "'" << query << "'"
|
std::cerr << "An error occurred while processing the query " << "'" << query << "'"
|
||||||
<< ": " << getCurrentExceptionMessage(false) << std::endl;
|
<< ": " << getCurrentExceptionMessage(false) << std::endl;
|
||||||
if (!continue_on_errors)
|
if (!(continue_on_errors || max_consecutive_errors > ++consecutive_errors))
|
||||||
{
|
{
|
||||||
shutdown = true;
|
shutdown = true;
|
||||||
throw;
|
throw;
|
||||||
@ -648,6 +682,7 @@ int mainEntryClickHouseBenchmark(int argc, char ** argv)
|
|||||||
("stacktrace", "print stack traces of exceptions")
|
("stacktrace", "print stack traces of exceptions")
|
||||||
("confidence", value<size_t>()->default_value(5), "set the level of confidence for T-test [0=80%, 1=90%, 2=95%, 3=98%, 4=99%, 5=99.5%(default)")
|
("confidence", value<size_t>()->default_value(5), "set the level of confidence for T-test [0=80%, 1=90%, 2=95%, 3=98%, 4=99%, 5=99.5%(default)")
|
||||||
("query_id", value<std::string>()->default_value(""), "")
|
("query_id", value<std::string>()->default_value(""), "")
|
||||||
|
("max-consecutive-errors", value<size_t>()->default_value(0), "set number of allowed consecutive errors")
|
||||||
("continue_on_errors", "continue testing even if a query fails")
|
("continue_on_errors", "continue testing even if a query fails")
|
||||||
("reconnect", "establish new connection for every query")
|
("reconnect", "establish new connection for every query")
|
||||||
("client-side-time", "display the time including network communication instead of server-side time; note that for server versions before 22.8 we always display client-side time")
|
("client-side-time", "display the time including network communication instead of server-side time; note that for server versions before 22.8 we always display client-side time")
|
||||||
@ -702,6 +737,7 @@ int mainEntryClickHouseBenchmark(int argc, char ** argv)
|
|||||||
options["confidence"].as<size_t>(),
|
options["confidence"].as<size_t>(),
|
||||||
options["query_id"].as<std::string>(),
|
options["query_id"].as<std::string>(),
|
||||||
options["query"].as<std::string>(),
|
options["query"].as<std::string>(),
|
||||||
|
options["max-consecutive-errors"].as<size_t>(),
|
||||||
options.count("continue_on_errors"),
|
options.count("continue_on_errors"),
|
||||||
options.count("reconnect"),
|
options.count("reconnect"),
|
||||||
options.count("client-side-time"),
|
options.count("client-side-time"),
|
||||||
|
@ -348,17 +348,9 @@ void Client::connect()
|
|||||||
}
|
}
|
||||||
catch (const Exception & e)
|
catch (const Exception & e)
|
||||||
{
|
{
|
||||||
/// It is typical when users install ClickHouse, type some password and instantly forget it.
|
if (e.code() == DB::ErrorCodes::AUTHENTICATION_FAILED)
|
||||||
/// This problem can't be fixed with reconnection so it is not attempted
|
|
||||||
if ((connection_parameters.user.empty() || connection_parameters.user == "default")
|
|
||||||
&& e.code() == DB::ErrorCodes::AUTHENTICATION_FAILED)
|
|
||||||
{
|
{
|
||||||
std::cerr << std::endl
|
/// This problem can't be fixed with reconnection so it is not attempted
|
||||||
<< "If you have installed ClickHouse and forgot password you can reset it in the configuration file." << std::endl
|
|
||||||
<< "The password for default user is typically located at /etc/clickhouse-server/users.d/default-password.xml" << std::endl
|
|
||||||
<< "and deleting this file will reset the password." << std::endl
|
|
||||||
<< "See also /etc/clickhouse-server/users.xml on the server where ClickHouse is installed." << std::endl
|
|
||||||
<< std::endl;
|
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
@ -670,24 +670,30 @@ TaskStatus ClusterCopier::tryMoveAllPiecesToDestinationTable(const TaskTable & t
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create node to signal that we finished moving
|
/// Create node to signal that we finished moving
|
||||||
{
|
|
||||||
String state_finished = TaskStateWithOwner::getData(TaskState::Finished, host_id);
|
|
||||||
zookeeper->set(current_partition_attach_is_done, state_finished, 0);
|
|
||||||
/// Also increment a counter of processed partitions
|
/// Also increment a counter of processed partitions
|
||||||
|
{
|
||||||
|
const auto state_finished = TaskStateWithOwner::getData(TaskState::Finished, host_id);
|
||||||
|
const auto task_status = task_zookeeper_path + "/status";
|
||||||
|
|
||||||
|
/// Try until success
|
||||||
while (true)
|
while (true)
|
||||||
{
|
{
|
||||||
Coordination::Stat stat;
|
Coordination::Stat stat;
|
||||||
auto status_json = zookeeper->get(task_zookeeper_path + "/status", &stat);
|
auto status_json = zookeeper->get(task_status, &stat);
|
||||||
auto statuses = StatusAccumulator::fromJSON(status_json);
|
auto statuses = StatusAccumulator::fromJSON(status_json);
|
||||||
|
|
||||||
/// Increment status for table.
|
/// Increment status for table.
|
||||||
auto status_for_table = (*statuses)[task_table.name_in_config];
|
(*statuses)[task_table.name_in_config].processed_partitions_count += 1;
|
||||||
status_for_table.processed_partitions_count += 1;
|
|
||||||
(*statuses)[task_table.name_in_config] = status_for_table;
|
|
||||||
|
|
||||||
auto statuses_to_commit = StatusAccumulator::serializeToJSON(statuses);
|
auto statuses_to_commit = StatusAccumulator::serializeToJSON(statuses);
|
||||||
auto error = zookeeper->trySet(task_zookeeper_path + "/status", statuses_to_commit, stat.version, &stat);
|
|
||||||
if (error == Coordination::Error::ZOK)
|
Coordination::Requests ops;
|
||||||
|
ops.emplace_back(zkutil::makeSetRequest(current_partition_attach_is_done, state_finished, 0));
|
||||||
|
ops.emplace_back(zkutil::makeSetRequest(task_status, statuses_to_commit, stat.version));
|
||||||
|
|
||||||
|
Coordination::Responses responses;
|
||||||
|
Coordination::Error code = zookeeper->tryMulti(ops, responses);
|
||||||
|
|
||||||
|
if (code == Coordination::Error::ZOK)
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,15 @@
|
|||||||
set (CLICKHOUSE_DISKS_SOURCES DisksApp.cpp ICommand.cpp)
|
set (CLICKHOUSE_DISKS_SOURCES
|
||||||
|
DisksApp.cpp
|
||||||
|
ICommand.cpp
|
||||||
|
CommandCopy.cpp
|
||||||
|
CommandLink.cpp
|
||||||
|
CommandList.cpp
|
||||||
|
CommandListDisks.cpp
|
||||||
|
CommandMkDir.cpp
|
||||||
|
CommandMove.cpp
|
||||||
|
CommandRead.cpp
|
||||||
|
CommandRemove.cpp
|
||||||
|
CommandWrite.cpp)
|
||||||
|
|
||||||
set (CLICKHOUSE_DISKS_LINK
|
set (CLICKHOUSE_DISKS_LINK
|
||||||
PRIVATE
|
PRIVATE
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ICommand.h"
|
#include "ICommand.h"
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
#include <Common/TerminalSize.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
@ -11,7 +10,7 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
class CommandCopy : public ICommand
|
class CommandCopy final : public ICommand
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
CommandCopy()
|
CommandCopy()
|
||||||
@ -51,16 +50,16 @@ public:
|
|||||||
String disk_name_from = config.getString("diskFrom", config.getString("disk", "default"));
|
String disk_name_from = config.getString("diskFrom", config.getString("disk", "default"));
|
||||||
String disk_name_to = config.getString("diskTo", config.getString("disk", "default"));
|
String disk_name_to = config.getString("diskTo", config.getString("disk", "default"));
|
||||||
|
|
||||||
String path_from = command_arguments[0];
|
const String & path_from = command_arguments[0];
|
||||||
String path_to = command_arguments[1];
|
const String & path_to = command_arguments[1];
|
||||||
|
|
||||||
DiskPtr disk_from = global_context->getDisk(disk_name_from);
|
DiskPtr disk_from = global_context->getDisk(disk_name_from);
|
||||||
DiskPtr disk_to = global_context->getDisk(disk_name_to);
|
DiskPtr disk_to = global_context->getDisk(disk_name_to);
|
||||||
|
|
||||||
String full_path_from = fullPathWithValidate(disk_from, path_from);
|
String relative_path_from = validatePathAndGetAsRelative(path_from);
|
||||||
String full_path_to = fullPathWithValidate(disk_to, path_to);
|
String relative_path_to = validatePathAndGetAsRelative(path_to);
|
||||||
|
|
||||||
disk_from->copy(full_path_from, disk_to, full_path_to);
|
disk_from->copy(relative_path_from, disk_to, relative_path_to);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ICommand.h"
|
#include "ICommand.h"
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
|
||||||
@ -11,7 +9,7 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
class CommandLink : public ICommand
|
class CommandLink final : public ICommand
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
CommandLink()
|
CommandLink()
|
||||||
@ -40,15 +38,15 @@ public:
|
|||||||
|
|
||||||
String disk_name = config.getString("disk", "default");
|
String disk_name = config.getString("disk", "default");
|
||||||
|
|
||||||
String path_from = command_arguments[0];
|
const String & path_from = command_arguments[0];
|
||||||
String path_to = command_arguments[1];
|
const String & path_to = command_arguments[1];
|
||||||
|
|
||||||
DiskPtr disk = global_context->getDisk(disk_name);
|
DiskPtr disk = global_context->getDisk(disk_name);
|
||||||
|
|
||||||
String full_path_from = fullPathWithValidate(disk, path_from);
|
String relative_path_from = validatePathAndGetAsRelative(path_from);
|
||||||
String full_path_to = fullPathWithValidate(disk, path_to);
|
String relative_path_to = validatePathAndGetAsRelative(path_to);
|
||||||
|
|
||||||
disk->createHardLink(full_path_from, full_path_to);
|
disk->createHardLink(relative_path_from, relative_path_to);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ICommand.h"
|
#include "ICommand.h"
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
#include <Common/TerminalSize.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
@ -11,7 +10,7 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
class CommandList : public ICommand
|
class CommandList final : public ICommand
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
CommandList()
|
CommandList()
|
||||||
@ -46,43 +45,47 @@ public:
|
|||||||
|
|
||||||
String disk_name = config.getString("disk", "default");
|
String disk_name = config.getString("disk", "default");
|
||||||
|
|
||||||
String path = command_arguments[0];
|
const String & path = command_arguments[0];
|
||||||
|
|
||||||
DiskPtr disk = global_context->getDisk(disk_name);
|
DiskPtr disk = global_context->getDisk(disk_name);
|
||||||
|
|
||||||
String full_path = fullPathWithValidate(disk, path);
|
String relative_path = validatePathAndGetAsRelative(path);
|
||||||
|
|
||||||
bool recursive = config.getBool("recursive", false);
|
bool recursive = config.getBool("recursive", false);
|
||||||
|
|
||||||
if (recursive)
|
if (recursive)
|
||||||
listRecursive(disk, full_path);
|
listRecursive(disk, relative_path);
|
||||||
else
|
else
|
||||||
list(disk, full_path);
|
list(disk, relative_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
static void list(const DiskPtr & disk, const std::string & full_path)
|
static void list(const DiskPtr & disk, const std::string & relative_path)
|
||||||
{
|
{
|
||||||
std::vector<String> file_names;
|
std::vector<String> file_names;
|
||||||
disk->listFiles(full_path, file_names);
|
disk->listFiles(relative_path, file_names);
|
||||||
|
|
||||||
for (const auto & file_name : file_names)
|
for (const auto & file_name : file_names)
|
||||||
std::cout << file_name << '\n';
|
std::cout << file_name << '\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
static void listRecursive(const DiskPtr & disk, const std::string & full_path)
|
static void listRecursive(const DiskPtr & disk, const std::string & relative_path)
|
||||||
{
|
{
|
||||||
std::vector<String> file_names;
|
std::vector<String> file_names;
|
||||||
disk->listFiles(full_path, file_names);
|
disk->listFiles(relative_path, file_names);
|
||||||
|
|
||||||
std::cout << full_path << ":\n";
|
std::cout << relative_path << ":\n";
|
||||||
|
|
||||||
|
if (!file_names.empty())
|
||||||
|
{
|
||||||
for (const auto & file_name : file_names)
|
for (const auto & file_name : file_names)
|
||||||
std::cout << file_name << '\n';
|
std::cout << file_name << '\n';
|
||||||
std::cout << "\n";
|
std::cout << "\n";
|
||||||
|
}
|
||||||
|
|
||||||
for (const auto & file_name : file_names)
|
for (const auto & file_name : file_names)
|
||||||
{
|
{
|
||||||
auto path = full_path + "/" + file_name;
|
auto path = relative_path + "/" + file_name;
|
||||||
if (disk->isDirectory(path))
|
if (disk->isDirectory(path))
|
||||||
listRecursive(disk, path);
|
listRecursive(disk, path);
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ICommand.h"
|
#include "ICommand.h"
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
|
||||||
@ -11,7 +9,7 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
class CommandListDisks : public ICommand
|
class CommandListDisks final : public ICommand
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
CommandListDisks()
|
CommandListDisks()
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ICommand.h"
|
#include "ICommand.h"
|
||||||
|
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
#include <Common/TerminalSize.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
@ -11,7 +11,7 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
class CommandMkDir : public ICommand
|
class CommandMkDir final : public ICommand
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
CommandMkDir()
|
CommandMkDir()
|
||||||
@ -46,17 +46,17 @@ public:
|
|||||||
|
|
||||||
String disk_name = config.getString("disk", "default");
|
String disk_name = config.getString("disk", "default");
|
||||||
|
|
||||||
String path = command_arguments[0];
|
const String & path = command_arguments[0];
|
||||||
|
|
||||||
DiskPtr disk = global_context->getDisk(disk_name);
|
DiskPtr disk = global_context->getDisk(disk_name);
|
||||||
|
|
||||||
String full_path = fullPathWithValidate(disk, path);
|
String relative_path = validatePathAndGetAsRelative(path);
|
||||||
bool recursive = config.getBool("recursive", false);
|
bool recursive = config.getBool("recursive", false);
|
||||||
|
|
||||||
if (recursive)
|
if (recursive)
|
||||||
disk->createDirectories(full_path);
|
disk->createDirectories(relative_path);
|
||||||
else
|
else
|
||||||
disk->createDirectory(full_path);
|
disk->createDirectory(relative_path);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ICommand.h"
|
#include "ICommand.h"
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
|
||||||
@ -11,7 +9,7 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
class CommandMove : public ICommand
|
class CommandMove final : public ICommand
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
CommandMove()
|
CommandMove()
|
||||||
@ -39,18 +37,18 @@ public:
|
|||||||
|
|
||||||
String disk_name = config.getString("disk", "default");
|
String disk_name = config.getString("disk", "default");
|
||||||
|
|
||||||
String path_from = command_arguments[0];
|
const String & path_from = command_arguments[0];
|
||||||
String path_to = command_arguments[1];
|
const String & path_to = command_arguments[1];
|
||||||
|
|
||||||
DiskPtr disk = global_context->getDisk(disk_name);
|
DiskPtr disk = global_context->getDisk(disk_name);
|
||||||
|
|
||||||
String full_path_from = fullPathWithValidate(disk, path_from);
|
String relative_path_from = validatePathAndGetAsRelative(path_from);
|
||||||
String full_path_to = fullPathWithValidate(disk, path_to);
|
String relative_path_to = validatePathAndGetAsRelative(path_to);
|
||||||
|
|
||||||
if (disk->isFile(full_path_from))
|
if (disk->isFile(relative_path_from))
|
||||||
disk->moveFile(full_path_from, full_path_to);
|
disk->moveFile(relative_path_from, relative_path_to);
|
||||||
else
|
else
|
||||||
disk->moveDirectory(full_path_from, full_path_to);
|
disk->moveDirectory(relative_path_from, relative_path_to);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,9 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ICommand.h"
|
#include "ICommand.h"
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
#include <IO/ReadBufferFromFile.h>
|
||||||
|
#include <IO/WriteBufferFromFile.h>
|
||||||
|
#include <IO/copyData.h>
|
||||||
|
#include <Common/TerminalSize.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
@ -11,7 +13,7 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
class CommandRead : public ICommand
|
class CommandRead final : public ICommand
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
CommandRead()
|
CommandRead()
|
||||||
@ -46,27 +48,25 @@ public:
|
|||||||
|
|
||||||
String disk_name = config.getString("disk", "default");
|
String disk_name = config.getString("disk", "default");
|
||||||
|
|
||||||
String path = command_arguments[0];
|
|
||||||
|
|
||||||
DiskPtr disk = global_context->getDisk(disk_name);
|
DiskPtr disk = global_context->getDisk(disk_name);
|
||||||
|
|
||||||
String full_path = fullPathWithValidate(disk, path);
|
String relative_path = validatePathAndGetAsRelative(command_arguments[0]);
|
||||||
|
|
||||||
String path_output = config.getString("output", "");
|
String path_output = config.getString("output", "");
|
||||||
|
|
||||||
if (!path_output.empty())
|
if (!path_output.empty())
|
||||||
{
|
{
|
||||||
String full_path_output = fullPathWithValidate(disk, path_output);
|
String relative_path_output = validatePathAndGetAsRelative(path_output);
|
||||||
|
|
||||||
auto in = disk->readFile(full_path);
|
auto in = disk->readFile(relative_path);
|
||||||
auto out = disk->writeFile(full_path_output);
|
auto out = disk->writeFile(relative_path_output);
|
||||||
copyData(*in, *out);
|
copyData(*in, *out);
|
||||||
out->finalize();
|
out->finalize();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
auto in = disk->readFile(full_path);
|
auto in = disk->readFile(relative_path);
|
||||||
std::unique_ptr<WriteBufferFromFileBase> out = std::make_unique<WriteBufferFromFileDescriptor>(STDOUT_FILENO);
|
std::unique_ptr<WriteBufferFromFileBase> out = std::make_unique<WriteBufferFromFileDescriptor>(STDOUT_FILENO);
|
||||||
copyData(*in, *out);
|
copyData(*in, *out);
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ICommand.h"
|
#include "ICommand.h"
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
|
||||||
@ -11,7 +9,7 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
class CommandRemove : public ICommand
|
class CommandRemove final : public ICommand
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
CommandRemove()
|
CommandRemove()
|
||||||
@ -39,13 +37,13 @@ public:
|
|||||||
|
|
||||||
String disk_name = config.getString("disk", "default");
|
String disk_name = config.getString("disk", "default");
|
||||||
|
|
||||||
String path = command_arguments[0];
|
const String & path = command_arguments[0];
|
||||||
|
|
||||||
DiskPtr disk = global_context->getDisk(disk_name);
|
DiskPtr disk = global_context->getDisk(disk_name);
|
||||||
|
|
||||||
String full_path = fullPathWithValidate(disk, path);
|
String relative_path = validatePathAndGetAsRelative(path);
|
||||||
|
|
||||||
disk->removeRecursive(full_path);
|
disk->removeRecursive(relative_path);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "ICommand.h"
|
#include "ICommand.h"
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
|
||||||
|
#include <Common/TerminalSize.h>
|
||||||
|
#include <IO/ReadBufferFromFile.h>
|
||||||
|
#include <IO/WriteBufferFromFile.h>
|
||||||
|
#include <IO/copyData.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -11,7 +14,7 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
class CommandWrite : public ICommand
|
class CommandWrite final : public ICommand
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
CommandWrite()
|
CommandWrite()
|
||||||
@ -46,11 +49,11 @@ public:
|
|||||||
|
|
||||||
String disk_name = config.getString("disk", "default");
|
String disk_name = config.getString("disk", "default");
|
||||||
|
|
||||||
String path = command_arguments[0];
|
const String & path = command_arguments[0];
|
||||||
|
|
||||||
DiskPtr disk = global_context->getDisk(disk_name);
|
DiskPtr disk = global_context->getDisk(disk_name);
|
||||||
|
|
||||||
String full_path = fullPathWithValidate(disk, path);
|
String relative_path = validatePathAndGetAsRelative(path);
|
||||||
|
|
||||||
String path_input = config.getString("input", "");
|
String path_input = config.getString("input", "");
|
||||||
std::unique_ptr<ReadBufferFromFileBase> in;
|
std::unique_ptr<ReadBufferFromFileBase> in;
|
||||||
@ -60,11 +63,11 @@ public:
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
String full_path_input = fullPathWithValidate(disk, path_input);
|
String relative_path_input = validatePathAndGetAsRelative(path_input);
|
||||||
in = disk->readFile(full_path_input);
|
in = disk->readFile(relative_path_input);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto out = disk->writeFile(full_path);
|
auto out = disk->writeFile(relative_path);
|
||||||
copyData(*in, *out);
|
copyData(*in, *out);
|
||||||
out->finalize();
|
out->finalize();
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
#include "DisksApp.h"
|
#include "DisksApp.h"
|
||||||
|
#include "ICommand.h"
|
||||||
|
|
||||||
#include <Disks/registerDisks.h>
|
#include <Disks/registerDisks.h>
|
||||||
|
|
||||||
#include <base/argsToConfig.h>
|
#include <Common/TerminalSize.h>
|
||||||
|
|
||||||
#include <Formats/registerFormats.h>
|
#include <Formats/registerFormats.h>
|
||||||
|
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
|
@ -1,28 +1,22 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "CommandCopy.cpp"
|
|
||||||
#include "CommandLink.cpp"
|
|
||||||
#include "CommandList.cpp"
|
|
||||||
#include "CommandListDisks.cpp"
|
|
||||||
#include "CommandMkDir.cpp"
|
|
||||||
#include "CommandMove.cpp"
|
|
||||||
#include "CommandRead.cpp"
|
|
||||||
#include "CommandRemove.cpp"
|
|
||||||
#include "CommandWrite.cpp"
|
|
||||||
|
|
||||||
#include <Loggers/Loggers.h>
|
#include <Loggers/Loggers.h>
|
||||||
|
|
||||||
#include <Common/ProgressIndication.h>
|
|
||||||
#include <Common/StatusFile.h>
|
|
||||||
#include <Common/InterruptListener.h>
|
|
||||||
#include <Core/Settings.h>
|
|
||||||
#include <Interpreters/Context.h>
|
#include <Interpreters/Context.h>
|
||||||
|
#include <Poco/Util/Application.h>
|
||||||
|
|
||||||
|
#include <boost/program_options.hpp>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
|
class ICommand;
|
||||||
using CommandPtr = std::unique_ptr<ICommand>;
|
using CommandPtr = std::unique_ptr<ICommand>;
|
||||||
|
|
||||||
|
namespace po = boost::program_options;
|
||||||
|
using ProgramOptionsDescription = boost::program_options::options_description;
|
||||||
|
using CommandLineOptions = boost::program_options::variables_map;
|
||||||
|
|
||||||
class DisksApp : public Poco::Util::Application, public Loggers
|
class DisksApp : public Poco::Util::Application, public Loggers
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
@ -30,19 +30,21 @@ void ICommand::addOptions(ProgramOptionsDescription & options_description)
|
|||||||
options_description.add(*command_option_description);
|
options_description.add(*command_option_description);
|
||||||
}
|
}
|
||||||
|
|
||||||
String ICommand::fullPathWithValidate(const DiskPtr & disk, const String & path)
|
String ICommand::validatePathAndGetAsRelative(const String & path)
|
||||||
{
|
{
|
||||||
if (fs::path(path).lexically_normal().string() != path)
|
/// If path contain non-normalized symbols like . we will normalized them. If the resulting normalized path
|
||||||
|
/// still contain '..' it can be dangerous, disallow such paths. Also since clickhouse-disks
|
||||||
|
/// is not an interactive program (don't track you current path) it's OK to disallow .. paths.
|
||||||
|
String lexically_normal_path = fs::path(path).lexically_normal();
|
||||||
|
if (lexically_normal_path.find("..") != std::string::npos)
|
||||||
throw DB::Exception(DB::ErrorCodes::BAD_ARGUMENTS, "Path {} is not normalized", path);
|
throw DB::Exception(DB::ErrorCodes::BAD_ARGUMENTS, "Path {} is not normalized", path);
|
||||||
|
|
||||||
String disk_path = fs::canonical(fs::path(disk->getPath())) / "";
|
/// If path is absolute we should keep it as relative inside disk, so disk will look like
|
||||||
String full_path = (fs::absolute(disk_path) / path).lexically_normal();
|
/// an ordinary filesystem with root.
|
||||||
|
if (fs::path(lexically_normal_path).is_absolute())
|
||||||
|
return lexically_normal_path.substr(1);
|
||||||
|
|
||||||
if (!full_path.starts_with(disk_path))
|
return lexically_normal_path;
|
||||||
throw DB::Exception(
|
|
||||||
DB::ErrorCodes::BAD_ARGUMENTS, "Path {} must be inside disk path {}", full_path, disk_path);
|
|
||||||
|
|
||||||
return path;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -2,16 +2,10 @@
|
|||||||
|
|
||||||
#include <Disks/IDisk.h>
|
#include <Disks/IDisk.h>
|
||||||
|
|
||||||
#include <Poco/Util/Application.h>
|
|
||||||
|
|
||||||
#include <IO/WriteBufferFromFileDescriptor.h>
|
|
||||||
#include <IO/ReadBufferFromFileDescriptor.h>
|
|
||||||
#include <IO/copyData.h>
|
|
||||||
|
|
||||||
#include <boost/program_options.hpp>
|
#include <boost/program_options.hpp>
|
||||||
|
|
||||||
#include <Common/TerminalSize.h>
|
|
||||||
#include <Common/Config/ConfigProcessor.h>
|
#include <Common/Config/ConfigProcessor.h>
|
||||||
|
#include <Poco/Util/Application.h>
|
||||||
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
|
||||||
@ -43,7 +37,7 @@ public:
|
|||||||
protected:
|
protected:
|
||||||
void printHelpMessage() const;
|
void printHelpMessage() const;
|
||||||
|
|
||||||
static String fullPathWithValidate(const DiskPtr & disk, const String & path);
|
static String validatePathAndGetAsRelative(const String & path);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
String command_name;
|
String command_name;
|
||||||
@ -55,14 +49,16 @@ protected:
|
|||||||
po::positional_options_description positional_options_description;
|
po::positional_options_description positional_options_description;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
using CommandPtr = std::unique_ptr<ICommand>;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::unique_ptr <DB::ICommand> makeCommandCopy();
|
DB::CommandPtr makeCommandCopy();
|
||||||
std::unique_ptr <DB::ICommand> makeCommandLink();
|
DB::CommandPtr makeCommandLink();
|
||||||
std::unique_ptr <DB::ICommand> makeCommandList();
|
DB::CommandPtr makeCommandList();
|
||||||
std::unique_ptr <DB::ICommand> makeCommandListDisks();
|
DB::CommandPtr makeCommandListDisks();
|
||||||
std::unique_ptr <DB::ICommand> makeCommandMove();
|
DB::CommandPtr makeCommandMove();
|
||||||
std::unique_ptr <DB::ICommand> makeCommandRead();
|
DB::CommandPtr makeCommandRead();
|
||||||
std::unique_ptr <DB::ICommand> makeCommandRemove();
|
DB::CommandPtr makeCommandRemove();
|
||||||
std::unique_ptr <DB::ICommand> makeCommandWrite();
|
DB::CommandPtr makeCommandWrite();
|
||||||
std::unique_ptr <DB::ICommand> makeCommandMkDir();
|
DB::CommandPtr makeCommandMkDir();
|
||||||
|
@ -37,7 +37,7 @@
|
|||||||
#include <AggregateFunctions/registerAggregateFunctions.h>
|
#include <AggregateFunctions/registerAggregateFunctions.h>
|
||||||
#include <TableFunctions/registerTableFunctions.h>
|
#include <TableFunctions/registerTableFunctions.h>
|
||||||
#include <Storages/registerStorages.h>
|
#include <Storages/registerStorages.h>
|
||||||
#include <Storages/NamedCollections.h>
|
#include <Storages/NamedCollectionUtils.h>
|
||||||
#include <Dictionaries/registerDictionaries.h>
|
#include <Dictionaries/registerDictionaries.h>
|
||||||
#include <Disks/registerDisks.h>
|
#include <Disks/registerDisks.h>
|
||||||
#include <Formats/registerFormats.h>
|
#include <Formats/registerFormats.h>
|
||||||
@ -120,7 +120,7 @@ void LocalServer::initialize(Poco::Util::Application & self)
|
|||||||
config().getUInt("max_io_thread_pool_free_size", 0),
|
config().getUInt("max_io_thread_pool_free_size", 0),
|
||||||
config().getUInt("io_thread_pool_queue_size", 10000));
|
config().getUInt("io_thread_pool_queue_size", 10000));
|
||||||
|
|
||||||
NamedCollectionFactory::instance().initialize(config());
|
NamedCollectionUtils::loadFromConfig(config());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -212,6 +212,8 @@ void LocalServer::tryInitPath()
|
|||||||
|
|
||||||
global_context->setUserFilesPath(""); // user's files are everywhere
|
global_context->setUserFilesPath(""); // user's files are everywhere
|
||||||
|
|
||||||
|
NamedCollectionUtils::loadFromSQL(global_context);
|
||||||
|
|
||||||
/// top_level_domains_lists
|
/// top_level_domains_lists
|
||||||
const std::string & top_level_domains_path = config().getString("top_level_domains_path", path + "top_level_domains/");
|
const std::string & top_level_domains_path = config().getString("top_level_domains_path", path + "top_level_domains/");
|
||||||
if (!top_level_domains_path.empty())
|
if (!top_level_domains_path.empty())
|
||||||
|
@ -60,7 +60,7 @@
|
|||||||
#include <Storages/System/attachInformationSchemaTables.h>
|
#include <Storages/System/attachInformationSchemaTables.h>
|
||||||
#include <Storages/Cache/ExternalDataSourceCache.h>
|
#include <Storages/Cache/ExternalDataSourceCache.h>
|
||||||
#include <Storages/Cache/registerRemoteFileMetadatas.h>
|
#include <Storages/Cache/registerRemoteFileMetadatas.h>
|
||||||
#include <Storages/NamedCollections.h>
|
#include <Storages/NamedCollectionUtils.h>
|
||||||
#include <AggregateFunctions/registerAggregateFunctions.h>
|
#include <AggregateFunctions/registerAggregateFunctions.h>
|
||||||
#include <Functions/UserDefined/IUserDefinedSQLObjectsLoader.h>
|
#include <Functions/UserDefined/IUserDefinedSQLObjectsLoader.h>
|
||||||
#include <Functions/registerFunctions.h>
|
#include <Functions/registerFunctions.h>
|
||||||
@ -782,7 +782,7 @@ try
|
|||||||
config().getUInt("max_io_thread_pool_free_size", 0),
|
config().getUInt("max_io_thread_pool_free_size", 0),
|
||||||
config().getUInt("io_thread_pool_queue_size", 10000));
|
config().getUInt("io_thread_pool_queue_size", 10000));
|
||||||
|
|
||||||
NamedCollectionFactory::instance().initialize(config());
|
NamedCollectionUtils::loadFromConfig(config());
|
||||||
|
|
||||||
/// Initialize global local cache for remote filesystem.
|
/// Initialize global local cache for remote filesystem.
|
||||||
if (config().has("local_cache_for_remote_fs"))
|
if (config().has("local_cache_for_remote_fs"))
|
||||||
@ -1168,6 +1168,8 @@ try
|
|||||||
SensitiveDataMasker::setInstance(std::make_unique<SensitiveDataMasker>(config(), "query_masking_rules"));
|
SensitiveDataMasker::setInstance(std::make_unique<SensitiveDataMasker>(config(), "query_masking_rules"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
NamedCollectionUtils::loadFromSQL(global_context);
|
||||||
|
|
||||||
auto main_config_reloader = std::make_unique<ConfigReloader>(
|
auto main_config_reloader = std::make_unique<ConfigReloader>(
|
||||||
config_path,
|
config_path,
|
||||||
include_from_path,
|
include_from_path,
|
||||||
@ -1336,7 +1338,8 @@ try
|
|||||||
#if USE_SSL
|
#if USE_SSL
|
||||||
CertificateReloader::instance().tryLoad(*config);
|
CertificateReloader::instance().tryLoad(*config);
|
||||||
#endif
|
#endif
|
||||||
NamedCollectionFactory::instance().reload(*config);
|
NamedCollectionUtils::reloadFromConfig(*config);
|
||||||
|
|
||||||
ProfileEvents::increment(ProfileEvents::MainConfigLoads);
|
ProfileEvents::increment(ProfileEvents::MainConfigLoads);
|
||||||
|
|
||||||
/// Must be the last.
|
/// Must be the last.
|
||||||
|
@ -16,11 +16,13 @@
|
|||||||
#include <Access/ExternalAuthenticators.h>
|
#include <Access/ExternalAuthenticators.h>
|
||||||
#include <Access/AccessChangesNotifier.h>
|
#include <Access/AccessChangesNotifier.h>
|
||||||
#include <Access/AccessBackup.h>
|
#include <Access/AccessBackup.h>
|
||||||
|
#include <Access/resolveSetting.h>
|
||||||
#include <Backups/BackupEntriesCollector.h>
|
#include <Backups/BackupEntriesCollector.h>
|
||||||
#include <Backups/RestorerFromBackup.h>
|
#include <Backups/RestorerFromBackup.h>
|
||||||
#include <Core/Settings.h>
|
#include <Core/Settings.h>
|
||||||
|
#include <Storages/MergeTree/MergeTreeSettings.h>
|
||||||
#include <base/defines.h>
|
#include <base/defines.h>
|
||||||
#include <base/find_symbols.h>
|
#include <IO/Operators.h>
|
||||||
#include <Poco/AccessExpireCache.h>
|
#include <Poco/AccessExpireCache.h>
|
||||||
#include <boost/algorithm/string/join.hpp>
|
#include <boost/algorithm/string/join.hpp>
|
||||||
#include <boost/algorithm/string/split.hpp>
|
#include <boost/algorithm/string/split.hpp>
|
||||||
@ -38,7 +40,6 @@ namespace ErrorCodes
|
|||||||
extern const int AUTHENTICATION_FAILED;
|
extern const int AUTHENTICATION_FAILED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
namespace
|
namespace
|
||||||
{
|
{
|
||||||
void checkForUsersNotInMainConfig(
|
void checkForUsersNotInMainConfig(
|
||||||
@ -103,7 +104,7 @@ public:
|
|||||||
|
|
||||||
bool isSettingNameAllowed(std::string_view setting_name) const
|
bool isSettingNameAllowed(std::string_view setting_name) const
|
||||||
{
|
{
|
||||||
if (Settings::hasBuiltin(setting_name))
|
if (settingIsBuiltin(setting_name))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
std::lock_guard lock{mutex};
|
std::lock_guard lock{mutex};
|
||||||
@ -454,9 +455,21 @@ UUID AccessControl::authenticate(const Credentials & credentials, const Poco::Ne
|
|||||||
{
|
{
|
||||||
tryLogCurrentException(getLogger(), "from: " + address.toString() + ", user: " + credentials.getUserName() + ": Authentication failed");
|
tryLogCurrentException(getLogger(), "from: " + address.toString() + ", user: " + credentials.getUserName() + ": Authentication failed");
|
||||||
|
|
||||||
|
WriteBufferFromOwnString message;
|
||||||
|
message << credentials.getUserName() << ": Authentication failed: password is incorrect, or there is no user with such name.";
|
||||||
|
|
||||||
|
/// Better exception message for usability.
|
||||||
|
/// It is typical when users install ClickHouse, type some password and instantly forget it.
|
||||||
|
if (credentials.getUserName().empty() || credentials.getUserName() == "default")
|
||||||
|
message << "\n\n"
|
||||||
|
<< "If you have installed ClickHouse and forgot password you can reset it in the configuration file.\n"
|
||||||
|
<< "The password for default user is typically located at /etc/clickhouse-server/users.d/default-password.xml\n"
|
||||||
|
<< "and deleting this file will reset the password.\n"
|
||||||
|
<< "See also /etc/clickhouse-server/users.xml on the server where ClickHouse is installed.\n\n";
|
||||||
|
|
||||||
/// We use the same message for all authentication failures because we don't want to give away any unnecessary information for security reasons,
|
/// We use the same message for all authentication failures because we don't want to give away any unnecessary information for security reasons,
|
||||||
/// only the log will show the exact reason.
|
/// only the log will show the exact reason.
|
||||||
throw Exception(credentials.getUserName() + ": Authentication failed: password is incorrect or there is no user with such name", ErrorCodes::AUTHENTICATION_FAILED);
|
throw Exception(message.str(), ErrorCodes::AUTHENTICATION_FAILED);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ public:
|
|||||||
/// The same as allColumnFlags().
|
/// The same as allColumnFlags().
|
||||||
static AccessFlags allFlagsGrantableOnColumnLevel();
|
static AccessFlags allFlagsGrantableOnColumnLevel();
|
||||||
|
|
||||||
static constexpr size_t SIZE = 128;
|
static constexpr size_t SIZE = 256;
|
||||||
private:
|
private:
|
||||||
using Flags = std::bitset<SIZE>;
|
using Flags = std::bitset<SIZE>;
|
||||||
Flags flags;
|
Flags flags;
|
||||||
|
@ -69,6 +69,7 @@ enum class AccessType
|
|||||||
M(ALTER_FREEZE_PARTITION, "FREEZE PARTITION, UNFREEZE", TABLE, ALTER_TABLE) \
|
M(ALTER_FREEZE_PARTITION, "FREEZE PARTITION, UNFREEZE", TABLE, ALTER_TABLE) \
|
||||||
\
|
\
|
||||||
M(ALTER_DATABASE_SETTINGS, "ALTER DATABASE SETTING, ALTER MODIFY DATABASE SETTING, MODIFY DATABASE SETTING", DATABASE, ALTER_DATABASE) /* allows to execute ALTER MODIFY SETTING */\
|
M(ALTER_DATABASE_SETTINGS, "ALTER DATABASE SETTING, ALTER MODIFY DATABASE SETTING, MODIFY DATABASE SETTING", DATABASE, ALTER_DATABASE) /* allows to execute ALTER MODIFY SETTING */\
|
||||||
|
M(ALTER_NAMED_COLLECTION, "", GROUP, ALTER) /* allows to execute ALTER NAMED COLLECTION */\
|
||||||
\
|
\
|
||||||
M(ALTER_TABLE, "", GROUP, ALTER) \
|
M(ALTER_TABLE, "", GROUP, ALTER) \
|
||||||
M(ALTER_DATABASE, "", GROUP, ALTER) \
|
M(ALTER_DATABASE, "", GROUP, ALTER) \
|
||||||
@ -88,6 +89,7 @@ enum class AccessType
|
|||||||
M(CREATE_TEMPORARY_TABLE, "", GLOBAL, CREATE) /* allows to create and manipulate temporary tables;
|
M(CREATE_TEMPORARY_TABLE, "", GLOBAL, CREATE) /* allows to create and manipulate temporary tables;
|
||||||
implicitly enabled by the grant CREATE_TABLE on any table */ \
|
implicitly enabled by the grant CREATE_TABLE on any table */ \
|
||||||
M(CREATE_FUNCTION, "", GLOBAL, CREATE) /* allows to execute CREATE FUNCTION */ \
|
M(CREATE_FUNCTION, "", GLOBAL, CREATE) /* allows to execute CREATE FUNCTION */ \
|
||||||
|
M(CREATE_NAMED_COLLECTION, "", GLOBAL, CREATE) /* allows to execute CREATE NAMED COLLECTION */ \
|
||||||
M(CREATE, "", GROUP, ALL) /* allows to execute {CREATE|ATTACH} */ \
|
M(CREATE, "", GROUP, ALL) /* allows to execute {CREATE|ATTACH} */ \
|
||||||
\
|
\
|
||||||
M(DROP_DATABASE, "", DATABASE, DROP) /* allows to execute {DROP|DETACH} DATABASE */\
|
M(DROP_DATABASE, "", DATABASE, DROP) /* allows to execute {DROP|DETACH} DATABASE */\
|
||||||
@ -96,6 +98,7 @@ enum class AccessType
|
|||||||
implicitly enabled by the grant DROP_TABLE */\
|
implicitly enabled by the grant DROP_TABLE */\
|
||||||
M(DROP_DICTIONARY, "", DICTIONARY, DROP) /* allows to execute {DROP|DETACH} DICTIONARY */\
|
M(DROP_DICTIONARY, "", DICTIONARY, DROP) /* allows to execute {DROP|DETACH} DICTIONARY */\
|
||||||
M(DROP_FUNCTION, "", GLOBAL, DROP) /* allows to execute DROP FUNCTION */\
|
M(DROP_FUNCTION, "", GLOBAL, DROP) /* allows to execute DROP FUNCTION */\
|
||||||
|
M(DROP_NAMED_COLLECTION, "", GLOBAL, DROP) /* allows to execute DROP NAMED COLLECTION */\
|
||||||
M(DROP, "", GROUP, ALL) /* allows to execute {DROP|DETACH} */\
|
M(DROP, "", GROUP, ALL) /* allows to execute {DROP|DETACH} */\
|
||||||
\
|
\
|
||||||
M(TRUNCATE, "TRUNCATE TABLE", TABLE, ALL) \
|
M(TRUNCATE, "TRUNCATE TABLE", TABLE, ALL) \
|
||||||
|
@ -174,7 +174,6 @@ private:
|
|||||||
void initialize();
|
void initialize();
|
||||||
void setUser(const UserPtr & user_) const;
|
void setUser(const UserPtr & user_) const;
|
||||||
void setRolesInfo(const std::shared_ptr<const EnabledRolesInfo> & roles_info_) const;
|
void setRolesInfo(const std::shared_ptr<const EnabledRolesInfo> & roles_info_) const;
|
||||||
void setSettingsAndConstraints() const;
|
|
||||||
void calculateAccessRights() const;
|
void calculateAccessRights() const;
|
||||||
|
|
||||||
template <bool throw_if_denied, bool grant_option>
|
template <bool throw_if_denied, bool grant_option>
|
||||||
|
@ -1,13 +1,15 @@
|
|||||||
|
#include <string_view>
|
||||||
#include <Access/SettingsConstraints.h>
|
#include <Access/SettingsConstraints.h>
|
||||||
|
#include <Access/resolveSetting.h>
|
||||||
#include <Access/AccessControl.h>
|
#include <Access/AccessControl.h>
|
||||||
#include <Core/Settings.h>
|
#include <Core/Settings.h>
|
||||||
|
#include <Storages/MergeTree/MergeTreeSettings.h>
|
||||||
#include <Common/FieldVisitorToString.h>
|
#include <Common/FieldVisitorToString.h>
|
||||||
#include <Common/FieldVisitorsAccurateComparison.h>
|
#include <Common/FieldVisitorsAccurateComparison.h>
|
||||||
#include <IO/WriteHelpers.h>
|
#include <IO/WriteHelpers.h>
|
||||||
#include <Poco/Util/AbstractConfiguration.h>
|
#include <Poco/Util/AbstractConfiguration.h>
|
||||||
#include <boost/range/algorithm_ext/erase.hpp>
|
#include <boost/range/algorithm_ext/erase.hpp>
|
||||||
|
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
namespace ErrorCodes
|
namespace ErrorCodes
|
||||||
@ -18,7 +20,6 @@ namespace ErrorCodes
|
|||||||
extern const int UNKNOWN_SETTING;
|
extern const int UNKNOWN_SETTING;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SettingsConstraints::SettingsConstraints(const AccessControl & access_control_) : access_control(&access_control_)
|
SettingsConstraints::SettingsConstraints(const AccessControl & access_control_) : access_control(&access_control_)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
@ -35,19 +36,28 @@ void SettingsConstraints::clear()
|
|||||||
constraints.clear();
|
constraints.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
void SettingsConstraints::set(const String & setting_name, const Field & min_value, const Field & max_value, SettingConstraintWritability writability)
|
void SettingsConstraints::set(const String & full_name, const Field & min_value, const Field & max_value, SettingConstraintWritability writability)
|
||||||
{
|
{
|
||||||
auto & constraint = constraints[setting_name];
|
auto & constraint = constraints[full_name];
|
||||||
if (!min_value.isNull())
|
if (!min_value.isNull())
|
||||||
constraint.min_value = Settings::castValueUtil(setting_name, min_value);
|
constraint.min_value = settingCastValueUtil(full_name, min_value);
|
||||||
if (!max_value.isNull())
|
if (!max_value.isNull())
|
||||||
constraint.max_value = Settings::castValueUtil(setting_name, max_value);
|
constraint.max_value = settingCastValueUtil(full_name, max_value);
|
||||||
constraint.writability = writability;
|
constraint.writability = writability;
|
||||||
}
|
}
|
||||||
|
|
||||||
void SettingsConstraints::get(const Settings & current_settings, std::string_view setting_name, Field & min_value, Field & max_value, SettingConstraintWritability & writability) const
|
void SettingsConstraints::get(const Settings & current_settings, std::string_view short_name, Field & min_value, Field & max_value, SettingConstraintWritability & writability) const
|
||||||
{
|
{
|
||||||
auto checker = getChecker(current_settings, setting_name);
|
// NOTE: for `Settings` short name is equal to full name
|
||||||
|
auto checker = getChecker(current_settings, short_name);
|
||||||
|
min_value = checker.constraint.min_value;
|
||||||
|
max_value = checker.constraint.max_value;
|
||||||
|
writability = checker.constraint.writability;
|
||||||
|
}
|
||||||
|
|
||||||
|
void SettingsConstraints::get(const MergeTreeSettings &, std::string_view short_name, Field & min_value, Field & max_value, SettingConstraintWritability & writability) const
|
||||||
|
{
|
||||||
|
auto checker = getMergeTreeChecker(short_name);
|
||||||
min_value = checker.constraint.min_value;
|
min_value = checker.constraint.min_value;
|
||||||
max_value = checker.constraint.max_value;
|
max_value = checker.constraint.max_value;
|
||||||
writability = checker.constraint.writability;
|
writability = checker.constraint.writability;
|
||||||
@ -97,6 +107,17 @@ void SettingsConstraints::check(const Settings & current_settings, SettingsChang
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void SettingsConstraints::check(const MergeTreeSettings & current_settings, const SettingChange & change) const
|
||||||
|
{
|
||||||
|
checkImpl(current_settings, const_cast<SettingChange &>(change), THROW_ON_VIOLATION);
|
||||||
|
}
|
||||||
|
|
||||||
|
void SettingsConstraints::check(const MergeTreeSettings & current_settings, const SettingsChanges & changes) const
|
||||||
|
{
|
||||||
|
for (const auto & change : changes)
|
||||||
|
check(current_settings, change);
|
||||||
|
}
|
||||||
|
|
||||||
void SettingsConstraints::clamp(const Settings & current_settings, SettingsChanges & changes) const
|
void SettingsConstraints::clamp(const Settings & current_settings, SettingsChanges & changes) const
|
||||||
{
|
{
|
||||||
boost::range::remove_erase_if(
|
boost::range::remove_erase_if(
|
||||||
@ -107,6 +128,36 @@ void SettingsConstraints::clamp(const Settings & current_settings, SettingsChang
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <class T>
|
||||||
|
bool getNewValueToCheck(const T & current_settings, SettingChange & change, Field & new_value, bool throw_on_failure)
|
||||||
|
{
|
||||||
|
Field current_value;
|
||||||
|
bool has_current_value = current_settings.tryGet(change.name, current_value);
|
||||||
|
|
||||||
|
/// Setting isn't checked if value has not changed.
|
||||||
|
if (has_current_value && change.value == current_value)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
if (throw_on_failure)
|
||||||
|
new_value = T::castValueUtil(change.name, change.value);
|
||||||
|
else
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
new_value = T::castValueUtil(change.name, change.value);
|
||||||
|
}
|
||||||
|
catch (...)
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Setting isn't checked if value has not changed.
|
||||||
|
if (has_current_value && new_value == current_value)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
bool SettingsConstraints::checkImpl(const Settings & current_settings, SettingChange & change, ReactionOnViolation reaction) const
|
bool SettingsConstraints::checkImpl(const Settings & current_settings, SettingChange & change, ReactionOnViolation reaction) const
|
||||||
{
|
{
|
||||||
@ -115,26 +166,6 @@ bool SettingsConstraints::checkImpl(const Settings & current_settings, SettingCh
|
|||||||
if (setting_name == "profile")
|
if (setting_name == "profile")
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
bool cannot_cast;
|
|
||||||
auto cast_value = [&](const Field & x) -> Field
|
|
||||||
{
|
|
||||||
cannot_cast = false;
|
|
||||||
if (reaction == THROW_ON_VIOLATION)
|
|
||||||
return Settings::castValueUtil(setting_name, x);
|
|
||||||
else
|
|
||||||
{
|
|
||||||
try
|
|
||||||
{
|
|
||||||
return Settings::castValueUtil(setting_name, x);
|
|
||||||
}
|
|
||||||
catch (...)
|
|
||||||
{
|
|
||||||
cannot_cast = true;
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (reaction == THROW_ON_VIOLATION)
|
if (reaction == THROW_ON_VIOLATION)
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
@ -156,27 +187,21 @@ bool SettingsConstraints::checkImpl(const Settings & current_settings, SettingCh
|
|||||||
else if (!access_control->isSettingNameAllowed(setting_name))
|
else if (!access_control->isSettingNameAllowed(setting_name))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
Field current_value, new_value;
|
Field new_value;
|
||||||
if (current_settings.tryGet(setting_name, current_value))
|
if (!getNewValueToCheck(current_settings, change, new_value, reaction == THROW_ON_VIOLATION))
|
||||||
{
|
|
||||||
/// Setting isn't checked if value has not changed.
|
|
||||||
if (change.value == current_value)
|
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
new_value = cast_value(change.value);
|
|
||||||
if ((new_value == current_value) || cannot_cast)
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
new_value = cast_value(change.value);
|
|
||||||
if (cannot_cast)
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return getChecker(current_settings, setting_name).check(change, new_value, reaction);
|
return getChecker(current_settings, setting_name).check(change, new_value, reaction);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool SettingsConstraints::checkImpl(const MergeTreeSettings & current_settings, SettingChange & change, ReactionOnViolation reaction) const
|
||||||
|
{
|
||||||
|
Field new_value;
|
||||||
|
if (!getNewValueToCheck(current_settings, change, new_value, reaction == THROW_ON_VIOLATION))
|
||||||
|
return false;
|
||||||
|
return getMergeTreeChecker(change.name).check(change, new_value, reaction);
|
||||||
|
}
|
||||||
|
|
||||||
bool SettingsConstraints::Checker::check(SettingChange & change, const Field & new_value, ReactionOnViolation reaction) const
|
bool SettingsConstraints::Checker::check(SettingChange & change, const Field & new_value, ReactionOnViolation reaction) const
|
||||||
{
|
{
|
||||||
const String & setting_name = change.name;
|
const String & setting_name = change.name;
|
||||||
@ -185,8 +210,6 @@ bool SettingsConstraints::Checker::check(SettingChange & change, const Field & n
|
|||||||
{
|
{
|
||||||
if (reaction == THROW_ON_VIOLATION)
|
if (reaction == THROW_ON_VIOLATION)
|
||||||
return applyVisitor(FieldVisitorAccurateLess{}, left, right);
|
return applyVisitor(FieldVisitorAccurateLess{}, left, right);
|
||||||
else
|
|
||||||
{
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
return applyVisitor(FieldVisitorAccurateLess{}, left, right);
|
return applyVisitor(FieldVisitorAccurateLess{}, left, right);
|
||||||
@ -195,7 +218,6 @@ bool SettingsConstraints::Checker::check(SettingChange & change, const Field & n
|
|||||||
{
|
{
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (!explain.empty())
|
if (!explain.empty())
|
||||||
@ -280,6 +302,14 @@ SettingsConstraints::Checker SettingsConstraints::getChecker(const Settings & cu
|
|||||||
return Checker(it->second);
|
return Checker(it->second);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
SettingsConstraints::Checker SettingsConstraints::getMergeTreeChecker(std::string_view short_name) const
|
||||||
|
{
|
||||||
|
auto it = constraints.find(settingFullName<MergeTreeSettings>(short_name));
|
||||||
|
if (it == constraints.end())
|
||||||
|
return Checker(); // Allowed
|
||||||
|
return Checker(it->second);
|
||||||
|
}
|
||||||
|
|
||||||
bool SettingsConstraints::Constraint::operator==(const Constraint & other) const
|
bool SettingsConstraints::Constraint::operator==(const Constraint & other) const
|
||||||
{
|
{
|
||||||
return writability == other.writability && min_value == other.min_value && max_value == other.max_value;
|
return writability == other.writability && min_value == other.min_value && max_value == other.max_value;
|
||||||
|
@ -12,6 +12,7 @@ namespace Poco::Util
|
|||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
struct Settings;
|
struct Settings;
|
||||||
|
struct MergeTreeSettings;
|
||||||
struct SettingChange;
|
struct SettingChange;
|
||||||
class SettingsChanges;
|
class SettingsChanges;
|
||||||
class AccessControl;
|
class AccessControl;
|
||||||
@ -65,8 +66,9 @@ public:
|
|||||||
void clear();
|
void clear();
|
||||||
bool empty() const { return constraints.empty(); }
|
bool empty() const { return constraints.empty(); }
|
||||||
|
|
||||||
void set(const String & setting_name, const Field & min_value, const Field & max_value, SettingConstraintWritability writability);
|
void set(const String & full_name, const Field & min_value, const Field & max_value, SettingConstraintWritability writability);
|
||||||
void get(const Settings & current_settings, std::string_view setting_name, Field & min_value, Field & max_value, SettingConstraintWritability & writability) const;
|
void get(const Settings & current_settings, std::string_view short_name, Field & min_value, Field & max_value, SettingConstraintWritability & writability) const;
|
||||||
|
void get(const MergeTreeSettings & current_settings, std::string_view short_name, Field & min_value, Field & max_value, SettingConstraintWritability & writability) const;
|
||||||
|
|
||||||
void merge(const SettingsConstraints & other);
|
void merge(const SettingsConstraints & other);
|
||||||
|
|
||||||
@ -75,6 +77,10 @@ public:
|
|||||||
void check(const Settings & current_settings, const SettingsChanges & changes) const;
|
void check(const Settings & current_settings, const SettingsChanges & changes) const;
|
||||||
void check(const Settings & current_settings, SettingsChanges & changes) const;
|
void check(const Settings & current_settings, SettingsChanges & changes) const;
|
||||||
|
|
||||||
|
/// Checks whether `change` violates these constraints and throws an exception if so. (setting short name is expected inside `changes`)
|
||||||
|
void check(const MergeTreeSettings & current_settings, const SettingChange & change) const;
|
||||||
|
void check(const MergeTreeSettings & current_settings, const SettingsChanges & changes) const;
|
||||||
|
|
||||||
/// Checks whether `change` violates these and clamps the `change` if so.
|
/// Checks whether `change` violates these and clamps the `change` if so.
|
||||||
void clamp(const Settings & current_settings, SettingsChanges & changes) const;
|
void clamp(const Settings & current_settings, SettingsChanges & changes) const;
|
||||||
|
|
||||||
@ -137,8 +143,10 @@ private:
|
|||||||
};
|
};
|
||||||
|
|
||||||
bool checkImpl(const Settings & current_settings, SettingChange & change, ReactionOnViolation reaction) const;
|
bool checkImpl(const Settings & current_settings, SettingChange & change, ReactionOnViolation reaction) const;
|
||||||
|
bool checkImpl(const MergeTreeSettings & current_settings, SettingChange & change, ReactionOnViolation reaction) const;
|
||||||
|
|
||||||
Checker getChecker(const Settings & current_settings, std::string_view setting_name) const;
|
Checker getChecker(const Settings & current_settings, std::string_view setting_name) const;
|
||||||
|
Checker getMergeTreeChecker(std::string_view short_name) const;
|
||||||
|
|
||||||
// Special container for heterogeneous lookups: to avoid `String` construction during `find(std::string_view)`
|
// Special container for heterogeneous lookups: to avoid `String` construction during `find(std::string_view)`
|
||||||
using Constraints = std::unordered_map<String, Constraint, StringHash, std::equal_to<>>;
|
using Constraints = std::unordered_map<String, Constraint, StringHash, std::equal_to<>>;
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
#include <Access/User.h>
|
#include <Access/User.h>
|
||||||
#include <Access/SettingsProfile.h>
|
#include <Access/SettingsProfile.h>
|
||||||
#include <Access/AccessControl.h>
|
#include <Access/AccessControl.h>
|
||||||
|
#include <Access/resolveSetting.h>
|
||||||
#include <Access/AccessChangesNotifier.h>
|
#include <Access/AccessChangesNotifier.h>
|
||||||
#include <Dictionaries/IDictionary.h>
|
#include <Dictionaries/IDictionary.h>
|
||||||
#include <Common/Config/ConfigReloader.h>
|
#include <Common/Config/ConfigReloader.h>
|
||||||
@ -451,9 +452,9 @@ namespace
|
|||||||
for (const String & constraint_type : constraint_types)
|
for (const String & constraint_type : constraint_types)
|
||||||
{
|
{
|
||||||
if (constraint_type == "min")
|
if (constraint_type == "min")
|
||||||
profile_element.min_value = Settings::stringToValueUtil(setting_name, config.getString(path_to_name + "." + constraint_type));
|
profile_element.min_value = settingStringToValueUtil(setting_name, config.getString(path_to_name + "." + constraint_type));
|
||||||
else if (constraint_type == "max")
|
else if (constraint_type == "max")
|
||||||
profile_element.max_value = Settings::stringToValueUtil(setting_name, config.getString(path_to_name + "." + constraint_type));
|
profile_element.max_value = settingStringToValueUtil(setting_name, config.getString(path_to_name + "." + constraint_type));
|
||||||
else if (constraint_type == "readonly" || constraint_type == "const")
|
else if (constraint_type == "readonly" || constraint_type == "const")
|
||||||
{
|
{
|
||||||
writability_count++;
|
writability_count++;
|
||||||
@ -517,7 +518,7 @@ namespace
|
|||||||
|
|
||||||
SettingsProfileElement profile_element;
|
SettingsProfileElement profile_element;
|
||||||
profile_element.setting_name = setting_name;
|
profile_element.setting_name = setting_name;
|
||||||
profile_element.value = Settings::stringToValueUtil(setting_name, config.getString(profile_config + "." + key));
|
profile_element.value = settingStringToValueUtil(setting_name, config.getString(profile_config + "." + key));
|
||||||
profile->elements.emplace_back(std::move(profile_element));
|
profile->elements.emplace_back(std::move(profile_element));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
90
src/Access/resolveSetting.h
Normal file
90
src/Access/resolveSetting.h
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <Core/Settings.h>
|
||||||
|
#include <Storages/MergeTree/MergeTreeSettings.h>
|
||||||
|
|
||||||
|
//
|
||||||
|
// Settings from different classes (Settings, MergeTreeSettings) can coexist in the same "namespace".
|
||||||
|
// This is, for example, required to define settings constraints inside user profiles.
|
||||||
|
// `resolveSetting(full_name)` is used to resolve setting name and choose which class is to be used.
|
||||||
|
// Templated lambda syntax should be used:
|
||||||
|
//
|
||||||
|
// return resolveSetting(name, [] <typename T> (std::string_view name, SettingsType<T>)
|
||||||
|
// {
|
||||||
|
// return T::castValueUtil(name, value); // T will be deduced into `Settings`, `MergeTreeSettings`, ...
|
||||||
|
// });
|
||||||
|
//
|
||||||
|
|
||||||
|
namespace DB
|
||||||
|
{
|
||||||
|
|
||||||
|
constexpr std::string_view MERGE_TREE_SETTINGS_PREFIX = "merge_tree_";
|
||||||
|
|
||||||
|
template <typename T> struct SettingsType {};
|
||||||
|
|
||||||
|
// Resolve setting name and call function `f` back with short name and class
|
||||||
|
template <typename F>
|
||||||
|
auto resolveSetting(std::string_view full_name, F && f)
|
||||||
|
{
|
||||||
|
if (full_name.starts_with(MERGE_TREE_SETTINGS_PREFIX))
|
||||||
|
{
|
||||||
|
std::string_view short_name = static_cast<std::string_view>(full_name).substr(MERGE_TREE_SETTINGS_PREFIX.size());
|
||||||
|
if (MergeTreeSettings::hasBuiltin(short_name)) // Check is required because `Settings` also contain names starting with 'merge_tree_' prefix
|
||||||
|
return f(short_name, SettingsType<MergeTreeSettings>());
|
||||||
|
}
|
||||||
|
// NOTE: other setting name resolution rules are to be added here
|
||||||
|
|
||||||
|
// If no rule works - use global namespace
|
||||||
|
return f(full_name, SettingsType<Settings>());
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Field settingCastValueUtil(std::string_view full_name, const Field & value)
|
||||||
|
{
|
||||||
|
return resolveSetting(full_name, [&] <typename T> (std::string_view short_name, SettingsType<T>)
|
||||||
|
{
|
||||||
|
return T::castValueUtil(short_name, value);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
inline String settingValueToStringUtil(std::string_view full_name, const Field & value)
|
||||||
|
{
|
||||||
|
return resolveSetting(full_name, [&] <typename T> (std::string_view short_name, SettingsType<T>)
|
||||||
|
{
|
||||||
|
return T::valueToStringUtil(short_name, value);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Field settingStringToValueUtil(std::string_view full_name, const String & str)
|
||||||
|
{
|
||||||
|
return resolveSetting(full_name, [&] <typename T> (std::string_view short_name, SettingsType<T>)
|
||||||
|
{
|
||||||
|
return T::stringToValueUtil(short_name, str);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
inline bool settingIsBuiltin(std::string_view full_name)
|
||||||
|
{
|
||||||
|
return resolveSetting(full_name, [&] <typename T> (std::string_view short_name, SettingsType<T>)
|
||||||
|
{
|
||||||
|
return T::hasBuiltin(short_name);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
inline String settingFullName(std::string_view short_name);
|
||||||
|
|
||||||
|
template <>
|
||||||
|
inline String settingFullName<Settings>(std::string_view short_name)
|
||||||
|
{
|
||||||
|
return String(short_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <>
|
||||||
|
inline String settingFullName<MergeTreeSettings>(std::string_view short_name)
|
||||||
|
{
|
||||||
|
String full_name(MERGE_TREE_SETTINGS_PREFIX);
|
||||||
|
full_name += short_name; // Just because you cannot concatenate `std::string_view` and `std::string` using operator+ in C++20 yet
|
||||||
|
return full_name;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -77,7 +77,10 @@ protected:
|
|||||||
|
|
||||||
static bool getFlag(ConstAggregateDataPtr __restrict place) noexcept
|
static bool getFlag(ConstAggregateDataPtr __restrict place) noexcept
|
||||||
{
|
{
|
||||||
return result_is_nullable ? place[0] : true;
|
if constexpr (result_is_nullable)
|
||||||
|
return place[0];
|
||||||
|
else
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@ -98,9 +101,10 @@ public:
|
|||||||
|
|
||||||
DataTypePtr getReturnType() const override
|
DataTypePtr getReturnType() const override
|
||||||
{
|
{
|
||||||
return result_is_nullable
|
if constexpr (result_is_nullable)
|
||||||
? makeNullable(nested_function->getReturnType())
|
return makeNullable(nested_function->getReturnType());
|
||||||
: nested_function->getReturnType();
|
else
|
||||||
|
return nested_function->getReturnType();
|
||||||
}
|
}
|
||||||
|
|
||||||
void create(AggregateDataPtr __restrict place) const override
|
void create(AggregateDataPtr __restrict place) const override
|
||||||
@ -136,7 +140,8 @@ public:
|
|||||||
|
|
||||||
void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena * arena) const override
|
void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena * arena) const override
|
||||||
{
|
{
|
||||||
if (result_is_nullable && getFlag(rhs))
|
if constexpr (result_is_nullable)
|
||||||
|
if (getFlag(rhs))
|
||||||
setFlag(place);
|
setFlag(place);
|
||||||
|
|
||||||
nested_function->merge(nestedPlace(place), nestedPlace(rhs), arena);
|
nested_function->merge(nestedPlace(place), nestedPlace(rhs), arena);
|
||||||
@ -472,7 +477,7 @@ public:
|
|||||||
final_flags = std::make_unique<UInt8[]>(row_end);
|
final_flags = std::make_unique<UInt8[]>(row_end);
|
||||||
final_flags_ptr = final_flags.get();
|
final_flags_ptr = final_flags.get();
|
||||||
|
|
||||||
bool included_elements = 0;
|
size_t included_elements = 0;
|
||||||
const auto & flags = assert_cast<const ColumnUInt8 &>(*columns[if_argument_pos]).getData();
|
const auto & flags = assert_cast<const ColumnUInt8 &>(*columns[if_argument_pos]).getData();
|
||||||
for (size_t i = row_begin; i < row_end; i++)
|
for (size_t i = row_begin; i < row_end; i++)
|
||||||
{
|
{
|
||||||
|
@ -3,6 +3,8 @@
|
|||||||
#include <AggregateFunctions/AggregateFunctionFactory.h>
|
#include <AggregateFunctions/AggregateFunctionFactory.h>
|
||||||
#include <AggregateFunctions/IAggregateFunction.h>
|
#include <AggregateFunctions/IAggregateFunction.h>
|
||||||
|
|
||||||
|
#include <Interpreters/Context.h>
|
||||||
|
|
||||||
#include <Analyzer/InDepthQueryTreeVisitor.h>
|
#include <Analyzer/InDepthQueryTreeVisitor.h>
|
||||||
#include <Analyzer/ColumnNode.h>
|
#include <Analyzer/ColumnNode.h>
|
||||||
#include <Analyzer/FunctionNode.h>
|
#include <Analyzer/FunctionNode.h>
|
||||||
@ -56,7 +58,7 @@ public:
|
|||||||
auto & count_distinct_argument_column_typed = count_distinct_argument_column->as<ColumnNode &>();
|
auto & count_distinct_argument_column_typed = count_distinct_argument_column->as<ColumnNode &>();
|
||||||
|
|
||||||
/// Build subquery SELECT count_distinct_argument_column FROM table_expression GROUP BY count_distinct_argument_column
|
/// Build subquery SELECT count_distinct_argument_column FROM table_expression GROUP BY count_distinct_argument_column
|
||||||
auto subquery = std::make_shared<QueryNode>();
|
auto subquery = std::make_shared<QueryNode>(Context::createCopy(query_node->getContext()));
|
||||||
subquery->getJoinTree() = query_node->getJoinTree();
|
subquery->getJoinTree() = query_node->getJoinTree();
|
||||||
subquery->getProjection().getNodes().push_back(count_distinct_argument_column);
|
subquery->getProjection().getNodes().push_back(count_distinct_argument_column);
|
||||||
subquery->getGroupBy().getNodes().push_back(count_distinct_argument_column);
|
subquery->getGroupBy().getNodes().push_back(count_distinct_argument_column);
|
||||||
|
@ -6,6 +6,7 @@
|
|||||||
#include <Analyzer/InDepthQueryTreeVisitor.h>
|
#include <Analyzer/InDepthQueryTreeVisitor.h>
|
||||||
#include <Analyzer/ConstantNode.h>
|
#include <Analyzer/ConstantNode.h>
|
||||||
#include <Analyzer/FunctionNode.h>
|
#include <Analyzer/FunctionNode.h>
|
||||||
|
#include <Interpreters/Context.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
@ -16,7 +17,8 @@ namespace
|
|||||||
class NormalizeCountVariantsVisitor : public InDepthQueryTreeVisitor<NormalizeCountVariantsVisitor>
|
class NormalizeCountVariantsVisitor : public InDepthQueryTreeVisitor<NormalizeCountVariantsVisitor>
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
static void visitImpl(QueryTreeNodePtr & node)
|
explicit NormalizeCountVariantsVisitor(ContextPtr context_) : context(std::move(context_)) {}
|
||||||
|
void visitImpl(QueryTreeNodePtr & node)
|
||||||
{
|
{
|
||||||
auto * function_node = node->as<FunctionNode>();
|
auto * function_node = node->as<FunctionNode>();
|
||||||
if (!function_node || !function_node->isAggregateFunction() || (function_node->getFunctionName() != "count" && function_node->getFunctionName() != "sum"))
|
if (!function_node || !function_node->isAggregateFunction() || (function_node->getFunctionName() != "count" && function_node->getFunctionName() != "sum"))
|
||||||
@ -39,13 +41,16 @@ public:
|
|||||||
}
|
}
|
||||||
else if (function_node->getFunctionName() == "sum" &&
|
else if (function_node->getFunctionName() == "sum" &&
|
||||||
first_argument_constant_literal.getType() == Field::Types::UInt64 &&
|
first_argument_constant_literal.getType() == Field::Types::UInt64 &&
|
||||||
first_argument_constant_literal.get<UInt64>() == 1)
|
first_argument_constant_literal.get<UInt64>() == 1 &&
|
||||||
|
!context->getSettingsRef().aggregate_functions_null_for_empty)
|
||||||
{
|
{
|
||||||
resolveAsCountAggregateFunction(*function_node);
|
resolveAsCountAggregateFunction(*function_node);
|
||||||
function_node->getArguments().getNodes().clear();
|
function_node->getArguments().getNodes().clear();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
private:
|
private:
|
||||||
|
ContextPtr context;
|
||||||
|
|
||||||
static inline void resolveAsCountAggregateFunction(FunctionNode & function_node)
|
static inline void resolveAsCountAggregateFunction(FunctionNode & function_node)
|
||||||
{
|
{
|
||||||
auto function_result_type = function_node.getResultType();
|
auto function_result_type = function_node.getResultType();
|
||||||
@ -59,9 +64,9 @@ private:
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void NormalizeCountVariantsPass::run(QueryTreeNodePtr query_tree_node, ContextPtr)
|
void NormalizeCountVariantsPass::run(QueryTreeNodePtr query_tree_node, ContextPtr context)
|
||||||
{
|
{
|
||||||
NormalizeCountVariantsVisitor visitor;
|
NormalizeCountVariantsVisitor visitor(context);
|
||||||
visitor.visit(query_tree_node);
|
visitor.visit(query_tree_node);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -647,6 +647,11 @@ struct IdentifierResolveScope
|
|||||||
subquery_depth = parent_scope->subquery_depth;
|
subquery_depth = parent_scope->subquery_depth;
|
||||||
context = parent_scope->context;
|
context = parent_scope->context;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (auto * union_node = scope_node->as<UnionNode>())
|
||||||
|
context = union_node->getContext();
|
||||||
|
else if (auto * query_node = scope_node->as<QueryNode>())
|
||||||
|
context = query_node->getContext();
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr scope_node;
|
QueryTreeNodePtr scope_node;
|
||||||
@ -974,6 +979,8 @@ public:
|
|||||||
void resolve(QueryTreeNodePtr node, const QueryTreeNodePtr & table_expression, ContextPtr context)
|
void resolve(QueryTreeNodePtr node, const QueryTreeNodePtr & table_expression, ContextPtr context)
|
||||||
{
|
{
|
||||||
IdentifierResolveScope scope(node, nullptr /*parent_scope*/);
|
IdentifierResolveScope scope(node, nullptr /*parent_scope*/);
|
||||||
|
|
||||||
|
if (!scope.context)
|
||||||
scope.context = context;
|
scope.context = context;
|
||||||
|
|
||||||
auto node_type = node->getNodeType();
|
auto node_type = node->getNodeType();
|
||||||
@ -2843,6 +2850,14 @@ IdentifierResolveResult QueryAnalyzer::tryResolveIdentifierInParentScopes(const
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Nested subqueries cannot access outer subqueries table expressions from JOIN tree because
|
||||||
|
* that can prevent resolution of table expression from CTE.
|
||||||
|
*
|
||||||
|
* Example: WITH a AS (SELECT number FROM numbers(1)), b AS (SELECT number FROM a) SELECT * FROM a as l, b as r;
|
||||||
|
*/
|
||||||
|
if (identifier_lookup.isTableExpressionLookup())
|
||||||
|
identifier_resolve_settings.allow_to_check_join_tree = false;
|
||||||
|
|
||||||
while (scope_to_check != nullptr)
|
while (scope_to_check != nullptr)
|
||||||
{
|
{
|
||||||
auto lookup_result = tryResolveIdentifier(identifier_lookup, *scope_to_check, identifier_resolve_settings);
|
auto lookup_result = tryResolveIdentifier(identifier_lookup, *scope_to_check, identifier_resolve_settings);
|
||||||
@ -4042,7 +4057,7 @@ ProjectionNames QueryAnalyzer::resolveFunction(QueryTreeNodePtr & node, Identifi
|
|||||||
|
|
||||||
auto constant_data_type = std::make_shared<DataTypeUInt64>();
|
auto constant_data_type = std::make_shared<DataTypeUInt64>();
|
||||||
|
|
||||||
auto in_subquery = std::make_shared<QueryNode>();
|
auto in_subquery = std::make_shared<QueryNode>(Context::createCopy(scope.context));
|
||||||
in_subquery->getProjection().getNodes().push_back(std::make_shared<ConstantNode>(1UL, constant_data_type));
|
in_subquery->getProjection().getNodes().push_back(std::make_shared<ConstantNode>(1UL, constant_data_type));
|
||||||
in_subquery->getJoinTree() = exists_subquery_argument;
|
in_subquery->getJoinTree() = exists_subquery_argument;
|
||||||
in_subquery->getLimit() = std::make_shared<ConstantNode>(1UL, constant_data_type);
|
in_subquery->getLimit() = std::make_shared<ConstantNode>(1UL, constant_data_type);
|
||||||
@ -4095,7 +4110,7 @@ ProjectionNames QueryAnalyzer::resolveFunction(QueryTreeNodePtr & node, Identifi
|
|||||||
projection_columns.emplace_back(column.name, column.type);
|
projection_columns.emplace_back(column.name, column.type);
|
||||||
}
|
}
|
||||||
|
|
||||||
auto in_second_argument_query_node = std::make_shared<QueryNode>();
|
auto in_second_argument_query_node = std::make_shared<QueryNode>(Context::createCopy(scope.context));
|
||||||
in_second_argument_query_node->setIsSubquery(true);
|
in_second_argument_query_node->setIsSubquery(true);
|
||||||
in_second_argument_query_node->getProjectionNode() = std::move(column_nodes_to_select);
|
in_second_argument_query_node->getProjectionNode() = std::move(column_nodes_to_select);
|
||||||
in_second_argument_query_node->getJoinTree() = std::move(in_second_argument);
|
in_second_argument_query_node->getJoinTree() = std::move(in_second_argument);
|
||||||
@ -5756,14 +5771,6 @@ void QueryAnalyzer::resolveQuery(const QueryTreeNodePtr & query_node, Identifier
|
|||||||
max_subquery_depth);
|
max_subquery_depth);
|
||||||
|
|
||||||
auto & query_node_typed = query_node->as<QueryNode &>();
|
auto & query_node_typed = query_node->as<QueryNode &>();
|
||||||
|
|
||||||
if (query_node_typed.hasSettingsChanges())
|
|
||||||
{
|
|
||||||
auto updated_scope_context = Context::createCopy(scope.context);
|
|
||||||
updated_scope_context->applySettingsChanges(query_node_typed.getSettingsChanges());
|
|
||||||
scope.context = std::move(updated_scope_context);
|
|
||||||
}
|
|
||||||
|
|
||||||
const auto & settings = scope.context->getSettingsRef();
|
const auto & settings = scope.context->getSettingsRef();
|
||||||
|
|
||||||
if (settings.group_by_use_nulls)
|
if (settings.group_by_use_nulls)
|
||||||
|
@ -56,7 +56,7 @@ public:
|
|||||||
if (!isInt64OrUInt64FieldType(constant_value_literal.getType()))
|
if (!isInt64OrUInt64FieldType(constant_value_literal.getType()))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (constant_value_literal.get<UInt64>() != 1)
|
if (constant_value_literal.get<UInt64>() != 1 || context->getSettingsRef().aggregate_functions_null_for_empty)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
function_node_arguments_nodes[0] = std::move(function_node_arguments_nodes[1]);
|
function_node_arguments_nodes[0] = std::move(function_node_arguments_nodes[1]);
|
||||||
|
@ -21,8 +21,10 @@
|
|||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
QueryNode::QueryNode()
|
QueryNode::QueryNode(ContextMutablePtr context_, SettingsChanges settings_changes_)
|
||||||
: IQueryTreeNode(children_size)
|
: IQueryTreeNode(children_size)
|
||||||
|
, context(std::move(context_))
|
||||||
|
, settings_changes(std::move(settings_changes_))
|
||||||
{
|
{
|
||||||
children[with_child_index] = std::make_shared<ListNode>();
|
children[with_child_index] = std::make_shared<ListNode>();
|
||||||
children[projection_child_index] = std::make_shared<ListNode>();
|
children[projection_child_index] = std::make_shared<ListNode>();
|
||||||
@ -32,6 +34,10 @@ QueryNode::QueryNode()
|
|||||||
children[limit_by_child_index] = std::make_shared<ListNode>();
|
children[limit_by_child_index] = std::make_shared<ListNode>();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
QueryNode::QueryNode(ContextMutablePtr context_)
|
||||||
|
: QueryNode(context_, {} /*settings_changes*/)
|
||||||
|
{}
|
||||||
|
|
||||||
void QueryNode::dumpTreeImpl(WriteBuffer & buffer, FormatState & format_state, size_t indent) const
|
void QueryNode::dumpTreeImpl(WriteBuffer & buffer, FormatState & format_state, size_t indent) const
|
||||||
{
|
{
|
||||||
buffer << std::string(indent, ' ') << "QUERY id: " << format_state.getNodeId(this);
|
buffer << std::string(indent, ' ') << "QUERY id: " << format_state.getNodeId(this);
|
||||||
@ -222,7 +228,7 @@ void QueryNode::updateTreeHashImpl(HashState & state) const
|
|||||||
|
|
||||||
QueryTreeNodePtr QueryNode::cloneImpl() const
|
QueryTreeNodePtr QueryNode::cloneImpl() const
|
||||||
{
|
{
|
||||||
auto result_query_node = std::make_shared<QueryNode>();
|
auto result_query_node = std::make_shared<QueryNode>(context);
|
||||||
|
|
||||||
result_query_node->is_subquery = is_subquery;
|
result_query_node->is_subquery = is_subquery;
|
||||||
result_query_node->is_cte = is_cte;
|
result_query_node->is_cte = is_cte;
|
||||||
|
@ -10,6 +10,8 @@
|
|||||||
#include <Analyzer/ListNode.h>
|
#include <Analyzer/ListNode.h>
|
||||||
#include <Analyzer/TableExpressionModifiers.h>
|
#include <Analyzer/TableExpressionModifiers.h>
|
||||||
|
|
||||||
|
#include <Interpreters/Context_fwd.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -61,7 +63,41 @@ using QueryNodePtr = std::shared_ptr<QueryNode>;
|
|||||||
class QueryNode final : public IQueryTreeNode
|
class QueryNode final : public IQueryTreeNode
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
explicit QueryNode();
|
/// Construct query node with context and changed settings
|
||||||
|
explicit QueryNode(ContextMutablePtr context_, SettingsChanges settings_changes_);
|
||||||
|
|
||||||
|
/// Construct query node with context
|
||||||
|
explicit QueryNode(ContextMutablePtr context_);
|
||||||
|
|
||||||
|
/// Get context
|
||||||
|
ContextPtr getContext() const
|
||||||
|
{
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get mutable context
|
||||||
|
const ContextMutablePtr & getMutableContext() const
|
||||||
|
{
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get mutable context
|
||||||
|
ContextMutablePtr & getMutableContext()
|
||||||
|
{
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns true if query node has settings changes, false otherwise
|
||||||
|
bool hasSettingsChanges() const
|
||||||
|
{
|
||||||
|
return !settings_changes.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get query node settings changes
|
||||||
|
const SettingsChanges & getSettingsChanges() const
|
||||||
|
{
|
||||||
|
return settings_changes;
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if query node is subquery, false otherwise
|
/// Returns true if query node is subquery, false otherwise
|
||||||
bool isSubquery() const
|
bool isSubquery() const
|
||||||
@ -513,24 +549,6 @@ public:
|
|||||||
return children[offset_child_index];
|
return children[offset_child_index];
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if query node has settings changes specified, false otherwise
|
|
||||||
bool hasSettingsChanges() const
|
|
||||||
{
|
|
||||||
return !settings_changes.empty();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get query node settings changes
|
|
||||||
const SettingsChanges & getSettingsChanges() const
|
|
||||||
{
|
|
||||||
return settings_changes;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Set query node settings changes value
|
|
||||||
void setSettingsChanges(SettingsChanges settings_changes_value)
|
|
||||||
{
|
|
||||||
settings_changes = std::move(settings_changes_value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Get query node projection columns
|
/// Get query node projection columns
|
||||||
const NamesAndTypes & getProjectionColumns() const
|
const NamesAndTypes & getProjectionColumns() const
|
||||||
{
|
{
|
||||||
@ -572,6 +590,7 @@ private:
|
|||||||
|
|
||||||
std::string cte_name;
|
std::string cte_name;
|
||||||
NamesAndTypes projection_columns;
|
NamesAndTypes projection_columns;
|
||||||
|
ContextMutablePtr context;
|
||||||
SettingsChanges settings_changes;
|
SettingsChanges settings_changes;
|
||||||
|
|
||||||
static constexpr size_t with_child_index = 0;
|
static constexpr size_t with_child_index = 0;
|
||||||
|
@ -77,75 +77,90 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
QueryTreeNodePtr buildSelectOrUnionExpression(const ASTPtr & select_or_union_query, bool is_subquery, const std::string & cte_name) const;
|
QueryTreeNodePtr buildSelectOrUnionExpression(const ASTPtr & select_or_union_query,
|
||||||
|
bool is_subquery,
|
||||||
|
const std::string & cte_name,
|
||||||
|
const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildSelectWithUnionExpression(const ASTPtr & select_with_union_query, bool is_subquery, const std::string & cte_name) const;
|
QueryTreeNodePtr buildSelectWithUnionExpression(const ASTPtr & select_with_union_query,
|
||||||
|
bool is_subquery,
|
||||||
|
const std::string & cte_name,
|
||||||
|
const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildSelectIntersectExceptQuery(const ASTPtr & select_intersect_except_query, bool is_subquery, const std::string & cte_name) const;
|
QueryTreeNodePtr buildSelectIntersectExceptQuery(const ASTPtr & select_intersect_except_query,
|
||||||
|
bool is_subquery,
|
||||||
|
const std::string & cte_name,
|
||||||
|
const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildSelectExpression(const ASTPtr & select_query, bool is_subquery, const std::string & cte_name) const;
|
QueryTreeNodePtr buildSelectExpression(const ASTPtr & select_query,
|
||||||
|
bool is_subquery,
|
||||||
|
const std::string & cte_name,
|
||||||
|
const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildSortList(const ASTPtr & order_by_expression_list) const;
|
QueryTreeNodePtr buildSortList(const ASTPtr & order_by_expression_list, const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildInterpolateList(const ASTPtr & interpolate_expression_list) const;
|
QueryTreeNodePtr buildInterpolateList(const ASTPtr & interpolate_expression_list, const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildWindowList(const ASTPtr & window_definition_list) const;
|
QueryTreeNodePtr buildWindowList(const ASTPtr & window_definition_list, const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildExpressionList(const ASTPtr & expression_list) const;
|
QueryTreeNodePtr buildExpressionList(const ASTPtr & expression_list, const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildExpression(const ASTPtr & expression) const;
|
QueryTreeNodePtr buildExpression(const ASTPtr & expression, const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildWindow(const ASTPtr & window_definition) const;
|
QueryTreeNodePtr buildWindow(const ASTPtr & window_definition, const ContextPtr & context) const;
|
||||||
|
|
||||||
QueryTreeNodePtr buildJoinTree(const ASTPtr & tables_in_select_query) const;
|
QueryTreeNodePtr buildJoinTree(const ASTPtr & tables_in_select_query, const ContextPtr & context) const;
|
||||||
|
|
||||||
ColumnTransformersNodes buildColumnTransformers(const ASTPtr & matcher_expression, size_t start_child_index) const;
|
ColumnTransformersNodes buildColumnTransformers(const ASTPtr & matcher_expression, size_t start_child_index, const ContextPtr & context) const;
|
||||||
|
|
||||||
ASTPtr query;
|
ASTPtr query;
|
||||||
ContextPtr context;
|
|
||||||
QueryTreeNodePtr query_tree_node;
|
QueryTreeNodePtr query_tree_node;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
QueryTreeBuilder::QueryTreeBuilder(ASTPtr query_, ContextPtr context_)
|
QueryTreeBuilder::QueryTreeBuilder(ASTPtr query_, ContextPtr context_)
|
||||||
: query(query_->clone())
|
: query(query_->clone())
|
||||||
, context(std::move(context_))
|
|
||||||
{
|
{
|
||||||
if (query->as<ASTSelectWithUnionQuery>() ||
|
if (query->as<ASTSelectWithUnionQuery>() ||
|
||||||
query->as<ASTSelectIntersectExceptQuery>() ||
|
query->as<ASTSelectIntersectExceptQuery>() ||
|
||||||
query->as<ASTSelectQuery>())
|
query->as<ASTSelectQuery>())
|
||||||
query_tree_node = buildSelectOrUnionExpression(query, false /*is_subquery*/, {} /*cte_name*/);
|
query_tree_node = buildSelectOrUnionExpression(query, false /*is_subquery*/, {} /*cte_name*/, context_);
|
||||||
else if (query->as<ASTExpressionList>())
|
else if (query->as<ASTExpressionList>())
|
||||||
query_tree_node = buildExpressionList(query);
|
query_tree_node = buildExpressionList(query, context_);
|
||||||
else
|
else
|
||||||
query_tree_node = buildExpression(query);
|
query_tree_node = buildExpression(query, context_);
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildSelectOrUnionExpression(const ASTPtr & select_or_union_query, bool is_subquery, const std::string & cte_name) const
|
QueryTreeNodePtr QueryTreeBuilder::buildSelectOrUnionExpression(const ASTPtr & select_or_union_query,
|
||||||
|
bool is_subquery,
|
||||||
|
const std::string & cte_name,
|
||||||
|
const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
QueryTreeNodePtr query_node;
|
QueryTreeNodePtr query_node;
|
||||||
|
|
||||||
if (select_or_union_query->as<ASTSelectWithUnionQuery>())
|
if (select_or_union_query->as<ASTSelectWithUnionQuery>())
|
||||||
query_node = buildSelectWithUnionExpression(select_or_union_query, is_subquery /*is_subquery*/, cte_name /*cte_name*/);
|
query_node = buildSelectWithUnionExpression(select_or_union_query, is_subquery /*is_subquery*/, cte_name /*cte_name*/, context);
|
||||||
else if (select_or_union_query->as<ASTSelectIntersectExceptQuery>())
|
else if (select_or_union_query->as<ASTSelectIntersectExceptQuery>())
|
||||||
query_node = buildSelectIntersectExceptQuery(select_or_union_query, is_subquery /*is_subquery*/, cte_name /*cte_name*/);
|
query_node = buildSelectIntersectExceptQuery(select_or_union_query, is_subquery /*is_subquery*/, cte_name /*cte_name*/, context);
|
||||||
else if (select_or_union_query->as<ASTSelectQuery>())
|
else if (select_or_union_query->as<ASTSelectQuery>())
|
||||||
query_node = buildSelectExpression(select_or_union_query, is_subquery /*is_subquery*/, cte_name /*cte_name*/);
|
query_node = buildSelectExpression(select_or_union_query, is_subquery /*is_subquery*/, cte_name /*cte_name*/, context);
|
||||||
else
|
else
|
||||||
throw Exception(ErrorCodes::UNSUPPORTED_METHOD, "SELECT or UNION query {} is not supported", select_or_union_query->formatForErrorMessage());
|
throw Exception(ErrorCodes::UNSUPPORTED_METHOD, "SELECT or UNION query {} is not supported", select_or_union_query->formatForErrorMessage());
|
||||||
|
|
||||||
return query_node;
|
return query_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildSelectWithUnionExpression(const ASTPtr & select_with_union_query, bool is_subquery, const std::string & cte_name) const
|
QueryTreeNodePtr QueryTreeBuilder::buildSelectWithUnionExpression(const ASTPtr & select_with_union_query,
|
||||||
|
bool is_subquery,
|
||||||
|
const std::string & cte_name,
|
||||||
|
const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
auto & select_with_union_query_typed = select_with_union_query->as<ASTSelectWithUnionQuery &>();
|
auto & select_with_union_query_typed = select_with_union_query->as<ASTSelectWithUnionQuery &>();
|
||||||
auto & select_lists = select_with_union_query_typed.list_of_selects->as<ASTExpressionList &>();
|
auto & select_lists = select_with_union_query_typed.list_of_selects->as<ASTExpressionList &>();
|
||||||
|
|
||||||
if (select_lists.children.size() == 1)
|
if (select_lists.children.size() == 1)
|
||||||
return buildSelectOrUnionExpression(select_lists.children[0], is_subquery, cte_name);
|
return buildSelectOrUnionExpression(select_lists.children[0], is_subquery, cte_name, context);
|
||||||
|
|
||||||
auto union_node = std::make_shared<UnionNode>(select_with_union_query_typed.union_mode);
|
auto union_node = std::make_shared<UnionNode>(Context::createCopy(context), select_with_union_query_typed.union_mode);
|
||||||
union_node->setIsSubquery(is_subquery);
|
union_node->setIsSubquery(is_subquery);
|
||||||
union_node->setIsCTE(!cte_name.empty());
|
union_node->setIsCTE(!cte_name.empty());
|
||||||
union_node->setCTEName(cte_name);
|
union_node->setCTEName(cte_name);
|
||||||
@ -156,20 +171,23 @@ QueryTreeNodePtr QueryTreeBuilder::buildSelectWithUnionExpression(const ASTPtr &
|
|||||||
for (size_t i = 0; i < select_lists_children_size; ++i)
|
for (size_t i = 0; i < select_lists_children_size; ++i)
|
||||||
{
|
{
|
||||||
auto & select_list_node = select_lists.children[i];
|
auto & select_list_node = select_lists.children[i];
|
||||||
QueryTreeNodePtr query_node = buildSelectOrUnionExpression(select_list_node, false /*is_subquery*/, {} /*cte_name*/);
|
QueryTreeNodePtr query_node = buildSelectOrUnionExpression(select_list_node, false /*is_subquery*/, {} /*cte_name*/, context);
|
||||||
union_node->getQueries().getNodes().push_back(std::move(query_node));
|
union_node->getQueries().getNodes().push_back(std::move(query_node));
|
||||||
}
|
}
|
||||||
|
|
||||||
return union_node;
|
return union_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildSelectIntersectExceptQuery(const ASTPtr & select_intersect_except_query, bool is_subquery, const std::string & cte_name) const
|
QueryTreeNodePtr QueryTreeBuilder::buildSelectIntersectExceptQuery(const ASTPtr & select_intersect_except_query,
|
||||||
|
bool is_subquery,
|
||||||
|
const std::string & cte_name,
|
||||||
|
const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
auto & select_intersect_except_query_typed = select_intersect_except_query->as<ASTSelectIntersectExceptQuery &>();
|
auto & select_intersect_except_query_typed = select_intersect_except_query->as<ASTSelectIntersectExceptQuery &>();
|
||||||
auto select_lists = select_intersect_except_query_typed.getListOfSelects();
|
auto select_lists = select_intersect_except_query_typed.getListOfSelects();
|
||||||
|
|
||||||
if (select_lists.size() == 1)
|
if (select_lists.size() == 1)
|
||||||
return buildSelectExpression(select_lists[0], is_subquery, cte_name);
|
return buildSelectExpression(select_lists[0], is_subquery, cte_name, context);
|
||||||
|
|
||||||
SelectUnionMode union_mode;
|
SelectUnionMode union_mode;
|
||||||
if (select_intersect_except_query_typed.final_operator == ASTSelectIntersectExceptQuery::Operator::INTERSECT_ALL)
|
if (select_intersect_except_query_typed.final_operator == ASTSelectIntersectExceptQuery::Operator::INTERSECT_ALL)
|
||||||
@ -183,7 +201,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildSelectIntersectExceptQuery(const ASTPtr
|
|||||||
else
|
else
|
||||||
throw Exception(ErrorCodes::LOGICAL_ERROR, "UNION type is not initialized");
|
throw Exception(ErrorCodes::LOGICAL_ERROR, "UNION type is not initialized");
|
||||||
|
|
||||||
auto union_node = std::make_shared<UnionNode>(union_mode);
|
auto union_node = std::make_shared<UnionNode>(Context::createCopy(context), union_mode);
|
||||||
union_node->setIsSubquery(is_subquery);
|
union_node->setIsSubquery(is_subquery);
|
||||||
union_node->setIsCTE(!cte_name.empty());
|
union_node->setIsCTE(!cte_name.empty());
|
||||||
union_node->setCTEName(cte_name);
|
union_node->setCTEName(cte_name);
|
||||||
@ -194,17 +212,32 @@ QueryTreeNodePtr QueryTreeBuilder::buildSelectIntersectExceptQuery(const ASTPtr
|
|||||||
for (size_t i = 0; i < select_lists_size; ++i)
|
for (size_t i = 0; i < select_lists_size; ++i)
|
||||||
{
|
{
|
||||||
auto & select_list_node = select_lists[i];
|
auto & select_list_node = select_lists[i];
|
||||||
QueryTreeNodePtr query_node = buildSelectOrUnionExpression(select_list_node, false /*is_subquery*/, {} /*cte_name*/);
|
QueryTreeNodePtr query_node = buildSelectOrUnionExpression(select_list_node, false /*is_subquery*/, {} /*cte_name*/, context);
|
||||||
union_node->getQueries().getNodes().push_back(std::move(query_node));
|
union_node->getQueries().getNodes().push_back(std::move(query_node));
|
||||||
}
|
}
|
||||||
|
|
||||||
return union_node;
|
return union_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildSelectExpression(const ASTPtr & select_query, bool is_subquery, const std::string & cte_name) const
|
QueryTreeNodePtr QueryTreeBuilder::buildSelectExpression(const ASTPtr & select_query,
|
||||||
|
bool is_subquery,
|
||||||
|
const std::string & cte_name,
|
||||||
|
const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
const auto & select_query_typed = select_query->as<ASTSelectQuery &>();
|
const auto & select_query_typed = select_query->as<ASTSelectQuery &>();
|
||||||
auto current_query_tree = std::make_shared<QueryNode>();
|
|
||||||
|
auto updated_context = Context::createCopy(context);
|
||||||
|
auto select_settings = select_query_typed.settings();
|
||||||
|
SettingsChanges settings_changes;
|
||||||
|
|
||||||
|
if (select_settings)
|
||||||
|
{
|
||||||
|
auto & set_query = select_settings->as<ASTSetQuery &>();
|
||||||
|
updated_context->applySettingsChanges(set_query.changes);
|
||||||
|
settings_changes = set_query.changes;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto current_query_tree = std::make_shared<QueryNode>(std::move(updated_context), std::move(settings_changes));
|
||||||
|
|
||||||
current_query_tree->setIsSubquery(is_subquery);
|
current_query_tree->setIsSubquery(is_subquery);
|
||||||
current_query_tree->setIsCTE(!cte_name.empty());
|
current_query_tree->setIsCTE(!cte_name.empty());
|
||||||
@ -218,30 +251,25 @@ QueryTreeNodePtr QueryTreeBuilder::buildSelectExpression(const ASTPtr & select_q
|
|||||||
current_query_tree->setIsGroupByAll(select_query_typed.group_by_all);
|
current_query_tree->setIsGroupByAll(select_query_typed.group_by_all);
|
||||||
current_query_tree->setOriginalAST(select_query);
|
current_query_tree->setOriginalAST(select_query);
|
||||||
|
|
||||||
auto select_settings = select_query_typed.settings();
|
auto current_context = current_query_tree->getContext();
|
||||||
if (select_settings)
|
|
||||||
{
|
|
||||||
auto & set_query = select_settings->as<ASTSetQuery &>();
|
|
||||||
current_query_tree->setSettingsChanges(set_query.changes);
|
|
||||||
}
|
|
||||||
|
|
||||||
current_query_tree->getJoinTree() = buildJoinTree(select_query_typed.tables());
|
current_query_tree->getJoinTree() = buildJoinTree(select_query_typed.tables(), current_context);
|
||||||
|
|
||||||
auto select_with_list = select_query_typed.with();
|
auto select_with_list = select_query_typed.with();
|
||||||
if (select_with_list)
|
if (select_with_list)
|
||||||
current_query_tree->getWithNode() = buildExpressionList(select_with_list);
|
current_query_tree->getWithNode() = buildExpressionList(select_with_list, current_context);
|
||||||
|
|
||||||
auto select_expression_list = select_query_typed.select();
|
auto select_expression_list = select_query_typed.select();
|
||||||
if (select_expression_list)
|
if (select_expression_list)
|
||||||
current_query_tree->getProjectionNode() = buildExpressionList(select_expression_list);
|
current_query_tree->getProjectionNode() = buildExpressionList(select_expression_list, current_context);
|
||||||
|
|
||||||
auto prewhere_expression = select_query_typed.prewhere();
|
auto prewhere_expression = select_query_typed.prewhere();
|
||||||
if (prewhere_expression)
|
if (prewhere_expression)
|
||||||
current_query_tree->getPrewhere() = buildExpression(prewhere_expression);
|
current_query_tree->getPrewhere() = buildExpression(prewhere_expression, current_context);
|
||||||
|
|
||||||
auto where_expression = select_query_typed.where();
|
auto where_expression = select_query_typed.where();
|
||||||
if (where_expression)
|
if (where_expression)
|
||||||
current_query_tree->getWhere() = buildExpression(where_expression);
|
current_query_tree->getWhere() = buildExpression(where_expression, current_context);
|
||||||
|
|
||||||
auto group_by_list = select_query_typed.groupBy();
|
auto group_by_list = select_query_typed.groupBy();
|
||||||
if (group_by_list)
|
if (group_by_list)
|
||||||
@ -254,56 +282,56 @@ QueryTreeNodePtr QueryTreeBuilder::buildSelectExpression(const ASTPtr & select_q
|
|||||||
|
|
||||||
for (auto & grouping_sets_keys : group_by_children)
|
for (auto & grouping_sets_keys : group_by_children)
|
||||||
{
|
{
|
||||||
auto grouping_sets_keys_list_node = buildExpressionList(grouping_sets_keys);
|
auto grouping_sets_keys_list_node = buildExpressionList(grouping_sets_keys, current_context);
|
||||||
current_query_tree->getGroupBy().getNodes().emplace_back(std::move(grouping_sets_keys_list_node));
|
current_query_tree->getGroupBy().getNodes().emplace_back(std::move(grouping_sets_keys_list_node));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
current_query_tree->getGroupByNode() = buildExpressionList(group_by_list);
|
current_query_tree->getGroupByNode() = buildExpressionList(group_by_list, current_context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto having_expression = select_query_typed.having();
|
auto having_expression = select_query_typed.having();
|
||||||
if (having_expression)
|
if (having_expression)
|
||||||
current_query_tree->getHaving() = buildExpression(having_expression);
|
current_query_tree->getHaving() = buildExpression(having_expression, current_context);
|
||||||
|
|
||||||
auto window_list = select_query_typed.window();
|
auto window_list = select_query_typed.window();
|
||||||
if (window_list)
|
if (window_list)
|
||||||
current_query_tree->getWindowNode() = buildWindowList(window_list);
|
current_query_tree->getWindowNode() = buildWindowList(window_list, current_context);
|
||||||
|
|
||||||
auto select_order_by_list = select_query_typed.orderBy();
|
auto select_order_by_list = select_query_typed.orderBy();
|
||||||
if (select_order_by_list)
|
if (select_order_by_list)
|
||||||
current_query_tree->getOrderByNode() = buildSortList(select_order_by_list);
|
current_query_tree->getOrderByNode() = buildSortList(select_order_by_list, current_context);
|
||||||
|
|
||||||
auto interpolate_list = select_query_typed.interpolate();
|
auto interpolate_list = select_query_typed.interpolate();
|
||||||
if (interpolate_list)
|
if (interpolate_list)
|
||||||
current_query_tree->getInterpolate() = buildInterpolateList(interpolate_list);
|
current_query_tree->getInterpolate() = buildInterpolateList(interpolate_list, current_context);
|
||||||
|
|
||||||
auto select_limit_by_limit = select_query_typed.limitByLength();
|
auto select_limit_by_limit = select_query_typed.limitByLength();
|
||||||
if (select_limit_by_limit)
|
if (select_limit_by_limit)
|
||||||
current_query_tree->getLimitByLimit() = buildExpression(select_limit_by_limit);
|
current_query_tree->getLimitByLimit() = buildExpression(select_limit_by_limit, current_context);
|
||||||
|
|
||||||
auto select_limit_by_offset = select_query_typed.limitOffset();
|
auto select_limit_by_offset = select_query_typed.limitOffset();
|
||||||
if (select_limit_by_offset)
|
if (select_limit_by_offset)
|
||||||
current_query_tree->getLimitByOffset() = buildExpression(select_limit_by_offset);
|
current_query_tree->getLimitByOffset() = buildExpression(select_limit_by_offset, current_context);
|
||||||
|
|
||||||
auto select_limit_by = select_query_typed.limitBy();
|
auto select_limit_by = select_query_typed.limitBy();
|
||||||
if (select_limit_by)
|
if (select_limit_by)
|
||||||
current_query_tree->getLimitByNode() = buildExpressionList(select_limit_by);
|
current_query_tree->getLimitByNode() = buildExpressionList(select_limit_by, current_context);
|
||||||
|
|
||||||
auto select_limit = select_query_typed.limitLength();
|
auto select_limit = select_query_typed.limitLength();
|
||||||
if (select_limit)
|
if (select_limit)
|
||||||
current_query_tree->getLimit() = buildExpression(select_limit);
|
current_query_tree->getLimit() = buildExpression(select_limit, current_context);
|
||||||
|
|
||||||
auto select_offset = select_query_typed.limitOffset();
|
auto select_offset = select_query_typed.limitOffset();
|
||||||
if (select_offset)
|
if (select_offset)
|
||||||
current_query_tree->getOffset() = buildExpression(select_offset);
|
current_query_tree->getOffset() = buildExpression(select_offset, current_context);
|
||||||
|
|
||||||
return current_query_tree;
|
return current_query_tree;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildSortList(const ASTPtr & order_by_expression_list) const
|
QueryTreeNodePtr QueryTreeBuilder::buildSortList(const ASTPtr & order_by_expression_list, const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
auto list_node = std::make_shared<ListNode>();
|
auto list_node = std::make_shared<ListNode>();
|
||||||
|
|
||||||
@ -324,7 +352,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildSortList(const ASTPtr & order_by_express
|
|||||||
collator = std::make_shared<Collator>(order_by_element.collation->as<ASTLiteral &>().value.get<String &>());
|
collator = std::make_shared<Collator>(order_by_element.collation->as<ASTLiteral &>().value.get<String &>());
|
||||||
|
|
||||||
const auto & sort_expression_ast = order_by_element.children.at(0);
|
const auto & sort_expression_ast = order_by_element.children.at(0);
|
||||||
auto sort_expression = buildExpression(sort_expression_ast);
|
auto sort_expression = buildExpression(sort_expression_ast, context);
|
||||||
auto sort_node = std::make_shared<SortNode>(std::move(sort_expression),
|
auto sort_node = std::make_shared<SortNode>(std::move(sort_expression),
|
||||||
sort_direction,
|
sort_direction,
|
||||||
nulls_sort_direction,
|
nulls_sort_direction,
|
||||||
@ -332,11 +360,11 @@ QueryTreeNodePtr QueryTreeBuilder::buildSortList(const ASTPtr & order_by_express
|
|||||||
order_by_element.with_fill);
|
order_by_element.with_fill);
|
||||||
|
|
||||||
if (order_by_element.fill_from)
|
if (order_by_element.fill_from)
|
||||||
sort_node->getFillFrom() = buildExpression(order_by_element.fill_from);
|
sort_node->getFillFrom() = buildExpression(order_by_element.fill_from, context);
|
||||||
if (order_by_element.fill_to)
|
if (order_by_element.fill_to)
|
||||||
sort_node->getFillTo() = buildExpression(order_by_element.fill_to);
|
sort_node->getFillTo() = buildExpression(order_by_element.fill_to, context);
|
||||||
if (order_by_element.fill_step)
|
if (order_by_element.fill_step)
|
||||||
sort_node->getFillStep() = buildExpression(order_by_element.fill_step);
|
sort_node->getFillStep() = buildExpression(order_by_element.fill_step, context);
|
||||||
|
|
||||||
list_node->getNodes().push_back(std::move(sort_node));
|
list_node->getNodes().push_back(std::move(sort_node));
|
||||||
}
|
}
|
||||||
@ -344,7 +372,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildSortList(const ASTPtr & order_by_express
|
|||||||
return list_node;
|
return list_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildInterpolateList(const ASTPtr & interpolate_expression_list) const
|
QueryTreeNodePtr QueryTreeBuilder::buildInterpolateList(const ASTPtr & interpolate_expression_list, const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
auto list_node = std::make_shared<ListNode>();
|
auto list_node = std::make_shared<ListNode>();
|
||||||
|
|
||||||
@ -355,7 +383,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildInterpolateList(const ASTPtr & interpola
|
|||||||
{
|
{
|
||||||
const auto & interpolate_element = expression->as<const ASTInterpolateElement &>();
|
const auto & interpolate_element = expression->as<const ASTInterpolateElement &>();
|
||||||
auto expression_to_interpolate = std::make_shared<IdentifierNode>(Identifier(interpolate_element.column));
|
auto expression_to_interpolate = std::make_shared<IdentifierNode>(Identifier(interpolate_element.column));
|
||||||
auto interpolate_expression = buildExpression(interpolate_element.expr);
|
auto interpolate_expression = buildExpression(interpolate_element.expr, context);
|
||||||
auto interpolate_node = std::make_shared<InterpolateNode>(std::move(expression_to_interpolate), std::move(interpolate_expression));
|
auto interpolate_node = std::make_shared<InterpolateNode>(std::move(expression_to_interpolate), std::move(interpolate_expression));
|
||||||
|
|
||||||
list_node->getNodes().push_back(std::move(interpolate_node));
|
list_node->getNodes().push_back(std::move(interpolate_node));
|
||||||
@ -364,7 +392,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildInterpolateList(const ASTPtr & interpola
|
|||||||
return list_node;
|
return list_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildWindowList(const ASTPtr & window_definition_list) const
|
QueryTreeNodePtr QueryTreeBuilder::buildWindowList(const ASTPtr & window_definition_list, const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
auto list_node = std::make_shared<ListNode>();
|
auto list_node = std::make_shared<ListNode>();
|
||||||
|
|
||||||
@ -375,7 +403,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildWindowList(const ASTPtr & window_definit
|
|||||||
{
|
{
|
||||||
const auto & window_list_element_typed = window_list_element->as<const ASTWindowListElement &>();
|
const auto & window_list_element_typed = window_list_element->as<const ASTWindowListElement &>();
|
||||||
|
|
||||||
auto window_node = buildWindow(window_list_element_typed.definition);
|
auto window_node = buildWindow(window_list_element_typed.definition, context);
|
||||||
window_node->setAlias(window_list_element_typed.name);
|
window_node->setAlias(window_list_element_typed.name);
|
||||||
|
|
||||||
list_node->getNodes().push_back(std::move(window_node));
|
list_node->getNodes().push_back(std::move(window_node));
|
||||||
@ -384,7 +412,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildWindowList(const ASTPtr & window_definit
|
|||||||
return list_node;
|
return list_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildExpressionList(const ASTPtr & expression_list) const
|
QueryTreeNodePtr QueryTreeBuilder::buildExpressionList(const ASTPtr & expression_list, const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
auto list_node = std::make_shared<ListNode>();
|
auto list_node = std::make_shared<ListNode>();
|
||||||
|
|
||||||
@ -393,14 +421,14 @@ QueryTreeNodePtr QueryTreeBuilder::buildExpressionList(const ASTPtr & expression
|
|||||||
|
|
||||||
for (auto & expression : expression_list_typed.children)
|
for (auto & expression : expression_list_typed.children)
|
||||||
{
|
{
|
||||||
auto expression_node = buildExpression(expression);
|
auto expression_node = buildExpression(expression, context);
|
||||||
list_node->getNodes().push_back(std::move(expression_node));
|
list_node->getNodes().push_back(std::move(expression_node));
|
||||||
}
|
}
|
||||||
|
|
||||||
return list_node;
|
return list_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildExpression(const ASTPtr & expression) const
|
QueryTreeNodePtr QueryTreeBuilder::buildExpression(const ASTPtr & expression, const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
QueryTreeNodePtr result;
|
QueryTreeNodePtr result;
|
||||||
|
|
||||||
@ -411,13 +439,13 @@ QueryTreeNodePtr QueryTreeBuilder::buildExpression(const ASTPtr & expression) co
|
|||||||
}
|
}
|
||||||
else if (const auto * asterisk = expression->as<ASTAsterisk>())
|
else if (const auto * asterisk = expression->as<ASTAsterisk>())
|
||||||
{
|
{
|
||||||
auto column_transformers = buildColumnTransformers(expression, 0 /*start_child_index*/);
|
auto column_transformers = buildColumnTransformers(expression, 0 /*start_child_index*/, context);
|
||||||
result = std::make_shared<MatcherNode>(std::move(column_transformers));
|
result = std::make_shared<MatcherNode>(std::move(column_transformers));
|
||||||
}
|
}
|
||||||
else if (const auto * qualified_asterisk = expression->as<ASTQualifiedAsterisk>())
|
else if (const auto * qualified_asterisk = expression->as<ASTQualifiedAsterisk>())
|
||||||
{
|
{
|
||||||
auto & qualified_identifier = qualified_asterisk->children.at(0)->as<ASTTableIdentifier &>();
|
auto & qualified_identifier = qualified_asterisk->children.at(0)->as<ASTTableIdentifier &>();
|
||||||
auto column_transformers = buildColumnTransformers(expression, 1 /*start_child_index*/);
|
auto column_transformers = buildColumnTransformers(expression, 1 /*start_child_index*/, context);
|
||||||
result = std::make_shared<MatcherNode>(Identifier(qualified_identifier.name_parts), std::move(column_transformers));
|
result = std::make_shared<MatcherNode>(Identifier(qualified_identifier.name_parts), std::move(column_transformers));
|
||||||
}
|
}
|
||||||
else if (const auto * ast_literal = expression->as<ASTLiteral>())
|
else if (const auto * ast_literal = expression->as<ASTLiteral>())
|
||||||
@ -466,7 +494,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildExpression(const ASTPtr & expression) co
|
|||||||
}
|
}
|
||||||
|
|
||||||
const auto & lambda_expression = lambda_arguments_and_expression.at(1);
|
const auto & lambda_expression = lambda_arguments_and_expression.at(1);
|
||||||
auto lambda_expression_node = buildExpression(lambda_expression);
|
auto lambda_expression_node = buildExpression(lambda_expression, context);
|
||||||
|
|
||||||
result = std::make_shared<LambdaNode>(std::move(lambda_arguments), std::move(lambda_expression_node));
|
result = std::make_shared<LambdaNode>(std::move(lambda_arguments), std::move(lambda_expression_node));
|
||||||
}
|
}
|
||||||
@ -478,20 +506,20 @@ QueryTreeNodePtr QueryTreeBuilder::buildExpression(const ASTPtr & expression) co
|
|||||||
{
|
{
|
||||||
const auto & function_parameters_list = function->parameters->as<ASTExpressionList>()->children;
|
const auto & function_parameters_list = function->parameters->as<ASTExpressionList>()->children;
|
||||||
for (const auto & argument : function_parameters_list)
|
for (const auto & argument : function_parameters_list)
|
||||||
function_node->getParameters().getNodes().push_back(buildExpression(argument));
|
function_node->getParameters().getNodes().push_back(buildExpression(argument, context));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (function->arguments)
|
if (function->arguments)
|
||||||
{
|
{
|
||||||
const auto & function_arguments_list = function->arguments->as<ASTExpressionList>()->children;
|
const auto & function_arguments_list = function->arguments->as<ASTExpressionList>()->children;
|
||||||
for (const auto & argument : function_arguments_list)
|
for (const auto & argument : function_arguments_list)
|
||||||
function_node->getArguments().getNodes().push_back(buildExpression(argument));
|
function_node->getArguments().getNodes().push_back(buildExpression(argument, context));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (function->is_window_function)
|
if (function->is_window_function)
|
||||||
{
|
{
|
||||||
if (function->window_definition)
|
if (function->window_definition)
|
||||||
function_node->getWindowNode() = buildWindow(function->window_definition);
|
function_node->getWindowNode() = buildWindow(function->window_definition, context);
|
||||||
else
|
else
|
||||||
function_node->getWindowNode() = std::make_shared<IdentifierNode>(Identifier(function->window_name));
|
function_node->getWindowNode() = std::make_shared<IdentifierNode>(Identifier(function->window_name));
|
||||||
}
|
}
|
||||||
@ -502,20 +530,20 @@ QueryTreeNodePtr QueryTreeBuilder::buildExpression(const ASTPtr & expression) co
|
|||||||
else if (const auto * subquery = expression->as<ASTSubquery>())
|
else if (const auto * subquery = expression->as<ASTSubquery>())
|
||||||
{
|
{
|
||||||
auto subquery_query = subquery->children[0];
|
auto subquery_query = subquery->children[0];
|
||||||
auto query_node = buildSelectWithUnionExpression(subquery_query, true /*is_subquery*/, {} /*cte_name*/);
|
auto query_node = buildSelectWithUnionExpression(subquery_query, true /*is_subquery*/, {} /*cte_name*/, context);
|
||||||
|
|
||||||
result = std::move(query_node);
|
result = std::move(query_node);
|
||||||
}
|
}
|
||||||
else if (const auto * with_element = expression->as<ASTWithElement>())
|
else if (const auto * with_element = expression->as<ASTWithElement>())
|
||||||
{
|
{
|
||||||
auto with_element_subquery = with_element->subquery->as<ASTSubquery &>().children.at(0);
|
auto with_element_subquery = with_element->subquery->as<ASTSubquery &>().children.at(0);
|
||||||
auto query_node = buildSelectWithUnionExpression(with_element_subquery, true /*is_subquery*/, with_element->name /*cte_name*/);
|
auto query_node = buildSelectWithUnionExpression(with_element_subquery, true /*is_subquery*/, with_element->name /*cte_name*/, context);
|
||||||
|
|
||||||
result = std::move(query_node);
|
result = std::move(query_node);
|
||||||
}
|
}
|
||||||
else if (const auto * columns_regexp_matcher = expression->as<ASTColumnsRegexpMatcher>())
|
else if (const auto * columns_regexp_matcher = expression->as<ASTColumnsRegexpMatcher>())
|
||||||
{
|
{
|
||||||
auto column_transformers = buildColumnTransformers(expression, 0 /*start_child_index*/);
|
auto column_transformers = buildColumnTransformers(expression, 0 /*start_child_index*/, context);
|
||||||
result = std::make_shared<MatcherNode>(columns_regexp_matcher->getMatcher(), std::move(column_transformers));
|
result = std::make_shared<MatcherNode>(columns_regexp_matcher->getMatcher(), std::move(column_transformers));
|
||||||
}
|
}
|
||||||
else if (const auto * columns_list_matcher = expression->as<ASTColumnsListMatcher>())
|
else if (const auto * columns_list_matcher = expression->as<ASTColumnsListMatcher>())
|
||||||
@ -529,13 +557,13 @@ QueryTreeNodePtr QueryTreeBuilder::buildExpression(const ASTPtr & expression) co
|
|||||||
column_list_identifiers.emplace_back(Identifier{column_list_identifier.name_parts});
|
column_list_identifiers.emplace_back(Identifier{column_list_identifier.name_parts});
|
||||||
}
|
}
|
||||||
|
|
||||||
auto column_transformers = buildColumnTransformers(expression, 0 /*start_child_index*/);
|
auto column_transformers = buildColumnTransformers(expression, 0 /*start_child_index*/, context);
|
||||||
result = std::make_shared<MatcherNode>(std::move(column_list_identifiers), std::move(column_transformers));
|
result = std::make_shared<MatcherNode>(std::move(column_list_identifiers), std::move(column_transformers));
|
||||||
}
|
}
|
||||||
else if (const auto * qualified_columns_regexp_matcher = expression->as<ASTQualifiedColumnsRegexpMatcher>())
|
else if (const auto * qualified_columns_regexp_matcher = expression->as<ASTQualifiedColumnsRegexpMatcher>())
|
||||||
{
|
{
|
||||||
auto & qualified_identifier = qualified_columns_regexp_matcher->children.at(0)->as<ASTTableIdentifier &>();
|
auto & qualified_identifier = qualified_columns_regexp_matcher->children.at(0)->as<ASTTableIdentifier &>();
|
||||||
auto column_transformers = buildColumnTransformers(expression, 1 /*start_child_index*/);
|
auto column_transformers = buildColumnTransformers(expression, 1 /*start_child_index*/, context);
|
||||||
result = std::make_shared<MatcherNode>(Identifier(qualified_identifier.name_parts), qualified_columns_regexp_matcher->getMatcher(), std::move(column_transformers));
|
result = std::make_shared<MatcherNode>(Identifier(qualified_identifier.name_parts), qualified_columns_regexp_matcher->getMatcher(), std::move(column_transformers));
|
||||||
}
|
}
|
||||||
else if (const auto * qualified_columns_list_matcher = expression->as<ASTQualifiedColumnsListMatcher>())
|
else if (const auto * qualified_columns_list_matcher = expression->as<ASTQualifiedColumnsListMatcher>())
|
||||||
@ -551,7 +579,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildExpression(const ASTPtr & expression) co
|
|||||||
column_list_identifiers.emplace_back(Identifier{column_list_identifier.name_parts});
|
column_list_identifiers.emplace_back(Identifier{column_list_identifier.name_parts});
|
||||||
}
|
}
|
||||||
|
|
||||||
auto column_transformers = buildColumnTransformers(expression, 1 /*start_child_index*/);
|
auto column_transformers = buildColumnTransformers(expression, 1 /*start_child_index*/, context);
|
||||||
result = std::make_shared<MatcherNode>(Identifier(qualified_identifier.name_parts), std::move(column_list_identifiers), std::move(column_transformers));
|
result = std::make_shared<MatcherNode>(Identifier(qualified_identifier.name_parts), std::move(column_list_identifiers), std::move(column_transformers));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -567,7 +595,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildExpression(const ASTPtr & expression) co
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildWindow(const ASTPtr & window_definition) const
|
QueryTreeNodePtr QueryTreeBuilder::buildWindow(const ASTPtr & window_definition, const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
const auto & window_definition_typed = window_definition->as<const ASTWindowDefinition &>();
|
const auto & window_definition_typed = window_definition->as<const ASTWindowDefinition &>();
|
||||||
WindowFrame window_frame;
|
WindowFrame window_frame;
|
||||||
@ -586,23 +614,23 @@ QueryTreeNodePtr QueryTreeBuilder::buildWindow(const ASTPtr & window_definition)
|
|||||||
window_node->setParentWindowName(window_definition_typed.parent_window_name);
|
window_node->setParentWindowName(window_definition_typed.parent_window_name);
|
||||||
|
|
||||||
if (window_definition_typed.partition_by)
|
if (window_definition_typed.partition_by)
|
||||||
window_node->getPartitionByNode() = buildExpressionList(window_definition_typed.partition_by);
|
window_node->getPartitionByNode() = buildExpressionList(window_definition_typed.partition_by, context);
|
||||||
|
|
||||||
if (window_definition_typed.order_by)
|
if (window_definition_typed.order_by)
|
||||||
window_node->getOrderByNode() = buildSortList(window_definition_typed.order_by);
|
window_node->getOrderByNode() = buildSortList(window_definition_typed.order_by, context);
|
||||||
|
|
||||||
if (window_definition_typed.frame_begin_offset)
|
if (window_definition_typed.frame_begin_offset)
|
||||||
window_node->getFrameBeginOffsetNode() = buildExpression(window_definition_typed.frame_begin_offset);
|
window_node->getFrameBeginOffsetNode() = buildExpression(window_definition_typed.frame_begin_offset, context);
|
||||||
|
|
||||||
if (window_definition_typed.frame_end_offset)
|
if (window_definition_typed.frame_end_offset)
|
||||||
window_node->getFrameEndOffsetNode() = buildExpression(window_definition_typed.frame_end_offset);
|
window_node->getFrameEndOffsetNode() = buildExpression(window_definition_typed.frame_end_offset, context);
|
||||||
|
|
||||||
window_node->setOriginalAST(window_definition);
|
window_node->setOriginalAST(window_definition);
|
||||||
|
|
||||||
return window_node;
|
return window_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
QueryTreeNodePtr QueryTreeBuilder::buildJoinTree(const ASTPtr & tables_in_select_query) const
|
QueryTreeNodePtr QueryTreeBuilder::buildJoinTree(const ASTPtr & tables_in_select_query, const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
if (!tables_in_select_query)
|
if (!tables_in_select_query)
|
||||||
{
|
{
|
||||||
@ -668,7 +696,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildJoinTree(const ASTPtr & tables_in_select
|
|||||||
auto & subquery_expression = table_expression.subquery->as<ASTSubquery &>();
|
auto & subquery_expression = table_expression.subquery->as<ASTSubquery &>();
|
||||||
const auto & select_with_union_query = subquery_expression.children[0];
|
const auto & select_with_union_query = subquery_expression.children[0];
|
||||||
|
|
||||||
auto node = buildSelectWithUnionExpression(select_with_union_query, true /*is_subquery*/, {} /*cte_name*/);
|
auto node = buildSelectWithUnionExpression(select_with_union_query, true /*is_subquery*/, {} /*cte_name*/, context);
|
||||||
node->setAlias(subquery_expression.tryGetAlias());
|
node->setAlias(subquery_expression.tryGetAlias());
|
||||||
node->setOriginalAST(select_with_union_query);
|
node->setOriginalAST(select_with_union_query);
|
||||||
|
|
||||||
@ -694,9 +722,9 @@ QueryTreeNodePtr QueryTreeBuilder::buildJoinTree(const ASTPtr & tables_in_select
|
|||||||
for (const auto & argument : function_arguments_list)
|
for (const auto & argument : function_arguments_list)
|
||||||
{
|
{
|
||||||
if (argument->as<ASTSelectQuery>() || argument->as<ASTSelectWithUnionQuery>() || argument->as<ASTSelectIntersectExceptQuery>())
|
if (argument->as<ASTSelectQuery>() || argument->as<ASTSelectWithUnionQuery>() || argument->as<ASTSelectIntersectExceptQuery>())
|
||||||
node->getArguments().getNodes().push_back(buildSelectOrUnionExpression(argument, false /*is_subquery*/, {} /*cte_name*/));
|
node->getArguments().getNodes().push_back(buildSelectOrUnionExpression(argument, false /*is_subquery*/, {} /*cte_name*/, context));
|
||||||
else
|
else
|
||||||
node->getArguments().getNodes().push_back(buildExpression(argument));
|
node->getArguments().getNodes().push_back(buildExpression(argument, context));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -726,9 +754,9 @@ QueryTreeNodePtr QueryTreeBuilder::buildJoinTree(const ASTPtr & tables_in_select
|
|||||||
QueryTreeNodePtr join_expression;
|
QueryTreeNodePtr join_expression;
|
||||||
|
|
||||||
if (table_join.using_expression_list)
|
if (table_join.using_expression_list)
|
||||||
join_expression = buildExpressionList(table_join.using_expression_list);
|
join_expression = buildExpressionList(table_join.using_expression_list, context);
|
||||||
else if (table_join.on_expression)
|
else if (table_join.on_expression)
|
||||||
join_expression = buildExpression(table_join.on_expression);
|
join_expression = buildExpression(table_join.on_expression, context);
|
||||||
|
|
||||||
const auto & settings = context->getSettingsRef();
|
const auto & settings = context->getSettingsRef();
|
||||||
auto join_default_strictness = settings.join_default_strictness;
|
auto join_default_strictness = settings.join_default_strictness;
|
||||||
@ -785,7 +813,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildJoinTree(const ASTPtr & tables_in_select
|
|||||||
auto last_table_expression = std::move(table_expressions.back());
|
auto last_table_expression = std::move(table_expressions.back());
|
||||||
table_expressions.pop_back();
|
table_expressions.pop_back();
|
||||||
|
|
||||||
auto array_join_expressions_list = buildExpressionList(array_join_expression.expression_list);
|
auto array_join_expressions_list = buildExpressionList(array_join_expression.expression_list, context);
|
||||||
auto array_join_node = std::make_shared<ArrayJoinNode>(std::move(last_table_expression), std::move(array_join_expressions_list), is_left_array_join);
|
auto array_join_node = std::make_shared<ArrayJoinNode>(std::move(last_table_expression), std::move(array_join_expressions_list), is_left_array_join);
|
||||||
|
|
||||||
/** Original AST is not set because it will contain only array join part and does
|
/** Original AST is not set because it will contain only array join part and does
|
||||||
@ -805,7 +833,7 @@ QueryTreeNodePtr QueryTreeBuilder::buildJoinTree(const ASTPtr & tables_in_select
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
ColumnTransformersNodes QueryTreeBuilder::buildColumnTransformers(const ASTPtr & matcher_expression, size_t start_child_index) const
|
ColumnTransformersNodes QueryTreeBuilder::buildColumnTransformers(const ASTPtr & matcher_expression, size_t start_child_index, const ContextPtr & context) const
|
||||||
{
|
{
|
||||||
ColumnTransformersNodes column_transformers;
|
ColumnTransformersNodes column_transformers;
|
||||||
size_t children_size = matcher_expression->children.size();
|
size_t children_size = matcher_expression->children.size();
|
||||||
@ -818,14 +846,14 @@ ColumnTransformersNodes QueryTreeBuilder::buildColumnTransformers(const ASTPtr &
|
|||||||
{
|
{
|
||||||
if (apply_transformer->lambda)
|
if (apply_transformer->lambda)
|
||||||
{
|
{
|
||||||
auto lambda_query_tree_node = buildExpression(apply_transformer->lambda);
|
auto lambda_query_tree_node = buildExpression(apply_transformer->lambda, context);
|
||||||
column_transformers.emplace_back(std::make_shared<ApplyColumnTransformerNode>(std::move(lambda_query_tree_node)));
|
column_transformers.emplace_back(std::make_shared<ApplyColumnTransformerNode>(std::move(lambda_query_tree_node)));
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
auto function_node = std::make_shared<FunctionNode>(apply_transformer->func_name);
|
auto function_node = std::make_shared<FunctionNode>(apply_transformer->func_name);
|
||||||
if (apply_transformer->parameters)
|
if (apply_transformer->parameters)
|
||||||
function_node->getParametersNode() = buildExpressionList(apply_transformer->parameters);
|
function_node->getParametersNode() = buildExpressionList(apply_transformer->parameters, context);
|
||||||
|
|
||||||
column_transformers.emplace_back(std::make_shared<ApplyColumnTransformerNode>(std::move(function_node)));
|
column_transformers.emplace_back(std::make_shared<ApplyColumnTransformerNode>(std::move(function_node)));
|
||||||
}
|
}
|
||||||
@ -856,7 +884,7 @@ ColumnTransformersNodes QueryTreeBuilder::buildColumnTransformers(const ASTPtr &
|
|||||||
for (const auto & replace_transformer_child : replace_transformer->children)
|
for (const auto & replace_transformer_child : replace_transformer->children)
|
||||||
{
|
{
|
||||||
auto & replacement = replace_transformer_child->as<ASTColumnsReplaceTransformer::Replacement &>();
|
auto & replacement = replace_transformer_child->as<ASTColumnsReplaceTransformer::Replacement &>();
|
||||||
replacements.emplace_back(ReplaceColumnTransformerNode::Replacement{replacement.name, buildExpression(replacement.expr)});
|
replacements.emplace_back(ReplaceColumnTransformerNode::Replacement{replacement.name, buildExpression(replacement.expr, context)});
|
||||||
}
|
}
|
||||||
|
|
||||||
column_transformers.emplace_back(std::make_shared<ReplaceColumnTransformerNode>(replacements, replace_transformer->is_strict));
|
column_transformers.emplace_back(std::make_shared<ReplaceColumnTransformerNode>(replacements, replace_transformer->is_strict));
|
||||||
|
@ -13,6 +13,8 @@ namespace DB
|
|||||||
* AST that represent query ASTSelectWithUnionQuery, ASTSelectIntersectExceptQuery, ASTSelectQuery.
|
* AST that represent query ASTSelectWithUnionQuery, ASTSelectIntersectExceptQuery, ASTSelectQuery.
|
||||||
* AST that represent a list of expressions ASTExpressionList.
|
* AST that represent a list of expressions ASTExpressionList.
|
||||||
* AST that represent expression ASTIdentifier, ASTAsterisk, ASTLiteral, ASTFunction.
|
* AST that represent expression ASTIdentifier, ASTAsterisk, ASTLiteral, ASTFunction.
|
||||||
|
*
|
||||||
|
* For QUERY and UNION nodes contexts are created with respect to specified SETTINGS.
|
||||||
*/
|
*/
|
||||||
QueryTreeNodePtr buildQueryTree(ASTPtr query, ContextPtr context);
|
QueryTreeNodePtr buildQueryTree(ASTPtr query, ContextPtr context);
|
||||||
|
|
||||||
|
@ -3,8 +3,6 @@
|
|||||||
#include <Common/SipHash.h>
|
#include <Common/SipHash.h>
|
||||||
#include <Common/FieldVisitorToString.h>
|
#include <Common/FieldVisitorToString.h>
|
||||||
|
|
||||||
#include <Core/NamesAndTypes.h>
|
|
||||||
|
|
||||||
#include <IO/WriteBuffer.h>
|
#include <IO/WriteBuffer.h>
|
||||||
#include <IO/WriteHelpers.h>
|
#include <IO/WriteHelpers.h>
|
||||||
#include <IO/Operators.h>
|
#include <IO/Operators.h>
|
||||||
@ -18,9 +16,12 @@
|
|||||||
#include <Parsers/ASTFunction.h>
|
#include <Parsers/ASTFunction.h>
|
||||||
|
|
||||||
#include <Core/ColumnWithTypeAndName.h>
|
#include <Core/ColumnWithTypeAndName.h>
|
||||||
|
#include <Core/NamesAndTypes.h>
|
||||||
|
|
||||||
#include <DataTypes/getLeastSupertype.h>
|
#include <DataTypes/getLeastSupertype.h>
|
||||||
|
|
||||||
|
#include <Interpreters/Context.h>
|
||||||
|
|
||||||
#include <Analyzer/QueryNode.h>
|
#include <Analyzer/QueryNode.h>
|
||||||
#include <Analyzer/Utils.h>
|
#include <Analyzer/Utils.h>
|
||||||
|
|
||||||
@ -33,8 +34,9 @@ namespace ErrorCodes
|
|||||||
extern const int BAD_ARGUMENTS;
|
extern const int BAD_ARGUMENTS;
|
||||||
}
|
}
|
||||||
|
|
||||||
UnionNode::UnionNode(SelectUnionMode union_mode_)
|
UnionNode::UnionNode(ContextMutablePtr context_, SelectUnionMode union_mode_)
|
||||||
: IQueryTreeNode(children_size)
|
: IQueryTreeNode(children_size)
|
||||||
|
, context(std::move(context_))
|
||||||
, union_mode(union_mode_)
|
, union_mode(union_mode_)
|
||||||
{
|
{
|
||||||
if (union_mode == SelectUnionMode::UNION_DEFAULT ||
|
if (union_mode == SelectUnionMode::UNION_DEFAULT ||
|
||||||
@ -129,7 +131,7 @@ void UnionNode::updateTreeHashImpl(HashState & state) const
|
|||||||
|
|
||||||
QueryTreeNodePtr UnionNode::cloneImpl() const
|
QueryTreeNodePtr UnionNode::cloneImpl() const
|
||||||
{
|
{
|
||||||
auto result_union_node = std::make_shared<UnionNode>(union_mode);
|
auto result_union_node = std::make_shared<UnionNode>(context, union_mode);
|
||||||
|
|
||||||
result_union_node->is_subquery = is_subquery;
|
result_union_node->is_subquery = is_subquery;
|
||||||
result_union_node->is_cte = is_cte;
|
result_union_node->is_cte = is_cte;
|
||||||
|
@ -3,12 +3,14 @@
|
|||||||
#include <Core/NamesAndTypes.h>
|
#include <Core/NamesAndTypes.h>
|
||||||
#include <Core/Field.h>
|
#include <Core/Field.h>
|
||||||
|
|
||||||
|
#include <Parsers/SelectUnionMode.h>
|
||||||
|
|
||||||
#include <Analyzer/Identifier.h>
|
#include <Analyzer/Identifier.h>
|
||||||
#include <Analyzer/IQueryTreeNode.h>
|
#include <Analyzer/IQueryTreeNode.h>
|
||||||
#include <Analyzer/ListNode.h>
|
#include <Analyzer/ListNode.h>
|
||||||
#include <Analyzer/TableExpressionModifiers.h>
|
#include <Analyzer/TableExpressionModifiers.h>
|
||||||
|
|
||||||
#include <Parsers/SelectUnionMode.h>
|
#include <Interpreters/Context_fwd.h>
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
@ -37,8 +39,26 @@ using UnionNodePtr = std::shared_ptr<UnionNode>;
|
|||||||
class UnionNode final : public IQueryTreeNode
|
class UnionNode final : public IQueryTreeNode
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
/// Construct union node with normalized union mode
|
/// Construct union node with context and normalized union mode
|
||||||
explicit UnionNode(SelectUnionMode union_mode_);
|
explicit UnionNode(ContextMutablePtr context_, SelectUnionMode union_mode_);
|
||||||
|
|
||||||
|
/// Get context
|
||||||
|
ContextPtr getContext() const
|
||||||
|
{
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get mutable context
|
||||||
|
const ContextMutablePtr & getMutableContext() const
|
||||||
|
{
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get mutable context
|
||||||
|
ContextMutablePtr & getMutableContext()
|
||||||
|
{
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns true if union node is subquery, false otherwise
|
/// Returns true if union node is subquery, false otherwise
|
||||||
bool isSubquery() const
|
bool isSubquery() const
|
||||||
@ -129,6 +149,7 @@ private:
|
|||||||
bool is_subquery = false;
|
bool is_subquery = false;
|
||||||
bool is_cte = false;
|
bool is_cte = false;
|
||||||
std::string cte_name;
|
std::string cte_name;
|
||||||
|
ContextMutablePtr context;
|
||||||
SelectUnionMode union_mode;
|
SelectUnionMode union_mode;
|
||||||
|
|
||||||
static constexpr size_t queries_child_index = 0;
|
static constexpr size_t queries_child_index = 0;
|
||||||
|
@ -1,5 +1,9 @@
|
|||||||
add_subdirectory(StringUtils)
|
add_subdirectory(StringUtils)
|
||||||
|
|
||||||
|
if (ENABLE_BENCHMARKS)
|
||||||
|
add_subdirectory(benchmarks)
|
||||||
|
endif()
|
||||||
|
|
||||||
if (ENABLE_EXAMPLES)
|
if (ENABLE_EXAMPLES)
|
||||||
add_subdirectory(examples)
|
add_subdirectory(examples)
|
||||||
endif()
|
endif()
|
||||||
|
@ -1204,6 +1204,11 @@ public:
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename DateOrTime>
|
||||||
|
inline DateTimeComponents toDateTimeComponents(DateOrTime v) const
|
||||||
|
{
|
||||||
|
return toDateTimeComponents(lut[toLUTIndex(v)].date);
|
||||||
|
}
|
||||||
|
|
||||||
inline UInt64 toNumYYYYMMDDhhmmss(Time t) const
|
inline UInt64 toNumYYYYMMDDhhmmss(Time t) const
|
||||||
{
|
{
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
#include "Epoll.h"
|
#include "Epoll.h"
|
||||||
#include <Common/Exception.h>
|
#include <Common/Exception.h>
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
#include <Common/logger_useful.h>
|
|
||||||
|
|
||||||
namespace DB
|
namespace DB
|
||||||
{
|
{
|
||||||
@ -70,9 +69,6 @@ size_t Epoll::getManyReady(int max_events, epoll_event * events_out, bool blocki
|
|||||||
|
|
||||||
if (ready_size == -1 && errno != EINTR)
|
if (ready_size == -1 && errno != EINTR)
|
||||||
throwFromErrno("Error in epoll_wait", DB::ErrorCodes::EPOLL_ERROR);
|
throwFromErrno("Error in epoll_wait", DB::ErrorCodes::EPOLL_ERROR);
|
||||||
|
|
||||||
if (errno == EINTR)
|
|
||||||
LOG_TEST(&Poco::Logger::get("Epoll"), "EINTR");
|
|
||||||
}
|
}
|
||||||
while (ready_size <= 0 && (ready_size != 0 || blocking));
|
while (ready_size <= 0 && (ready_size != 0 || blocking));
|
||||||
|
|
||||||
|
@ -637,8 +637,9 @@
|
|||||||
M(666, CANNOT_USE_CACHE) \
|
M(666, CANNOT_USE_CACHE) \
|
||||||
M(667, NOT_INITIALIZED) \
|
M(667, NOT_INITIALIZED) \
|
||||||
M(668, INVALID_STATE) \
|
M(668, INVALID_STATE) \
|
||||||
M(669, UNKNOWN_NAMED_COLLECTION) \
|
M(669, NAMED_COLLECTION_DOESNT_EXIST) \
|
||||||
M(670, NAMED_COLLECTION_ALREADY_EXISTS) \
|
M(670, NAMED_COLLECTION_ALREADY_EXISTS) \
|
||||||
|
M(671, NAMED_COLLECTION_IS_IMMUTABLE) \
|
||||||
\
|
\
|
||||||
M(999, KEEPER_EXCEPTION) \
|
M(999, KEEPER_EXCEPTION) \
|
||||||
M(1000, POCO_EXCEPTION) \
|
M(1000, POCO_EXCEPTION) \
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user