diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d0a6189313d..24b3e178651 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -211,12 +211,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 0 + BUILD_NAME: 'package_release' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -250,12 +250,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 8 + BUILD_NAME: 'binary_release' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -289,12 +289,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 3 + BUILD_NAME: 'package_asan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -328,12 +328,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 4 + BUILD_NAME: 'package_ubsan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -367,12 +367,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 5 + BUILD_NAME: 'package_tsan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -406,12 +406,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 6 + BUILD_NAME: 'package_msan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -445,12 +445,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 7 + BUILD_NAME: 'package_debug' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -486,13 +486,13 @@ jobs: IMAGES_PATH: ${{runner.temp}}/images_path REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches - CHECK_NAME: 'ClickHouse special build check (actions)' - BUILD_NUMBER: 1 + CHECK_NAME: 'ClickHouse build check (actions)' + BUILD_NAME: 'binary_splitted' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 9a0da9c3172..18969eeedd1 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -84,7 +84,7 @@ jobs: sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 compatibility_check.py 0 + cd $REPO_COPY/tests/ci && python3 compatibility_check.py - name: Cleanup if: always() run: | @@ -142,12 +142,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 0 + BUILD_NAME: 'package_release' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -181,12 +181,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 8 + BUILD_NAME: 'binary_release' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -219,12 +219,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 3 + BUILD_NAME: 'package_asan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -257,12 +257,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 4 + BUILD_NAME: 'package_ubsan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -295,12 +295,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 5 + BUILD_NAME: 'package_tsan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -333,12 +333,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 6 + BUILD_NAME: 'package_msan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -371,12 +371,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 7 + BUILD_NAME: 'package_debug' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -412,13 +412,13 @@ jobs: IMAGES_PATH: ${{runner.temp}}/images_path REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches - CHECK_NAME: 'ClickHouse special build check (actions)' - BUILD_NUMBER: 1 + CHECK_NAME: 'ClickHouse build check (actions)' + BUILD_NAME: 'binary_splitted' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml index e279ae91588..988db77e62a 100644 --- a/.github/workflows/release_branches.yml +++ b/.github/workflows/release_branches.yml @@ -41,7 +41,7 @@ jobs: sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 compatibility_check.py 0 + cd $REPO_COPY/tests/ci && python3 compatibility_check.py - name: Cleanup if: always() run: | @@ -72,12 +72,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 0 + BUILD_NAME: 'package_release' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -110,12 +110,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 3 + BUILD_NAME: 'package_asan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -148,12 +148,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 4 + BUILD_NAME: 'package_ubsan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -186,12 +186,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 5 + BUILD_NAME: 'package_tsan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -224,12 +224,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 6 + BUILD_NAME: 'package_msan' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: @@ -262,12 +262,12 @@ jobs: REPO_COPY: ${{runner.temp}}/build_check/ClickHouse CACHES_PATH: ${{runner.temp}}/../ccaches CHECK_NAME: 'ClickHouse build check (actions)' - BUILD_NUMBER: 7 + BUILD_NAME: 'package_debug' run: | sudo rm -fr $TEMP_PATH mkdir -p $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH - cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER + cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME - name: Upload build URLs to artifacts uses: actions/upload-artifact@v2 with: diff --git a/.gitmodules b/.gitmodules index 5672b89b4d2..a9178493cdf 100644 --- a/.gitmodules +++ b/.gitmodules @@ -17,7 +17,7 @@ [submodule "contrib/zlib-ng"] path = contrib/zlib-ng url = https://github.com/ClickHouse-Extras/zlib-ng.git - branch = clickhouse-new + branch = clickhouse-2.0.x [submodule "contrib/googletest"] path = contrib/googletest url = https://github.com/google/googletest.git @@ -135,9 +135,6 @@ [submodule "contrib/flatbuffers"] path = contrib/flatbuffers url = https://github.com/ClickHouse-Extras/flatbuffers.git -[submodule "contrib/libc-headers"] - path = contrib/libc-headers - url = https://github.com/ClickHouse-Extras/libc-headers.git [submodule "contrib/replxx"] path = contrib/replxx url = https://github.com/ClickHouse-Extras/replxx.git diff --git a/CMakeLists.txt b/CMakeLists.txt index 6df7b24abb1..7a3991bc93c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -223,7 +223,7 @@ if (OS_DARWIN) # from a _specific_ library, which is what we need. set(WHOLE_ARCHIVE -force_load) # The `-noall_load` flag is the default and now obsolete. - set(NO_WHOLE_ARCHIVE "") + set(NO_WHOLE_ARCHIVE "-undefined,error") # Effectively, a no-op. Here to avoid empty "-Wl, " sequence to be generated in the command line. else () set(WHOLE_ARCHIVE --whole-archive) set(NO_WHOLE_ARCHIVE --no-whole-archive) diff --git a/cmake/linux/default_libs.cmake b/cmake/linux/default_libs.cmake index 23374916418..426ae482ea3 100644 --- a/cmake/linux/default_libs.cmake +++ b/cmake/linux/default_libs.cmake @@ -29,14 +29,6 @@ message(STATUS "Default libraries: ${DEFAULT_LIBS}") set(CMAKE_CXX_STANDARD_LIBRARIES ${DEFAULT_LIBS}) set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS}) -# glibc-compatibility library relies to constant version of libc headers -# (because minor changes in function attributes between different glibc versions will introduce incompatibilities) -# This is for x86_64. For other architectures we have separate toolchains. -if (ARCH_AMD64 AND NOT CMAKE_CROSSCOMPILING) - set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers) - set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers) -endif () - # Unfortunately '-pthread' doesn't work with '-nodefaultlibs'. # Just make sure we have pthreads at all. set(THREADS_PREFER_PTHREAD_FLAG ON) diff --git a/cmake/tools.cmake b/cmake/tools.cmake index c87887ddddd..eb3624f3b3b 100644 --- a/cmake/tools.cmake +++ b/cmake/tools.cmake @@ -22,9 +22,10 @@ if (COMPILER_GCC) elseif (COMPILER_CLANG) # Require minimum version of clang/apple-clang if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang") - # If you are developer you can figure out what exact versions of AppleClang are Ok, - # simply remove the following line. - message (FATAL_ERROR "AppleClang is not supported, you should install clang from brew. See the instruction: https://clickhouse.com/docs/en/development/build-osx/") + # (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it. + if (NOT ALLOW_APPLECLANG AND NOT DEFINED ENV{ALLOW_APPLECLANG}) + message (FATAL_ERROR "AppleClang is not supported, you should install clang from brew. See the instruction: https://clickhouse.com/docs/en/development/build-osx/") + endif () # AppleClang 10.0.1 (Xcode 10.2) corresponds to LLVM/Clang upstream version 7.0.0 # AppleClang 11.0.0 (Xcode 11.0) corresponds to LLVM/Clang upstream version 8.0.0 diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt index 278dba9bb70..eb898af2ea6 100644 --- a/contrib/CMakeLists.txt +++ b/contrib/CMakeLists.txt @@ -301,9 +301,10 @@ endif() # instead of controlling it via CMAKE_FOLDER. function (ensure_target_rooted_in _target _folder) - # Skip INTERFACE library targets, since FOLDER property is not available for them. + # Skip aliases and INTERFACE library targets, since FOLDER property is not available/writable for them. + get_target_property (_target_aliased "${_target}" ALIASED_TARGET) get_target_property (_target_type "${_target}" TYPE) - if (_target_type STREQUAL "INTERFACE_LIBRARY") + if (_target_aliased OR _target_type STREQUAL "INTERFACE_LIBRARY") return () endif () diff --git a/contrib/libc-headers b/contrib/libc-headers deleted file mode 160000 index aa5429bf67a..00000000000 --- a/contrib/libc-headers +++ /dev/null @@ -1 +0,0 @@ -Subproject commit aa5429bf67a346e48ad60efd88bcefc286644bf3 diff --git a/contrib/libcxx-cmake/CMakeLists.txt b/contrib/libcxx-cmake/CMakeLists.txt index 2ec6dbff1a1..332fb0411cd 100644 --- a/contrib/libcxx-cmake/CMakeLists.txt +++ b/contrib/libcxx-cmake/CMakeLists.txt @@ -73,6 +73,11 @@ target_compile_options(cxx PRIVATE -w) target_link_libraries(cxx PUBLIC cxxabi) +# For __udivmodti4, __divmodti4. +if (OS_DARWIN AND COMPILER_GCC) + target_link_libraries(cxx PRIVATE gcc) +endif () + install( TARGETS cxx EXPORT global diff --git a/contrib/sentry-native-cmake/CMakeLists.txt b/contrib/sentry-native-cmake/CMakeLists.txt index 3b0057afe94..f4e946cf797 100644 --- a/contrib/sentry-native-cmake/CMakeLists.txt +++ b/contrib/sentry-native-cmake/CMakeLists.txt @@ -28,11 +28,16 @@ set (SRCS ${SRC_DIR}/src/sentry_unix_pageallocator.c ${SRC_DIR}/src/path/sentry_path_unix.c ${SRC_DIR}/src/symbolizer/sentry_symbolizer_unix.c - ${SRC_DIR}/src/modulefinder/sentry_modulefinder_linux.c ${SRC_DIR}/src/transports/sentry_transport_curl.c ${SRC_DIR}/src/backends/sentry_backend_none.c ) +if(APPLE) + list(APPEND SRCS ${SRC_DIR}/src/modulefinder/sentry_modulefinder_apple.c) +else() + list(APPEND SRCS ${SRC_DIR}/src/modulefinder/sentry_modulefinder_linux.c) +endif() + add_library(sentry ${SRCS}) add_library(sentry::sentry ALIAS sentry) diff --git a/contrib/simdjson-cmake/CMakeLists.txt b/contrib/simdjson-cmake/CMakeLists.txt index 862d8dc50f8..bb9a5844def 100644 --- a/contrib/simdjson-cmake/CMakeLists.txt +++ b/contrib/simdjson-cmake/CMakeLists.txt @@ -6,4 +6,6 @@ add_library(simdjson ${SIMDJSON_SRC}) target_include_directories(simdjson SYSTEM PUBLIC "${SIMDJSON_INCLUDE_DIR}" PRIVATE "${SIMDJSON_SRC_DIR}") # simdjson is using its own CPU dispatching and get confused if we enable AVX/AVX2 flags. -target_compile_options(simdjson PRIVATE -mno-avx -mno-avx2) +if(ARCH_AMD64) + target_compile_options(simdjson PRIVATE -mno-avx -mno-avx2) +endif() diff --git a/contrib/zlib-ng b/contrib/zlib-ng index 6a5e93b9007..bffad6f6fe7 160000 --- a/contrib/zlib-ng +++ b/contrib/zlib-ng @@ -1 +1 @@ -Subproject commit 6a5e93b9007782115f7f7e5235dedc81c4f1facb +Subproject commit bffad6f6fe74d6a2f92e2668390664a926c68733 diff --git a/docker/builder/Dockerfile b/docker/builder/Dockerfile index 5a998ffbe3e..49c40d576e7 100644 --- a/docker/builder/Dockerfile +++ b/docker/builder/Dockerfile @@ -1,8 +1,10 @@ FROM ubuntu:20.04 -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 RUN apt-get update \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ diff --git a/docker/client/Dockerfile b/docker/client/Dockerfile index 6f9a957852e..e84cb601c0f 100644 --- a/docker/client/Dockerfile +++ b/docker/client/Dockerfile @@ -1,10 +1,12 @@ FROM ubuntu:18.04 +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list + ARG repository="deb https://repo.clickhouse.com/deb/stable/ main/" ARG version=21.12.1.* -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list - RUN apt-get update \ && apt-get install --yes --no-install-recommends \ apt-transport-https \ diff --git a/docker/docs/builder/Dockerfile b/docker/docs/builder/Dockerfile index 8afddefa41a..50e3fadf9ac 100644 --- a/docker/docs/builder/Dockerfile +++ b/docker/docs/builder/Dockerfile @@ -1,9 +1,11 @@ # docker build -t clickhouse/docs-build . FROM ubuntu:20.04 -ENV LANG=C.UTF-8 +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +ENV LANG=C.UTF-8 RUN apt-get update \ && DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \ diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile index 6f654e44190..6a6d0e7212c 100644 --- a/docker/packager/binary/Dockerfile +++ b/docker/packager/binary/Dockerfile @@ -1,9 +1,11 @@ # docker build -t clickhouse/binary-builder . FROM ubuntu:20.04 -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 RUN apt-get update \ && apt-get install \ diff --git a/docker/packager/deb/Dockerfile b/docker/packager/deb/Dockerfile index a3a4c2c554d..873edfe4afc 100644 --- a/docker/packager/deb/Dockerfile +++ b/docker/packager/deb/Dockerfile @@ -1,9 +1,11 @@ # docker build -t clickhouse/deb-builder . FROM ubuntu:20.04 -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 RUN apt-get update \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ diff --git a/docker/server/Dockerfile b/docker/server/Dockerfile index 04842e7a3de..96e7e73af33 100644 --- a/docker/server/Dockerfile +++ b/docker/server/Dockerfile @@ -1,5 +1,9 @@ FROM ubuntu:20.04 +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list + ARG repository="deb https://repo.clickhouse.com/deb/stable/ main/" ARG version=21.12.1.* ARG gosu_ver=1.10 @@ -26,8 +30,6 @@ ARG DEBIAN_FRONTEND=noninteractive # installed to prevent picking those uid / gid by some unrelated software. # The same uid / gid (101) is used both for alpine and ubuntu. -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list - RUN groupadd -r clickhouse --gid=101 \ && useradd -r -g clickhouse --uid=101 --home-dir=/var/lib/clickhouse --shell=/bin/bash clickhouse \ && apt-get update \ diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile index daacbe2bbfb..a661f8875a2 100644 --- a/docker/test/base/Dockerfile +++ b/docker/test/base/Dockerfile @@ -1,9 +1,11 @@ # docker build -t clickhouse/test-base . FROM clickhouse/test-util -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 RUN apt-get update \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ diff --git a/docker/test/codebrowser/Dockerfile b/docker/test/codebrowser/Dockerfile index 94aa321252b..25fabca67b5 100644 --- a/docker/test/codebrowser/Dockerfile +++ b/docker/test/codebrowser/Dockerfile @@ -2,7 +2,9 @@ # docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output clickhouse/codebrowser FROM clickhouse/binary-builder -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-9 libllvm9 libclang-9-dev diff --git a/docker/test/fasttest/Dockerfile b/docker/test/fasttest/Dockerfile index 798910fb952..6fa5b0aa9db 100644 --- a/docker/test/fasttest/Dockerfile +++ b/docker/test/fasttest/Dockerfile @@ -1,9 +1,11 @@ # docker build -t clickhouse/fasttest . FROM clickhouse/test-util -ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 RUN apt-get update \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ diff --git a/docker/test/fasttest/run.sh b/docker/test/fasttest/run.sh index 10c896c15f2..6e4e1dc5af8 100755 --- a/docker/test/fasttest/run.sh +++ b/docker/test/fasttest/run.sh @@ -174,7 +174,6 @@ function clone_submodules contrib/double-conversion contrib/libcxx contrib/libcxxabi - contrib/libc-headers contrib/lz4 contrib/zstd contrib/fastops diff --git a/docker/test/fuzzer/Dockerfile b/docker/test/fuzzer/Dockerfile index 6444e745c47..c602cba50aa 100644 --- a/docker/test/fuzzer/Dockerfile +++ b/docker/test/fuzzer/Dockerfile @@ -1,12 +1,14 @@ # docker build -t clickhouse/fuzzer . FROM clickhouse/test-base +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list + ENV LANG=C.UTF-8 ENV TZ=Europe/Moscow RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list - RUN apt-get update \ && DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \ ca-certificates \ diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile index 58c77d285d9..5695be70b9a 100644 --- a/docker/test/integration/runner/Dockerfile +++ b/docker/test/integration/runner/Dockerfile @@ -1,7 +1,9 @@ # docker build -t clickhouse/integration-tests-runner . FROM ubuntu:20.04 -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN apt-get update \ && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ diff --git a/docker/test/performance-comparison/Dockerfile b/docker/test/performance-comparison/Dockerfile index 73d9454ab7f..eddaf969f33 100644 --- a/docker/test/performance-comparison/Dockerfile +++ b/docker/test/performance-comparison/Dockerfile @@ -1,12 +1,14 @@ # docker build -t clickhouse/performance-comparison . FROM ubuntu:18.04 +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list + ENV LANG=C.UTF-8 ENV TZ=Europe/Moscow RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list - RUN apt-get update \ && DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \ bash \ diff --git a/docker/test/sqlancer/Dockerfile b/docker/test/sqlancer/Dockerfile index e73fd03fb6d..0821d516e23 100644 --- a/docker/test/sqlancer/Dockerfile +++ b/docker/test/sqlancer/Dockerfile @@ -1,7 +1,9 @@ # docker build -t clickhouse/sqlancer-test . FROM ubuntu:20.04 -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN apt-get update --yes && env DEBIAN_FRONTEND=noninteractive apt-get install wget unzip git default-jdk maven python3 --yes --no-install-recommends RUN wget https://github.com/sqlancer/sqlancer/archive/master.zip -O /sqlancer.zip diff --git a/docker/test/style/Dockerfile b/docker/test/style/Dockerfile index 39d7bb8a9e6..824af0de022 100644 --- a/docker/test/style/Dockerfile +++ b/docker/test/style/Dockerfile @@ -1,7 +1,9 @@ # docker build -t clickhouse/style-test . FROM ubuntu:20.04 -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ shellcheck \ diff --git a/docker/test/testflows/runner/Dockerfile b/docker/test/testflows/runner/Dockerfile index 91d0eb844d9..8ea3cd46973 100644 --- a/docker/test/testflows/runner/Dockerfile +++ b/docker/test/testflows/runner/Dockerfile @@ -1,7 +1,9 @@ # docker build -t clickhouse/testflows-runner . FROM ubuntu:20.04 -RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list +# ARG for quick switch to a given ubuntu mirror +ARG apt_archive="http://archive.ubuntu.com" +RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list RUN apt-get update \ && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ diff --git a/docs/en/development/build-osx.md b/docs/en/development/build-osx.md index 0cd69312bb6..a110e63801c 100644 --- a/docs/en/development/build-osx.md +++ b/docs/en/development/build-osx.md @@ -3,15 +3,14 @@ toc_priority: 65 toc_title: Build on Mac OS X --- -# You don't have to build ClickHouse - -You can install ClickHouse as follows: https://clickhouse.com/#quick-start -Choose Mac x86 or M1. - # How to Build ClickHouse on Mac OS X {#how-to-build-clickhouse-on-mac-os-x} -Build should work on x86_64 (Intel) and arm64 (Apple Silicon) based macOS 10.15 (Catalina) and higher with Homebrew's vanilla Clang. -It is always recommended to use `clang` compiler. It is possible to use XCode's `AppleClang` or `gcc` but it's strongly discouraged. +!!! info "You don't have to build ClickHouse yourself!" + You can install pre-built ClickHouse as described in [Quick Start](https://clickhouse.com/#quick-start). + Follow `macOS (Intel)` or `macOS (Apple silicon)` installation instructions. + +Build should work on x86_64 (Intel) and arm64 (Apple silicon) based macOS 10.15 (Catalina) and higher with Homebrew's vanilla Clang. +It is always recommended to use vanilla `clang` compiler. It is possible to use XCode's `apple-clang` or `gcc` but it's strongly discouraged. ## Install Homebrew {#install-homebrew} @@ -33,8 +32,6 @@ sudo rm -rf /Library/Developer/CommandLineTools sudo xcode-select --install ``` -Reboot. - ## Install Required Compilers, Tools, and Libraries {#install-required-compilers-tools-and-libraries} ``` bash @@ -51,40 +48,41 @@ git clone --recursive git@github.com:ClickHouse/ClickHouse.git ## Build ClickHouse {#build-clickhouse} -To build using Homebrew's vanilla Clang compiler: +To build using Homebrew's vanilla Clang compiler (the only **recommended** way): ``` bash cd ClickHouse rm -rf build mkdir build cd build -cmake -DCMAKE_C_COMPILER=$(brew --prefix llvm)/bin/clang -DCMAKE_CXX_COMPILER=$(brew --prefix llvm)/bin/clang++ -DCMAKE_BUILD_TYPE=RelWithDebInfo .. +cmake -DCMAKE_C_COMPILER=$(brew --prefix llvm)/bin/clang -DCMAKE_CXX_COMPILER=$(brew --prefix llvm)/bin/clang++ -DCMAKE_AR=$(brew --prefix llvm)/bin/llvm-ar -DCMAKE_RANLIB=$(brew --prefix llvm)/bin/llvm-ranlib -DOBJCOPY_PATH=$(brew --prefix llvm)/bin/llvm-objcopy -DCMAKE_BUILD_TYPE=RelWithDebInfo .. cmake --build . --config RelWithDebInfo -cd .. +# The resulting binary will be created at: ./programs/clickhouse ``` -To build using Xcode's native AppleClang compiler (this option is strongly not recommended; use the option above): +To build using Xcode's native AppleClang compiler in Xcode IDE (this option is only for development builds and workflows, and is **not recommended** unless you know what you are doing): ``` bash cd ClickHouse rm -rf build mkdir build cd build -cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo .. -cmake --build . --config RelWithDebInfo -cd .. +XCODE_IDE=1 ALLOW_APPLECLANG=1 cmake -G Xcode -DCMAKE_BUILD_TYPE=Debug -DENABLE_JEMALLOC=OFF .. +cmake --open . +# ...then, in Xcode IDE select ALL_BUILD scheme and start the building process. +# The resulting binary will be created at: ./programs/Debug/clickhouse ``` -To build using Homebrew's vanilla GCC compiler (this option is absolutely not recommended, I'm wondering why do we ever have it): +To build using Homebrew's vanilla GCC compiler (this option is only for development experiments, and is **absolutely not recommended** unless you really know what you are doing): ``` bash cd ClickHouse rm -rf build mkdir build cd build -cmake -DCMAKE_C_COMPILER=$(brew --prefix gcc)/bin/gcc-11 -DCMAKE_CXX_COMPILER=$(brew --prefix gcc)/bin/g++-11 -DCMAKE_BUILD_TYPE=RelWithDebInfo .. +cmake -DCMAKE_C_COMPILER=$(brew --prefix gcc)/bin/gcc-11 -DCMAKE_CXX_COMPILER=$(brew --prefix gcc)/bin/g++-11 -DCMAKE_AR=$(brew --prefix gcc)/bin/gcc-ar-11 -DCMAKE_RANLIB=$(brew --prefix gcc)/bin/gcc-ranlib-11 -DOBJCOPY_PATH=$(brew --prefix binutils)/bin/objcopy -DCMAKE_BUILD_TYPE=RelWithDebInfo .. cmake --build . --config RelWithDebInfo -cd .. +# The resulting binary will be created at: ./programs/clickhouse ``` ## Caveats {#caveats} @@ -140,9 +138,9 @@ sudo launchctl load -w /Library/LaunchDaemons/limit.maxfiles.plist To check if it’s working, use the `ulimit -n` or `launchctl limit maxfiles` commands. -## Run ClickHouse server: +## Running ClickHouse server -``` +``` bash cd ClickHouse ./build/programs/clickhouse-server --config-file ./programs/server/config.xml ``` diff --git a/docs/en/engines/table-engines/integrations/kafka.md b/docs/en/engines/table-engines/integrations/kafka.md index 879e919e823..9fdc43f3f18 100644 --- a/docs/en/engines/table-engines/integrations/kafka.md +++ b/docs/en/engines/table-engines/integrations/kafka.md @@ -133,8 +133,7 @@ Example: SELECT level, sum(total) FROM daily GROUP BY level; ``` - -To improve performance, received messages are grouped into blocks the size of [max_insert_block_size](../../../operations/server-configuration-parameters/settings.md#settings-max_insert_block_size). If the block wasn’t formed within [stream_flush_interval_ms](../../../operations/server-configuration-parameters/settings.md) milliseconds, the data will be flushed to the table regardless of the completeness of the block. +To improve performance, received messages are grouped into blocks the size of [max_insert_block_size](../../../operations/settings/settings/#settings-max_insert_block_size). If the block wasn’t formed within [stream_flush_interval_ms](../../../operations/settings/settings/#stream-flush-interval-ms) milliseconds, the data will be flushed to the table regardless of the completeness of the block. To stop receiving topic data or to change the conversion logic, detach the materialized view: @@ -192,6 +191,6 @@ Example: **See Also** - [Virtual columns](../../../engines/table-engines/index.md#table_engines-virtual_columns) -- [background_schedule_pool_size](../../../operations/settings/settings.md#background_schedule_pool_size) +- [background_message_broker_schedule_pool_size](../../../operations/settings/settings.md#background_message_broker_schedule_pool_size) [Original article](https://clickhouse.com/docs/en/engines/table-engines/integrations/kafka/) diff --git a/docs/en/interfaces/formats.md b/docs/en/interfaces/formats.md index c16db5c3db2..245a0c8fe89 100644 --- a/docs/en/interfaces/formats.md +++ b/docs/en/interfaces/formats.md @@ -24,6 +24,8 @@ The supported formats are: | [CSVWithNames](#csvwithnames) | ✔ | ✔ | | [CSVWithNamesAndTypes](#csvwithnamesandtypes) | ✔ | ✔ | | [CustomSeparated](#format-customseparated) | ✔ | ✔ | +| [CustomSeparatedWithNames](#customseparatedwithnames) | ✔ | ✔ | +| [CustomSeparatedWithNamesAndTypes](#customseparatedwithnamesandtypes) | ✔ | ✔ | | [Values](#data-format-values) | ✔ | ✔ | | [Vertical](#vertical) | ✗ | ✔ | | [JSON](#json) | ✗ | ✔ | @@ -429,8 +431,17 @@ Also prints two header rows with column names and types, similar to [TabSeparate ## CustomSeparated {#format-customseparated} -Similar to [Template](#format-template), but it prints or reads all columns and uses escaping rule from setting `format_custom_escaping_rule` and delimiters from settings `format_custom_field_delimiter`, `format_custom_row_before_delimiter`, `format_custom_row_after_delimiter`, `format_custom_row_between_delimiter`, `format_custom_result_before_delimiter` and `format_custom_result_after_delimiter`, not from format strings. -There is also `CustomSeparatedIgnoreSpaces` format, which is similar to `TemplateIgnoreSpaces`. +Similar to [Template](#format-template), but it prints or reads all names and types of columns and uses escaping rule from [format_custom_escaping_rule](../operations/settings/settings.md#format-custom-escaping-rule) setting and delimiters from [format_custom_field_delimiter](../operations/settings/settings.md#format-custom-field-delimiter), [format_custom_row_before_delimiter](../operations/settings/settings.md#format-custom-row-before-delimiter), [format_custom_row_after_delimiter](../operations/settings/settings.md#format-custom-row-after-delimiter), [format_custom_row_between_delimiter](../operations/settings/settings.md#format-custom-row-between-delimiter), [format_custom_result_before_delimiter](../operations/settings/settings.md#format-custom-result-before-delimiter) and [format_custom_result_after_delimiter](../operations/settings/settings.md#format-custom-result-after-delimiter) settings, not from format strings. + +There is also `CustomSeparatedIgnoreSpaces` format, which is similar to [TemplateIgnoreSpaces](#templateignorespaces). + +## CustomSeparatedWithNames {#customseparatedwithnames} + +Also prints the header row with column names, similar to [TabSeparatedWithNames](#tabseparatedwithnames). + +## CustomSeparatedWithNamesAndTypes {#customseparatedwithnamesandtypes} + +Also prints two header rows with column names and types, similar to [TabSeparatedWithNamesAndTypes](#tabseparatedwithnamesandtypes). ## JSON {#json} @@ -1536,14 +1547,17 @@ Each line of imported data is parsed according to the regular expression. When working with the `Regexp` format, you can use the following settings: -- `format_regexp` — [String](../sql-reference/data-types/string.md). Contains regular expression in the [re2](https://github.com/google/re2/wiki/Syntax) format. -- `format_regexp_escaping_rule` — [String](../sql-reference/data-types/string.md). The following escaping rules are supported: - - CSV (similarly to [CSV](#csv)) - - JSON (similarly to [JSONEachRow](#jsoneachrow)) - - Escaped (similarly to [TSV](#tabseparated)) - - Quoted (similarly to [Values](#data-format-values)) - - Raw (extracts subpatterns as a whole, no escaping rules) -- `format_regexp_skip_unmatched` — [UInt8](../sql-reference/data-types/int-uint.md). Defines the need to throw an exeption in case the `format_regexp` expression does not match the imported data. Can be set to `0` or `1`. +- `format_regexp` — [String](../sql-reference/data-types/string.md). Contains regular expression in the [re2](https://github.com/google/re2/wiki/Syntax) format. + +- `format_regexp_escaping_rule` — [String](../sql-reference/data-types/string.md). The following escaping rules are supported: + + - CSV (similarly to [CSV](#csv)) + - JSON (similarly to [JSONEachRow](#jsoneachrow)) + - Escaped (similarly to [TSV](#tabseparated)) + - Quoted (similarly to [Values](#data-format-values)) + - Raw (extracts subpatterns as a whole, no escaping rules, similarly to [TSVRaw](#tabseparatedraw)) + +- `format_regexp_skip_unmatched` — [UInt8](../sql-reference/data-types/int-uint.md). Defines the need to throw an exeption in case the `format_regexp` expression does not match the imported data. Can be set to `0` or `1`. **Usage** diff --git a/docs/en/operations/server-configuration-parameters/settings.md b/docs/en/operations/server-configuration-parameters/settings.md index d60aac84673..717ab4e14b7 100644 --- a/docs/en/operations/server-configuration-parameters/settings.md +++ b/docs/en/operations/server-configuration-parameters/settings.md @@ -69,8 +69,6 @@ If no conditions met for a data part, ClickHouse uses the `lz4` compression. ``` - ## custom_settings_prefixes {#custom_settings_prefixes} diff --git a/docs/en/operations/settings/settings.md b/docs/en/operations/settings/settings.md index 74a0f3a8c0d..a1c2b2fbf50 100644 --- a/docs/en/operations/settings/settings.md +++ b/docs/en/operations/settings/settings.md @@ -4071,3 +4071,54 @@ Possible values: - 0 — Big files read with only copying data from kernel to userspace. Default value: `0`. + +## format_custom_escaping_rule {#format-custom-escaping-rule} + +Sets the field escaping rule for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format. + +Possible values: + +- `'Escaped'` — Similarly to [TSV](../../interfaces/formats.md#tabseparated). +- `'Quoted'` — Similarly to [Values](../../interfaces/formats.md#data-format-values). +- `'CSV'` — Similarly to [CSV](../../interfaces/formats.md#csv). +- `'JSON'` — Similarly to [JSONEachRow](../../interfaces/formats.md#jsoneachrow). +- `'XML'` — Similarly to [XML](../../interfaces/formats.md#xml). +- `'Raw'` — Extracts subpatterns as a whole, no escaping rules, similarly to [TSVRaw](../../interfaces/formats.md#tabseparatedraw). + +Default value: `'Escaped'`. + +## format_custom_field_delimiter {#format-custom-field-delimiter} + +Sets the character that is interpreted as a delimiter between the fields for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format. + +Default value: `'\t'`. + +## format_custom_row_before_delimiter {#format-custom-row-before-delimiter} + +Sets the character that is interpreted as a delimiter before the field of the first column for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format. + +Default value: `''`. + +## format_custom_row_after_delimiter {#format-custom-row-after-delimiter} + +Sets the character that is interpreted as a delimiter after the field of the last column for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format. + +Default value: `'\n'`. + +## format_custom_row_between_delimiter {#format-custom-row-between-delimiter} + +Sets the character that is interpreted as a delimiter between the rows for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format. + +Default value: `''`. + +## format_custom_result_before_delimiter {#format-custom-result-before-delimiter} + +Sets the character that is interpreted as a prefix before the result set for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format. + +Default value: `''`. + +## format_custom_result_after_delimiter {#format-custom-result-after-delimiter} + +Sets the character that is interpreted as a suffix after the result set for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format. + +Default value: `''`. diff --git a/docs/en/operations/utilities/clickhouse-format.md b/docs/en/operations/utilities/clickhouse-format.md index edba55689e7..333f127e125 100644 --- a/docs/en/operations/utilities/clickhouse-format.md +++ b/docs/en/operations/utilities/clickhouse-format.md @@ -1,98 +1,114 @@ ---- -toc_priority: 65 -toc_title: clickhouse-format ---- - -# clickhouse-format {#clickhouse-format} - -Allows formatting input queries. - -Keys: - -- `--help` or`-h` — Produce help message. -- `--hilite` — Add syntax highlight with ANSI terminal escape sequences. -- `--oneline` — Format in single line. -- `--quiet` or `-q` — Just check syntax, no output on success. -- `--multiquery` or `-n` — Allow multiple queries in the same file. -- `--obfuscate` — Obfuscate instead of formatting. -- `--seed ` — Seed arbitrary string that determines the result of obfuscation. -- `--backslash` — Add a backslash at the end of each line of the formatted query. Can be useful when you copy a query from web or somewhere else with multiple lines, and want to execute it in command line. - -## Examples {#examples} - -1. Highlighting and single line: - -```bash -$ clickhouse-format --oneline --hilite <<< "SELECT sum(number) FROM numbers(5);" -``` - -Result: - -```sql -SELECT sum(number) FROM numbers(5) -``` - -2. Multiqueries: - -```bash -$ clickhouse-format -n <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" -``` - -Result: - -```text -SELECT * -FROM -( - SELECT 1 AS x - UNION ALL - SELECT 1 - UNION DISTINCT - SELECT 3 -) -; -``` - -3. Obfuscating: - -```bash -$ clickhouse-format --seed Hello --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" -``` - -Result: - -```text -SELECT treasury_mammoth_hazelnut BETWEEN nutmeg AND span, CASE WHEN chive >= 116 THEN switching ELSE ANYTHING END; -``` - -Same query and another seed string: - -```bash -$ clickhouse-format --seed World --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" -``` - -Result: - -```text -SELECT horse_tape_summer BETWEEN folklore AND moccasins, CASE WHEN intestine >= 116 THEN nonconformist ELSE FORESTRY END; -``` - -4. Adding backslash: - -```bash -$ clickhouse-format --backslash <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" -``` - -Result: - -```text -SELECT * \ -FROM \ -( \ - SELECT 1 AS x \ - UNION ALL \ - SELECT 1 \ - UNION DISTINCT \ - SELECT 3 \ -) -``` +--- +toc_priority: 65 +toc_title: clickhouse-format +--- + +# clickhouse-format {#clickhouse-format} + +Allows formatting input queries. + +Keys: + +- `--help` or`-h` — Produce help message. +- `--query` — Format queries of any length and complexity. +- `--hilite` — Add syntax highlight with ANSI terminal escape sequences. +- `--oneline` — Format in single line. +- `--quiet` or `-q` — Just check syntax, no output on success. +- `--multiquery` or `-n` — Allow multiple queries in the same file. +- `--obfuscate` — Obfuscate instead of formatting. +- `--seed ` — Seed arbitrary string that determines the result of obfuscation. +- `--backslash` — Add a backslash at the end of each line of the formatted query. Can be useful when you copy a query from web or somewhere else with multiple lines, and want to execute it in command line. + +## Examples {#examples} + +1. Formatting a query: + +```bash +$ clickhouse-format --query "select number from numbers(10) where number%2 order by number desc;" +``` + +Result: + +```text +SELECT number +FROM numbers(10) +WHERE number % 2 +ORDER BY number DESC +``` + +2. Highlighting and single line: + +```bash +$ clickhouse-format --oneline --hilite <<< "SELECT sum(number) FROM numbers(5);" +``` + +Result: + +```sql +SELECT sum(number) FROM numbers(5) +``` + +3. Multiqueries: + +```bash +$ clickhouse-format -n <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" +``` + +Result: + +```text +SELECT * +FROM +( + SELECT 1 AS x + UNION ALL + SELECT 1 + UNION DISTINCT + SELECT 3 +) +; +``` + +4. Obfuscating: + +```bash +$ clickhouse-format --seed Hello --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" +``` + +Result: + +```text +SELECT treasury_mammoth_hazelnut BETWEEN nutmeg AND span, CASE WHEN chive >= 116 THEN switching ELSE ANYTHING END; +``` + +Same query and another seed string: + +```bash +$ clickhouse-format --seed World --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" +``` + +Result: + +```text +SELECT horse_tape_summer BETWEEN folklore AND moccasins, CASE WHEN intestine >= 116 THEN nonconformist ELSE FORESTRY END; +``` + +5. Adding backslash: + +```bash +$ clickhouse-format --backslash <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" +``` + +Result: + +```text +SELECT * \ +FROM \ +( \ + SELECT 1 AS x \ + UNION ALL \ + SELECT 1 \ + UNION DISTINCT \ + SELECT 3 \ +) +``` diff --git a/docs/en/sql-reference/aggregate-functions/reference/exponentialmovingaverage.md b/docs/en/sql-reference/aggregate-functions/reference/exponentialmovingaverage.md new file mode 100644 index 00000000000..cfc9b6cd58e --- /dev/null +++ b/docs/en/sql-reference/aggregate-functions/reference/exponentialmovingaverage.md @@ -0,0 +1,148 @@ +--- +toc_priority: 108 +--- + +## exponentialMovingAverage {#exponential-moving-average} + +Сalculates the exponential moving average of values for the determined time. + +**Syntax** + +```sql +exponentialMovingAverage(x)(value, timestamp) +``` + +Each `value` corresponds to the determinate `timestamp`. The half-life `x` is the time lag at which the exponential weights decay by one-half. The function returns a weighted average: the older the time point, the less weight the corresponding value is considered to be. + +**Arguments** + +- `value` — Value. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) or [Decimal](../../../sql-reference/data-types/decimal.md). +- `timestamp` — Timestamp. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) or [Decimal](../../../sql-reference/data-types/decimal.md). + +**Parameters** + +- `x` — Half-life period. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) or [Decimal](../../../sql-reference/data-types/decimal.md). + +**Returned values** + +- Returnes an [exponentially smoothed moving average](https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average) of the values for the past `x` time at the latest point of time. + +Type: [Float64](../../../sql-reference/data-types/float.md#float32-float64). + +**Examples** + +Input table: + +``` text +┌──temperature─┬─timestamp──┐ +│ 95 │ 1 │ +│ 95 │ 2 │ +│ 95 │ 3 │ +│ 96 │ 4 │ +│ 96 │ 5 │ +│ 96 │ 6 │ +│ 96 │ 7 │ +│ 97 │ 8 │ +│ 97 │ 9 │ +│ 97 │ 10 │ +│ 97 │ 11 │ +│ 98 │ 12 │ +│ 98 │ 13 │ +│ 98 │ 14 │ +│ 98 │ 15 │ +│ 99 │ 16 │ +│ 99 │ 17 │ +│ 99 │ 18 │ +│ 100 │ 19 │ +│ 100 │ 20 │ +└──────────────┴────────────┘ +``` + +Query: + +```sql +SELECT exponentialMovingAverage(5)(temperature, timestamp); +``` + +Result: + +``` text +┌──exponentialMovingAverage(5)(temperature, timestamp)──┐ +│ 92.25779635374204 │ +└───────────────────────────────────────────────────────┘ +``` + +Query: + +```sql +SELECT + value, + time, + round(exp_smooth, 3), + bar(exp_smooth, 0, 1, 50) AS bar +FROM +( + SELECT + (number = 0) OR (number >= 25) AS value, + number AS time, + exponentialMovingAverage(10)(value, time) OVER (Rows BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS exp_smooth + FROM numbers(50) +) +``` + +Result: + +``` text +┌─value─┬─time─┬─round(exp_smooth, 3)─┬─bar────────────────────────────────────────┐ +│ 1 │ 0 │ 0.067 │ ███▎ │ +│ 0 │ 1 │ 0.062 │ ███ │ +│ 0 │ 2 │ 0.058 │ ██▊ │ +│ 0 │ 3 │ 0.054 │ ██▋ │ +│ 0 │ 4 │ 0.051 │ ██▌ │ +│ 0 │ 5 │ 0.047 │ ██▎ │ +│ 0 │ 6 │ 0.044 │ ██▏ │ +│ 0 │ 7 │ 0.041 │ ██ │ +│ 0 │ 8 │ 0.038 │ █▊ │ +│ 0 │ 9 │ 0.036 │ █▋ │ +│ 0 │ 10 │ 0.033 │ █▋ │ +│ 0 │ 11 │ 0.031 │ █▌ │ +│ 0 │ 12 │ 0.029 │ █▍ │ +│ 0 │ 13 │ 0.027 │ █▎ │ +│ 0 │ 14 │ 0.025 │ █▎ │ +│ 0 │ 15 │ 0.024 │ █▏ │ +│ 0 │ 16 │ 0.022 │ █ │ +│ 0 │ 17 │ 0.021 │ █ │ +│ 0 │ 18 │ 0.019 │ ▊ │ +│ 0 │ 19 │ 0.018 │ ▊ │ +│ 0 │ 20 │ 0.017 │ ▋ │ +│ 0 │ 21 │ 0.016 │ ▋ │ +│ 0 │ 22 │ 0.015 │ ▋ │ +│ 0 │ 23 │ 0.014 │ ▋ │ +│ 0 │ 24 │ 0.013 │ ▋ │ +│ 1 │ 25 │ 0.079 │ ███▊ │ +│ 1 │ 26 │ 0.14 │ ███████ │ +│ 1 │ 27 │ 0.198 │ █████████▊ │ +│ 1 │ 28 │ 0.252 │ ████████████▌ │ +│ 1 │ 29 │ 0.302 │ ███████████████ │ +│ 1 │ 30 │ 0.349 │ █████████████████▍ │ +│ 1 │ 31 │ 0.392 │ ███████████████████▌ │ +│ 1 │ 32 │ 0.433 │ █████████████████████▋ │ +│ 1 │ 33 │ 0.471 │ ███████████████████████▌ │ +│ 1 │ 34 │ 0.506 │ █████████████████████████▎ │ +│ 1 │ 35 │ 0.539 │ ██████████████████████████▊ │ +│ 1 │ 36 │ 0.57 │ ████████████████████████████▌ │ +│ 1 │ 37 │ 0.599 │ █████████████████████████████▊ │ +│ 1 │ 38 │ 0.626 │ ███████████████████████████████▎ │ +│ 1 │ 39 │ 0.651 │ ████████████████████████████████▌ │ +│ 1 │ 40 │ 0.674 │ █████████████████████████████████▋ │ +│ 1 │ 41 │ 0.696 │ ██████████████████████████████████▋ │ +│ 1 │ 42 │ 0.716 │ ███████████████████████████████████▋ │ +│ 1 │ 43 │ 0.735 │ ████████████████████████████████████▋ │ +│ 1 │ 44 │ 0.753 │ █████████████████████████████████████▋ │ +│ 1 │ 45 │ 0.77 │ ██████████████████████████████████████▍ │ +│ 1 │ 46 │ 0.785 │ ███████████████████████████████████████▎ │ +│ 1 │ 47 │ 0.8 │ ███████████████████████████████████████▊ │ +│ 1 │ 48 │ 0.813 │ ████████████████████████████████████████▋ │ +│ 1 │ 49 │ 0.825 │ █████████████████████████████████████████▎│ +└───────┴──────┴──────────────────────┴────────────────────────────────────────────┘ +``` diff --git a/docs/en/sql-reference/functions/hash-functions.md b/docs/en/sql-reference/functions/hash-functions.md index 21ed8d33098..573e47ead54 100644 --- a/docs/en/sql-reference/functions/hash-functions.md +++ b/docs/en/sql-reference/functions/hash-functions.md @@ -89,9 +89,39 @@ SELECT sipHash64(array('e','x','a'), 'mple', 10, toDateTime('2019-06-15 23:00:00 ## sipHash128 {#hash_functions-siphash128} -Calculates SipHash from a string. -Accepts a String-type argument. Returns FixedString(16). -Differs from sipHash64 in that the final xor-folding state is only done up to 128 bits. +Produces a 128-bit [SipHash](https://131002.net/siphash/) hash value. Differs from [sipHash64](#hash_functions-siphash64) in that the final xor-folding state is done up to 128 bits. + +**Syntax** + +``` sql +sipHash128(par1,...) +``` + +**Arguments** + +The function takes a variable number of input parameters. Arguments can be any of the [supported data types](../../sql-reference/data-types/index.md). + +**Returned value** + +A 128-bit `SipHash` hash value. + +Type: [FixedString(16)](../../sql-reference/data-types/fixedstring.md). + +**Example** + +Query: + +``` sql +SELECT hex(sipHash128('foo', '\x01', 3)); +``` + +Result: + +``` text +┌─hex(sipHash128('foo', '', 3))────┐ +│ 9DE516A64A414D4B1B609415E4523F24 │ +└──────────────────────────────────┘ +``` ## cityHash64 {#cityhash64} @@ -459,28 +489,36 @@ SELECT murmurHash3_32(array('e','x','a'), 'mple', 10, toDateTime('2019-06-15 23: Produces a 128-bit [MurmurHash3](https://github.com/aappleby/smhasher) hash value. +**Syntax** + ``` sql -murmurHash3_128( expr ) +murmurHash3_128(expr) ``` **Arguments** -- `expr` — [Expressions](../../sql-reference/syntax.md#syntax-expressions) returning a [String](../../sql-reference/data-types/string.md)-type value. +- `expr` — A list of [expressions](../../sql-reference/syntax.md#syntax-expressions). [String](../../sql-reference/data-types/string.md). -**Returned Value** +**Returned value** -A [FixedString(16)](../../sql-reference/data-types/fixedstring.md) data type hash value. +A 128-bit `MurmurHash3` hash value. + +Type: [FixedString(16)](../../sql-reference/data-types/fixedstring.md). **Example** +Query: + ``` sql -SELECT hex(murmurHash3_128('example_string')) AS MurmurHash3, toTypeName(MurmurHash3) AS type; +SELECT hex(murmurHash3_128('foo', 'foo', 'foo')); ``` +Result: + ``` text -┌─MurmurHash3──────────────────────┬─type───┐ -│ 368A1A311CB7342253354B548E7E7E71 │ String │ -└──────────────────────────────────┴────────┘ +┌─hex(murmurHash3_128('foo', 'foo', 'foo'))─┐ +│ F8F7AD9B6CD4CF117A71E277E2EC2931 │ +└───────────────────────────────────────────┘ ``` ## xxHash32, xxHash64 {#hash-functions-xxhash32} diff --git a/docs/ru/interfaces/formats.md b/docs/ru/interfaces/formats.md index 70f1a7bcb2e..79d760271f5 100644 --- a/docs/ru/interfaces/formats.md +++ b/docs/ru/interfaces/formats.md @@ -20,6 +20,8 @@ ClickHouse может принимать (`INSERT`) и отдавать (`SELECT | [CSV](#csv) | ✔ | ✔ | | [CSVWithNames](#csvwithnames) | ✔ | ✔ | | [CustomSeparated](#format-customseparated) | ✔ | ✔ | +| [CustomSeparatedWithNames](#customseparatedwithnames) | ✔ | ✔ | +| [CustomSeparatedWithNamesAndTypes](#customseparatedwithnamesandtypes) | ✔ | ✔ | | [Values](#data-format-values) | ✔ | ✔ | | [Vertical](#vertical) | ✗ | ✔ | | [JSON](#json) | ✗ | ✔ | @@ -368,8 +370,17 @@ $ clickhouse-client --format_csv_delimiter="|" --query="INSERT INTO test.csv FOR ## CustomSeparated {#format-customseparated} -Аналогичен [Template](#format-template), но выводит (или считывает) все столбцы, используя для них правило экранирования из настройки `format_custom_escaping_rule` и разделители из настроек `format_custom_field_delimiter`, `format_custom_row_before_delimiter`, `format_custom_row_after_delimiter`, `format_custom_row_between_delimiter`, `format_custom_result_before_delimiter` и `format_custom_result_after_delimiter`, а не из форматных строк. -Также существует формат `CustomSeparatedIgnoreSpaces`, аналогичный `TemplateIgnoreSpaces`. +Аналогичен [Template](#format-template), но выводит (или считывает) все имена и типы столбцов, используя для них правило экранирования из настройки [format_custom_escaping_rule](../operations/settings/settings.md#format-custom-escaping-rule) и разделители из настроек [format_custom_field_delimiter](../operations/settings/settings.md#format-custom-field-delimiter), [format_custom_row_before_delimiter](../operations/settings/settings.md#format-custom-row-before-delimiter), [format_custom_row_after_delimiter](../operations/settings/settings.md#format-custom-row-after-delimiter), [format_custom_row_between_delimiter](../operations/settings/settings.md#format-custom-row-between-delimiter), [format_custom_result_before_delimiter](../operations/settings/settings.md#format-custom-result-before-delimiter) и [format_custom_result_after_delimiter](../operations/settings/settings.md#format-custom-result-after-delimiter), а не из форматных строк. + +Также существует формат `CustomSeparatedIgnoreSpaces`, аналогичный формату [TemplateIgnoreSpaces](#templateignorespaces). + +## CustomSeparatedWithNames {#customseparatedwithnames} + +Выводит также заголовок с именами столбцов, аналогичен формату [TabSeparatedWithNames](#tabseparatedwithnames). + +## CustomSeparatedWithNamesAndTypes {#customseparatedwithnamesandtypes} + +Выводит также два заголовка с именами и типами столбцов, аналогичен формату [TabSeparatedWithNamesAndTypes](#tabseparatedwithnamesandtypes). ## JSON {#json} @@ -1399,14 +1410,17 @@ SELECT * FROM line_as_string; При работе с форматом `Regexp` можно использовать следующие параметры: -- `format_regexp` — [String](../sql-reference/data-types/string.md). Строка с регулярным выражением в формате [re2](https://github.com/google/re2/wiki/Syntax). -- `format_regexp_escaping_rule` — [String](../sql-reference/data-types/string.md). Правило сериализации. Поддерживаются следующие правила: - - CSV (как в [CSV](#csv)) - - JSON (как в [JSONEachRow](#jsoneachrow)) - - Escaped (как в [TSV](#tabseparated)) - - Quoted (как в [Values](#data-format-values)) - - Raw (данные импортируются как есть, без сериализации) -- `format_regexp_skip_unmatched` — [UInt8](../sql-reference/data-types/int-uint.md). Признак, будет ли генерироваться исключение в случае, если импортируемые данные не соответствуют регулярному выражению `format_regexp`. Может принимать значение `0` или `1`. +- `format_regexp` — [String](../sql-reference/data-types/string.md). Строка с регулярным выражением в формате [re2](https://github.com/google/re2/wiki/Syntax). + +- `format_regexp_escaping_rule` — [String](../sql-reference/data-types/string.md). Правило экранирования. Поддерживаются следующие правила: + + - CSV (как в формате [CSV](#csv)) + - JSON (как в формате [JSONEachRow](#jsoneachrow)) + - Escaped (как в формате [TSV](#tabseparated)) + - Quoted (как в формате [Values](#data-format-values)) + - Raw (данные импортируются как есть, без экранирования, как в формате [TSVRaw](#tabseparatedraw)) + +- `format_regexp_skip_unmatched` — [UInt8](../sql-reference/data-types/int-uint.md). Признак, будет ли генерироваться исключение в случае, если импортируемые данные не соответствуют регулярному выражению `format_regexp`. Может принимать значение `0` или `1`. **Использование** diff --git a/docs/ru/operations/server-configuration-parameters/settings.md b/docs/ru/operations/server-configuration-parameters/settings.md index 7d9804927e9..c194c70ebbc 100644 --- a/docs/ru/operations/server-configuration-parameters/settings.md +++ b/docs/ru/operations/server-configuration-parameters/settings.md @@ -131,7 +131,7 @@ ClickHouse проверяет условия для `min_part_size` и `min_part ```xml - 0123456789101 + 012345678910 ``` diff --git a/docs/ru/operations/settings/settings.md b/docs/ru/operations/settings/settings.md index 9b62618e184..be4137731c0 100644 --- a/docs/ru/operations/settings/settings.md +++ b/docs/ru/operations/settings/settings.md @@ -3830,3 +3830,54 @@ SELECT * FROM positional_arguments ORDER BY 2,3; - 0 — большие файлы считываются только с копированием данных из ядра в пространство пользователей. Значение по умолчанию: `0`. + +## format_custom_escaping_rule {#format-custom-escaping-rule} + +Устанавливает правило экранирования данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated). + +Возможные значения: + +- `'Escaped'` — как в формате [TSV](../../interfaces/formats.md#tabseparated). +- `'Quoted'` — как в формате [Values](../../interfaces/formats.md#data-format-values). +- `'CSV'` — как в формате [CSV](../../interfaces/formats.md#csv). +- `'JSON'` — как в формате [JSONEachRow](../../interfaces/formats.md#jsoneachrow). +- `'XML'` — как в формате [XML](../../interfaces/formats.md#xml). +- `'Raw'` — данные импортируются как есть, без экранирования, как в формате [TSVRaw](../../interfaces/formats.md#tabseparatedraw). + +Значение по умолчанию: `'Escaped'`. + +## format_custom_field_delimiter {#format-custom-field-delimiter} + +Задает символ, который интерпретируется как разделитель между полями данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated). + +Значение по умолчанию: `'\t'`. + +## format_custom_row_before_delimiter {#format-custom-row-before-delimiter} + +Задает символ, который интерпретируется как разделитель перед полем первого столбца данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated). + +Значение по умолчанию: `''`. + +## format_custom_row_after_delimiter {#format-custom-row-after-delimiter} + +Задает символ, который интерпретируется как разделитель после поля последнего столбца данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated). + +Значение по умолчанию: `'\n'`. + +## format_custom_row_between_delimiter {#format-custom-row-between-delimiter} + +Задает символ, который интерпретируется как разделитель между строками данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated). + +Значение по умолчанию: `''`. + +## format_custom_result_before_delimiter {#format-custom-result-before-delimiter} + +Задает символ, который интерпретируется как префикс перед результирующим набором данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated). + +Значение по умолчанию: `''`. + +## format_custom_result_after_delimiter {#format-custom-result-after-delimiter} + +Задает символ, который интерпретируется как суффикс после результирующего набора данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated). + +Значение по умолчанию: `''`. diff --git a/docs/ru/operations/utilities/clickhouse-format.md b/docs/ru/operations/utilities/clickhouse-format.md index 876c741e0ac..0c8132caf77 100644 --- a/docs/ru/operations/utilities/clickhouse-format.md +++ b/docs/ru/operations/utilities/clickhouse-format.md @@ -1,98 +1,114 @@ ---- -toc_priority: 65 -toc_title: clickhouse-format ---- - -# clickhouse-format {#clickhouse-format} - -Позволяет форматировать входящие запросы. - -Ключи: - -- `--help` или`-h` — выводит описание ключей. -- `--hilite` — добавляет подсветку синтаксиса с экранированием символов. -- `--oneline` — форматирование в одну строку. -- `--quiet` или `-q` — проверяет синтаксис без вывода результата. -- `--multiquery` or `-n` — поддерживает несколько запросов в одной строке. -- `--obfuscate` — обфусцирует вместо форматирования. -- `--seed <строка>` — задает строку, которая определяет результат обфускации. -- `--backslash` — добавляет обратный слеш в конце каждой строки отформатированного запроса. Удобно использовать если многострочный запрос скопирован из интернета или другого источника и его нужно выполнить из командной строки. - -## Примеры {#examples} - -1. Подсветка синтаксиса и форматирование в одну строку: - -```bash -$ clickhouse-format --oneline --hilite <<< "SELECT sum(number) FROM numbers(5);" -``` - -Результат: - -```sql -SELECT sum(number) FROM numbers(5) -``` - -2. Несколько запросов в одной строке: - -```bash -$ clickhouse-format -n <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" -``` - -Результат: - -```text -SELECT * -FROM -( - SELECT 1 AS x - UNION ALL - SELECT 1 - UNION DISTINCT - SELECT 3 -) -; -``` - -3. Обфускация: - -```bash -$ clickhouse-format --seed Hello --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" -``` - -Результат: - -```text -SELECT treasury_mammoth_hazelnut BETWEEN nutmeg AND span, CASE WHEN chive >= 116 THEN switching ELSE ANYTHING END; -``` - -Тот же запрос с другой инициализацией обфускатора: - -```bash -$ clickhouse-format --seed World --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" -``` - -Результат: - -```text -SELECT horse_tape_summer BETWEEN folklore AND moccasins, CASE WHEN intestine >= 116 THEN nonconformist ELSE FORESTRY END; -``` - -4. Добавление обратного слеша: - -```bash -$ clickhouse-format --backslash <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" -``` - -Результат: - -```text -SELECT * \ -FROM \ -( \ - SELECT 1 AS x \ - UNION ALL \ - SELECT 1 \ - UNION DISTINCT \ - SELECT 3 \ -) -``` +--- +toc_priority: 65 +toc_title: clickhouse-format +--- + +# clickhouse-format {#clickhouse-format} + +Позволяет форматировать входящие запросы. + +Ключи: + +- `--help` или`-h` — выводит описание ключей. +- `--query` — форматирует запрос любой длины и сложности. +- `--hilite` — добавляет подсветку синтаксиса с экранированием символов. +- `--oneline` — форматирование в одну строку. +- `--quiet` или `-q` — проверяет синтаксис без вывода результата. +- `--multiquery` or `-n` — поддерживает несколько запросов в одной строке. +- `--obfuscate` — обфусцирует вместо форматирования. +- `--seed <строка>` — задает строку, которая определяет результат обфускации. +- `--backslash` — добавляет обратный слеш в конце каждой строки отформатированного запроса. Удобно использовать если многострочный запрос скопирован из интернета или другого источника и его нужно выполнить из командной строки. + +## Примеры {#examples} + +1. Форматирование запроса: + +```bash +$ clickhouse-format --query "select number from numbers(10) where number%2 order by number desc;" +``` + +Результат: + +```text +SELECT number +FROM numbers(10) +WHERE number % 2 +ORDER BY number DESC +``` + +2. Подсветка синтаксиса и форматирование в одну строку: + +```bash +$ clickhouse-format --oneline --hilite <<< "SELECT sum(number) FROM numbers(5);" +``` + +Результат: + +```sql +SELECT sum(number) FROM numbers(5) +``` + +3. Несколько запросов в одной строке: + +```bash +$ clickhouse-format -n <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" +``` + +Результат: + +```text +SELECT * +FROM +( + SELECT 1 AS x + UNION ALL + SELECT 1 + UNION DISTINCT + SELECT 3 +) +; +``` + +4. Обфускация: + +```bash +$ clickhouse-format --seed Hello --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" +``` + +Результат: + +```text +SELECT treasury_mammoth_hazelnut BETWEEN nutmeg AND span, CASE WHEN chive >= 116 THEN switching ELSE ANYTHING END; +``` + +Тот же запрос с другой инициализацией обфускатора: + +```bash +$ clickhouse-format --seed World --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" +``` + +Результат: + +```text +SELECT horse_tape_summer BETWEEN folklore AND moccasins, CASE WHEN intestine >= 116 THEN nonconformist ELSE FORESTRY END; +``` + +5. Добавление обратного слеша: + +```bash +$ clickhouse-format --backslash <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" +``` + +Результат: + +```text +SELECT * \ +FROM \ +( \ + SELECT 1 AS x \ + UNION ALL \ + SELECT 1 \ + UNION DISTINCT \ + SELECT 3 \ +) +``` diff --git a/docs/ru/sql-reference/aggregate-functions/reference/exponentialmovingaverage.md b/docs/ru/sql-reference/aggregate-functions/reference/exponentialmovingaverage.md new file mode 100644 index 00000000000..40ec14b6182 --- /dev/null +++ b/docs/ru/sql-reference/aggregate-functions/reference/exponentialmovingaverage.md @@ -0,0 +1,148 @@ +--- +toc_priority: 108 +--- + +## exponentialMovingAverage {#exponential-moving-average} + +Вычисляет экспоненциальное скользящее среднее за определенный промежуток времени. + +**Синтакис:** + +```sql +exponentialMovingAverage(x)(value, timestamp) +``` + +Каждой точке `timestamp` на временном отрезке соответствует определенное значение `value`. Период полураспада — временной интервал `х`, через который вес значений уменьшается в 2 раза. Функция возвращает взвешенное среднее: чем старше временная точка, тем c меньшим весом считается соответствующее ей значение. + +**Аргументы** + +- `value` — входные значения. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) или [Decimal](../../../sql-reference/data-types/decimal.md). +- `timestamp` — параметр для упорядочивания значений. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) или [Decimal](../../../sql-reference/data-types/decimal.md). + +**Параметры** + +- `x` — период полураспада. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) или [Decimal](../../../sql-reference/data-types/decimal.md). + +**Возвращаемые значения** + +- Возвращает [экспоненциальное скользящее среднее](https://ru.wikipedia.org/wiki/Скользящая_средняя#Экспоненциально_взвешенное_скользящее_среднее) за прошедшее время `x` в последний момент времени. + +Тип: [Float64](../../../sql-reference/data-types/float.md#float32-float64). + +**Пример** + +Исходная таблица: + +``` text +┌──temperature─┬─timestamp──┐ +│ 95 │ 1 │ +│ 95 │ 2 │ +│ 95 │ 3 │ +│ 96 │ 4 │ +│ 96 │ 5 │ +│ 96 │ 6 │ +│ 96 │ 7 │ +│ 97 │ 8 │ +│ 97 │ 9 │ +│ 97 │ 10 │ +│ 97 │ 11 │ +│ 98 │ 12 │ +│ 98 │ 13 │ +│ 98 │ 14 │ +│ 98 │ 15 │ +│ 99 │ 16 │ +│ 99 │ 17 │ +│ 99 │ 18 │ +│ 100 │ 19 │ +│ 100 │ 20 │ +└──────────────┴────────────┘ +``` + +Запрос: + +```sql +SELECT exponentialMovingAverage(5)(temperature, timestamp); +``` + +Результат: + +``` text +┌──exponentialMovingAverage(5)(temperature, timestamp)──┐ +│ 92.25779635374204 │ +└───────────────────────────────────────────────────────┘ +``` + +Запрос: + +```sql +SELECT + value, + time, + round(exp_smooth, 3), + bar(exp_smooth, 0, 1, 50) AS bar +FROM +( + SELECT + (number = 0) OR (number >= 25) AS value, + number AS time, + exponentialMovingAverage(10)(value, time) OVER (Rows BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS exp_smooth + FROM numbers(50) +) +``` + +Результат: + +``` text +┌─value─┬─time─┬─round(exp_smooth, 3)─┬─bar────────────────────────────────────────┐ +│ 1 │ 0 │ 0.067 │ ███▎ │ +│ 0 │ 1 │ 0.062 │ ███ │ +│ 0 │ 2 │ 0.058 │ ██▊ │ +│ 0 │ 3 │ 0.054 │ ██▋ │ +│ 0 │ 4 │ 0.051 │ ██▌ │ +│ 0 │ 5 │ 0.047 │ ██▎ │ +│ 0 │ 6 │ 0.044 │ ██▏ │ +│ 0 │ 7 │ 0.041 │ ██ │ +│ 0 │ 8 │ 0.038 │ █▊ │ +│ 0 │ 9 │ 0.036 │ █▋ │ +│ 0 │ 10 │ 0.033 │ █▋ │ +│ 0 │ 11 │ 0.031 │ █▌ │ +│ 0 │ 12 │ 0.029 │ █▍ │ +│ 0 │ 13 │ 0.027 │ █▎ │ +│ 0 │ 14 │ 0.025 │ █▎ │ +│ 0 │ 15 │ 0.024 │ █▏ │ +│ 0 │ 16 │ 0.022 │ █ │ +│ 0 │ 17 │ 0.021 │ █ │ +│ 0 │ 18 │ 0.019 │ ▊ │ +│ 0 │ 19 │ 0.018 │ ▊ │ +│ 0 │ 20 │ 0.017 │ ▋ │ +│ 0 │ 21 │ 0.016 │ ▋ │ +│ 0 │ 22 │ 0.015 │ ▋ │ +│ 0 │ 23 │ 0.014 │ ▋ │ +│ 0 │ 24 │ 0.013 │ ▋ │ +│ 1 │ 25 │ 0.079 │ ███▊ │ +│ 1 │ 26 │ 0.14 │ ███████ │ +│ 1 │ 27 │ 0.198 │ █████████▊ │ +│ 1 │ 28 │ 0.252 │ ████████████▌ │ +│ 1 │ 29 │ 0.302 │ ███████████████ │ +│ 1 │ 30 │ 0.349 │ █████████████████▍ │ +│ 1 │ 31 │ 0.392 │ ███████████████████▌ │ +│ 1 │ 32 │ 0.433 │ █████████████████████▋ │ +│ 1 │ 33 │ 0.471 │ ███████████████████████▌ │ +│ 1 │ 34 │ 0.506 │ █████████████████████████▎ │ +│ 1 │ 35 │ 0.539 │ ██████████████████████████▊ │ +│ 1 │ 36 │ 0.57 │ ████████████████████████████▌ │ +│ 1 │ 37 │ 0.599 │ █████████████████████████████▊ │ +│ 1 │ 38 │ 0.626 │ ███████████████████████████████▎ │ +│ 1 │ 39 │ 0.651 │ ████████████████████████████████▌ │ +│ 1 │ 40 │ 0.674 │ █████████████████████████████████▋ │ +│ 1 │ 41 │ 0.696 │ ██████████████████████████████████▋ │ +│ 1 │ 42 │ 0.716 │ ███████████████████████████████████▋ │ +│ 1 │ 43 │ 0.735 │ ████████████████████████████████████▋ │ +│ 1 │ 44 │ 0.753 │ █████████████████████████████████████▋ │ +│ 1 │ 45 │ 0.77 │ ██████████████████████████████████████▍ │ +│ 1 │ 46 │ 0.785 │ ███████████████████████████████████████▎ │ +│ 1 │ 47 │ 0.8 │ ███████████████████████████████████████▊ │ +│ 1 │ 48 │ 0.813 │ ████████████████████████████████████████▋ │ +│ 1 │ 49 │ 0.825 │ █████████████████████████████████████████▎│ +└───────┴──────┴──────────────────────┴────────────────────────────────────────────┘ +``` \ No newline at end of file diff --git a/docs/ru/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-sources.md b/docs/ru/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-sources.md index 3baef3f5ba7..f25ce4c203d 100644 --- a/docs/ru/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-sources.md +++ b/docs/ru/sql-reference/dictionaries/external-dictionaries/external-dicts-dict-sources.md @@ -748,7 +748,7 @@ SOURCE(REDIS( !!! info "Примечание" Поля `column_family` или `where` не могут быть использованы вместе с полем `query`. Также обязательно должен быть один из источников данных: `column_family` или `query`. -### PosgreSQL {#dicts-external_dicts_dict_sources-postgresql} +### PostgreSQL {#dicts-external_dicts_dict_sources-postgresql} Пример настройки: diff --git a/docs/ru/sql-reference/functions/hash-functions.md b/docs/ru/sql-reference/functions/hash-functions.md index c7a738acd04..06e97181be0 100644 --- a/docs/ru/sql-reference/functions/hash-functions.md +++ b/docs/ru/sql-reference/functions/hash-functions.md @@ -89,9 +89,39 @@ SELECT sipHash64(array('e','x','a'), 'mple', 10, toDateTime('2019-06-15 23:00:00 ## sipHash128 {#hash_functions-siphash128} -Вычисляет SipHash от строки. -Принимает аргумент типа String. Возвращает FixedString(16). -Отличается от sipHash64 тем, что финальный xor-folding состояния делается только до 128 бит. +Генерирует 128-битное хеш-значение [SipHash](https://131002.net/siphash/). Отличается от [sipHash64](#hash_functions-siphash64) тем, что финальный xor-folding состояния делается до 128 бит. + +**Синтаксис** + +``` sql +sipHash128(par1,...) +``` + +**Аргументы** + +Функция принимает переменное число входных параметров. Аргументы могут быть любого [поддерживаемого типа данных](../../sql-reference/functions/hash-functions.md). + +**Возвращаемое значение** + +128-битное хеш-значение `SipHash`. + +Тип: [FixedString(16)](../../sql-reference/data-types/fixedstring.md). + +**Пример** + +Запрос: + +``` sql +SELECT hex(sipHash128('foo', '\x01', 3)); +``` + +Результат: + +``` text +┌─hex(sipHash128('foo', '', 3))────┐ +│ 9DE516A64A414D4B1B609415E4523F24 │ +└──────────────────────────────────┘ +``` ## cityHash64 {#cityhash64} @@ -459,30 +489,38 @@ SELECT murmurHash3_32(array('e','x','a'), 'mple', 10, toDateTime('2019-06-15 23: ## murmurHash3_128 {#murmurhash3-128} -Генерирует значение [MurmurHash3](https://github.com/aappleby/smhasher). +Генерирует 128-битное хеш-значение [MurmurHash3](https://github.com/aappleby/smhasher). + +**Синтаксис** ``` sql -murmurHash3_128( expr ) +murmurHash3_128(expr) ``` **Аргументы** -- `expr` — [выражение](../syntax.md#syntax-expressions), возвращающее значение типа [String](../../sql-reference/functions/hash-functions.md). +- `expr` — список [выражений](../../sql-reference/syntax.md#syntax-expressions). [String](../../sql-reference/data-types/string.md). **Возвращаемое значение** -Хэш-значение типа [FixedString(16)](../../sql-reference/functions/hash-functions.md). +128-битное значение хеш-значение `MurmurHash3`. + +Тип: [FixedString(16)](../../sql-reference/data-types/fixedstring.md). **Пример** +Запрос: + ``` sql -SELECT hex(murmurHash3_128('example_string')) AS MurmurHash3, toTypeName(MurmurHash3) AS type; +SELECT hex(murmurHash3_128('foo', 'foo', 'foo')); ``` +Результат: + ``` text -┌─MurmurHash3──────────────────────┬─type───┐ -│ 368A1A311CB7342253354B548E7E7E71 │ String │ -└──────────────────────────────────┴────────┘ +┌─hex(murmurHash3_128('foo', 'foo', 'foo'))─┐ +│ F8F7AD9B6CD4CF117A71E277E2EC2931 │ +└───────────────────────────────────────────┘ ``` ## xxHash32, xxHash64 {#hash-functions-xxhash32-xxhash64} diff --git a/programs/install/Install.cpp b/programs/install/Install.cpp index 556932510fb..a017355cfdd 100644 --- a/programs/install/Install.cpp +++ b/programs/install/Install.cpp @@ -492,8 +492,9 @@ int mainEntryClickHouseInstall(int argc, char ** argv) /// Override the default paths. /// Data paths. + const std::string data_file = config_d / "data-paths.xml"; + if (!fs::exists(data_file)) { - std::string data_file = config_d / "data-paths.xml"; WriteBufferFromFile out(data_file); out << "\n" " " << data_path.string() << "\n" @@ -503,12 +504,14 @@ int mainEntryClickHouseInstall(int argc, char ** argv) "\n"; out.sync(); out.finalize(); + fs::permissions(data_file, fs::perms::owner_read, fs::perm_options::replace); fmt::print("Data path configuration override is saved to file {}.\n", data_file); } /// Logger. + const std::string logger_file = config_d / "logger.xml"; + if (!fs::exists(logger_file)) { - std::string logger_file = config_d / "logger.xml"; WriteBufferFromFile out(logger_file); out << "\n" " \n" @@ -518,12 +521,14 @@ int mainEntryClickHouseInstall(int argc, char ** argv) "\n"; out.sync(); out.finalize(); + fs::permissions(logger_file, fs::perms::owner_read, fs::perm_options::replace); fmt::print("Log path configuration override is saved to file {}.\n", logger_file); } /// User directories. + const std::string user_directories_file = config_d / "user-directories.xml"; + if (!fs::exists(user_directories_file)) { - std::string user_directories_file = config_d / "user-directories.xml"; WriteBufferFromFile out(user_directories_file); out << "\n" " \n" @@ -534,12 +539,14 @@ int mainEntryClickHouseInstall(int argc, char ** argv) "\n"; out.sync(); out.finalize(); + fs::permissions(user_directories_file, fs::perms::owner_read, fs::perm_options::replace); fmt::print("User directory path configuration override is saved to file {}.\n", user_directories_file); } /// OpenSSL. + const std::string openssl_file = config_d / "openssl.xml"; + if (!fs::exists(openssl_file)) { - std::string openssl_file = config_d / "openssl.xml"; WriteBufferFromFile out(openssl_file); out << "\n" " \n" @@ -552,6 +559,7 @@ int mainEntryClickHouseInstall(int argc, char ** argv) "\n"; out.sync(); out.finalize(); + fs::permissions(openssl_file, fs::perms::owner_read, fs::perm_options::replace); fmt::print("OpenSSL path configuration override is saved to file {}.\n", openssl_file); } } @@ -761,12 +769,13 @@ int mainEntryClickHouseInstall(int argc, char ** argv) #if defined(__linux__) fmt::print("Setting capabilities for clickhouse binary. This is optional.\n"); std::string command = fmt::format("command -v setcap >/dev/null" - " && echo > {0} && chmod a+x {0} && {0} && setcap 'cap_net_admin,cap_ipc_lock,cap_sys_nice+ep' {0} && {0} && rm {0}" - " && setcap 'cap_net_admin,cap_ipc_lock,cap_sys_nice+ep' {1}" + " && command -v capsh >/dev/null" + " && capsh --has-p=cap_net_admin,cap_ipc_lock,cap_sys_nice+ep >/dev/null 2>&1" + " && setcap 'cap_net_admin,cap_ipc_lock,cap_sys_nice+ep' {0}" " || echo \"Cannot set 'net_admin' or 'ipc_lock' or 'sys_nice' capability for clickhouse binary." " This is optional. Taskstats accounting will be disabled." " To enable taskstats accounting you may add the required capability later manually.\"", - "/tmp/test_setcap.sh", fs::canonical(main_bin_path).string()); + fs::canonical(main_bin_path).string()); executeScript(command); #endif diff --git a/programs/keeper/keeper_config.xml b/programs/keeper/keeper_config.xml index 02d5be70563..8b4d4274e6a 100644 --- a/programs/keeper/keeper_config.xml +++ b/programs/keeper/keeper_config.xml @@ -49,7 +49,7 @@ localhost - 44444 + 9234 diff --git a/programs/keeper/keeper_embedded.xml b/programs/keeper/keeper_embedded.xml index 9d08a3ce910..d887b95a735 100644 --- a/programs/keeper/keeper_embedded.xml +++ b/programs/keeper/keeper_embedded.xml @@ -14,7 +14,7 @@ 1 localhost - 44444 + 9234 diff --git a/programs/main.cpp b/programs/main.cpp index 8c70dcacc99..a71131c59ec 100644 --- a/programs/main.cpp +++ b/programs/main.cpp @@ -328,7 +328,11 @@ struct Checker { checkRequiredInstructions(); } -} checker __attribute__((init_priority(101))); /// Run before other static initializers. +} checker +#ifndef __APPLE__ + __attribute__((init_priority(101))) /// Run before other static initializers. +#endif +; } diff --git a/src/AggregateFunctions/AggregateFunctionCount.h b/src/AggregateFunctions/AggregateFunctionCount.h index 022789cc96c..1567bda5e79 100644 --- a/src/AggregateFunctions/AggregateFunctionCount.h +++ b/src/AggregateFunctions/AggregateFunctionCount.h @@ -203,6 +203,21 @@ public: data(place).count += !assert_cast(*columns[0]).isNullAt(row_num); } + void addBatchSinglePlace( + size_t batch_size, AggregateDataPtr place, const IColumn ** columns, Arena *, ssize_t if_argument_pos) const override + { + auto & nc = assert_cast(*columns[0]); + if (if_argument_pos >= 0) + { + const auto & flags = assert_cast(*columns[if_argument_pos]).getData(); + data(place).count += countBytesInFilterWithNull(flags, nc.getNullMapData().data()); + } + else + { + data(place).count += batch_size - countBytesInFilter(nc.getNullMapData().data(), batch_size); + } + } + void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena *) const override { data(place).count += data(rhs).count; diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index b098a478c9e..709a8f97df0 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -549,10 +549,5 @@ if (ENABLE_TESTS AND USE_GTEST) clickhouse_common_zookeeper string_utils) - # For __udivmodti4 referenced in Core/tests/gtest_DecimalFunctions.cpp - if (OS_DARWIN AND COMPILER_GCC) - target_link_libraries(unit_tests_dbms PRIVATE gcc) - endif () - add_check(unit_tests_dbms) endif () diff --git a/src/Common/Elf.cpp b/src/Common/Elf.cpp index ebf4e38ba13..7f6232c5525 100644 --- a/src/Common/Elf.cpp +++ b/src/Common/Elf.cpp @@ -119,6 +119,24 @@ std::optional Elf::findSectionByName(const char * name) const String Elf::getBuildID() const { + /// Section headers are the first choice for a debuginfo file + if (String build_id; iterateSections([&build_id](const Section & section, size_t) + { + if (section.header.sh_type == SHT_NOTE) + { + build_id = Elf::getBuildID(section.begin(), section.size()); + if (!build_id.empty()) + { + return true; + } + } + return false; + })) + { + return build_id; + } + + /// fallback to PHDR for (size_t idx = 0; idx < header->e_phnum; ++idx) { const ElfPhdr & phdr = program_headers[idx]; @@ -126,6 +144,7 @@ String Elf::getBuildID() const if (phdr.p_type == PT_NOTE) return getBuildID(mapped + phdr.p_offset, phdr.p_filesz); } + return {}; } diff --git a/src/Common/Elf.h b/src/Common/Elf.h index 90783ddc18d..30a28f6e9a9 100644 --- a/src/Common/Elf.h +++ b/src/Common/Elf.h @@ -54,7 +54,8 @@ public: const char * end() const { return mapped + elf_size; } size_t size() const { return elf_size; } - /// Obtain build id from PT_NOTES section of program headers. Return empty string if does not exist. + /// Obtain build id from SHT_NOTE of section headers (fallback to PT_NOTES section of program headers). + /// Return empty string if does not exist. /// The string is returned in binary. Note that "readelf -n ./clickhouse-server" prints it in hex. String getBuildID() const; static String getBuildID(const char * nhdr_pos, size_t size); diff --git a/src/Common/ThreadStatus.h b/src/Common/ThreadStatus.h index d22f6e1574b..97ddda1ea30 100644 --- a/src/Common/ThreadStatus.h +++ b/src/Common/ThreadStatus.h @@ -165,7 +165,7 @@ protected: std::function fatal_error_callback; /// It is used to avoid enabling the query profiler when you have multiple ThreadStatus in the same thread - bool query_profiled_enabled = true; + bool query_profiler_enabled = true; /// Requires access to query_id. friend class MemoryTrackerThreadSwitcher; @@ -207,7 +207,8 @@ public: void disableProfiling() { - query_profiled_enabled = false; + assert(!query_profiler_real && !query_profiler_cpu); + query_profiler_enabled = false; } /// Starts new query and create new thread group for it, current thread becomes master thread of the query diff --git a/src/Functions/FunctionsAES.h b/src/Functions/FunctionsAES.h index 58a7947a135..d3796081f18 100644 --- a/src/Functions/FunctionsAES.h +++ b/src/Functions/FunctionsAES.h @@ -279,37 +279,33 @@ private: // That may lead later to reading unallocated data from underlying PaddedPODArray // due to assumption that it is safe to read up to 15 bytes past end. const auto pad_to_next_block = block_size == 1 ? 0 : 1; - for (size_t r = 0; r < input_rows_count; ++r) + for (size_t row_idx = 0; row_idx < input_rows_count; ++row_idx) { - resulting_size += (input_column->getDataAt(r).size / block_size + pad_to_next_block) * block_size + 1; + resulting_size += (input_column->getDataAt(row_idx).size / block_size + pad_to_next_block) * block_size + 1; if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM) resulting_size += tag_size; } -#if defined(MEMORY_SANITIZER) - encrypted_result_column_data.resize_fill(resulting_size, 0xFF); -#else encrypted_result_column_data.resize(resulting_size); -#endif } auto * encrypted = encrypted_result_column_data.data(); KeyHolder key_holder; - for (size_t r = 0; r < input_rows_count; ++r) + for (size_t row_idx = 0; row_idx < input_rows_count; ++row_idx) { - const auto key_value = key_holder.setKey(key_size, key_column->getDataAt(r)); + const auto key_value = key_holder.setKey(key_size, key_column->getDataAt(row_idx)); auto iv_value = StringRef{}; if (iv_column) { - iv_value = iv_column->getDataAt(r); + iv_value = iv_column->getDataAt(row_idx); /// If the length is zero (empty string is passed) it should be treat as no IV. if (iv_value.size == 0) iv_value.data = nullptr; } - const StringRef input_value = input_column->getDataAt(r); + const StringRef input_value = input_column->getDataAt(row_idx); if constexpr (mode != CipherMode::MySQLCompatibility) { @@ -348,7 +344,7 @@ private: // 1.a.2 Set AAD if (aad_column) { - const auto aad_data = aad_column->getDataAt(r); + const auto aad_data = aad_column->getDataAt(row_idx); int tmp_len = 0; if (aad_data.size != 0 && EVP_EncryptUpdate(evp_ctx, nullptr, &tmp_len, reinterpret_cast(aad_data.data), aad_data.size) != 1) @@ -408,7 +404,7 @@ private: }; -/// AES_decrypt(string, key, block_mode[, init_vector]) +/// decrypt(string, key, block_mode[, init_vector]) template class FunctionDecrypt : public IFunction { @@ -471,7 +467,9 @@ private: ColumnPtr result_column; if (arguments.size() <= 3) + { result_column = doDecrypt(evp_cipher, input_rows_count, input_column, key_column, nullptr, nullptr); + } else { const auto iv_column = arguments[3].column; @@ -548,59 +546,58 @@ private: { size_t resulting_size = 0; - for (size_t r = 0; r < input_rows_count; ++r) + for (size_t row_idx = 0; row_idx < input_rows_count; ++row_idx) { - size_t string_size = input_column->getDataAt(r).size; + size_t string_size = input_column->getDataAt(row_idx).size; resulting_size += string_size + 1; /// With terminating zero. if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM) { - if (string_size < tag_size) - throw Exception("Encrypted data is smaller than the size of additional data for AEAD mode, cannot decrypt.", - ErrorCodes::BAD_ARGUMENTS); + if (string_size > 0) + { + if (string_size < tag_size) + throw Exception("Encrypted data is smaller than the size of additional data for AEAD mode, cannot decrypt.", + ErrorCodes::BAD_ARGUMENTS); - resulting_size -= tag_size; + resulting_size -= tag_size; + } } } -#if defined(MEMORY_SANITIZER) - // Pre-fill result column with values to prevent MSAN from dropping dead on - // aes-X-ecb mode with "WARNING: MemorySanitizer: use-of-uninitialized-value". - // This is most likely to be caused by the underlying assembler implementation: - // see crypto/aes/aesni-x86_64.s, function aesni_ecb_encrypt - // which msan seems to fail instrument correctly. - decrypted_result_column_data.resize_fill(resulting_size, 0xFF); -#else decrypted_result_column_data.resize(resulting_size); -#endif } auto * decrypted = decrypted_result_column_data.data(); KeyHolder key_holder; - for (size_t r = 0; r < input_rows_count; ++r) + for (size_t row_idx = 0; row_idx < input_rows_count; ++row_idx) { // 0: prepare key if required - auto key_value = key_holder.setKey(key_size, key_column->getDataAt(r)); + auto key_value = key_holder.setKey(key_size, key_column->getDataAt(row_idx)); auto iv_value = StringRef{}; if (iv_column) { - iv_value = iv_column->getDataAt(r); + iv_value = iv_column->getDataAt(row_idx); /// If the length is zero (empty string is passed) it should be treat as no IV. if (iv_value.size == 0) iv_value.data = nullptr; } - auto input_value = input_column->getDataAt(r); + auto input_value = input_column->getDataAt(row_idx); + if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM) { - // empty plaintext results in empty ciphertext + tag, means there should be at least tag_size bytes. - if (input_value.size < tag_size) - throw Exception(fmt::format("Encrypted data is too short: only {} bytes, " - "should contain at least {} bytes of a tag.", - input_value.size, block_size, tag_size), ErrorCodes::BAD_ARGUMENTS); - input_value.size -= tag_size; + if (input_value.size > 0) + { + // empty plaintext results in empty ciphertext + tag, means there should be at least tag_size bytes. + if (input_value.size < tag_size) + throw Exception(fmt::format("Encrypted data is too short: only {} bytes, " + "should contain at least {} bytes of a tag.", + input_value.size, block_size, tag_size), ErrorCodes::BAD_ARGUMENTS); + + input_value.size -= tag_size; + } } if constexpr (mode != CipherMode::MySQLCompatibility) @@ -619,8 +616,9 @@ private: } } - // Avoid extra work on empty ciphertext/plaintext for some ciphers - if (!(input_value.size == 0 && block_size == 1 && mode != CipherMode::RFC5116_AEAD_AES_GCM)) + /// Avoid extra work on empty ciphertext/plaintext. Always decrypt empty to empty. + /// This makes sense for default implementation for NULLs. + if (input_value.size > 0) { // 1: Init CTX if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM) @@ -641,7 +639,7 @@ private: // 1.a.2: Set AAD if present if (aad_column) { - StringRef aad_data = aad_column->getDataAt(r); + StringRef aad_data = aad_column->getDataAt(row_idx); int tmp_len = 0; if (aad_data.size != 0 && EVP_DecryptUpdate(evp_ctx, nullptr, &tmp_len, reinterpret_cast(aad_data.data), aad_data.size) != 1) diff --git a/src/Interpreters/DDLWorker.cpp b/src/Interpreters/DDLWorker.cpp index 9fefcf012a9..27bb4906f1a 100644 --- a/src/Interpreters/DDLWorker.cpp +++ b/src/Interpreters/DDLWorker.cpp @@ -53,8 +53,6 @@ namespace ErrorCodes constexpr const char * TASK_PROCESSED_OUT_REASON = "Task has been already processed"; -namespace -{ /** Caveats: usage of locks in ZooKeeper is incorrect in 99% of cases, * and highlights your poor understanding of distributed systems. @@ -104,14 +102,29 @@ public: void unlock() { + if (!locked) + return; + + locked = false; + + if (zookeeper->expired()) + { + LOG_WARNING(log, "Lock is lost, because session was expired. Path: {}, message: {}", lock_path, lock_message); + return; + } + Coordination::Stat stat; std::string dummy; + /// NOTE It will throw if session expired after we checked it above bool result = zookeeper->tryGet(lock_path, dummy, &stat); if (result && stat.ephemeralOwner == zookeeper->getClientID()) zookeeper->remove(lock_path, -1); + else if (result) + throw Exception(ErrorCodes::LOGICAL_ERROR, "Lock is lost, it has another owner. Path: {}, message: {}, owner: {}, our id: {}", + lock_path, lock_message, stat.ephemeralOwner, zookeeper->getClientID()); else - LOG_WARNING(log, "Lock is lost. It is normal if session was expired. Path: {}/{}", lock_path, lock_message); + throw Exception(ErrorCodes::LOGICAL_ERROR, "Lock is lost, node does not exist. Path: {}, message: {}", lock_path, lock_message); } bool tryLock() @@ -119,18 +132,16 @@ public: std::string dummy; Coordination::Error code = zookeeper->tryCreate(lock_path, lock_message, zkutil::CreateMode::Ephemeral, dummy); - if (code == Coordination::Error::ZNODEEXISTS) + if (code == Coordination::Error::ZOK) { - return false; + locked = true; } - else if (code == Coordination::Error::ZOK) - { - return true; - } - else + else if (code != Coordination::Error::ZNODEEXISTS) { throw Coordination::Exception(code); } + + return locked; } private: @@ -139,6 +150,7 @@ private: std::string lock_path; std::string lock_message; Poco::Logger * log; + bool locked = false; }; @@ -148,8 +160,6 @@ std::unique_ptr createSimpleZooKeeperLock( return std::make_unique(zookeeper, lock_prefix, lock_name, lock_message); } -} - DDLWorker::DDLWorker( int pool_size_, @@ -644,6 +654,10 @@ void DDLWorker::processTask(DDLTaskBase & task, const ZooKeeperPtr & zookeeper) zookeeper->create(active_node_path, {}, zkutil::CreateMode::Ephemeral); } + /// We must hold the lock until task execution status is committed to ZooKeeper, + /// otherwise another replica may try to execute query again. + std::unique_ptr execute_on_leader_lock; + /// Step 2: Execute query from the task. if (!task.was_executed) { @@ -674,7 +688,7 @@ void DDLWorker::processTask(DDLTaskBase & task, const ZooKeeperPtr & zookeeper) if (task.execute_on_leader) { - tryExecuteQueryOnLeaderReplica(task, storage, rewritten_query, task.entry_path, zookeeper); + tryExecuteQueryOnLeaderReplica(task, storage, rewritten_query, task.entry_path, zookeeper, execute_on_leader_lock); } else { @@ -761,7 +775,8 @@ bool DDLWorker::tryExecuteQueryOnLeaderReplica( StoragePtr storage, const String & rewritten_query, const String & /*node_path*/, - const ZooKeeperPtr & zookeeper) + const ZooKeeperPtr & zookeeper, + std::unique_ptr & execute_on_leader_lock) { StorageReplicatedMergeTree * replicated_storage = dynamic_cast(storage.get()); @@ -799,7 +814,7 @@ bool DDLWorker::tryExecuteQueryOnLeaderReplica( pcg64 rng(randomSeed()); - auto lock = createSimpleZooKeeperLock(zookeeper, shard_path, "lock", task.host_id_str); + execute_on_leader_lock = createSimpleZooKeeperLock(zookeeper, shard_path, "lock", task.host_id_str); Stopwatch stopwatch; @@ -829,7 +844,7 @@ bool DDLWorker::tryExecuteQueryOnLeaderReplica( throw Exception(ErrorCodes::NOT_A_LEADER, "Cannot execute initial query on non-leader replica"); /// Any replica which is leader tries to take lock - if (status.is_leader && lock->tryLock()) + if (status.is_leader && execute_on_leader_lock->tryLock()) { /// In replicated merge tree we can have multiple leaders. So we can /// be "leader" and took lock, but another "leader" replica may have @@ -858,8 +873,6 @@ bool DDLWorker::tryExecuteQueryOnLeaderReplica( executed_by_us = true; break; } - - lock->unlock(); } /// Waiting for someone who will execute query and change is_executed_path node diff --git a/src/Interpreters/DDLWorker.h b/src/Interpreters/DDLWorker.h index d2b7c9d169d..0b8b0a4a4d8 100644 --- a/src/Interpreters/DDLWorker.h +++ b/src/Interpreters/DDLWorker.h @@ -38,7 +38,7 @@ struct DDLTaskBase; using DDLTaskPtr = std::unique_ptr; using ZooKeeperPtr = std::shared_ptr; class AccessRightsElements; - +class ZooKeeperLock; class DDLWorker { @@ -94,7 +94,8 @@ protected: StoragePtr storage, const String & rewritten_query, const String & node_path, - const ZooKeeperPtr & zookeeper); + const ZooKeeperPtr & zookeeper, + std::unique_ptr & execute_on_leader_lock); bool tryExecuteQuery(const String & query, DDLTaskBase & task, const ZooKeeperPtr & zookeeper); diff --git a/src/Interpreters/OptimizeIfWithConstantConditionVisitor.cpp b/src/Interpreters/OptimizeIfWithConstantConditionVisitor.cpp index 993c9945039..0440c52797c 100644 --- a/src/Interpreters/OptimizeIfWithConstantConditionVisitor.cpp +++ b/src/Interpreters/OptimizeIfWithConstantConditionVisitor.cpp @@ -28,6 +28,8 @@ static bool tryExtractConstValueFromCondition(const ASTPtr & condition, bool & v } /// cast of numeric constant in condition to UInt8 + /// Note: this solution is ad-hoc and only implemented for yandex.metrica use case. + /// We should allow any constant condition (or maybe remove this optimization completely) later. if (const auto * function = condition->as()) { if (isFunctionCast(function)) @@ -49,6 +51,16 @@ static bool tryExtractConstValueFromCondition(const ASTPtr & condition, bool & v } } } + else if (function->name == "toUInt8" || function->name == "toInt8") + { + if (const auto * expr_list = function->arguments->as()) + { + if (expr_list->children.size() != 1) + throw Exception(ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH, "Function {} must have exactly two arguments", function->name); + + return tryExtractConstValueFromCondition(expr_list->children.at(0), value); + } + } } return false; diff --git a/src/Interpreters/SystemLog.cpp b/src/Interpreters/SystemLog.cpp index 71155760aea..fc2a5b620e2 100644 --- a/src/Interpreters/SystemLog.cpp +++ b/src/Interpreters/SystemLog.cpp @@ -96,6 +96,22 @@ std::shared_ptr createSystemLog( } +ASTPtr ISystemLog::getCreateTableQueryClean(const StorageID & table_id, ContextPtr context) +{ + DatabasePtr database = DatabaseCatalog::instance().getDatabase(table_id.database_name); + ASTPtr old_ast = database->getCreateTableQuery(table_id.table_name, context); + auto & old_create_query_ast = old_ast->as(); + /// Reset UUID + old_create_query_ast.uuid = UUIDHelpers::Nil; + /// Existing table has default settings (i.e. `index_granularity = 8192`), reset them. + if (ASTStorage * storage = old_create_query_ast.storage) + { + storage->reset(storage->settings); + } + return old_ast; +} + + SystemLogs::SystemLogs(ContextPtr global_context, const Poco::Util::AbstractConfiguration & config) { query_log = createSystemLog(global_context, "system", "query_log", config, "query_log"); diff --git a/src/Interpreters/SystemLog.h b/src/Interpreters/SystemLog.h index ee3e3062e13..d6342e3973e 100644 --- a/src/Interpreters/SystemLog.h +++ b/src/Interpreters/SystemLog.h @@ -61,6 +61,7 @@ namespace DB namespace ErrorCodes { extern const int TIMEOUT_EXCEEDED; + extern const int LOGICAL_ERROR; } #define DBMS_SYSTEM_LOG_QUEUE_SIZE 1048576 @@ -83,13 +84,18 @@ class ISystemLog { public: virtual String getName() = 0; - virtual ASTPtr getCreateTableQuery() = 0; //// force -- force table creation (used for SYSTEM FLUSH LOGS) virtual void flush(bool force = false) = 0; virtual void prepareTable() = 0; virtual void startup() = 0; virtual void shutdown() = 0; virtual ~ISystemLog() = default; + + /// returns CREATE TABLE query, but with removed: + /// - UUID + /// - SETTINGS (for MergeTree) + /// That way it can be used to compare with the SystemLog::getCreateTableQuery() + static ASTPtr getCreateTableQueryClean(const StorageID & table_id, ContextPtr context); }; @@ -171,7 +177,7 @@ public: return LogElement::name(); } - ASTPtr getCreateTableQuery() override; + ASTPtr getCreateTableQuery(); protected: Poco::Logger * log; @@ -181,6 +187,8 @@ private: const StorageID table_id; const String storage_def; StoragePtr table; + String create_query; + String old_create_query; bool is_prepared = false; const size_t flush_interval_milliseconds; ThreadFromGlobalPool saving_thread; @@ -228,6 +236,7 @@ SystemLog::SystemLog( : WithContext(context_) , table_id(database_name_, table_name_) , storage_def(storage_def_) + , create_query(serializeAST(*getCreateTableQuery())) , flush_interval_milliseconds(flush_interval_milliseconds_) { assert(database_name_ == DatabaseCatalog::SYSTEM_DATABASE); @@ -520,14 +529,14 @@ void SystemLog::prepareTable() if (table) { - auto metadata_columns = table->getInMemoryMetadataPtr()->getColumns(); - auto old_query = InterpreterCreateQuery::formatColumns(metadata_columns); + if (old_create_query.empty()) + { + old_create_query = serializeAST(*getCreateTableQueryClean(table_id, getContext())); + if (old_create_query.empty()) + throw Exception(ErrorCodes::LOGICAL_ERROR, "Empty CREATE QUERY for {}", backQuoteIfNeed(table_id.table_name)); + } - auto ordinary_columns = LogElement::getNamesAndTypes(); - auto alias_columns = LogElement::getNamesAndAliases(); - auto current_query = InterpreterCreateQuery::formatColumns(ordinary_columns, alias_columns); - - if (serializeAST(*old_query) != serializeAST(*current_query)) + if (old_create_query != create_query) { /// Rename the existing table. int suffix = 0; @@ -553,9 +562,11 @@ void SystemLog::prepareTable() LOG_DEBUG( log, - "Existing table {} for system log has obsolete or different structure. Renaming it to {}", + "Existing table {} for system log has obsolete or different structure. Renaming it to {}.\nOld: {}\nNew: {}\n.", description, - backQuoteIfNeed(to.table)); + backQuoteIfNeed(to.table), + old_create_query, + create_query); auto query_context = Context::createCopy(context); query_context->makeQueryContext(); @@ -573,17 +584,17 @@ void SystemLog::prepareTable() /// Create the table. LOG_DEBUG(log, "Creating new table {} for {}", description, LogElement::name()); - auto create = getCreateTableQuery(); - - auto query_context = Context::createCopy(context); query_context->makeQueryContext(); - InterpreterCreateQuery interpreter(create, query_context); + auto create_query_ast = getCreateTableQuery(); + InterpreterCreateQuery interpreter(create_query_ast, query_context); interpreter.setInternal(true); interpreter.execute(); table = DatabaseCatalog::instance().getTable(table_id, getContext()); + + old_create_query.clear(); } is_prepared = true; diff --git a/src/Interpreters/ThreadStatusExt.cpp b/src/Interpreters/ThreadStatusExt.cpp index e796c2b85a1..fc6aa15a1e8 100644 --- a/src/Interpreters/ThreadStatusExt.cpp +++ b/src/Interpreters/ThreadStatusExt.cpp @@ -310,7 +310,7 @@ void ThreadStatus::resetPerformanceCountersLastUsage() void ThreadStatus::initQueryProfiler() { - if (!query_profiled_enabled) + if (!query_profiler_enabled) return; /// query profilers are useless without trace collector @@ -326,11 +326,11 @@ void ThreadStatus::initQueryProfiler() { if (settings.query_profiler_real_time_period_ns > 0) query_profiler_real = std::make_unique(thread_id, - /* period */ static_cast(settings.query_profiler_real_time_period_ns)); + /* period= */ static_cast(settings.query_profiler_real_time_period_ns)); if (settings.query_profiler_cpu_time_period_ns > 0) query_profiler_cpu = std::make_unique(thread_id, - /* period */ static_cast(settings.query_profiler_cpu_time_period_ns)); + /* period= */ static_cast(settings.query_profiler_cpu_time_period_ns)); } catch (...) { diff --git a/src/Parsers/IAST.cpp b/src/Parsers/IAST.cpp index 9a8838956f9..3a21d704eb9 100644 --- a/src/Parsers/IAST.cpp +++ b/src/Parsers/IAST.cpp @@ -14,7 +14,6 @@ namespace ErrorCodes extern const int TOO_DEEP_AST; extern const int BAD_ARGUMENTS; extern const int UNKNOWN_ELEMENT_IN_AST; - extern const int LOGICAL_ERROR; } @@ -48,23 +47,6 @@ size_t IAST::checkSize(size_t max_size) const return res; } -void IAST::reset(IAST *& field) -{ - if (field == nullptr) - return; - - const auto child = std::find_if(children.begin(), children.end(), [field](const auto & p) - { - return p.get() == field; - }); - - if (child == children.end()) - throw Exception("AST subtree not found in children", ErrorCodes::LOGICAL_ERROR); - - children.erase(child); - field = nullptr; -} - IAST::Hash IAST::getTreeHash() const { diff --git a/src/Parsers/IAST.h b/src/Parsers/IAST.h index 1c9c81ad378..2f30a1f7bee 100644 --- a/src/Parsers/IAST.h +++ b/src/Parsers/IAST.h @@ -157,7 +157,23 @@ public: set(field, child); } - void reset(IAST *& field); + template + void reset(T * & field) + { + if (field == nullptr) + return; + + const auto child = std::find_if(children.begin(), children.end(), [field](const auto & p) + { + return p.get() == field; + }); + + if (child == children.end()) + throw Exception("AST subtree not found in children", ErrorCodes::LOGICAL_ERROR); + + children.erase(child); + field = nullptr; + } /// Convert to a string. diff --git a/src/Server/KeeperTCPHandler.cpp b/src/Server/KeeperTCPHandler.cpp index 6478ece1536..67abd6db13a 100644 --- a/src/Server/KeeperTCPHandler.cpp +++ b/src/Server/KeeperTCPHandler.cpp @@ -208,8 +208,14 @@ KeeperTCPHandler::KeeperTCPHandler(IServer & server_, const Poco::Net::StreamSoc , log(&Poco::Logger::get("KeeperTCPHandler")) , global_context(Context::createCopy(server.context())) , keeper_dispatcher(global_context->getKeeperDispatcher()) - , operation_timeout(0, global_context->getConfigRef().getUInt("keeper_server.operation_timeout_ms", Coordination::DEFAULT_OPERATION_TIMEOUT_MS) * 1000) - , session_timeout(0, global_context->getConfigRef().getUInt("keeper_server.session_timeout_ms", Coordination::DEFAULT_SESSION_TIMEOUT_MS) * 1000) + , operation_timeout( + 0, + global_context->getConfigRef().getUInt( + "keeper_server.coordination_settings.operation_timeout_ms", Coordination::DEFAULT_OPERATION_TIMEOUT_MS) * 1000) + , session_timeout( + 0, + global_context->getConfigRef().getUInt( + "keeper_server.coordination_settings.session_timeout_ms", Coordination::DEFAULT_SESSION_TIMEOUT_MS) * 1000) , poll_wrapper(std::make_unique(socket_)) , responses(std::make_unique(std::numeric_limits::max())) , last_op(std::make_unique(EMPTY_LAST_OP)) diff --git a/tests/ci/ast_fuzzer_check.py b/tests/ci/ast_fuzzer_check.py index d842d484841..02c81a4db31 100644 --- a/tests/ci/ast_fuzzer_check.py +++ b/tests/ci/ast_fuzzer_check.py @@ -11,8 +11,7 @@ from github import Github from s3_helper import S3Helper from get_robot_token import get_best_robot_token from pr_info import PRInfo -from ci_config import build_config_to_string -from build_download_helper import get_build_config_for_check, get_build_urls +from build_download_helper import get_build_name_for_check, get_build_urls from docker_pull_helper import get_image_with_version from commit_status_helper import post_commit_status from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse @@ -54,11 +53,9 @@ if __name__ == "__main__": docker_image = get_image_with_version(temp_path, IMAGE_NAME) - build_config = get_build_config_for_check(check_name) - print(build_config) - build_config_str = build_config_to_string(build_config) - print(build_config_str) - urls = get_build_urls(build_config_str, reports_path) + build_name = get_build_name_for_check(check_name) + print(build_name) + urls = get_build_urls(build_name, reports_path) if not urls: raise Exception("No build URLs found") diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py index 573a5cf82b4..0adad71d8d9 100644 --- a/tests/ci/build_check.py +++ b/tests/ci/build_check.py @@ -12,19 +12,17 @@ from pr_info import PRInfo from get_robot_token import get_best_robot_token from version_helper import get_version_from_repo, update_version_local from ccache_utils import get_ccache_if_not_exists, upload_ccache -from ci_config import build_config_to_string, CI_CONFIG +from ci_config import CI_CONFIG from docker_pull_helper import get_image_with_version -def get_build_config(build_check_name, build_number): +def get_build_config(build_check_name, build_name): if build_check_name == 'ClickHouse build check (actions)': build_config_name = 'build_config' - elif build_check_name == 'ClickHouse special build check (actions)': - build_config_name = 'special_build_config' else: raise Exception(f"Unknown build check name {build_check_name}") - return CI_CONFIG[build_config_name][build_number] + return CI_CONFIG[build_config_name][build_name] def _can_export_binaries(build_config): @@ -94,9 +92,9 @@ if __name__ == "__main__": caches_path = os.getenv("CACHES_PATH", temp_path) build_check_name = sys.argv[1] - build_number = int(sys.argv[2]) + build_name = sys.argv[2] - build_config = get_build_config(build_check_name, build_number) + build_config = get_build_config(build_check_name, build_name) if not os.path.exists(temp_path): os.makedirs(temp_path) @@ -125,7 +123,6 @@ if __name__ == "__main__": logging.info("Updated local files with version") - build_name = build_config_to_string(build_config) logging.info("Build short name %s", build_name) subprocess.check_call(f"echo 'BUILD_NAME=build_urls_{build_name}' >> $GITHUB_ENV", shell=True) @@ -161,7 +158,12 @@ if __name__ == "__main__": logging.info("Will upload cache") upload_ccache(ccache_path, s3_helper, pr_info.number, temp_path) - s3_path_prefix = str(pr_info.number) + "/" + pr_info.sha + "/" + build_check_name.lower().replace(' ', '_') + "/" + build_name + # for release pull requests we use branch names prefixes, not pr numbers + if 'release' in pr_info.labels or 'release-lts' in pr_info.labels: + s3_path_prefix = pr_info.head_ref + "/" + pr_info.sha + "/" + build_name + else: + s3_path_prefix = str(pr_info.number) + "/" + pr_info.sha + "/" + build_name + if os.path.exists(log_path): log_url = s3_helper.upload_build_file_to_s3(log_path, s3_path_prefix + "/" + os.path.basename(log_path)) logging.info("Log url %s", log_url) diff --git a/tests/ci/build_download_helper.py b/tests/ci/build_download_helper.py index 2770b737041..5ce54423e19 100644 --- a/tests/ci/build_download_helper.py +++ b/tests/ci/build_download_helper.py @@ -8,17 +8,17 @@ import time import requests -from ci_config import CI_CONFIG, build_config_to_string +from ci_config import CI_CONFIG DOWNLOAD_RETRIES_COUNT = 5 -def get_build_config_for_check(check_name): - return CI_CONFIG["tests_config"][check_name]['required_build_properties'] +def get_build_name_for_check(check_name): + return CI_CONFIG['tests_config'][check_name]['required_build'] -def get_build_urls(build_config_str, reports_path): +def get_build_urls(build_name, reports_path): for root, _, files in os.walk(reports_path): for f in files: - if build_config_str in f : + if build_name in f : logging.info("Found build report json %s", f) with open(os.path.join(root, f), 'r', encoding='utf-8') as file_handler: build_report = json.load(file_handler) @@ -72,11 +72,8 @@ def download_builds(result_path, build_urls, filter_fn): dowload_build_with_progress(url, os.path.join(result_path, fname)) def download_builds_filter(check_name, reports_path, result_path, filter_fn=lambda _: True): - build_config = get_build_config_for_check(check_name) - print(build_config) - build_config_str = build_config_to_string(build_config) - print(build_config_str) - urls = get_build_urls(build_config_str, reports_path) + build_name = get_build_name_for_check(check_name) + urls = get_build_urls(build_name, reports_path) print(urls) if not urls: diff --git a/tests/ci/ci_config.py b/tests/ci/ci_config.py index 161f50fed20..57a873aa35f 100644 --- a/tests/ci/ci_config.py +++ b/tests/ci/ci_config.py @@ -1,8 +1,8 @@ #!/usr/bin/env python3 CI_CONFIG = { - "build_config": [ - { + "build_config": { + "package_release": { "compiler": "clang-13", "build_type": "", "sanitizer": "", @@ -13,7 +13,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "performance": { "compiler": "clang-13", "build_type": "", "sanitizer": "", @@ -23,7 +23,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "binary_gcc": { "compiler": "gcc-11", "build_type": "", "sanitizer": "", @@ -33,7 +33,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "package_asan": { "compiler": "clang-13", "build_type": "", "sanitizer": "address", @@ -43,7 +43,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "package_ubsan": { "compiler": "clang-13", "build_type": "", "sanitizer": "undefined", @@ -53,7 +53,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "package_tsan": { "compiler": "clang-13", "build_type": "", "sanitizer": "thread", @@ -63,7 +63,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "package_msan": { "compiler": "clang-13", "build_type": "", "sanitizer": "memory", @@ -73,7 +73,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "package_debug": { "compiler": "clang-13", "build_type": "debug", "sanitizer": "", @@ -83,7 +83,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "binary_release": { "compiler": "clang-13", "build_type": "", "sanitizer": "", @@ -92,10 +92,8 @@ CI_CONFIG = { "splitted": "unsplitted", "tidy": "disable", "with_coverage": False - } - ], - "special_build_config": [ - { + }, + "package_tidy": { "compiler": "clang-13", "build_type": "debug", "sanitizer": "", @@ -105,7 +103,7 @@ CI_CONFIG = { "tidy": "enable", "with_coverage": False }, - { + "binary_splitted": { "compiler": "clang-13", "build_type": "", "sanitizer": "", @@ -115,7 +113,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "binary_darwin": { "compiler": "clang-13-darwin", "build_type": "", "sanitizer": "", @@ -125,7 +123,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "binary_aarch64": { "compiler": "clang-13-aarch64", "build_type": "", "sanitizer": "", @@ -135,7 +133,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "binary_freebsd": { "compiler": "clang-13-freebsd", "build_type": "", "sanitizer": "", @@ -145,7 +143,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "binary_darwin_aarch64": { "compiler": "clang-13-darwin-aarch64", "build_type": "", "sanitizer": "", @@ -155,7 +153,7 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False }, - { + "binary_ppc64le": { "compiler": "clang-13-ppc64le", "build_type": "", "sanitizer": "", @@ -165,550 +163,139 @@ CI_CONFIG = { "tidy": "disable", "with_coverage": False } - ], + }, "tests_config": { "Stateful tests (address, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "address", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_asan", }, "Stateful tests (thread, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "thread", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_tsan", }, "Stateful tests (memory, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "memory", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_msan", }, "Stateful tests (ubsan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "undefined", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_ubsan", }, "Stateful tests (debug, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "debug", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_debug", }, "Stateful tests (release, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Stateful tests (release, DatabaseOrdinary, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Stateful tests (release, DatabaseReplicated, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Stateless tests (address, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "address", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_asan", }, "Stateless tests (thread, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "thread", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_tsan", }, "Stateless tests (memory, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "memory", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_msan", }, "Stateless tests (ubsan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "undefined", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_ubsan", }, "Stateless tests (debug, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "debug", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_debug", }, "Stateless tests (release, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Stateless tests (release, wide parts enabled, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Stateless tests (release, DatabaseOrdinary, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Stateless tests (release, DatabaseReplicated, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Stress test (address, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "address", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_asan", }, "Stress test (thread, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "thread", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_tsan", }, "Stress test (undefined, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "undefined", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_ubsan", }, "Stress test (memory, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "memory", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_msan", }, "Stress test (debug, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "debug", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_debug", }, "Integration tests (asan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "address", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_asan", }, "Integration tests (thread, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "thread", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_tsan", }, "Integration tests (release, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Integration tests (memory, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "memory", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_msan", }, "Integration tests flaky check (asan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "address", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_asan", }, "Compatibility check (actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Split build smoke test (actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "binary", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "splitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "binary_splitted", }, "Testflows check (actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Unit tests (release-gcc, actions)": { - "required_build_properties": { - "compiler": "gcc-11", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "binary_gcc", }, "Unit tests (release-clang, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "binary", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "binary_release", }, "Unit tests (asan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "address", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_asan", }, "Unit tests (msan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "memory", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_msan", }, "Unit tests (tsan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "thread", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_tsan", }, "Unit tests (ubsan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "undefined", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_ubsan", }, "AST fuzzer (debug, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "debug", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_debug", }, "AST fuzzer (ASan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "address", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_asan", }, "AST fuzzer (MSan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "memory", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_msan", }, "AST fuzzer (TSan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "thread", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_tsan", }, "AST fuzzer (UBSan, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "undefined", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_ubsan", }, "Release (actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_release", }, "Stateless tests flaky check (address, actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "deb", - "build_type": "relwithdebuginfo", - "sanitizer": "address", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "package_asan", }, "ClickHouse Keeper Jepsen (actions)": { - "required_build_properties": { - "compiler": "clang-13", - "package_type": "binary", - "build_type": "relwithdebuginfo", - "sanitizer": "none", - "bundled": "bundled", - "splitted": "unsplitted", - "clang_tidy": "disable", - "with_coverage": False - } + "required_build": "binary_release", } } } - -def build_config_to_string(build_config): - if build_config["package_type"] == "performance": - return "performance" - - return "_".join([ - build_config['compiler'], - build_config['build_type'] if build_config['build_type'] else "relwithdebuginfo", - build_config['sanitizer'] if build_config['sanitizer'] else "none", - build_config['bundled'], - build_config['splitted'], - 'tidy' if 'tidy' in build_config and build_config['tidy'] == 'enable' else 'notidy', - 'with_coverage' if 'with_coverage' in build_config and build_config['with_coverage'] else 'without_coverage', - build_config['package_type'], - ]) diff --git a/tests/ci/metrics_lambda/app.py b/tests/ci/metrics_lambda/app.py index 4bf967a51e1..d776aa2be49 100644 --- a/tests/ci/metrics_lambda/app.py +++ b/tests/ci/metrics_lambda/app.py @@ -163,7 +163,7 @@ def main(github_secret_key, github_app_id, push_to_cloudwatch, delete_offline_ru if delete_offline_runners: print("Going to delete offline runners") for runner in runners: - if runner.offline: + if runner.offline and not runner.busy: print("Deleting runner", runner) delete_runner(access_token, runner) diff --git a/tests/ci/pr_info.py b/tests/ci/pr_info.py index c10bf0f6ecd..a445fef0ab2 100644 --- a/tests/ci/pr_info.py +++ b/tests/ci/pr_info.py @@ -6,7 +6,7 @@ import requests from unidiff import PatchSet -DIFF_IN_DOCUMENTATION_EXT = [".html", ".md", ".yml", ".txt", ".css", ".js", ".xml", ".ico", ".conf", ".svg", ".png", ".jpg", ".py", ".sh"] +DIFF_IN_DOCUMENTATION_EXT = [".html", ".md", ".yml", ".txt", ".css", ".js", ".xml", ".ico", ".conf", ".svg", ".png", ".jpg", ".py", ".sh", ".json"] def get_pr_for_commit(sha, ref): try_get_pr_url = f"https://api.github.com/repos/{os.getenv('GITHUB_REPOSITORY', 'ClickHouse/ClickHouse')}/commits/{sha}/pulls" diff --git a/tests/config/config.d/database_replicated.xml b/tests/config/config.d/database_replicated.xml index 1ad43b9880a..3fea87c4fd1 100644 --- a/tests/config/config.d/database_replicated.xml +++ b/tests/config/config.d/database_replicated.xml @@ -34,14 +34,14 @@ 1 localhost - 44444 + 9234 true 3 2 localhost - 44445 + 9235 true true 2 @@ -49,7 +49,7 @@ 3 localhost - 44446 + 9236 true true 1 diff --git a/tests/config/config.d/keeper_port.xml b/tests/config/config.d/keeper_port.xml index 85d4bc0f04f..21163ece190 100644 --- a/tests/config/config.d/keeper_port.xml +++ b/tests/config/config.d/keeper_port.xml @@ -22,7 +22,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_config_xml_full/configs/config.d/keeper_port.xml b/tests/integration/test_config_xml_full/configs/config.d/keeper_port.xml index cee4d338231..aada51c50b5 100644 --- a/tests/integration/test_config_xml_full/configs/config.d/keeper_port.xml +++ b/tests/integration/test_config_xml_full/configs/config.d/keeper_port.xml @@ -16,7 +16,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_config_xml_main/configs/config.d/keeper_port.yaml b/tests/integration/test_config_xml_main/configs/config.d/keeper_port.yaml index 91723bc372f..3019e530e84 100644 --- a/tests/integration/test_config_xml_main/configs/config.d/keeper_port.yaml +++ b/tests/integration/test_config_xml_main/configs/config.d/keeper_port.yaml @@ -11,5 +11,5 @@ keeper_server: server: id: 1 hostname: localhost - port: 44444 + port: 9234 diff --git a/tests/integration/test_config_xml_yaml_mix/configs/config.d/keeper_port.xml b/tests/integration/test_config_xml_yaml_mix/configs/config.d/keeper_port.xml index cee4d338231..aada51c50b5 100644 --- a/tests/integration/test_config_xml_yaml_mix/configs/config.d/keeper_port.xml +++ b/tests/integration/test_config_xml_yaml_mix/configs/config.d/keeper_port.xml @@ -16,7 +16,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_config_yaml_full/configs/config.d/keeper_port.yaml b/tests/integration/test_config_yaml_full/configs/config.d/keeper_port.yaml index 91723bc372f..3019e530e84 100644 --- a/tests/integration/test_config_yaml_full/configs/config.d/keeper_port.yaml +++ b/tests/integration/test_config_yaml_full/configs/config.d/keeper_port.yaml @@ -11,5 +11,5 @@ keeper_server: server: id: 1 hostname: localhost - port: 44444 + port: 9234 diff --git a/tests/integration/test_config_yaml_main/configs/config.d/keeper_port.xml b/tests/integration/test_config_yaml_main/configs/config.d/keeper_port.xml index cee4d338231..aada51c50b5 100644 --- a/tests/integration/test_config_yaml_main/configs/config.d/keeper_port.xml +++ b/tests/integration/test_config_yaml_main/configs/config.d/keeper_port.xml @@ -16,7 +16,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_keeper_auth/configs/keeper_config.xml b/tests/integration/test_keeper_auth/configs/keeper_config.xml index 1a01b79cebd..926cdd3a0f9 100644 --- a/tests/integration/test_keeper_auth/configs/keeper_config.xml +++ b/tests/integration/test_keeper_auth/configs/keeper_config.xml @@ -21,7 +21,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_keeper_back_to_back/configs/enable_keeper.xml b/tests/integration/test_keeper_back_to_back/configs/enable_keeper.xml index 30a56c6e1e1..9649117e4b1 100644 --- a/tests/integration/test_keeper_back_to_back/configs/enable_keeper.xml +++ b/tests/integration/test_keeper_back_to_back/configs/enable_keeper.xml @@ -20,7 +20,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_keeper_clickhouse_hard_restart/configs/enable_keeper.xml b/tests/integration/test_keeper_clickhouse_hard_restart/configs/enable_keeper.xml new file mode 100644 index 00000000000..c1d38a1de52 --- /dev/null +++ b/tests/integration/test_keeper_clickhouse_hard_restart/configs/enable_keeper.xml @@ -0,0 +1,22 @@ + + + 9181 + 1 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 1 + node1 + 9234 + + + + diff --git a/tests/integration/test_keeper_clickhouse_hard_restart/configs/keeper_conf.xml b/tests/integration/test_keeper_clickhouse_hard_restart/configs/keeper_conf.xml new file mode 100644 index 00000000000..ebb0d98ddf4 --- /dev/null +++ b/tests/integration/test_keeper_clickhouse_hard_restart/configs/keeper_conf.xml @@ -0,0 +1,8 @@ + + + + node1 + 9181 + + + diff --git a/tests/integration/test_keeper_four_word_command/configs/enable_keeper1.xml b/tests/integration/test_keeper_four_word_command/configs/enable_keeper1.xml index 258e58b558e..fcb3553bb98 100644 --- a/tests/integration/test_keeper_four_word_command/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_four_word_command/configs/enable_keeper1.xml @@ -17,14 +17,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 false true 2 @@ -32,7 +32,7 @@ 3 node3 - 44444 + 9234 false true 1 diff --git a/tests/integration/test_keeper_four_word_command/configs/enable_keeper2.xml b/tests/integration/test_keeper_four_word_command/configs/enable_keeper2.xml index f49fb819256..65011cd2637 100644 --- a/tests/integration/test_keeper_four_word_command/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_four_word_command/configs/enable_keeper2.xml @@ -17,14 +17,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 false true 2 @@ -32,7 +32,7 @@ 3 node3 - 44444 + 9234 false true 1 diff --git a/tests/integration/test_keeper_four_word_command/configs/enable_keeper3.xml b/tests/integration/test_keeper_four_word_command/configs/enable_keeper3.xml index 4606dce367f..a0d8c99f3d8 100644 --- a/tests/integration/test_keeper_four_word_command/configs/enable_keeper3.xml +++ b/tests/integration/test_keeper_four_word_command/configs/enable_keeper3.xml @@ -17,14 +17,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 false true 2 @@ -32,7 +32,7 @@ 3 node3 - 44444 + 9234 false true 1 diff --git a/tests/integration/test_keeper_four_word_command/configs/keeper_config_with_white_list.xml b/tests/integration/test_keeper_four_word_command/configs/keeper_config_with_white_list.xml index 4263cdb38c9..a0c735fd5db 100644 --- a/tests/integration/test_keeper_four_word_command/configs/keeper_config_with_white_list.xml +++ b/tests/integration/test_keeper_four_word_command/configs/keeper_config_with_white_list.xml @@ -7,17 +7,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 node3 - 44444 + 9234 diff --git a/tests/integration/test_keeper_four_word_command/configs/keeper_config_with_white_list_all.xml b/tests/integration/test_keeper_four_word_command/configs/keeper_config_with_white_list_all.xml index 635afa70b85..2aa58b25130 100644 --- a/tests/integration/test_keeper_four_word_command/configs/keeper_config_with_white_list_all.xml +++ b/tests/integration/test_keeper_four_word_command/configs/keeper_config_with_white_list_all.xml @@ -7,17 +7,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 node3 - 44444 + 9234 diff --git a/tests/integration/test_keeper_four_word_command/configs/keeper_config_without_white_list.xml b/tests/integration/test_keeper_four_word_command/configs/keeper_config_without_white_list.xml index 084b9a63654..697acaebe58 100644 --- a/tests/integration/test_keeper_four_word_command/configs/keeper_config_without_white_list.xml +++ b/tests/integration/test_keeper_four_word_command/configs/keeper_config_without_white_list.xml @@ -6,17 +6,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 node3 - 44444 + 9234 diff --git a/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper1.xml b/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper1.xml index 039d864711c..986b503ebe3 100644 --- a/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper1.xml +++ b/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper1.xml @@ -17,14 +17,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -32,7 +32,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper2.xml b/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper2.xml index a3c20e095f3..652b1992f46 100644 --- a/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper2.xml +++ b/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper2.xml @@ -17,14 +17,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -32,7 +32,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper3.xml b/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper3.xml index 867c48ba250..6507f97473b 100644 --- a/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper3.xml +++ b/tests/integration/test_keeper_internal_secure/configs/enable_secure_keeper3.xml @@ -17,14 +17,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -32,7 +32,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper1.xml b/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper1.xml index 9acc30f02fa..17455ed12f5 100644 --- a/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper1.xml @@ -16,14 +16,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -31,7 +31,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper2.xml b/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper2.xml index 9f8c0806ee0..03a23984cc2 100644 --- a/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper2.xml @@ -16,14 +16,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -31,7 +31,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper3.xml b/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper3.xml index e0efa3a8d1e..a3196ac3061 100644 --- a/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper3.xml +++ b/tests/integration/test_keeper_multinode_blocade_leader/configs/enable_keeper3.xml @@ -16,14 +16,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -31,7 +31,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_multinode_simple/configs/enable_keeper1.xml b/tests/integration/test_keeper_multinode_simple/configs/enable_keeper1.xml index 9acc30f02fa..17455ed12f5 100644 --- a/tests/integration/test_keeper_multinode_simple/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_multinode_simple/configs/enable_keeper1.xml @@ -16,14 +16,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -31,7 +31,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_multinode_simple/configs/enable_keeper2.xml b/tests/integration/test_keeper_multinode_simple/configs/enable_keeper2.xml index 9f8c0806ee0..03a23984cc2 100644 --- a/tests/integration/test_keeper_multinode_simple/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_multinode_simple/configs/enable_keeper2.xml @@ -16,14 +16,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -31,7 +31,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_multinode_simple/configs/enable_keeper3.xml b/tests/integration/test_keeper_multinode_simple/configs/enable_keeper3.xml index e0efa3a8d1e..a3196ac3061 100644 --- a/tests/integration/test_keeper_multinode_simple/configs/enable_keeper3.xml +++ b/tests/integration/test_keeper_multinode_simple/configs/enable_keeper3.xml @@ -16,14 +16,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -31,7 +31,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_nodes_add/configs/enable_keeper1.xml b/tests/integration/test_keeper_nodes_add/configs/enable_keeper1.xml index bd695389fa6..c1d38a1de52 100644 --- a/tests/integration/test_keeper_nodes_add/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_nodes_add/configs/enable_keeper1.xml @@ -15,7 +15,7 @@ 1 node1 - 44444 + 9234 diff --git a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_1.xml b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_1.xml index 510424715c4..d2717283a8d 100644 --- a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_1.xml +++ b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_1.xml @@ -15,17 +15,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 node3 - 44444 + 9234 diff --git a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_2.xml b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_2.xml index 264601d8c98..5924ee1c2dc 100644 --- a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_2.xml +++ b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_2.xml @@ -15,17 +15,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 node3 - 44444 + 9234 diff --git a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_3.xml b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_3.xml index 7f9775939bb..d261e4f67f3 100644 --- a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_3.xml +++ b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_three_nodes_3.xml @@ -15,17 +15,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 node3 - 44444 + 9234 diff --git a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_two_nodes_1.xml b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_two_nodes_1.xml index 3f6baf1ccc7..697986638d7 100644 --- a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_two_nodes_1.xml +++ b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_two_nodes_1.xml @@ -15,12 +15,12 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 diff --git a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_two_nodes_2.xml b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_two_nodes_2.xml index 61c9a00ffd6..967940e1e2b 100644 --- a/tests/integration/test_keeper_nodes_add/configs/enable_keeper_two_nodes_2.xml +++ b/tests/integration/test_keeper_nodes_add/configs/enable_keeper_two_nodes_2.xml @@ -15,12 +15,12 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 diff --git a/tests/integration/test_keeper_nodes_move/configs/enable_keeper1.xml b/tests/integration/test_keeper_nodes_move/configs/enable_keeper1.xml index a8a1a17b831..1e57d42016d 100644 --- a/tests/integration/test_keeper_nodes_move/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_nodes_move/configs/enable_keeper1.xml @@ -15,18 +15,18 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 true 3 node3 - 44444 + 9234 true diff --git a/tests/integration/test_keeper_nodes_move/configs/enable_keeper2.xml b/tests/integration/test_keeper_nodes_move/configs/enable_keeper2.xml index 7820158680c..98422b41c9b 100644 --- a/tests/integration/test_keeper_nodes_move/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_nodes_move/configs/enable_keeper2.xml @@ -15,18 +15,18 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 true 3 node3 - 44444 + 9234 true diff --git a/tests/integration/test_keeper_nodes_move/configs/enable_keeper3.xml b/tests/integration/test_keeper_nodes_move/configs/enable_keeper3.xml index cd76b0d216e..43800bd2dfb 100644 --- a/tests/integration/test_keeper_nodes_move/configs/enable_keeper3.xml +++ b/tests/integration/test_keeper_nodes_move/configs/enable_keeper3.xml @@ -15,18 +15,18 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 true 3 node3 - 44444 + 9234 true diff --git a/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_1.xml b/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_1.xml index d99502e2959..0d7544f9a5b 100644 --- a/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_1.xml +++ b/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_1.xml @@ -15,18 +15,18 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 true 4 node4 - 44444 + 9234 true diff --git a/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_2.xml b/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_2.xml index 04d8776782f..65feae85e3e 100644 --- a/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_2.xml +++ b/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_2.xml @@ -15,18 +15,18 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 true 4 node4 - 44444 + 9234 true diff --git a/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_4.xml b/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_4.xml index 10abf71b9f6..2499de4fe86 100644 --- a/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_4.xml +++ b/tests/integration/test_keeper_nodes_move/configs/enable_keeper_node4_4.xml @@ -15,18 +15,18 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 true 4 node4 - 44444 + 9234 true diff --git a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper1.xml b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper1.xml index a8a1a17b831..1e57d42016d 100644 --- a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper1.xml @@ -15,18 +15,18 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 true 3 node3 - 44444 + 9234 true diff --git a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper2.xml b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper2.xml index 7820158680c..98422b41c9b 100644 --- a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper2.xml @@ -15,18 +15,18 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 true 3 node3 - 44444 + 9234 true diff --git a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper3.xml b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper3.xml index cd76b0d216e..43800bd2dfb 100644 --- a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper3.xml +++ b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper3.xml @@ -15,18 +15,18 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 true 3 node3 - 44444 + 9234 true diff --git a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper_two_nodes_1.xml b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper_two_nodes_1.xml index 3f6baf1ccc7..697986638d7 100644 --- a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper_two_nodes_1.xml +++ b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper_two_nodes_1.xml @@ -15,12 +15,12 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 diff --git a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper_two_nodes_2.xml b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper_two_nodes_2.xml index 61c9a00ffd6..967940e1e2b 100644 --- a/tests/integration/test_keeper_nodes_remove/configs/enable_keeper_two_nodes_2.xml +++ b/tests/integration/test_keeper_nodes_remove/configs/enable_keeper_two_nodes_2.xml @@ -15,12 +15,12 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 diff --git a/tests/integration/test_keeper_nodes_remove/configs/enable_single_keeper1.xml b/tests/integration/test_keeper_nodes_remove/configs/enable_single_keeper1.xml index bd695389fa6..c1d38a1de52 100644 --- a/tests/integration/test_keeper_nodes_remove/configs/enable_single_keeper1.xml +++ b/tests/integration/test_keeper_nodes_remove/configs/enable_single_keeper1.xml @@ -15,7 +15,7 @@ 1 node1 - 44444 + 9234 diff --git a/tests/integration/test_keeper_persistent_log/configs/enable_keeper.xml b/tests/integration/test_keeper_persistent_log/configs/enable_keeper.xml index e302b857666..d6166a83d2f 100644 --- a/tests/integration/test_keeper_persistent_log/configs/enable_keeper.xml +++ b/tests/integration/test_keeper_persistent_log/configs/enable_keeper.xml @@ -15,7 +15,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper1.xml b/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper1.xml index 2ee38419581..06b70c1b5d0 100644 --- a/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper1.xml @@ -15,14 +15,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -30,7 +30,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper2.xml b/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper2.xml index 018fa284896..f13f8b902b4 100644 --- a/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper2.xml @@ -15,14 +15,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -30,7 +30,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper3.xml b/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper3.xml index 77a1d720b69..a69cabf8c54 100644 --- a/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper3.xml +++ b/tests/integration/test_keeper_persistent_log_multinode/configs/enable_keeper3.xml @@ -15,14 +15,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -30,7 +30,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_remove_leader/configs/enable_keeper1.xml b/tests/integration/test_keeper_remove_leader/configs/enable_keeper1.xml new file mode 100644 index 00000000000..1e57d42016d --- /dev/null +++ b/tests/integration/test_keeper_remove_leader/configs/enable_keeper1.xml @@ -0,0 +1,34 @@ + + + 9181 + 1 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 1 + node1 + 9234 + + + 2 + node2 + 9234 + true + + + 3 + node3 + 9234 + true + + + + diff --git a/tests/integration/test_keeper_remove_leader/configs/enable_keeper2.xml b/tests/integration/test_keeper_remove_leader/configs/enable_keeper2.xml new file mode 100644 index 00000000000..98422b41c9b --- /dev/null +++ b/tests/integration/test_keeper_remove_leader/configs/enable_keeper2.xml @@ -0,0 +1,34 @@ + + + 9181 + 2 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 1 + node1 + 9234 + + + 2 + node2 + 9234 + true + + + 3 + node3 + 9234 + true + + + + diff --git a/tests/integration/test_keeper_remove_leader/configs/enable_keeper3.xml b/tests/integration/test_keeper_remove_leader/configs/enable_keeper3.xml new file mode 100644 index 00000000000..43800bd2dfb --- /dev/null +++ b/tests/integration/test_keeper_remove_leader/configs/enable_keeper3.xml @@ -0,0 +1,34 @@ + + + 9181 + 3 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 1 + node1 + 9234 + + + 2 + node2 + 9234 + true + + + 3 + node3 + 9234 + true + + + + diff --git a/tests/integration/test_keeper_remove_leader/configs/enable_keeper_two_nodes_1.xml b/tests/integration/test_keeper_remove_leader/configs/enable_keeper_two_nodes_1.xml new file mode 100644 index 00000000000..d51e420f733 --- /dev/null +++ b/tests/integration/test_keeper_remove_leader/configs/enable_keeper_two_nodes_1.xml @@ -0,0 +1,28 @@ + + + 9181 + 1 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 2 + node2 + 9234 + + + 3 + node3 + 9234 + true + + + + diff --git a/tests/integration/test_keeper_remove_leader/configs/enable_keeper_two_nodes_2.xml b/tests/integration/test_keeper_remove_leader/configs/enable_keeper_two_nodes_2.xml new file mode 100644 index 00000000000..3f1ee1e01a8 --- /dev/null +++ b/tests/integration/test_keeper_remove_leader/configs/enable_keeper_two_nodes_2.xml @@ -0,0 +1,28 @@ + + + 9181 + 2 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 2 + node2 + 9234 + + + 3 + node3 + 9234 + true + + + + diff --git a/tests/integration/test_keeper_remove_leader/configs/enable_keeper_two_nodes_3.xml b/tests/integration/test_keeper_remove_leader/configs/enable_keeper_two_nodes_3.xml new file mode 100644 index 00000000000..a99bd5d5296 --- /dev/null +++ b/tests/integration/test_keeper_remove_leader/configs/enable_keeper_two_nodes_3.xml @@ -0,0 +1,28 @@ + + + 9181 + 3 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 2 + node2 + 9234 + + + 3 + node3 + 9234 + true + + + + diff --git a/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper1.xml b/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper1.xml index 6e838ed2a96..4ea543e6f31 100644 --- a/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper1.xml @@ -18,14 +18,14 @@ 1 node1 - 44444 + 9234 true 70 2 node2 - 44444 + 9234 true true 20 @@ -33,7 +33,7 @@ 3 node3 - 44444 + 9234 true true 10 diff --git a/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper2.xml b/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper2.xml index 1ff565044e8..4bf3083c1fa 100644 --- a/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper2.xml @@ -18,14 +18,14 @@ 1 node1 - 44444 + 9234 true 70 2 node2 - 44444 + 9234 true true 20 @@ -33,7 +33,7 @@ 3 node3 - 44444 + 9234 true true 10 diff --git a/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper3.xml b/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper3.xml index 8f022c22c3c..b9e2a2d0422 100644 --- a/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper3.xml +++ b/tests/integration/test_keeper_restore_from_snapshot/configs/enable_keeper3.xml @@ -18,14 +18,14 @@ 1 node1 - 44444 + 9234 true 70 2 node2 - 44444 + 9234 true true 20 @@ -33,7 +33,7 @@ 3 node3 - 44444 + 9234 true true 10 diff --git a/tests/integration/test_keeper_secure_client/configs/enable_secure_keeper.xml b/tests/integration/test_keeper_secure_client/configs/enable_secure_keeper.xml index b8ef730c6ed..2c144269bcc 100644 --- a/tests/integration/test_keeper_secure_client/configs/enable_secure_keeper.xml +++ b/tests/integration/test_keeper_secure_client/configs/enable_secure_keeper.xml @@ -17,7 +17,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config1.xml b/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config1.xml index 1e9bea6c606..ed5909d01eb 100644 --- a/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config1.xml +++ b/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config1.xml @@ -17,19 +17,19 @@ 1 node1 - 44444 + 9234 3 2 node2 - 44444 + 9234 2 3 node3 - 44444 + 9234 1 diff --git a/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config2.xml b/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config2.xml index 4fc00055ff1..896853a3713 100644 --- a/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config2.xml +++ b/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config2.xml @@ -17,19 +17,19 @@ 1 node1 - 44444 + 9234 3 2 node2 - 44444 + 9234 2 3 node3 - 44444 + 9234 1 diff --git a/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config3.xml b/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config3.xml index 89134440aa3..8d1d5c73dd8 100644 --- a/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config3.xml +++ b/tests/integration/test_keeper_snapshot_small_distance/configs/keeper_config3.xml @@ -17,19 +17,19 @@ 1 node1 - 44444 + 9234 3 2 node2 - 44444 + 9234 2 3 node3 - 44444 + 9234 1 diff --git a/tests/integration/test_keeper_snapshots/configs/enable_keeper.xml b/tests/integration/test_keeper_snapshots/configs/enable_keeper.xml index 15db48f4f37..a3217b34501 100644 --- a/tests/integration/test_keeper_snapshots/configs/enable_keeper.xml +++ b/tests/integration/test_keeper_snapshots/configs/enable_keeper.xml @@ -22,7 +22,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper1.xml b/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper1.xml index 759d3e7415b..27d0d38f596 100644 --- a/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper1.xml @@ -17,14 +17,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -32,7 +32,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper2.xml b/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper2.xml index d4b2296d54d..dedbd312544 100644 --- a/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper2.xml @@ -17,14 +17,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -32,7 +32,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper3.xml b/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper3.xml index 37fa4e8b3b7..fd36458b5eb 100644 --- a/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper3.xml +++ b/tests/integration/test_keeper_snapshots_multinode/configs/enable_keeper3.xml @@ -17,14 +17,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 @@ -32,7 +32,7 @@ 3 node3 - 44444 + 9234 true true 1 diff --git a/tests/integration/test_keeper_start_as_follower_multinode/configs/enable_keeper1.xml b/tests/integration/test_keeper_start_as_follower_multinode/configs/enable_keeper1.xml new file mode 100644 index 00000000000..1e57d42016d --- /dev/null +++ b/tests/integration/test_keeper_start_as_follower_multinode/configs/enable_keeper1.xml @@ -0,0 +1,34 @@ + + + 9181 + 1 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 1 + node1 + 9234 + + + 2 + node2 + 9234 + true + + + 3 + node3 + 9234 + true + + + + diff --git a/tests/integration/test_keeper_start_as_follower_multinode/configs/enable_keeper2.xml b/tests/integration/test_keeper_start_as_follower_multinode/configs/enable_keeper2.xml new file mode 100644 index 00000000000..98422b41c9b --- /dev/null +++ b/tests/integration/test_keeper_start_as_follower_multinode/configs/enable_keeper2.xml @@ -0,0 +1,34 @@ + + + 9181 + 2 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 1 + node1 + 9234 + + + 2 + node2 + 9234 + true + + + 3 + node3 + 9234 + true + + + + diff --git a/tests/integration/test_keeper_start_as_follower_multinode/configs/enable_keeper3.xml b/tests/integration/test_keeper_start_as_follower_multinode/configs/enable_keeper3.xml new file mode 100644 index 00000000000..43800bd2dfb --- /dev/null +++ b/tests/integration/test_keeper_start_as_follower_multinode/configs/enable_keeper3.xml @@ -0,0 +1,34 @@ + + + 9181 + 3 + /var/lib/clickhouse/coordination/log + /var/lib/clickhouse/coordination/snapshots + + + 5000 + 10000 + trace + + + + + 1 + node1 + 9234 + + + 2 + node2 + 9234 + true + + + 3 + node3 + 9234 + true + + + + diff --git a/tests/integration/test_keeper_three_nodes_start/configs/enable_keeper1.xml b/tests/integration/test_keeper_three_nodes_start/configs/enable_keeper1.xml index bc62d817074..d2159348588 100644 --- a/tests/integration/test_keeper_three_nodes_start/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_three_nodes_start/configs/enable_keeper1.xml @@ -15,17 +15,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 non_existing_node - 44444 + 9234 diff --git a/tests/integration/test_keeper_three_nodes_start/configs/enable_keeper2.xml b/tests/integration/test_keeper_three_nodes_start/configs/enable_keeper2.xml index a6c476fb449..0d9aeb191d9 100644 --- a/tests/integration/test_keeper_three_nodes_start/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_three_nodes_start/configs/enable_keeper2.xml @@ -15,17 +15,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 non_existing_node - 44444 + 9234 diff --git a/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper1.xml b/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper1.xml index 510424715c4..d2717283a8d 100644 --- a/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper1.xml @@ -15,17 +15,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 node3 - 44444 + 9234 diff --git a/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper2.xml b/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper2.xml index 264601d8c98..5924ee1c2dc 100644 --- a/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper2.xml @@ -15,17 +15,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 node3 - 44444 + 9234 diff --git a/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper3.xml b/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper3.xml index 7f9775939bb..d261e4f67f3 100644 --- a/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper3.xml +++ b/tests/integration/test_keeper_three_nodes_two_alive/configs/enable_keeper3.xml @@ -15,17 +15,17 @@ 1 node1 - 44444 + 9234 2 node2 - 44444 + 9234 3 node3 - 44444 + 9234 diff --git a/tests/integration/test_keeper_two_nodes_cluster/configs/enable_keeper1.xml b/tests/integration/test_keeper_two_nodes_cluster/configs/enable_keeper1.xml index 70626b33130..7c2e283e89f 100644 --- a/tests/integration/test_keeper_two_nodes_cluster/configs/enable_keeper1.xml +++ b/tests/integration/test_keeper_two_nodes_cluster/configs/enable_keeper1.xml @@ -16,14 +16,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 diff --git a/tests/integration/test_keeper_two_nodes_cluster/configs/enable_keeper2.xml b/tests/integration/test_keeper_two_nodes_cluster/configs/enable_keeper2.xml index 96068de33a9..618e6a04aec 100644 --- a/tests/integration/test_keeper_two_nodes_cluster/configs/enable_keeper2.xml +++ b/tests/integration/test_keeper_two_nodes_cluster/configs/enable_keeper2.xml @@ -16,14 +16,14 @@ 1 node1 - 44444 + 9234 true 3 2 node2 - 44444 + 9234 true true 2 diff --git a/tests/integration/test_keeper_zookeeper_converter/configs/keeper_config.xml b/tests/integration/test_keeper_zookeeper_converter/configs/keeper_config.xml index 612cb979bb7..9b50f2c6c41 100644 --- a/tests/integration/test_keeper_zookeeper_converter/configs/keeper_config.xml +++ b/tests/integration/test_keeper_zookeeper_converter/configs/keeper_config.xml @@ -16,7 +16,7 @@ 1 localhost - 44444 + 9234 diff --git a/tests/integration/test_system_logs_recreate/__init__.py b/tests/integration/test_system_logs_recreate/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/integration/test_system_logs_recreate/test.py b/tests/integration/test_system_logs_recreate/test.py new file mode 100644 index 00000000000..3ab0269b42e --- /dev/null +++ b/tests/integration/test_system_logs_recreate/test.py @@ -0,0 +1,70 @@ +# pylint: disable=line-too-long +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name + +import pytest +from helpers.cluster import ClickHouseCluster + +cluster = ClickHouseCluster(__file__) +node = cluster.add_instance('node_default', stay_alive=True) + +@pytest.fixture(scope='module', autouse=True) +def start_cluster(): + try: + cluster.start() + yield cluster + finally: + cluster.shutdown() + + +def test_system_logs_recreate(): + system_logs = [ + # enabled by default + 'query_log', + 'query_thread_log', + 'part_log', + 'trace_log', + 'metric_log', + ] + + node.query('SYSTEM FLUSH LOGS') + for table in system_logs: + assert 'ENGINE = MergeTree' in node.query(f'SHOW CREATE TABLE system.{table}') + assert 'ENGINE = Null' not in node.query(f'SHOW CREATE TABLE system.{table}') + assert len(node.query(f"SHOW TABLES FROM system LIKE '{table}%'").strip().split('\n')) == 1 + + # NOTE: we use zzz- prefix to make it the last file, + # so that it will be applied last. + for table in system_logs: + node.exec_in_container(['bash', '-c', f"""echo " + + <{table}> + ENGINE = Null + + + + " > /etc/clickhouse-server/config.d/zzz-override-{table}.xml + """]) + + node.restart_clickhouse() + node.query('SYSTEM FLUSH LOGS') + for table in system_logs: + assert 'ENGINE = MergeTree' not in node.query(f'SHOW CREATE TABLE system.{table}') + assert 'ENGINE = Null' in node.query(f'SHOW CREATE TABLE system.{table}') + assert len(node.query(f"SHOW TABLES FROM system LIKE '{table}%'").strip().split('\n')) == 2 + + for table in system_logs: + node.exec_in_container(['rm', f'/etc/clickhouse-server/config.d/zzz-override-{table}.xml']) + + node.restart_clickhouse() + node.query('SYSTEM FLUSH LOGS') + for table in system_logs: + assert 'ENGINE = MergeTree' in node.query(f'SHOW CREATE TABLE system.{table}') + assert 'ENGINE = Null' not in node.query(f'SHOW CREATE TABLE system.{table}') + assert len(node.query(f"SHOW TABLES FROM system LIKE '{table}%'").strip().split('\n')) == 3 + + node.query('SYSTEM FLUSH LOGS') + # Ensure that there was no superfluous RENAME's + # IOW that the table created only when the structure is indeed different. + for table in system_logs: + assert len(node.query(f"SHOW TABLES FROM system LIKE '{table}%'").strip().split('\n')) == 3 diff --git a/tests/performance/count.xml b/tests/performance/count.xml index b75fd4e4df5..bd9ef2baf80 100644 --- a/tests/performance/count.xml +++ b/tests/performance/count.xml @@ -4,6 +4,8 @@ INSERT INTO data SELECT number, 1 from numbers(10000000) SELECT count() FROM data + SELECT count(k) FROM (SELECT k FROM data, numbers(20) _a) + SELECT count(k) FROM (SELECT toNullable(k) as k FROM data, numbers(20) _a) DROP TABLE IF EXISTS data diff --git a/tests/performance/countIf.xml b/tests/performance/countIf.xml index a2209d5ed22..87616235226 100644 --- a/tests/performance/countIf.xml +++ b/tests/performance/countIf.xml @@ -1,3 +1,21 @@ SELECT countIf(number % 2) FROM numbers(100000000) + + + SELECT countIf(key IS NOT NULL) + FROM + ( + SELECT materialize(toNullable(1)) AS key + FROM numbers(100000000) + ) + + + + SELECT countIf(key IS NOT NULL) + FROM + ( + SELECT materialize(CAST(NULL, 'Nullable(Int8)')) AS key + FROM numbers(100000000) + ) + diff --git a/tests/queries/0_stateless/02124_encrypt_decrypt_nullable.reference b/tests/queries/0_stateless/02124_encrypt_decrypt_nullable.reference new file mode 100644 index 00000000000..5a3daa6efb8 --- /dev/null +++ b/tests/queries/0_stateless/02124_encrypt_decrypt_nullable.reference @@ -0,0 +1,16 @@ +aes_encrypt_mysql +\N +D1B43643E1D0E9390E39BA4EAE150851 +aes_decrypt_mysql +\N +48656C6C6F20576F726C6421 +encrypt +aes-256-ecb \N +aes-256-gcm \N +aes-256-ecb D1B43643E1D0E9390E39BA4EAE150851 +aes-256-gcm 219E6478A1A3BB5B686DA4BAD70323F192EFEDCCBBD6F49E78A7E2F6 +decrypt +aes-256-ecb \N +aes-256-gcm \N +aes-256-ecb Hello World! +aes-256-gcm Hello World! diff --git a/tests/queries/0_stateless/02124_encrypt_decrypt_nullable.sql b/tests/queries/0_stateless/02124_encrypt_decrypt_nullable.sql new file mode 100644 index 00000000000..a029b4afaaf --- /dev/null +++ b/tests/queries/0_stateless/02124_encrypt_decrypt_nullable.sql @@ -0,0 +1,57 @@ +-- Tags: no-fasttest +-- Tag no-fasttest: Depends on OpenSSL + +------------------------------------------------------------------------------- +-- Validate that encrypt/decrypt (and mysql versions) work against Nullable(String). +-- null gets encrypted/decrypted as null, non-null encrypted/decrypted as usual. +------------------------------------------------------------------------------- +-- using nullIf since that is the easiest way to produce `Nullable(String)` with a `null` value + +----------------------------------------------------------------------------------------------------------------------------------- +-- MySQL compatibility +SELECT 'aes_encrypt_mysql'; + +SELECT aes_encrypt_mysql('aes-256-ecb', CAST(null as Nullable(String)), 'test_key________________________'); + +WITH 'aes-256-ecb' as mode, 'Hello World!' as plaintext, 'test_key________________________' as key +SELECT hex(aes_encrypt_mysql(mode, toNullable(plaintext), key)); + +SELECT 'aes_decrypt_mysql'; + +SELECT aes_decrypt_mysql('aes-256-ecb', CAST(null as Nullable(String)), 'test_key________________________'); + +WITH 'aes-256-ecb' as mode, unhex('D1B43643E1D0E9390E39BA4EAE150851') as ciphertext, 'test_key________________________' as key +SELECT hex(aes_decrypt_mysql(mode, toNullable(ciphertext), key)); + +----------------------------------------------------------------------------------------------------------------------------------- +-- encrypt both non-null and null values of Nullable(String) +SELECT 'encrypt'; + +WITH 'aes-256-ecb' as mode, 'test_key________________________' as key +SELECT mode, encrypt(mode, CAST(null as Nullable(String)), key); + +WITH 'aes-256-gcm' as mode, 'test_key________________________' as key, 'test_iv_____' as iv +SELECT mode, encrypt(mode, CAST(null as Nullable(String)), key, iv); + +WITH 'aes-256-ecb' as mode, 'test_key________________________' as key +SELECT mode, hex(encrypt(mode, toNullable('Hello World!'), key)); + +WITH 'aes-256-gcm' as mode, 'test_key________________________' as key, 'test_iv_____' as iv +SELECT mode, hex(encrypt(mode, toNullable('Hello World!'), key, iv)); + +----------------------------------------------------------------------------------------------------------------------------------- +-- decrypt both non-null and null values of Nullable(String) + +SELECT 'decrypt'; + +WITH 'aes-256-ecb' as mode, 'test_key________________________' as key +SELECT mode, decrypt(mode, CAST(null as Nullable(String)), key); + +WITH 'aes-256-gcm' as mode, 'test_key________________________' as key, 'test_iv_____' as iv +SELECT mode, decrypt(mode, CAST(null as Nullable(String)), key, iv); + +WITH 'aes-256-ecb' as mode, unhex('D1B43643E1D0E9390E39BA4EAE150851') as ciphertext, 'test_key________________________' as key +SELECT mode, decrypt(mode, toNullable(ciphertext), key); + +WITH 'aes-256-gcm' as mode, unhex('219E6478A1A3BB5B686DA4BAD70323F192EFEDCCBBD6F49E78A7E2F6') as ciphertext, 'test_key________________________' as key, 'test_iv_____' as iv +SELECT mode, decrypt(mode, toNullable(ciphertext), key, iv); diff --git a/tests/queries/0_stateless/02125_constant_if_condition_and_not_existing_column.reference b/tests/queries/0_stateless/02125_constant_if_condition_and_not_existing_column.reference new file mode 100644 index 00000000000..67f2590a0c6 --- /dev/null +++ b/tests/queries/0_stateless/02125_constant_if_condition_and_not_existing_column.reference @@ -0,0 +1,8 @@ +42 +42 +42 +42 +42 +42 +42 +42 diff --git a/tests/queries/0_stateless/02125_constant_if_condition_and_not_existing_column.sql b/tests/queries/0_stateless/02125_constant_if_condition_and_not_existing_column.sql new file mode 100644 index 00000000000..ad3d417bc26 --- /dev/null +++ b/tests/queries/0_stateless/02125_constant_if_condition_and_not_existing_column.sql @@ -0,0 +1,14 @@ +drop table if exists test; +-- this queries does not have to pass, but they works historically +-- let's support this while can, see #31687 +create table test (x String) Engine=StripeLog; +insert into test values (0); +select if(0, y, 42) from test; +select if(1, 42, y) from test; +select if(toUInt8(0), y, 42) from test; +select if(toInt8(0), y, 42) from test; +select if(toUInt8(1), 42, y) from test; +select if(toInt8(1), 42, y) from test; +select if(toUInt8(toUInt8(0)), y, 42) from test; +select if(cast(cast(0, 'UInt8'), 'UInt8'), y, 42) from test; +drop table if exists t; diff --git a/tests/queries/0_stateless/02125_query_views_log.reference b/tests/queries/0_stateless/02125_query_views_log.reference new file mode 100644 index 00000000000..3ae4af9b4d0 --- /dev/null +++ b/tests/queries/0_stateless/02125_query_views_log.reference @@ -0,0 +1,24 @@ +-- { echo } +select view_name, read_rows, read_bytes, written_rows, written_bytes from system.query_views_log where startsWith(view_name, currentDatabase() || '.mv') order by view_name format Vertical; +Row 1: +────── +view_name: default.mv1 +read_rows: 1000000 +read_bytes: 4000000 +written_rows: 1000000 +written_bytes: 4000000 + +Row 2: +────── +view_name: default.mv2 +read_rows: 1000000 +read_bytes: 4000000 +written_rows: 1000000 +written_bytes: 4000000 +select read_rows, read_bytes, written_rows, written_bytes from system.query_log where type = 'QueryFinish' and query_kind = 'Insert' and current_database = currentDatabase() format Vertical; +Row 1: +────── +read_rows: 1000000 +read_bytes: 8000000 +written_rows: 3000000 +written_bytes: 12000000 diff --git a/tests/queries/0_stateless/02125_query_views_log.sql b/tests/queries/0_stateless/02125_query_views_log.sql new file mode 100644 index 00000000000..d2d19b76a1f --- /dev/null +++ b/tests/queries/0_stateless/02125_query_views_log.sql @@ -0,0 +1,16 @@ +drop table if exists src; +drop table if exists dst; +drop table if exists mv1; +drop table if exists mv2; + +create table src (key Int) engine=Null(); +create table dst (key Int) engine=Null(); +create materialized view mv1 to dst as select * from src; +create materialized view mv2 to dst as select * from src; + +insert into src select * from numbers(1e6) settings log_queries=1, max_untracked_memory=0, parallel_view_processing=1; +system flush logs; + +-- { echo } +select view_name, read_rows, read_bytes, written_rows, written_bytes from system.query_views_log where startsWith(view_name, currentDatabase() || '.mv') order by view_name format Vertical; +select read_rows, read_bytes, written_rows, written_bytes from system.query_log where type = 'QueryFinish' and query_kind = 'Insert' and current_database = currentDatabase() format Vertical; diff --git a/website/templates/index/quickstart.html b/website/templates/index/quickstart.html index 2201160f239..100261ab548 100644 --- a/website/templates/index/quickstart.html +++ b/website/templates/index/quickstart.html @@ -20,10 +20,10 @@ Linux (ARM)