Merge branch 'ClickHouse:master' into master

This commit is contained in:
michael1589 2021-11-30 15:05:24 +08:00 committed by GitHub
commit 93efc75dc7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
156 changed files with 1931 additions and 1147 deletions

View File

@ -211,12 +211,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 0 BUILD_NAME: 'package_release'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -250,12 +250,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 8 BUILD_NAME: 'binary_release'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -289,12 +289,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 3 BUILD_NAME: 'package_asan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -328,12 +328,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 4 BUILD_NAME: 'package_ubsan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -367,12 +367,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 5 BUILD_NAME: 'package_tsan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -406,12 +406,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 6 BUILD_NAME: 'package_msan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -445,12 +445,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 7 BUILD_NAME: 'package_debug'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -486,13 +486,13 @@ jobs:
IMAGES_PATH: ${{runner.temp}}/images_path IMAGES_PATH: ${{runner.temp}}/images_path
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse special build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 1 BUILD_NAME: 'binary_splitted'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:

View File

@ -84,7 +84,7 @@ jobs:
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 compatibility_check.py 0 cd $REPO_COPY/tests/ci && python3 compatibility_check.py
- name: Cleanup - name: Cleanup
if: always() if: always()
run: | run: |
@ -142,12 +142,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 0 BUILD_NAME: 'package_release'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -181,12 +181,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 8 BUILD_NAME: 'binary_release'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -219,12 +219,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 3 BUILD_NAME: 'package_asan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -257,12 +257,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 4 BUILD_NAME: 'package_ubsan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -295,12 +295,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 5 BUILD_NAME: 'package_tsan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -333,12 +333,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 6 BUILD_NAME: 'package_msan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -371,12 +371,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 7 BUILD_NAME: 'package_debug'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -412,13 +412,13 @@ jobs:
IMAGES_PATH: ${{runner.temp}}/images_path IMAGES_PATH: ${{runner.temp}}/images_path
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse special build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 1 BUILD_NAME: 'binary_splitted'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:

View File

@ -41,7 +41,7 @@ jobs:
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 compatibility_check.py 0 cd $REPO_COPY/tests/ci && python3 compatibility_check.py
- name: Cleanup - name: Cleanup
if: always() if: always()
run: | run: |
@ -72,12 +72,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 0 BUILD_NAME: 'package_release'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -110,12 +110,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 3 BUILD_NAME: 'package_asan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -148,12 +148,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 4 BUILD_NAME: 'package_ubsan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -186,12 +186,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 5 BUILD_NAME: 'package_tsan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -224,12 +224,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 6 BUILD_NAME: 'package_msan'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
@ -262,12 +262,12 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)' CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 7 BUILD_NAME: 'package_debug'
run: | run: |
sudo rm -fr $TEMP_PATH sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
- name: Upload build URLs to artifacts - name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:

5
.gitmodules vendored
View File

@ -17,7 +17,7 @@
[submodule "contrib/zlib-ng"] [submodule "contrib/zlib-ng"]
path = contrib/zlib-ng path = contrib/zlib-ng
url = https://github.com/ClickHouse-Extras/zlib-ng.git url = https://github.com/ClickHouse-Extras/zlib-ng.git
branch = clickhouse-new branch = clickhouse-2.0.x
[submodule "contrib/googletest"] [submodule "contrib/googletest"]
path = contrib/googletest path = contrib/googletest
url = https://github.com/google/googletest.git url = https://github.com/google/googletest.git
@ -135,9 +135,6 @@
[submodule "contrib/flatbuffers"] [submodule "contrib/flatbuffers"]
path = contrib/flatbuffers path = contrib/flatbuffers
url = https://github.com/ClickHouse-Extras/flatbuffers.git url = https://github.com/ClickHouse-Extras/flatbuffers.git
[submodule "contrib/libc-headers"]
path = contrib/libc-headers
url = https://github.com/ClickHouse-Extras/libc-headers.git
[submodule "contrib/replxx"] [submodule "contrib/replxx"]
path = contrib/replxx path = contrib/replxx
url = https://github.com/ClickHouse-Extras/replxx.git url = https://github.com/ClickHouse-Extras/replxx.git

View File

@ -223,7 +223,7 @@ if (OS_DARWIN)
# from a _specific_ library, which is what we need. # from a _specific_ library, which is what we need.
set(WHOLE_ARCHIVE -force_load) set(WHOLE_ARCHIVE -force_load)
# The `-noall_load` flag is the default and now obsolete. # The `-noall_load` flag is the default and now obsolete.
set(NO_WHOLE_ARCHIVE "") set(NO_WHOLE_ARCHIVE "-undefined,error") # Effectively, a no-op. Here to avoid empty "-Wl, " sequence to be generated in the command line.
else () else ()
set(WHOLE_ARCHIVE --whole-archive) set(WHOLE_ARCHIVE --whole-archive)
set(NO_WHOLE_ARCHIVE --no-whole-archive) set(NO_WHOLE_ARCHIVE --no-whole-archive)

View File

@ -29,14 +29,6 @@ message(STATUS "Default libraries: ${DEFAULT_LIBS}")
set(CMAKE_CXX_STANDARD_LIBRARIES ${DEFAULT_LIBS}) set(CMAKE_CXX_STANDARD_LIBRARIES ${DEFAULT_LIBS})
set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS}) set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS})
# glibc-compatibility library relies to constant version of libc headers
# (because minor changes in function attributes between different glibc versions will introduce incompatibilities)
# This is for x86_64. For other architectures we have separate toolchains.
if (ARCH_AMD64 AND NOT CMAKE_CROSSCOMPILING)
set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
endif ()
# Unfortunately '-pthread' doesn't work with '-nodefaultlibs'. # Unfortunately '-pthread' doesn't work with '-nodefaultlibs'.
# Just make sure we have pthreads at all. # Just make sure we have pthreads at all.
set(THREADS_PREFER_PTHREAD_FLAG ON) set(THREADS_PREFER_PTHREAD_FLAG ON)

View File

@ -22,9 +22,10 @@ if (COMPILER_GCC)
elseif (COMPILER_CLANG) elseif (COMPILER_CLANG)
# Require minimum version of clang/apple-clang # Require minimum version of clang/apple-clang
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang") if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
# If you are developer you can figure out what exact versions of AppleClang are Ok, # (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
# simply remove the following line. if (NOT ALLOW_APPLECLANG AND NOT DEFINED ENV{ALLOW_APPLECLANG})
message (FATAL_ERROR "AppleClang is not supported, you should install clang from brew. See the instruction: https://clickhouse.com/docs/en/development/build-osx/") message (FATAL_ERROR "AppleClang is not supported, you should install clang from brew. See the instruction: https://clickhouse.com/docs/en/development/build-osx/")
endif ()
# AppleClang 10.0.1 (Xcode 10.2) corresponds to LLVM/Clang upstream version 7.0.0 # AppleClang 10.0.1 (Xcode 10.2) corresponds to LLVM/Clang upstream version 7.0.0
# AppleClang 11.0.0 (Xcode 11.0) corresponds to LLVM/Clang upstream version 8.0.0 # AppleClang 11.0.0 (Xcode 11.0) corresponds to LLVM/Clang upstream version 8.0.0

View File

@ -301,9 +301,10 @@ endif()
# instead of controlling it via CMAKE_FOLDER. # instead of controlling it via CMAKE_FOLDER.
function (ensure_target_rooted_in _target _folder) function (ensure_target_rooted_in _target _folder)
# Skip INTERFACE library targets, since FOLDER property is not available for them. # Skip aliases and INTERFACE library targets, since FOLDER property is not available/writable for them.
get_target_property (_target_aliased "${_target}" ALIASED_TARGET)
get_target_property (_target_type "${_target}" TYPE) get_target_property (_target_type "${_target}" TYPE)
if (_target_type STREQUAL "INTERFACE_LIBRARY") if (_target_aliased OR _target_type STREQUAL "INTERFACE_LIBRARY")
return () return ()
endif () endif ()

@ -1 +0,0 @@
Subproject commit aa5429bf67a346e48ad60efd88bcefc286644bf3

View File

@ -73,6 +73,11 @@ target_compile_options(cxx PRIVATE -w)
target_link_libraries(cxx PUBLIC cxxabi) target_link_libraries(cxx PUBLIC cxxabi)
# For __udivmodti4, __divmodti4.
if (OS_DARWIN AND COMPILER_GCC)
target_link_libraries(cxx PRIVATE gcc)
endif ()
install( install(
TARGETS cxx TARGETS cxx
EXPORT global EXPORT global

View File

@ -28,11 +28,16 @@ set (SRCS
${SRC_DIR}/src/sentry_unix_pageallocator.c ${SRC_DIR}/src/sentry_unix_pageallocator.c
${SRC_DIR}/src/path/sentry_path_unix.c ${SRC_DIR}/src/path/sentry_path_unix.c
${SRC_DIR}/src/symbolizer/sentry_symbolizer_unix.c ${SRC_DIR}/src/symbolizer/sentry_symbolizer_unix.c
${SRC_DIR}/src/modulefinder/sentry_modulefinder_linux.c
${SRC_DIR}/src/transports/sentry_transport_curl.c ${SRC_DIR}/src/transports/sentry_transport_curl.c
${SRC_DIR}/src/backends/sentry_backend_none.c ${SRC_DIR}/src/backends/sentry_backend_none.c
) )
if(APPLE)
list(APPEND SRCS ${SRC_DIR}/src/modulefinder/sentry_modulefinder_apple.c)
else()
list(APPEND SRCS ${SRC_DIR}/src/modulefinder/sentry_modulefinder_linux.c)
endif()
add_library(sentry ${SRCS}) add_library(sentry ${SRCS})
add_library(sentry::sentry ALIAS sentry) add_library(sentry::sentry ALIAS sentry)

View File

@ -6,4 +6,6 @@ add_library(simdjson ${SIMDJSON_SRC})
target_include_directories(simdjson SYSTEM PUBLIC "${SIMDJSON_INCLUDE_DIR}" PRIVATE "${SIMDJSON_SRC_DIR}") target_include_directories(simdjson SYSTEM PUBLIC "${SIMDJSON_INCLUDE_DIR}" PRIVATE "${SIMDJSON_SRC_DIR}")
# simdjson is using its own CPU dispatching and get confused if we enable AVX/AVX2 flags. # simdjson is using its own CPU dispatching and get confused if we enable AVX/AVX2 flags.
target_compile_options(simdjson PRIVATE -mno-avx -mno-avx2) if(ARCH_AMD64)
target_compile_options(simdjson PRIVATE -mno-avx -mno-avx2)
endif()

2
contrib/zlib-ng vendored

@ -1 +1 @@
Subproject commit 6a5e93b9007782115f7f7e5235dedc81c4f1facb Subproject commit bffad6f6fe74d6a2f92e2668390664a926c68733

View File

@ -1,8 +1,10 @@
FROM ubuntu:20.04 FROM ubuntu:20.04
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \ RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \

View File

@ -1,10 +1,12 @@
FROM ubuntu:18.04 FROM ubuntu:18.04
# ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
ARG repository="deb https://repo.clickhouse.com/deb/stable/ main/" ARG repository="deb https://repo.clickhouse.com/deb/stable/ main/"
ARG version=21.12.1.* ARG version=21.12.1.*
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
RUN apt-get update \ RUN apt-get update \
&& apt-get install --yes --no-install-recommends \ && apt-get install --yes --no-install-recommends \
apt-transport-https \ apt-transport-https \

View File

@ -1,9 +1,11 @@
# docker build -t clickhouse/docs-build . # docker build -t clickhouse/docs-build .
FROM ubuntu:20.04 FROM ubuntu:20.04
ENV LANG=C.UTF-8 # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list ENV LANG=C.UTF-8
RUN apt-get update \ RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \ && DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \

View File

@ -1,9 +1,11 @@
# docker build -t clickhouse/binary-builder . # docker build -t clickhouse/binary-builder .
FROM ubuntu:20.04 FROM ubuntu:20.04
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \ RUN apt-get update \
&& apt-get install \ && apt-get install \

View File

@ -1,9 +1,11 @@
# docker build -t clickhouse/deb-builder . # docker build -t clickhouse/deb-builder .
FROM ubuntu:20.04 FROM ubuntu:20.04
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \ RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \

View File

@ -1,5 +1,9 @@
FROM ubuntu:20.04 FROM ubuntu:20.04
# ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
ARG repository="deb https://repo.clickhouse.com/deb/stable/ main/" ARG repository="deb https://repo.clickhouse.com/deb/stable/ main/"
ARG version=21.12.1.* ARG version=21.12.1.*
ARG gosu_ver=1.10 ARG gosu_ver=1.10
@ -26,8 +30,6 @@ ARG DEBIAN_FRONTEND=noninteractive
# installed to prevent picking those uid / gid by some unrelated software. # installed to prevent picking those uid / gid by some unrelated software.
# The same uid / gid (101) is used both for alpine and ubuntu. # The same uid / gid (101) is used both for alpine and ubuntu.
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
RUN groupadd -r clickhouse --gid=101 \ RUN groupadd -r clickhouse --gid=101 \
&& useradd -r -g clickhouse --uid=101 --home-dir=/var/lib/clickhouse --shell=/bin/bash clickhouse \ && useradd -r -g clickhouse --uid=101 --home-dir=/var/lib/clickhouse --shell=/bin/bash clickhouse \
&& apt-get update \ && apt-get update \

View File

@ -1,9 +1,11 @@
# docker build -t clickhouse/test-base . # docker build -t clickhouse/test-base .
FROM clickhouse/test-util FROM clickhouse/test-util
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \ RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \

View File

@ -2,7 +2,9 @@
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output clickhouse/codebrowser # docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output clickhouse/codebrowser
FROM clickhouse/binary-builder FROM clickhouse/binary-builder
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-9 libllvm9 libclang-9-dev RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-9 libllvm9 libclang-9-dev

View File

@ -1,9 +1,11 @@
# docker build -t clickhouse/fasttest . # docker build -t clickhouse/fasttest .
FROM clickhouse/test-util FROM clickhouse/test-util
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13 # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \ RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \ && apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \

View File

@ -174,7 +174,6 @@ function clone_submodules
contrib/double-conversion contrib/double-conversion
contrib/libcxx contrib/libcxx
contrib/libcxxabi contrib/libcxxabi
contrib/libc-headers
contrib/lz4 contrib/lz4
contrib/zstd contrib/zstd
contrib/fastops contrib/fastops

View File

@ -1,12 +1,14 @@
# docker build -t clickhouse/fuzzer . # docker build -t clickhouse/fuzzer .
FROM clickhouse/test-base FROM clickhouse/test-base
# ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
ENV LANG=C.UTF-8 ENV LANG=C.UTF-8
ENV TZ=Europe/Moscow ENV TZ=Europe/Moscow
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
RUN apt-get update \ RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \ && DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \
ca-certificates \ ca-certificates \

View File

@ -1,7 +1,9 @@
# docker build -t clickhouse/integration-tests-runner . # docker build -t clickhouse/integration-tests-runner .
FROM ubuntu:20.04 FROM ubuntu:20.04
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update \ RUN apt-get update \
&& env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \

View File

@ -1,12 +1,14 @@
# docker build -t clickhouse/performance-comparison . # docker build -t clickhouse/performance-comparison .
FROM ubuntu:18.04 FROM ubuntu:18.04
# ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
ENV LANG=C.UTF-8 ENV LANG=C.UTF-8
ENV TZ=Europe/Moscow ENV TZ=Europe/Moscow
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
RUN apt-get update \ RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \ && DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \
bash \ bash \

View File

@ -1,7 +1,9 @@
# docker build -t clickhouse/sqlancer-test . # docker build -t clickhouse/sqlancer-test .
FROM ubuntu:20.04 FROM ubuntu:20.04
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update --yes && env DEBIAN_FRONTEND=noninteractive apt-get install wget unzip git default-jdk maven python3 --yes --no-install-recommends RUN apt-get update --yes && env DEBIAN_FRONTEND=noninteractive apt-get install wget unzip git default-jdk maven python3 --yes --no-install-recommends
RUN wget https://github.com/sqlancer/sqlancer/archive/master.zip -O /sqlancer.zip RUN wget https://github.com/sqlancer/sqlancer/archive/master.zip -O /sqlancer.zip

View File

@ -1,7 +1,9 @@
# docker build -t clickhouse/style-test . # docker build -t clickhouse/style-test .
FROM ubuntu:20.04 FROM ubuntu:20.04
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
shellcheck \ shellcheck \

View File

@ -1,7 +1,9 @@
# docker build -t clickhouse/testflows-runner . # docker build -t clickhouse/testflows-runner .
FROM ubuntu:20.04 FROM ubuntu:20.04
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list # ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update \ RUN apt-get update \
&& env DEBIAN_FRONTEND=noninteractive apt-get install --yes \ && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \

View File

@ -3,15 +3,14 @@ toc_priority: 65
toc_title: Build on Mac OS X toc_title: Build on Mac OS X
--- ---
# You don't have to build ClickHouse
You can install ClickHouse as follows: https://clickhouse.com/#quick-start
Choose Mac x86 or M1.
# How to Build ClickHouse on Mac OS X {#how-to-build-clickhouse-on-mac-os-x} # How to Build ClickHouse on Mac OS X {#how-to-build-clickhouse-on-mac-os-x}
Build should work on x86_64 (Intel) and arm64 (Apple Silicon) based macOS 10.15 (Catalina) and higher with Homebrew's vanilla Clang. !!! info "You don't have to build ClickHouse yourself!"
It is always recommended to use `clang` compiler. It is possible to use XCode's `AppleClang` or `gcc` but it's strongly discouraged. You can install pre-built ClickHouse as described in [Quick Start](https://clickhouse.com/#quick-start).
Follow `macOS (Intel)` or `macOS (Apple silicon)` installation instructions.
Build should work on x86_64 (Intel) and arm64 (Apple silicon) based macOS 10.15 (Catalina) and higher with Homebrew's vanilla Clang.
It is always recommended to use vanilla `clang` compiler. It is possible to use XCode's `apple-clang` or `gcc` but it's strongly discouraged.
## Install Homebrew {#install-homebrew} ## Install Homebrew {#install-homebrew}
@ -33,8 +32,6 @@ sudo rm -rf /Library/Developer/CommandLineTools
sudo xcode-select --install sudo xcode-select --install
``` ```
Reboot.
## Install Required Compilers, Tools, and Libraries {#install-required-compilers-tools-and-libraries} ## Install Required Compilers, Tools, and Libraries {#install-required-compilers-tools-and-libraries}
``` bash ``` bash
@ -51,40 +48,41 @@ git clone --recursive git@github.com:ClickHouse/ClickHouse.git
## Build ClickHouse {#build-clickhouse} ## Build ClickHouse {#build-clickhouse}
To build using Homebrew's vanilla Clang compiler: To build using Homebrew's vanilla Clang compiler (the only **recommended** way):
``` bash ``` bash
cd ClickHouse cd ClickHouse
rm -rf build rm -rf build
mkdir build mkdir build
cd build cd build
cmake -DCMAKE_C_COMPILER=$(brew --prefix llvm)/bin/clang -DCMAKE_CXX_COMPILER=$(brew --prefix llvm)/bin/clang++ -DCMAKE_BUILD_TYPE=RelWithDebInfo .. cmake -DCMAKE_C_COMPILER=$(brew --prefix llvm)/bin/clang -DCMAKE_CXX_COMPILER=$(brew --prefix llvm)/bin/clang++ -DCMAKE_AR=$(brew --prefix llvm)/bin/llvm-ar -DCMAKE_RANLIB=$(brew --prefix llvm)/bin/llvm-ranlib -DOBJCOPY_PATH=$(brew --prefix llvm)/bin/llvm-objcopy -DCMAKE_BUILD_TYPE=RelWithDebInfo ..
cmake --build . --config RelWithDebInfo cmake --build . --config RelWithDebInfo
cd .. # The resulting binary will be created at: ./programs/clickhouse
``` ```
To build using Xcode's native AppleClang compiler (this option is strongly not recommended; use the option above): To build using Xcode's native AppleClang compiler in Xcode IDE (this option is only for development builds and workflows, and is **not recommended** unless you know what you are doing):
``` bash ``` bash
cd ClickHouse cd ClickHouse
rm -rf build rm -rf build
mkdir build mkdir build
cd build cd build
cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo .. XCODE_IDE=1 ALLOW_APPLECLANG=1 cmake -G Xcode -DCMAKE_BUILD_TYPE=Debug -DENABLE_JEMALLOC=OFF ..
cmake --build . --config RelWithDebInfo cmake --open .
cd .. # ...then, in Xcode IDE select ALL_BUILD scheme and start the building process.
# The resulting binary will be created at: ./programs/Debug/clickhouse
``` ```
To build using Homebrew's vanilla GCC compiler (this option is absolutely not recommended, I'm wondering why do we ever have it): To build using Homebrew's vanilla GCC compiler (this option is only for development experiments, and is **absolutely not recommended** unless you really know what you are doing):
``` bash ``` bash
cd ClickHouse cd ClickHouse
rm -rf build rm -rf build
mkdir build mkdir build
cd build cd build
cmake -DCMAKE_C_COMPILER=$(brew --prefix gcc)/bin/gcc-11 -DCMAKE_CXX_COMPILER=$(brew --prefix gcc)/bin/g++-11 -DCMAKE_BUILD_TYPE=RelWithDebInfo .. cmake -DCMAKE_C_COMPILER=$(brew --prefix gcc)/bin/gcc-11 -DCMAKE_CXX_COMPILER=$(brew --prefix gcc)/bin/g++-11 -DCMAKE_AR=$(brew --prefix gcc)/bin/gcc-ar-11 -DCMAKE_RANLIB=$(brew --prefix gcc)/bin/gcc-ranlib-11 -DOBJCOPY_PATH=$(brew --prefix binutils)/bin/objcopy -DCMAKE_BUILD_TYPE=RelWithDebInfo ..
cmake --build . --config RelWithDebInfo cmake --build . --config RelWithDebInfo
cd .. # The resulting binary will be created at: ./programs/clickhouse
``` ```
## Caveats {#caveats} ## Caveats {#caveats}
@ -140,9 +138,9 @@ sudo launchctl load -w /Library/LaunchDaemons/limit.maxfiles.plist
To check if its working, use the `ulimit -n` or `launchctl limit maxfiles` commands. To check if its working, use the `ulimit -n` or `launchctl limit maxfiles` commands.
## Run ClickHouse server: ## Running ClickHouse server
``` ``` bash
cd ClickHouse cd ClickHouse
./build/programs/clickhouse-server --config-file ./programs/server/config.xml ./build/programs/clickhouse-server --config-file ./programs/server/config.xml
``` ```

View File

@ -133,8 +133,7 @@ Example:
SELECT level, sum(total) FROM daily GROUP BY level; SELECT level, sum(total) FROM daily GROUP BY level;
``` ```
To improve performance, received messages are grouped into blocks the size of [max_insert_block_size](../../../operations/settings/settings/#settings-max_insert_block_size). If the block wasnt formed within [stream_flush_interval_ms](../../../operations/settings/settings/#stream-flush-interval-ms) milliseconds, the data will be flushed to the table regardless of the completeness of the block.
To improve performance, received messages are grouped into blocks the size of [max_insert_block_size](../../../operations/server-configuration-parameters/settings.md#settings-max_insert_block_size). If the block wasnt formed within [stream_flush_interval_ms](../../../operations/server-configuration-parameters/settings.md) milliseconds, the data will be flushed to the table regardless of the completeness of the block.
To stop receiving topic data or to change the conversion logic, detach the materialized view: To stop receiving topic data or to change the conversion logic, detach the materialized view:
@ -192,6 +191,6 @@ Example:
**See Also** **See Also**
- [Virtual columns](../../../engines/table-engines/index.md#table_engines-virtual_columns) - [Virtual columns](../../../engines/table-engines/index.md#table_engines-virtual_columns)
- [background_schedule_pool_size](../../../operations/settings/settings.md#background_schedule_pool_size) - [background_message_broker_schedule_pool_size](../../../operations/settings/settings.md#background_message_broker_schedule_pool_size)
[Original article](https://clickhouse.com/docs/en/engines/table-engines/integrations/kafka/) <!--hide--> [Original article](https://clickhouse.com/docs/en/engines/table-engines/integrations/kafka/) <!--hide-->

View File

@ -24,6 +24,8 @@ The supported formats are:
| [CSVWithNames](#csvwithnames) | ✔ | ✔ | | [CSVWithNames](#csvwithnames) | ✔ | ✔ |
| [CSVWithNamesAndTypes](#csvwithnamesandtypes) | ✔ | ✔ | | [CSVWithNamesAndTypes](#csvwithnamesandtypes) | ✔ | ✔ |
| [CustomSeparated](#format-customseparated) | ✔ | ✔ | | [CustomSeparated](#format-customseparated) | ✔ | ✔ |
| [CustomSeparatedWithNames](#customseparatedwithnames) | ✔ | ✔ |
| [CustomSeparatedWithNamesAndTypes](#customseparatedwithnamesandtypes) | ✔ | ✔ |
| [Values](#data-format-values) | ✔ | ✔ | | [Values](#data-format-values) | ✔ | ✔ |
| [Vertical](#vertical) | ✗ | ✔ | | [Vertical](#vertical) | ✗ | ✔ |
| [JSON](#json) | ✗ | ✔ | | [JSON](#json) | ✗ | ✔ |
@ -429,8 +431,17 @@ Also prints two header rows with column names and types, similar to [TabSeparate
## CustomSeparated {#format-customseparated} ## CustomSeparated {#format-customseparated}
Similar to [Template](#format-template), but it prints or reads all columns and uses escaping rule from setting `format_custom_escaping_rule` and delimiters from settings `format_custom_field_delimiter`, `format_custom_row_before_delimiter`, `format_custom_row_after_delimiter`, `format_custom_row_between_delimiter`, `format_custom_result_before_delimiter` and `format_custom_result_after_delimiter`, not from format strings. Similar to [Template](#format-template), but it prints or reads all names and types of columns and uses escaping rule from [format_custom_escaping_rule](../operations/settings/settings.md#format-custom-escaping-rule) setting and delimiters from [format_custom_field_delimiter](../operations/settings/settings.md#format-custom-field-delimiter), [format_custom_row_before_delimiter](../operations/settings/settings.md#format-custom-row-before-delimiter), [format_custom_row_after_delimiter](../operations/settings/settings.md#format-custom-row-after-delimiter), [format_custom_row_between_delimiter](../operations/settings/settings.md#format-custom-row-between-delimiter), [format_custom_result_before_delimiter](../operations/settings/settings.md#format-custom-result-before-delimiter) and [format_custom_result_after_delimiter](../operations/settings/settings.md#format-custom-result-after-delimiter) settings, not from format strings.
There is also `CustomSeparatedIgnoreSpaces` format, which is similar to `TemplateIgnoreSpaces`.
There is also `CustomSeparatedIgnoreSpaces` format, which is similar to [TemplateIgnoreSpaces](#templateignorespaces).
## CustomSeparatedWithNames {#customseparatedwithnames}
Also prints the header row with column names, similar to [TabSeparatedWithNames](#tabseparatedwithnames).
## CustomSeparatedWithNamesAndTypes {#customseparatedwithnamesandtypes}
Also prints two header rows with column names and types, similar to [TabSeparatedWithNamesAndTypes](#tabseparatedwithnamesandtypes).
## JSON {#json} ## JSON {#json}
@ -1537,12 +1548,15 @@ Each line of imported data is parsed according to the regular expression.
When working with the `Regexp` format, you can use the following settings: When working with the `Regexp` format, you can use the following settings:
- `format_regexp` — [String](../sql-reference/data-types/string.md). Contains regular expression in the [re2](https://github.com/google/re2/wiki/Syntax) format. - `format_regexp` — [String](../sql-reference/data-types/string.md). Contains regular expression in the [re2](https://github.com/google/re2/wiki/Syntax) format.
- `format_regexp_escaping_rule` — [String](../sql-reference/data-types/string.md). The following escaping rules are supported: - `format_regexp_escaping_rule` — [String](../sql-reference/data-types/string.md). The following escaping rules are supported:
- CSV (similarly to [CSV](#csv)) - CSV (similarly to [CSV](#csv))
- JSON (similarly to [JSONEachRow](#jsoneachrow)) - JSON (similarly to [JSONEachRow](#jsoneachrow))
- Escaped (similarly to [TSV](#tabseparated)) - Escaped (similarly to [TSV](#tabseparated))
- Quoted (similarly to [Values](#data-format-values)) - Quoted (similarly to [Values](#data-format-values))
- Raw (extracts subpatterns as a whole, no escaping rules) - Raw (extracts subpatterns as a whole, no escaping rules, similarly to [TSVRaw](#tabseparatedraw))
- `format_regexp_skip_unmatched` — [UInt8](../sql-reference/data-types/int-uint.md). Defines the need to throw an exeption in case the `format_regexp` expression does not match the imported data. Can be set to `0` or `1`. - `format_regexp_skip_unmatched` — [UInt8](../sql-reference/data-types/int-uint.md). Defines the need to throw an exeption in case the `format_regexp` expression does not match the imported data. Can be set to `0` or `1`.
**Usage** **Usage**

View File

@ -69,8 +69,6 @@ If no conditions met for a data part, ClickHouse uses the `lz4` compression.
</compression> </compression>
``` ```
<!--
## encryption {#server-settings-encryption} ## encryption {#server-settings-encryption}
Configures a command to obtain a key to be used by [encryption codecs](../../sql-reference/statements/create/table.md#create-query-encryption-codecs). Key (or keys) should be written in environment variables or set in the configuration file. Configures a command to obtain a key to be used by [encryption codecs](../../sql-reference/statements/create/table.md#create-query-encryption-codecs). Key (or keys) should be written in environment variables or set in the configuration file.
@ -150,7 +148,6 @@ Or it can be set in hex:
Everything mentioned above can be applied for `aes_256_gcm_siv` (but the key must be 32 bytes long). Everything mentioned above can be applied for `aes_256_gcm_siv` (but the key must be 32 bytes long).
-->
## custom_settings_prefixes {#custom_settings_prefixes} ## custom_settings_prefixes {#custom_settings_prefixes}

View File

@ -4071,3 +4071,54 @@ Possible values:
- 0 — Big files read with only copying data from kernel to userspace. - 0 — Big files read with only copying data from kernel to userspace.
Default value: `0`. Default value: `0`.
## format_custom_escaping_rule {#format-custom-escaping-rule}
Sets the field escaping rule for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format.
Possible values:
- `'Escaped'` — Similarly to [TSV](../../interfaces/formats.md#tabseparated).
- `'Quoted'` — Similarly to [Values](../../interfaces/formats.md#data-format-values).
- `'CSV'` — Similarly to [CSV](../../interfaces/formats.md#csv).
- `'JSON'` — Similarly to [JSONEachRow](../../interfaces/formats.md#jsoneachrow).
- `'XML'` — Similarly to [XML](../../interfaces/formats.md#xml).
- `'Raw'` — Extracts subpatterns as a whole, no escaping rules, similarly to [TSVRaw](../../interfaces/formats.md#tabseparatedraw).
Default value: `'Escaped'`.
## format_custom_field_delimiter {#format-custom-field-delimiter}
Sets the character that is interpreted as a delimiter between the fields for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format.
Default value: `'\t'`.
## format_custom_row_before_delimiter {#format-custom-row-before-delimiter}
Sets the character that is interpreted as a delimiter before the field of the first column for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format.
Default value: `''`.
## format_custom_row_after_delimiter {#format-custom-row-after-delimiter}
Sets the character that is interpreted as a delimiter after the field of the last column for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format.
Default value: `'\n'`.
## format_custom_row_between_delimiter {#format-custom-row-between-delimiter}
Sets the character that is interpreted as a delimiter between the rows for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format.
Default value: `''`.
## format_custom_result_before_delimiter {#format-custom-result-before-delimiter}
Sets the character that is interpreted as a prefix before the result set for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format.
Default value: `''`.
## format_custom_result_after_delimiter {#format-custom-result-after-delimiter}
Sets the character that is interpreted as a suffix after the result set for [CustomSeparated](../../interfaces/formats.md#format-customseparated) data format.
Default value: `''`.

View File

@ -10,6 +10,7 @@ Allows formatting input queries.
Keys: Keys:
- `--help` or`-h` — Produce help message. - `--help` or`-h` — Produce help message.
- `--query` — Format queries of any length and complexity.
- `--hilite` — Add syntax highlight with ANSI terminal escape sequences. - `--hilite` — Add syntax highlight with ANSI terminal escape sequences.
- `--oneline` — Format in single line. - `--oneline` — Format in single line.
- `--quiet` or `-q` — Just check syntax, no output on success. - `--quiet` or `-q` — Just check syntax, no output on success.
@ -20,7 +21,22 @@ Keys:
## Examples {#examples} ## Examples {#examples}
1. Highlighting and single line: 1. Formatting a query:
```bash
$ clickhouse-format --query "select number from numbers(10) where number%2 order by number desc;"
```
Result:
```text
SELECT number
FROM numbers(10)
WHERE number % 2
ORDER BY number DESC
```
2. Highlighting and single line:
```bash ```bash
$ clickhouse-format --oneline --hilite <<< "SELECT sum(number) FROM numbers(5);" $ clickhouse-format --oneline --hilite <<< "SELECT sum(number) FROM numbers(5);"
@ -32,7 +48,7 @@ Result:
SELECT sum(number) FROM numbers(5) SELECT sum(number) FROM numbers(5)
``` ```
2. Multiqueries: 3. Multiqueries:
```bash ```bash
$ clickhouse-format -n <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" $ clickhouse-format -n <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);"
@ -53,7 +69,7 @@ FROM
; ;
``` ```
3. Obfuscating: 4. Obfuscating:
```bash ```bash
$ clickhouse-format --seed Hello --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" $ clickhouse-format --seed Hello --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;"
@ -77,7 +93,7 @@ Result:
SELECT horse_tape_summer BETWEEN folklore AND moccasins, CASE WHEN intestine >= 116 THEN nonconformist ELSE FORESTRY END; SELECT horse_tape_summer BETWEEN folklore AND moccasins, CASE WHEN intestine >= 116 THEN nonconformist ELSE FORESTRY END;
``` ```
4. Adding backslash: 5. Adding backslash:
```bash ```bash
$ clickhouse-format --backslash <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" $ clickhouse-format --backslash <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);"

View File

@ -0,0 +1,148 @@
---
toc_priority: 108
---
## exponentialMovingAverage {#exponential-moving-average}
Сalculates the exponential moving average of values for the determined time.
**Syntax**
```sql
exponentialMovingAverage(x)(value, timestamp)
```
Each `value` corresponds to the determinate `timestamp`. The half-life `x` is the time lag at which the exponential weights decay by one-half. The function returns a weighted average: the older the time point, the less weight the corresponding value is considered to be.
**Arguments**
- `value` — Value. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) or [Decimal](../../../sql-reference/data-types/decimal.md).
- `timestamp` — Timestamp. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) or [Decimal](../../../sql-reference/data-types/decimal.md).
**Parameters**
- `x` — Half-life period. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) or [Decimal](../../../sql-reference/data-types/decimal.md).
**Returned values**
- Returnes an [exponentially smoothed moving average](https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average) of the values for the past `x` time at the latest point of time.
Type: [Float64](../../../sql-reference/data-types/float.md#float32-float64).
**Examples**
Input table:
``` text
┌──temperature─┬─timestamp──┐
│ 95 │ 1 │
│ 95 │ 2 │
│ 95 │ 3 │
│ 96 │ 4 │
│ 96 │ 5 │
│ 96 │ 6 │
│ 96 │ 7 │
│ 97 │ 8 │
│ 97 │ 9 │
│ 97 │ 10 │
│ 97 │ 11 │
│ 98 │ 12 │
│ 98 │ 13 │
│ 98 │ 14 │
│ 98 │ 15 │
│ 99 │ 16 │
│ 99 │ 17 │
│ 99 │ 18 │
│ 100 │ 19 │
│ 100 │ 20 │
└──────────────┴────────────┘
```
Query:
```sql
SELECT exponentialMovingAverage(5)(temperature, timestamp);
```
Result:
``` text
┌──exponentialMovingAverage(5)(temperature, timestamp)──┐
│ 92.25779635374204 │
└───────────────────────────────────────────────────────┘
```
Query:
```sql
SELECT
value,
time,
round(exp_smooth, 3),
bar(exp_smooth, 0, 1, 50) AS bar
FROM
(
SELECT
(number = 0) OR (number >= 25) AS value,
number AS time,
exponentialMovingAverage(10)(value, time) OVER (Rows BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS exp_smooth
FROM numbers(50)
)
```
Result:
``` text
┌─value─┬─time─┬─round(exp_smooth, 3)─┬─bar────────────────────────────────────────┐
│ 1 │ 0 │ 0.067 │ ███▎ │
│ 0 │ 1 │ 0.062 │ ███ │
│ 0 │ 2 │ 0.058 │ ██▊ │
│ 0 │ 3 │ 0.054 │ ██▋ │
│ 0 │ 4 │ 0.051 │ ██▌ │
│ 0 │ 5 │ 0.047 │ ██▎ │
│ 0 │ 6 │ 0.044 │ ██▏ │
│ 0 │ 7 │ 0.041 │ ██ │
│ 0 │ 8 │ 0.038 │ █▊ │
│ 0 │ 9 │ 0.036 │ █▋ │
│ 0 │ 10 │ 0.033 │ █▋ │
│ 0 │ 11 │ 0.031 │ █▌ │
│ 0 │ 12 │ 0.029 │ █▍ │
│ 0 │ 13 │ 0.027 │ █▎ │
│ 0 │ 14 │ 0.025 │ █▎ │
│ 0 │ 15 │ 0.024 │ █▏ │
│ 0 │ 16 │ 0.022 │ █ │
│ 0 │ 17 │ 0.021 │ █ │
│ 0 │ 18 │ 0.019 │ ▊ │
│ 0 │ 19 │ 0.018 │ ▊ │
│ 0 │ 20 │ 0.017 │ ▋ │
│ 0 │ 21 │ 0.016 │ ▋ │
│ 0 │ 22 │ 0.015 │ ▋ │
│ 0 │ 23 │ 0.014 │ ▋ │
│ 0 │ 24 │ 0.013 │ ▋ │
│ 1 │ 25 │ 0.079 │ ███▊ │
│ 1 │ 26 │ 0.14 │ ███████ │
│ 1 │ 27 │ 0.198 │ █████████▊ │
│ 1 │ 28 │ 0.252 │ ████████████▌ │
│ 1 │ 29 │ 0.302 │ ███████████████ │
│ 1 │ 30 │ 0.349 │ █████████████████▍ │
│ 1 │ 31 │ 0.392 │ ███████████████████▌ │
│ 1 │ 32 │ 0.433 │ █████████████████████▋ │
│ 1 │ 33 │ 0.471 │ ███████████████████████▌ │
│ 1 │ 34 │ 0.506 │ █████████████████████████▎ │
│ 1 │ 35 │ 0.539 │ ██████████████████████████▊ │
│ 1 │ 36 │ 0.57 │ ████████████████████████████▌ │
│ 1 │ 37 │ 0.599 │ █████████████████████████████▊ │
│ 1 │ 38 │ 0.626 │ ███████████████████████████████▎ │
│ 1 │ 39 │ 0.651 │ ████████████████████████████████▌ │
│ 1 │ 40 │ 0.674 │ █████████████████████████████████▋ │
│ 1 │ 41 │ 0.696 │ ██████████████████████████████████▋ │
│ 1 │ 42 │ 0.716 │ ███████████████████████████████████▋ │
│ 1 │ 43 │ 0.735 │ ████████████████████████████████████▋ │
│ 1 │ 44 │ 0.753 │ █████████████████████████████████████▋ │
│ 1 │ 45 │ 0.77 │ ██████████████████████████████████████▍ │
│ 1 │ 46 │ 0.785 │ ███████████████████████████████████████▎ │
│ 1 │ 47 │ 0.8 │ ███████████████████████████████████████▊ │
│ 1 │ 48 │ 0.813 │ ████████████████████████████████████████▋ │
│ 1 │ 49 │ 0.825 │ █████████████████████████████████████████▎│
└───────┴──────┴──────────────────────┴────────────────────────────────────────────┘
```

View File

@ -89,9 +89,39 @@ SELECT sipHash64(array('e','x','a'), 'mple', 10, toDateTime('2019-06-15 23:00:00
## sipHash128 {#hash_functions-siphash128} ## sipHash128 {#hash_functions-siphash128}
Calculates SipHash from a string. Produces a 128-bit [SipHash](https://131002.net/siphash/) hash value. Differs from [sipHash64](#hash_functions-siphash64) in that the final xor-folding state is done up to 128 bits.
Accepts a String-type argument. Returns FixedString(16).
Differs from sipHash64 in that the final xor-folding state is only done up to 128 bits. **Syntax**
``` sql
sipHash128(par1,...)
```
**Arguments**
The function takes a variable number of input parameters. Arguments can be any of the [supported data types](../../sql-reference/data-types/index.md).
**Returned value**
A 128-bit `SipHash` hash value.
Type: [FixedString(16)](../../sql-reference/data-types/fixedstring.md).
**Example**
Query:
``` sql
SELECT hex(sipHash128('foo', '\x01', 3));
```
Result:
``` text
┌─hex(sipHash128('foo', '', 3))────┐
│ 9DE516A64A414D4B1B609415E4523F24 │
└──────────────────────────────────┘
```
## cityHash64 {#cityhash64} ## cityHash64 {#cityhash64}
@ -459,28 +489,36 @@ SELECT murmurHash3_32(array('e','x','a'), 'mple', 10, toDateTime('2019-06-15 23:
Produces a 128-bit [MurmurHash3](https://github.com/aappleby/smhasher) hash value. Produces a 128-bit [MurmurHash3](https://github.com/aappleby/smhasher) hash value.
**Syntax**
``` sql ``` sql
murmurHash3_128( expr ) murmurHash3_128(expr)
``` ```
**Arguments** **Arguments**
- `expr`[Expressions](../../sql-reference/syntax.md#syntax-expressions) returning a [String](../../sql-reference/data-types/string.md)-type value. - `expr`A list of [expressions](../../sql-reference/syntax.md#syntax-expressions). [String](../../sql-reference/data-types/string.md).
**Returned Value** **Returned value**
A [FixedString(16)](../../sql-reference/data-types/fixedstring.md) data type hash value. A 128-bit `MurmurHash3` hash value.
Type: [FixedString(16)](../../sql-reference/data-types/fixedstring.md).
**Example** **Example**
Query:
``` sql ``` sql
SELECT hex(murmurHash3_128('example_string')) AS MurmurHash3, toTypeName(MurmurHash3) AS type; SELECT hex(murmurHash3_128('foo', 'foo', 'foo'));
``` ```
Result:
``` text ``` text
┌─MurmurHash3──────────────────────┬─type───┐ ┌─hex(murmurHash3_128('foo', 'foo', 'foo'))─┐
368A1A311CB7342253354B548E7E7E71 │ String F8F7AD9B6CD4CF117A71E277E2EC2931
└──────────────────────────────────────────┘ └──────────────────────────────────────────┘
``` ```
## xxHash32, xxHash64 {#hash-functions-xxhash32} ## xxHash32, xxHash64 {#hash-functions-xxhash32}

View File

@ -20,6 +20,8 @@ ClickHouse может принимать (`INSERT`) и отдавать (`SELECT
| [CSV](#csv) | ✔ | ✔ | | [CSV](#csv) | ✔ | ✔ |
| [CSVWithNames](#csvwithnames) | ✔ | ✔ | | [CSVWithNames](#csvwithnames) | ✔ | ✔ |
| [CustomSeparated](#format-customseparated) | ✔ | ✔ | | [CustomSeparated](#format-customseparated) | ✔ | ✔ |
| [CustomSeparatedWithNames](#customseparatedwithnames) | ✔ | ✔ |
| [CustomSeparatedWithNamesAndTypes](#customseparatedwithnamesandtypes) | ✔ | ✔ |
| [Values](#data-format-values) | ✔ | ✔ | | [Values](#data-format-values) | ✔ | ✔ |
| [Vertical](#vertical) | ✗ | ✔ | | [Vertical](#vertical) | ✗ | ✔ |
| [JSON](#json) | ✗ | ✔ | | [JSON](#json) | ✗ | ✔ |
@ -368,8 +370,17 @@ $ clickhouse-client --format_csv_delimiter="|" --query="INSERT INTO test.csv FOR
## CustomSeparated {#format-customseparated} ## CustomSeparated {#format-customseparated}
Аналогичен [Template](#format-template), но выводит (или считывает) все столбцы, используя для них правило экранирования из настройки `format_custom_escaping_rule` и разделители из настроек `format_custom_field_delimiter`, `format_custom_row_before_delimiter`, `format_custom_row_after_delimiter`, `format_custom_row_between_delimiter`, `format_custom_result_before_delimiter` и `format_custom_result_after_delimiter`, а не из форматных строк. Аналогичен [Template](#format-template), но выводит (или считывает) все имена и типы столбцов, используя для них правило экранирования из настройки [format_custom_escaping_rule](../operations/settings/settings.md#format-custom-escaping-rule) и разделители из настроек [format_custom_field_delimiter](../operations/settings/settings.md#format-custom-field-delimiter), [format_custom_row_before_delimiter](../operations/settings/settings.md#format-custom-row-before-delimiter), [format_custom_row_after_delimiter](../operations/settings/settings.md#format-custom-row-after-delimiter), [format_custom_row_between_delimiter](../operations/settings/settings.md#format-custom-row-between-delimiter), [format_custom_result_before_delimiter](../operations/settings/settings.md#format-custom-result-before-delimiter) и [format_custom_result_after_delimiter](../operations/settings/settings.md#format-custom-result-after-delimiter), а не из форматных строк.
Также существует формат `CustomSeparatedIgnoreSpaces`, аналогичный `TemplateIgnoreSpaces`.
Также существует формат `CustomSeparatedIgnoreSpaces`, аналогичный формату [TemplateIgnoreSpaces](#templateignorespaces).
## CustomSeparatedWithNames {#customseparatedwithnames}
Выводит также заголовок с именами столбцов, аналогичен формату [TabSeparatedWithNames](#tabseparatedwithnames).
## CustomSeparatedWithNamesAndTypes {#customseparatedwithnamesandtypes}
Выводит также два заголовка с именами и типами столбцов, аналогичен формату [TabSeparatedWithNamesAndTypes](#tabseparatedwithnamesandtypes).
## JSON {#json} ## JSON {#json}
@ -1400,12 +1411,15 @@ SELECT * FROM line_as_string;
При работе с форматом `Regexp` можно использовать следующие параметры: При работе с форматом `Regexp` можно использовать следующие параметры:
- `format_regexp` — [String](../sql-reference/data-types/string.md). Строка с регулярным выражением в формате [re2](https://github.com/google/re2/wiki/Syntax). - `format_regexp` — [String](../sql-reference/data-types/string.md). Строка с регулярным выражением в формате [re2](https://github.com/google/re2/wiki/Syntax).
- `format_regexp_escaping_rule` — [String](../sql-reference/data-types/string.md). Правило сериализации. Поддерживаются следующие правила:
- CSV (как в [CSV](#csv)) - `format_regexp_escaping_rule` — [String](../sql-reference/data-types/string.md). Правило экранирования. Поддерживаются следующие правила:
- JSON (как в [JSONEachRow](#jsoneachrow))
- Escaped (как в [TSV](#tabseparated)) - CSV (как в формате [CSV](#csv))
- Quoted (как в [Values](#data-format-values)) - JSON (как в формате [JSONEachRow](#jsoneachrow))
- Raw (данные импортируются как есть, без сериализации) - Escaped (как в формате [TSV](#tabseparated))
- Quoted (как в формате [Values](#data-format-values))
- Raw (данные импортируются как есть, без экранирования, как в формате [TSVRaw](#tabseparatedraw))
- `format_regexp_skip_unmatched` — [UInt8](../sql-reference/data-types/int-uint.md). Признак, будет ли генерироваться исключение в случае, если импортируемые данные не соответствуют регулярному выражению `format_regexp`. Может принимать значение `0` или `1`. - `format_regexp_skip_unmatched` — [UInt8](../sql-reference/data-types/int-uint.md). Признак, будет ли генерироваться исключение в случае, если импортируемые данные не соответствуют регулярному выражению `format_regexp`. Может принимать значение `0` или `1`.
**Использование** **Использование**

View File

@ -131,7 +131,7 @@ ClickHouse проверяет условия для `min_part_size` и `min_part
```xml ```xml
<encryption_codecs> <encryption_codecs>
<aes_128_gcm_siv> <aes_128_gcm_siv>
<nonce>0123456789101</nonce> <nonce>012345678910</nonce>
</aes_128_gcm_siv> </aes_128_gcm_siv>
</encryption_codecs> </encryption_codecs>
``` ```

View File

@ -3830,3 +3830,54 @@ SELECT * FROM positional_arguments ORDER BY 2,3;
- 0 — большие файлы считываются только с копированием данных из ядра в пространство пользователей. - 0 — большие файлы считываются только с копированием данных из ядра в пространство пользователей.
Значение по умолчанию: `0`. Значение по умолчанию: `0`.
## format_custom_escaping_rule {#format-custom-escaping-rule}
Устанавливает правило экранирования данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated).
Возможные значения:
- `'Escaped'` — как в формате [TSV](../../interfaces/formats.md#tabseparated).
- `'Quoted'` — как в формате [Values](../../interfaces/formats.md#data-format-values).
- `'CSV'` — как в формате [CSV](../../interfaces/formats.md#csv).
- `'JSON'` — как в формате [JSONEachRow](../../interfaces/formats.md#jsoneachrow).
- `'XML'` — как в формате [XML](../../interfaces/formats.md#xml).
- `'Raw'` — данные импортируются как есть, без экранирования, как в формате [TSVRaw](../../interfaces/formats.md#tabseparatedraw).
Значение по умолчанию: `'Escaped'`.
## format_custom_field_delimiter {#format-custom-field-delimiter}
Задает символ, который интерпретируется как разделитель между полями данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated).
Значение по умолчанию: `'\t'`.
## format_custom_row_before_delimiter {#format-custom-row-before-delimiter}
Задает символ, который интерпретируется как разделитель перед полем первого столбца данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated).
Значение по умолчанию: `''`.
## format_custom_row_after_delimiter {#format-custom-row-after-delimiter}
Задает символ, который интерпретируется как разделитель после поля последнего столбца данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated).
Значение по умолчанию: `'\n'`.
## format_custom_row_between_delimiter {#format-custom-row-between-delimiter}
Задает символ, который интерпретируется как разделитель между строками данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated).
Значение по умолчанию: `''`.
## format_custom_result_before_delimiter {#format-custom-result-before-delimiter}
Задает символ, который интерпретируется как префикс перед результирующим набором данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated).
Значение по умолчанию: `''`.
## format_custom_result_after_delimiter {#format-custom-result-after-delimiter}
Задает символ, который интерпретируется как суффикс после результирующего набора данных формата [CustomSeparated](../../interfaces/formats.md#format-customseparated).
Значение по умолчанию: `''`.

View File

@ -10,6 +10,7 @@ toc_title: clickhouse-format
Ключи: Ключи:
- `--help` или`-h` — выводит описание ключей. - `--help` или`-h` — выводит описание ключей.
- `--query` — форматирует запрос любой длины и сложности.
- `--hilite` — добавляет подсветку синтаксиса с экранированием символов. - `--hilite` — добавляет подсветку синтаксиса с экранированием символов.
- `--oneline` — форматирование в одну строку. - `--oneline` — форматирование в одну строку.
- `--quiet` или `-q` — проверяет синтаксис без вывода результата. - `--quiet` или `-q` — проверяет синтаксис без вывода результата.
@ -20,7 +21,22 @@ toc_title: clickhouse-format
## Примеры {#examples} ## Примеры {#examples}
1. Подсветка синтаксиса и форматирование в одну строку: 1. Форматирование запроса:
```bash
$ clickhouse-format --query "select number from numbers(10) where number%2 order by number desc;"
```
Результат:
```text
SELECT number
FROM numbers(10)
WHERE number % 2
ORDER BY number DESC
```
2. Подсветка синтаксиса и форматирование в одну строку:
```bash ```bash
$ clickhouse-format --oneline --hilite <<< "SELECT sum(number) FROM numbers(5);" $ clickhouse-format --oneline --hilite <<< "SELECT sum(number) FROM numbers(5);"
@ -32,7 +48,7 @@ $ clickhouse-format --oneline --hilite <<< "SELECT sum(number) FROM numbers(5);"
SELECT sum(number) FROM numbers(5) SELECT sum(number) FROM numbers(5)
``` ```
2. Несколько запросов в одной строке: 3. Несколько запросов в одной строке:
```bash ```bash
$ clickhouse-format -n <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" $ clickhouse-format -n <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);"
@ -53,7 +69,7 @@ FROM
; ;
``` ```
3. Обфускация: 4. Обфускация:
```bash ```bash
$ clickhouse-format --seed Hello --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;" $ clickhouse-format --seed Hello --obfuscate <<< "SELECT cost_first_screen BETWEEN a AND b, CASE WHEN x >= 123 THEN y ELSE NULL END;"
@ -77,7 +93,7 @@ $ clickhouse-format --seed World --obfuscate <<< "SELECT cost_first_screen BETWE
SELECT horse_tape_summer BETWEEN folklore AND moccasins, CASE WHEN intestine >= 116 THEN nonconformist ELSE FORESTRY END; SELECT horse_tape_summer BETWEEN folklore AND moccasins, CASE WHEN intestine >= 116 THEN nonconformist ELSE FORESTRY END;
``` ```
4. Добавление обратного слеша: 5. Добавление обратного слеша:
```bash ```bash
$ clickhouse-format --backslash <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);" $ clickhouse-format --backslash <<< "SELECT * FROM (SELECT 1 AS x UNION ALL SELECT 1 UNION DISTINCT SELECT 3);"

View File

@ -0,0 +1,148 @@
---
toc_priority: 108
---
## exponentialMovingAverage {#exponential-moving-average}
Вычисляет экспоненциальное скользящее среднее за определенный промежуток времени.
**Синтакис:**
```sql
exponentialMovingAverage(x)(value, timestamp)
```
Каждой точке `timestamp` на временном отрезке соответствует определенное значение `value`. Период полураспада — временной интервал `х`, через который вес значений уменьшается в 2 раза. Функция возвращает взвешенное среднее: чем старше временная точка, тем c меньшим весом считается соответствующее ей значение.
**Аргументы**
- `value` — входные значения. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) или [Decimal](../../../sql-reference/data-types/decimal.md).
- `timestamp` — параметр для упорядочивания значений. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) или [Decimal](../../../sql-reference/data-types/decimal.md).
**Параметры**
- `x` — период полураспада. [Integer](../../../sql-reference/data-types/int-uint.md), [Float](../../../sql-reference/data-types/float.md) или [Decimal](../../../sql-reference/data-types/decimal.md).
**Возвращаемые значения**
- Возвращает [экспоненциальное скользящее среднее](https://ru.wikipedia.org/wiki/Скользящая_средняя#Экспоненциально_взвешенное_скользящее_среднее) за прошедшее время `x` в последний момент времени.
Тип: [Float64](../../../sql-reference/data-types/float.md#float32-float64).
**Пример**
Исходная таблица:
``` text
┌──temperature─┬─timestamp──┐
│ 95 │ 1 │
│ 95 │ 2 │
│ 95 │ 3 │
│ 96 │ 4 │
│ 96 │ 5 │
│ 96 │ 6 │
│ 96 │ 7 │
│ 97 │ 8 │
│ 97 │ 9 │
│ 97 │ 10 │
│ 97 │ 11 │
│ 98 │ 12 │
│ 98 │ 13 │
│ 98 │ 14 │
│ 98 │ 15 │
│ 99 │ 16 │
│ 99 │ 17 │
│ 99 │ 18 │
│ 100 │ 19 │
│ 100 │ 20 │
└──────────────┴────────────┘
```
Запрос:
```sql
SELECT exponentialMovingAverage(5)(temperature, timestamp);
```
Результат:
``` text
┌──exponentialMovingAverage(5)(temperature, timestamp)──┐
│ 92.25779635374204 │
└───────────────────────────────────────────────────────┘
```
Запрос:
```sql
SELECT
value,
time,
round(exp_smooth, 3),
bar(exp_smooth, 0, 1, 50) AS bar
FROM
(
SELECT
(number = 0) OR (number >= 25) AS value,
number AS time,
exponentialMovingAverage(10)(value, time) OVER (Rows BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS exp_smooth
FROM numbers(50)
)
```
Результат:
``` text
┌─value─┬─time─┬─round(exp_smooth, 3)─┬─bar────────────────────────────────────────┐
│ 1 │ 0 │ 0.067 │ ███▎ │
│ 0 │ 1 │ 0.062 │ ███ │
│ 0 │ 2 │ 0.058 │ ██▊ │
│ 0 │ 3 │ 0.054 │ ██▋ │
│ 0 │ 4 │ 0.051 │ ██▌ │
│ 0 │ 5 │ 0.047 │ ██▎ │
│ 0 │ 6 │ 0.044 │ ██▏ │
│ 0 │ 7 │ 0.041 │ ██ │
│ 0 │ 8 │ 0.038 │ █▊ │
│ 0 │ 9 │ 0.036 │ █▋ │
│ 0 │ 10 │ 0.033 │ █▋ │
│ 0 │ 11 │ 0.031 │ █▌ │
│ 0 │ 12 │ 0.029 │ █▍ │
│ 0 │ 13 │ 0.027 │ █▎ │
│ 0 │ 14 │ 0.025 │ █▎ │
│ 0 │ 15 │ 0.024 │ █▏ │
│ 0 │ 16 │ 0.022 │ █ │
│ 0 │ 17 │ 0.021 │ █ │
│ 0 │ 18 │ 0.019 │ ▊ │
│ 0 │ 19 │ 0.018 │ ▊ │
│ 0 │ 20 │ 0.017 │ ▋ │
│ 0 │ 21 │ 0.016 │ ▋ │
│ 0 │ 22 │ 0.015 │ ▋ │
│ 0 │ 23 │ 0.014 │ ▋ │
│ 0 │ 24 │ 0.013 │ ▋ │
│ 1 │ 25 │ 0.079 │ ███▊ │
│ 1 │ 26 │ 0.14 │ ███████ │
│ 1 │ 27 │ 0.198 │ █████████▊ │
│ 1 │ 28 │ 0.252 │ ████████████▌ │
│ 1 │ 29 │ 0.302 │ ███████████████ │
│ 1 │ 30 │ 0.349 │ █████████████████▍ │
│ 1 │ 31 │ 0.392 │ ███████████████████▌ │
│ 1 │ 32 │ 0.433 │ █████████████████████▋ │
│ 1 │ 33 │ 0.471 │ ███████████████████████▌ │
│ 1 │ 34 │ 0.506 │ █████████████████████████▎ │
│ 1 │ 35 │ 0.539 │ ██████████████████████████▊ │
│ 1 │ 36 │ 0.57 │ ████████████████████████████▌ │
│ 1 │ 37 │ 0.599 │ █████████████████████████████▊ │
│ 1 │ 38 │ 0.626 │ ███████████████████████████████▎ │
│ 1 │ 39 │ 0.651 │ ████████████████████████████████▌ │
│ 1 │ 40 │ 0.674 │ █████████████████████████████████▋ │
│ 1 │ 41 │ 0.696 │ ██████████████████████████████████▋ │
│ 1 │ 42 │ 0.716 │ ███████████████████████████████████▋ │
│ 1 │ 43 │ 0.735 │ ████████████████████████████████████▋ │
│ 1 │ 44 │ 0.753 │ █████████████████████████████████████▋ │
│ 1 │ 45 │ 0.77 │ ██████████████████████████████████████▍ │
│ 1 │ 46 │ 0.785 │ ███████████████████████████████████████▎ │
│ 1 │ 47 │ 0.8 │ ███████████████████████████████████████▊ │
│ 1 │ 48 │ 0.813 │ ████████████████████████████████████████▋ │
│ 1 │ 49 │ 0.825 │ █████████████████████████████████████████▎│
└───────┴──────┴──────────────────────┴────────────────────────────────────────────┘
```

View File

@ -748,7 +748,7 @@ SOURCE(REDIS(
!!! info "Примечание" !!! info "Примечание"
Поля `column_family` или `where` не могут быть использованы вместе с полем `query`. Также обязательно должен быть один из источников данных: `column_family` или `query`. Поля `column_family` или `where` не могут быть использованы вместе с полем `query`. Также обязательно должен быть один из источников данных: `column_family` или `query`.
### PosgreSQL {#dicts-external_dicts_dict_sources-postgresql} ### PostgreSQL {#dicts-external_dicts_dict_sources-postgresql}
Пример настройки: Пример настройки:

View File

@ -89,9 +89,39 @@ SELECT sipHash64(array('e','x','a'), 'mple', 10, toDateTime('2019-06-15 23:00:00
## sipHash128 {#hash_functions-siphash128} ## sipHash128 {#hash_functions-siphash128}
Вычисляет SipHash от строки. Генерирует 128-битное хеш-значение [SipHash](https://131002.net/siphash/). Отличается от [sipHash64](#hash_functions-siphash64) тем, что финальный xor-folding состояния делается до 128 бит.
Принимает аргумент типа String. Возвращает FixedString(16).
Отличается от sipHash64 тем, что финальный xor-folding состояния делается только до 128 бит. **Синтаксис**
``` sql
sipHash128(par1,...)
```
**Аргументы**
Функция принимает переменное число входных параметров. Аргументы могут быть любого [поддерживаемого типа данных](../../sql-reference/functions/hash-functions.md).
**Возвращаемое значение**
128-битное хеш-значение `SipHash`.
Тип: [FixedString(16)](../../sql-reference/data-types/fixedstring.md).
**Пример**
Запрос:
``` sql
SELECT hex(sipHash128('foo', '\x01', 3));
```
Результат:
``` text
┌─hex(sipHash128('foo', '', 3))────┐
│ 9DE516A64A414D4B1B609415E4523F24 │
└──────────────────────────────────┘
```
## cityHash64 {#cityhash64} ## cityHash64 {#cityhash64}
@ -459,30 +489,38 @@ SELECT murmurHash3_32(array('e','x','a'), 'mple', 10, toDateTime('2019-06-15 23:
## murmurHash3_128 {#murmurhash3-128} ## murmurHash3_128 {#murmurhash3-128}
Генерирует значение [MurmurHash3](https://github.com/aappleby/smhasher). Генерирует 128-битное хеш-значение [MurmurHash3](https://github.com/aappleby/smhasher).
**Синтаксис**
``` sql ``` sql
murmurHash3_128( expr ) murmurHash3_128(expr)
``` ```
**Аргументы** **Аргументы**
- `expr`[выражение](../syntax.md#syntax-expressions), возвращающее значение типа [String](../../sql-reference/functions/hash-functions.md). - `expr`список [выражений](../../sql-reference/syntax.md#syntax-expressions). [String](../../sql-reference/data-types/string.md).
**Возвращаемое значение** **Возвращаемое значение**
Хэш-значение типа [FixedString(16)](../../sql-reference/functions/hash-functions.md). 128-битное значение хеш-значение `MurmurHash3`.
Тип: [FixedString(16)](../../sql-reference/data-types/fixedstring.md).
**Пример** **Пример**
Запрос:
``` sql ``` sql
SELECT hex(murmurHash3_128('example_string')) AS MurmurHash3, toTypeName(MurmurHash3) AS type; SELECT hex(murmurHash3_128('foo', 'foo', 'foo'));
``` ```
Результат:
``` text ``` text
┌─MurmurHash3──────────────────────┬─type───┐ ┌─hex(murmurHash3_128('foo', 'foo', 'foo'))─┐
368A1A311CB7342253354B548E7E7E71 │ String F8F7AD9B6CD4CF117A71E277E2EC2931
└──────────────────────────────────────────┘ └──────────────────────────────────────────┘
``` ```
## xxHash32, xxHash64 {#hash-functions-xxhash32-xxhash64} ## xxHash32, xxHash64 {#hash-functions-xxhash32-xxhash64}

View File

@ -492,8 +492,9 @@ int mainEntryClickHouseInstall(int argc, char ** argv)
/// Override the default paths. /// Override the default paths.
/// Data paths. /// Data paths.
const std::string data_file = config_d / "data-paths.xml";
if (!fs::exists(data_file))
{ {
std::string data_file = config_d / "data-paths.xml";
WriteBufferFromFile out(data_file); WriteBufferFromFile out(data_file);
out << "<clickhouse>\n" out << "<clickhouse>\n"
" <path>" << data_path.string() << "</path>\n" " <path>" << data_path.string() << "</path>\n"
@ -503,12 +504,14 @@ int mainEntryClickHouseInstall(int argc, char ** argv)
"</clickhouse>\n"; "</clickhouse>\n";
out.sync(); out.sync();
out.finalize(); out.finalize();
fs::permissions(data_file, fs::perms::owner_read, fs::perm_options::replace);
fmt::print("Data path configuration override is saved to file {}.\n", data_file); fmt::print("Data path configuration override is saved to file {}.\n", data_file);
} }
/// Logger. /// Logger.
const std::string logger_file = config_d / "logger.xml";
if (!fs::exists(logger_file))
{ {
std::string logger_file = config_d / "logger.xml";
WriteBufferFromFile out(logger_file); WriteBufferFromFile out(logger_file);
out << "<clickhouse>\n" out << "<clickhouse>\n"
" <logger>\n" " <logger>\n"
@ -518,12 +521,14 @@ int mainEntryClickHouseInstall(int argc, char ** argv)
"</clickhouse>\n"; "</clickhouse>\n";
out.sync(); out.sync();
out.finalize(); out.finalize();
fs::permissions(logger_file, fs::perms::owner_read, fs::perm_options::replace);
fmt::print("Log path configuration override is saved to file {}.\n", logger_file); fmt::print("Log path configuration override is saved to file {}.\n", logger_file);
} }
/// User directories. /// User directories.
const std::string user_directories_file = config_d / "user-directories.xml";
if (!fs::exists(user_directories_file))
{ {
std::string user_directories_file = config_d / "user-directories.xml";
WriteBufferFromFile out(user_directories_file); WriteBufferFromFile out(user_directories_file);
out << "<clickhouse>\n" out << "<clickhouse>\n"
" <user_directories>\n" " <user_directories>\n"
@ -534,12 +539,14 @@ int mainEntryClickHouseInstall(int argc, char ** argv)
"</clickhouse>\n"; "</clickhouse>\n";
out.sync(); out.sync();
out.finalize(); out.finalize();
fs::permissions(user_directories_file, fs::perms::owner_read, fs::perm_options::replace);
fmt::print("User directory path configuration override is saved to file {}.\n", user_directories_file); fmt::print("User directory path configuration override is saved to file {}.\n", user_directories_file);
} }
/// OpenSSL. /// OpenSSL.
const std::string openssl_file = config_d / "openssl.xml";
if (!fs::exists(openssl_file))
{ {
std::string openssl_file = config_d / "openssl.xml";
WriteBufferFromFile out(openssl_file); WriteBufferFromFile out(openssl_file);
out << "<clickhouse>\n" out << "<clickhouse>\n"
" <openSSL>\n" " <openSSL>\n"
@ -552,6 +559,7 @@ int mainEntryClickHouseInstall(int argc, char ** argv)
"</clickhouse>\n"; "</clickhouse>\n";
out.sync(); out.sync();
out.finalize(); out.finalize();
fs::permissions(openssl_file, fs::perms::owner_read, fs::perm_options::replace);
fmt::print("OpenSSL path configuration override is saved to file {}.\n", openssl_file); fmt::print("OpenSSL path configuration override is saved to file {}.\n", openssl_file);
} }
} }
@ -761,12 +769,13 @@ int mainEntryClickHouseInstall(int argc, char ** argv)
#if defined(__linux__) #if defined(__linux__)
fmt::print("Setting capabilities for clickhouse binary. This is optional.\n"); fmt::print("Setting capabilities for clickhouse binary. This is optional.\n");
std::string command = fmt::format("command -v setcap >/dev/null" std::string command = fmt::format("command -v setcap >/dev/null"
" && echo > {0} && chmod a+x {0} && {0} && setcap 'cap_net_admin,cap_ipc_lock,cap_sys_nice+ep' {0} && {0} && rm {0}" " && command -v capsh >/dev/null"
" && setcap 'cap_net_admin,cap_ipc_lock,cap_sys_nice+ep' {1}" " && capsh --has-p=cap_net_admin,cap_ipc_lock,cap_sys_nice+ep >/dev/null 2>&1"
" && setcap 'cap_net_admin,cap_ipc_lock,cap_sys_nice+ep' {0}"
" || echo \"Cannot set 'net_admin' or 'ipc_lock' or 'sys_nice' capability for clickhouse binary." " || echo \"Cannot set 'net_admin' or 'ipc_lock' or 'sys_nice' capability for clickhouse binary."
" This is optional. Taskstats accounting will be disabled." " This is optional. Taskstats accounting will be disabled."
" To enable taskstats accounting you may add the required capability later manually.\"", " To enable taskstats accounting you may add the required capability later manually.\"",
"/tmp/test_setcap.sh", fs::canonical(main_bin_path).string()); fs::canonical(main_bin_path).string());
executeScript(command); executeScript(command);
#endif #endif

View File

@ -49,7 +49,7 @@
<!-- Internal port and hostname --> <!-- Internal port and hostname -->
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<!-- Add more servers here --> <!-- Add more servers here -->

View File

@ -14,7 +14,7 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -328,7 +328,11 @@ struct Checker
{ {
checkRequiredInstructions(); checkRequiredInstructions();
} }
} checker __attribute__((init_priority(101))); /// Run before other static initializers. } checker
#ifndef __APPLE__
__attribute__((init_priority(101))) /// Run before other static initializers.
#endif
;
} }

View File

@ -203,6 +203,21 @@ public:
data(place).count += !assert_cast<const ColumnNullable &>(*columns[0]).isNullAt(row_num); data(place).count += !assert_cast<const ColumnNullable &>(*columns[0]).isNullAt(row_num);
} }
void addBatchSinglePlace(
size_t batch_size, AggregateDataPtr place, const IColumn ** columns, Arena *, ssize_t if_argument_pos) const override
{
auto & nc = assert_cast<const ColumnNullable &>(*columns[0]);
if (if_argument_pos >= 0)
{
const auto & flags = assert_cast<const ColumnUInt8 &>(*columns[if_argument_pos]).getData();
data(place).count += countBytesInFilterWithNull(flags, nc.getNullMapData().data());
}
else
{
data(place).count += batch_size - countBytesInFilter(nc.getNullMapData().data(), batch_size);
}
}
void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena *) const override void merge(AggregateDataPtr __restrict place, ConstAggregateDataPtr rhs, Arena *) const override
{ {
data(place).count += data(rhs).count; data(place).count += data(rhs).count;

View File

@ -549,10 +549,5 @@ if (ENABLE_TESTS AND USE_GTEST)
clickhouse_common_zookeeper clickhouse_common_zookeeper
string_utils) string_utils)
# For __udivmodti4 referenced in Core/tests/gtest_DecimalFunctions.cpp
if (OS_DARWIN AND COMPILER_GCC)
target_link_libraries(unit_tests_dbms PRIVATE gcc)
endif ()
add_check(unit_tests_dbms) add_check(unit_tests_dbms)
endif () endif ()

View File

@ -119,6 +119,24 @@ std::optional<Elf::Section> Elf::findSectionByName(const char * name) const
String Elf::getBuildID() const String Elf::getBuildID() const
{ {
/// Section headers are the first choice for a debuginfo file
if (String build_id; iterateSections([&build_id](const Section & section, size_t)
{
if (section.header.sh_type == SHT_NOTE)
{
build_id = Elf::getBuildID(section.begin(), section.size());
if (!build_id.empty())
{
return true;
}
}
return false;
}))
{
return build_id;
}
/// fallback to PHDR
for (size_t idx = 0; idx < header->e_phnum; ++idx) for (size_t idx = 0; idx < header->e_phnum; ++idx)
{ {
const ElfPhdr & phdr = program_headers[idx]; const ElfPhdr & phdr = program_headers[idx];
@ -126,6 +144,7 @@ String Elf::getBuildID() const
if (phdr.p_type == PT_NOTE) if (phdr.p_type == PT_NOTE)
return getBuildID(mapped + phdr.p_offset, phdr.p_filesz); return getBuildID(mapped + phdr.p_offset, phdr.p_filesz);
} }
return {}; return {};
} }

View File

@ -54,7 +54,8 @@ public:
const char * end() const { return mapped + elf_size; } const char * end() const { return mapped + elf_size; }
size_t size() const { return elf_size; } size_t size() const { return elf_size; }
/// Obtain build id from PT_NOTES section of program headers. Return empty string if does not exist. /// Obtain build id from SHT_NOTE of section headers (fallback to PT_NOTES section of program headers).
/// Return empty string if does not exist.
/// The string is returned in binary. Note that "readelf -n ./clickhouse-server" prints it in hex. /// The string is returned in binary. Note that "readelf -n ./clickhouse-server" prints it in hex.
String getBuildID() const; String getBuildID() const;
static String getBuildID(const char * nhdr_pos, size_t size); static String getBuildID(const char * nhdr_pos, size_t size);

View File

@ -165,7 +165,7 @@ protected:
std::function<void()> fatal_error_callback; std::function<void()> fatal_error_callback;
/// It is used to avoid enabling the query profiler when you have multiple ThreadStatus in the same thread /// It is used to avoid enabling the query profiler when you have multiple ThreadStatus in the same thread
bool query_profiled_enabled = true; bool query_profiler_enabled = true;
/// Requires access to query_id. /// Requires access to query_id.
friend class MemoryTrackerThreadSwitcher; friend class MemoryTrackerThreadSwitcher;
@ -207,7 +207,8 @@ public:
void disableProfiling() void disableProfiling()
{ {
query_profiled_enabled = false; assert(!query_profiler_real && !query_profiler_cpu);
query_profiler_enabled = false;
} }
/// Starts new query and create new thread group for it, current thread becomes master thread of the query /// Starts new query and create new thread group for it, current thread becomes master thread of the query

View File

@ -279,37 +279,33 @@ private:
// That may lead later to reading unallocated data from underlying PaddedPODArray // That may lead later to reading unallocated data from underlying PaddedPODArray
// due to assumption that it is safe to read up to 15 bytes past end. // due to assumption that it is safe to read up to 15 bytes past end.
const auto pad_to_next_block = block_size == 1 ? 0 : 1; const auto pad_to_next_block = block_size == 1 ? 0 : 1;
for (size_t r = 0; r < input_rows_count; ++r) for (size_t row_idx = 0; row_idx < input_rows_count; ++row_idx)
{ {
resulting_size += (input_column->getDataAt(r).size / block_size + pad_to_next_block) * block_size + 1; resulting_size += (input_column->getDataAt(row_idx).size / block_size + pad_to_next_block) * block_size + 1;
if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM) if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM)
resulting_size += tag_size; resulting_size += tag_size;
} }
#if defined(MEMORY_SANITIZER)
encrypted_result_column_data.resize_fill(resulting_size, 0xFF);
#else
encrypted_result_column_data.resize(resulting_size); encrypted_result_column_data.resize(resulting_size);
#endif
} }
auto * encrypted = encrypted_result_column_data.data(); auto * encrypted = encrypted_result_column_data.data();
KeyHolder<mode> key_holder; KeyHolder<mode> key_holder;
for (size_t r = 0; r < input_rows_count; ++r) for (size_t row_idx = 0; row_idx < input_rows_count; ++row_idx)
{ {
const auto key_value = key_holder.setKey(key_size, key_column->getDataAt(r)); const auto key_value = key_holder.setKey(key_size, key_column->getDataAt(row_idx));
auto iv_value = StringRef{}; auto iv_value = StringRef{};
if (iv_column) if (iv_column)
{ {
iv_value = iv_column->getDataAt(r); iv_value = iv_column->getDataAt(row_idx);
/// If the length is zero (empty string is passed) it should be treat as no IV. /// If the length is zero (empty string is passed) it should be treat as no IV.
if (iv_value.size == 0) if (iv_value.size == 0)
iv_value.data = nullptr; iv_value.data = nullptr;
} }
const StringRef input_value = input_column->getDataAt(r); const StringRef input_value = input_column->getDataAt(row_idx);
if constexpr (mode != CipherMode::MySQLCompatibility) if constexpr (mode != CipherMode::MySQLCompatibility)
{ {
@ -348,7 +344,7 @@ private:
// 1.a.2 Set AAD // 1.a.2 Set AAD
if (aad_column) if (aad_column)
{ {
const auto aad_data = aad_column->getDataAt(r); const auto aad_data = aad_column->getDataAt(row_idx);
int tmp_len = 0; int tmp_len = 0;
if (aad_data.size != 0 && EVP_EncryptUpdate(evp_ctx, nullptr, &tmp_len, if (aad_data.size != 0 && EVP_EncryptUpdate(evp_ctx, nullptr, &tmp_len,
reinterpret_cast<const unsigned char *>(aad_data.data), aad_data.size) != 1) reinterpret_cast<const unsigned char *>(aad_data.data), aad_data.size) != 1)
@ -408,7 +404,7 @@ private:
}; };
/// AES_decrypt(string, key, block_mode[, init_vector]) /// decrypt(string, key, block_mode[, init_vector])
template <typename Impl> template <typename Impl>
class FunctionDecrypt : public IFunction class FunctionDecrypt : public IFunction
{ {
@ -471,7 +467,9 @@ private:
ColumnPtr result_column; ColumnPtr result_column;
if (arguments.size() <= 3) if (arguments.size() <= 3)
{
result_column = doDecrypt(evp_cipher, input_rows_count, input_column, key_column, nullptr, nullptr); result_column = doDecrypt(evp_cipher, input_rows_count, input_column, key_column, nullptr, nullptr);
}
else else
{ {
const auto iv_column = arguments[3].column; const auto iv_column = arguments[3].column;
@ -548,12 +546,14 @@ private:
{ {
size_t resulting_size = 0; size_t resulting_size = 0;
for (size_t r = 0; r < input_rows_count; ++r) for (size_t row_idx = 0; row_idx < input_rows_count; ++row_idx)
{ {
size_t string_size = input_column->getDataAt(r).size; size_t string_size = input_column->getDataAt(row_idx).size;
resulting_size += string_size + 1; /// With terminating zero. resulting_size += string_size + 1; /// With terminating zero.
if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM) if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM)
{
if (string_size > 0)
{ {
if (string_size < tag_size) if (string_size < tag_size)
throw Exception("Encrypted data is smaller than the size of additional data for AEAD mode, cannot decrypt.", throw Exception("Encrypted data is smaller than the size of additional data for AEAD mode, cannot decrypt.",
@ -562,46 +562,43 @@ private:
resulting_size -= tag_size; resulting_size -= tag_size;
} }
} }
}
#if defined(MEMORY_SANITIZER)
// Pre-fill result column with values to prevent MSAN from dropping dead on
// aes-X-ecb mode with "WARNING: MemorySanitizer: use-of-uninitialized-value".
// This is most likely to be caused by the underlying assembler implementation:
// see crypto/aes/aesni-x86_64.s, function aesni_ecb_encrypt
// which msan seems to fail instrument correctly.
decrypted_result_column_data.resize_fill(resulting_size, 0xFF);
#else
decrypted_result_column_data.resize(resulting_size); decrypted_result_column_data.resize(resulting_size);
#endif
} }
auto * decrypted = decrypted_result_column_data.data(); auto * decrypted = decrypted_result_column_data.data();
KeyHolder<mode> key_holder; KeyHolder<mode> key_holder;
for (size_t r = 0; r < input_rows_count; ++r) for (size_t row_idx = 0; row_idx < input_rows_count; ++row_idx)
{ {
// 0: prepare key if required // 0: prepare key if required
auto key_value = key_holder.setKey(key_size, key_column->getDataAt(r)); auto key_value = key_holder.setKey(key_size, key_column->getDataAt(row_idx));
auto iv_value = StringRef{}; auto iv_value = StringRef{};
if (iv_column) if (iv_column)
{ {
iv_value = iv_column->getDataAt(r); iv_value = iv_column->getDataAt(row_idx);
/// If the length is zero (empty string is passed) it should be treat as no IV. /// If the length is zero (empty string is passed) it should be treat as no IV.
if (iv_value.size == 0) if (iv_value.size == 0)
iv_value.data = nullptr; iv_value.data = nullptr;
} }
auto input_value = input_column->getDataAt(r); auto input_value = input_column->getDataAt(row_idx);
if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM) if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM)
{
if (input_value.size > 0)
{ {
// empty plaintext results in empty ciphertext + tag, means there should be at least tag_size bytes. // empty plaintext results in empty ciphertext + tag, means there should be at least tag_size bytes.
if (input_value.size < tag_size) if (input_value.size < tag_size)
throw Exception(fmt::format("Encrypted data is too short: only {} bytes, " throw Exception(fmt::format("Encrypted data is too short: only {} bytes, "
"should contain at least {} bytes of a tag.", "should contain at least {} bytes of a tag.",
input_value.size, block_size, tag_size), ErrorCodes::BAD_ARGUMENTS); input_value.size, block_size, tag_size), ErrorCodes::BAD_ARGUMENTS);
input_value.size -= tag_size; input_value.size -= tag_size;
} }
}
if constexpr (mode != CipherMode::MySQLCompatibility) if constexpr (mode != CipherMode::MySQLCompatibility)
{ {
@ -619,8 +616,9 @@ private:
} }
} }
// Avoid extra work on empty ciphertext/plaintext for some ciphers /// Avoid extra work on empty ciphertext/plaintext. Always decrypt empty to empty.
if (!(input_value.size == 0 && block_size == 1 && mode != CipherMode::RFC5116_AEAD_AES_GCM)) /// This makes sense for default implementation for NULLs.
if (input_value.size > 0)
{ {
// 1: Init CTX // 1: Init CTX
if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM) if constexpr (mode == CipherMode::RFC5116_AEAD_AES_GCM)
@ -641,7 +639,7 @@ private:
// 1.a.2: Set AAD if present // 1.a.2: Set AAD if present
if (aad_column) if (aad_column)
{ {
StringRef aad_data = aad_column->getDataAt(r); StringRef aad_data = aad_column->getDataAt(row_idx);
int tmp_len = 0; int tmp_len = 0;
if (aad_data.size != 0 && EVP_DecryptUpdate(evp_ctx, nullptr, &tmp_len, if (aad_data.size != 0 && EVP_DecryptUpdate(evp_ctx, nullptr, &tmp_len,
reinterpret_cast<const unsigned char *>(aad_data.data), aad_data.size) != 1) reinterpret_cast<const unsigned char *>(aad_data.data), aad_data.size) != 1)

View File

@ -53,8 +53,6 @@ namespace ErrorCodes
constexpr const char * TASK_PROCESSED_OUT_REASON = "Task has been already processed"; constexpr const char * TASK_PROCESSED_OUT_REASON = "Task has been already processed";
namespace
{
/** Caveats: usage of locks in ZooKeeper is incorrect in 99% of cases, /** Caveats: usage of locks in ZooKeeper is incorrect in 99% of cases,
* and highlights your poor understanding of distributed systems. * and highlights your poor understanding of distributed systems.
@ -104,14 +102,29 @@ public:
void unlock() void unlock()
{ {
if (!locked)
return;
locked = false;
if (zookeeper->expired())
{
LOG_WARNING(log, "Lock is lost, because session was expired. Path: {}, message: {}", lock_path, lock_message);
return;
}
Coordination::Stat stat; Coordination::Stat stat;
std::string dummy; std::string dummy;
/// NOTE It will throw if session expired after we checked it above
bool result = zookeeper->tryGet(lock_path, dummy, &stat); bool result = zookeeper->tryGet(lock_path, dummy, &stat);
if (result && stat.ephemeralOwner == zookeeper->getClientID()) if (result && stat.ephemeralOwner == zookeeper->getClientID())
zookeeper->remove(lock_path, -1); zookeeper->remove(lock_path, -1);
else if (result)
throw Exception(ErrorCodes::LOGICAL_ERROR, "Lock is lost, it has another owner. Path: {}, message: {}, owner: {}, our id: {}",
lock_path, lock_message, stat.ephemeralOwner, zookeeper->getClientID());
else else
LOG_WARNING(log, "Lock is lost. It is normal if session was expired. Path: {}/{}", lock_path, lock_message); throw Exception(ErrorCodes::LOGICAL_ERROR, "Lock is lost, node does not exist. Path: {}, message: {}", lock_path, lock_message);
} }
bool tryLock() bool tryLock()
@ -119,18 +132,16 @@ public:
std::string dummy; std::string dummy;
Coordination::Error code = zookeeper->tryCreate(lock_path, lock_message, zkutil::CreateMode::Ephemeral, dummy); Coordination::Error code = zookeeper->tryCreate(lock_path, lock_message, zkutil::CreateMode::Ephemeral, dummy);
if (code == Coordination::Error::ZNODEEXISTS) if (code == Coordination::Error::ZOK)
{ {
return false; locked = true;
} }
else if (code == Coordination::Error::ZOK) else if (code != Coordination::Error::ZNODEEXISTS)
{
return true;
}
else
{ {
throw Coordination::Exception(code); throw Coordination::Exception(code);
} }
return locked;
} }
private: private:
@ -139,6 +150,7 @@ private:
std::string lock_path; std::string lock_path;
std::string lock_message; std::string lock_message;
Poco::Logger * log; Poco::Logger * log;
bool locked = false;
}; };
@ -148,8 +160,6 @@ std::unique_ptr<ZooKeeperLock> createSimpleZooKeeperLock(
return std::make_unique<ZooKeeperLock>(zookeeper, lock_prefix, lock_name, lock_message); return std::make_unique<ZooKeeperLock>(zookeeper, lock_prefix, lock_name, lock_message);
} }
}
DDLWorker::DDLWorker( DDLWorker::DDLWorker(
int pool_size_, int pool_size_,
@ -644,6 +654,10 @@ void DDLWorker::processTask(DDLTaskBase & task, const ZooKeeperPtr & zookeeper)
zookeeper->create(active_node_path, {}, zkutil::CreateMode::Ephemeral); zookeeper->create(active_node_path, {}, zkutil::CreateMode::Ephemeral);
} }
/// We must hold the lock until task execution status is committed to ZooKeeper,
/// otherwise another replica may try to execute query again.
std::unique_ptr<ZooKeeperLock> execute_on_leader_lock;
/// Step 2: Execute query from the task. /// Step 2: Execute query from the task.
if (!task.was_executed) if (!task.was_executed)
{ {
@ -674,7 +688,7 @@ void DDLWorker::processTask(DDLTaskBase & task, const ZooKeeperPtr & zookeeper)
if (task.execute_on_leader) if (task.execute_on_leader)
{ {
tryExecuteQueryOnLeaderReplica(task, storage, rewritten_query, task.entry_path, zookeeper); tryExecuteQueryOnLeaderReplica(task, storage, rewritten_query, task.entry_path, zookeeper, execute_on_leader_lock);
} }
else else
{ {
@ -761,7 +775,8 @@ bool DDLWorker::tryExecuteQueryOnLeaderReplica(
StoragePtr storage, StoragePtr storage,
const String & rewritten_query, const String & rewritten_query,
const String & /*node_path*/, const String & /*node_path*/,
const ZooKeeperPtr & zookeeper) const ZooKeeperPtr & zookeeper,
std::unique_ptr<ZooKeeperLock> & execute_on_leader_lock)
{ {
StorageReplicatedMergeTree * replicated_storage = dynamic_cast<StorageReplicatedMergeTree *>(storage.get()); StorageReplicatedMergeTree * replicated_storage = dynamic_cast<StorageReplicatedMergeTree *>(storage.get());
@ -799,7 +814,7 @@ bool DDLWorker::tryExecuteQueryOnLeaderReplica(
pcg64 rng(randomSeed()); pcg64 rng(randomSeed());
auto lock = createSimpleZooKeeperLock(zookeeper, shard_path, "lock", task.host_id_str); execute_on_leader_lock = createSimpleZooKeeperLock(zookeeper, shard_path, "lock", task.host_id_str);
Stopwatch stopwatch; Stopwatch stopwatch;
@ -829,7 +844,7 @@ bool DDLWorker::tryExecuteQueryOnLeaderReplica(
throw Exception(ErrorCodes::NOT_A_LEADER, "Cannot execute initial query on non-leader replica"); throw Exception(ErrorCodes::NOT_A_LEADER, "Cannot execute initial query on non-leader replica");
/// Any replica which is leader tries to take lock /// Any replica which is leader tries to take lock
if (status.is_leader && lock->tryLock()) if (status.is_leader && execute_on_leader_lock->tryLock())
{ {
/// In replicated merge tree we can have multiple leaders. So we can /// In replicated merge tree we can have multiple leaders. So we can
/// be "leader" and took lock, but another "leader" replica may have /// be "leader" and took lock, but another "leader" replica may have
@ -858,8 +873,6 @@ bool DDLWorker::tryExecuteQueryOnLeaderReplica(
executed_by_us = true; executed_by_us = true;
break; break;
} }
lock->unlock();
} }
/// Waiting for someone who will execute query and change is_executed_path node /// Waiting for someone who will execute query and change is_executed_path node

View File

@ -38,7 +38,7 @@ struct DDLTaskBase;
using DDLTaskPtr = std::unique_ptr<DDLTaskBase>; using DDLTaskPtr = std::unique_ptr<DDLTaskBase>;
using ZooKeeperPtr = std::shared_ptr<zkutil::ZooKeeper>; using ZooKeeperPtr = std::shared_ptr<zkutil::ZooKeeper>;
class AccessRightsElements; class AccessRightsElements;
class ZooKeeperLock;
class DDLWorker class DDLWorker
{ {
@ -94,7 +94,8 @@ protected:
StoragePtr storage, StoragePtr storage,
const String & rewritten_query, const String & rewritten_query,
const String & node_path, const String & node_path,
const ZooKeeperPtr & zookeeper); const ZooKeeperPtr & zookeeper,
std::unique_ptr<ZooKeeperLock> & execute_on_leader_lock);
bool tryExecuteQuery(const String & query, DDLTaskBase & task, const ZooKeeperPtr & zookeeper); bool tryExecuteQuery(const String & query, DDLTaskBase & task, const ZooKeeperPtr & zookeeper);

View File

@ -28,6 +28,8 @@ static bool tryExtractConstValueFromCondition(const ASTPtr & condition, bool & v
} }
/// cast of numeric constant in condition to UInt8 /// cast of numeric constant in condition to UInt8
/// Note: this solution is ad-hoc and only implemented for yandex.metrica use case.
/// We should allow any constant condition (or maybe remove this optimization completely) later.
if (const auto * function = condition->as<ASTFunction>()) if (const auto * function = condition->as<ASTFunction>())
{ {
if (isFunctionCast(function)) if (isFunctionCast(function))
@ -49,6 +51,16 @@ static bool tryExtractConstValueFromCondition(const ASTPtr & condition, bool & v
} }
} }
} }
else if (function->name == "toUInt8" || function->name == "toInt8")
{
if (const auto * expr_list = function->arguments->as<ASTExpressionList>())
{
if (expr_list->children.size() != 1)
throw Exception(ErrorCodes::NUMBER_OF_ARGUMENTS_DOESNT_MATCH, "Function {} must have exactly two arguments", function->name);
return tryExtractConstValueFromCondition(expr_list->children.at(0), value);
}
}
} }
return false; return false;

View File

@ -96,6 +96,22 @@ std::shared_ptr<TSystemLog> createSystemLog(
} }
ASTPtr ISystemLog::getCreateTableQueryClean(const StorageID & table_id, ContextPtr context)
{
DatabasePtr database = DatabaseCatalog::instance().getDatabase(table_id.database_name);
ASTPtr old_ast = database->getCreateTableQuery(table_id.table_name, context);
auto & old_create_query_ast = old_ast->as<ASTCreateQuery &>();
/// Reset UUID
old_create_query_ast.uuid = UUIDHelpers::Nil;
/// Existing table has default settings (i.e. `index_granularity = 8192`), reset them.
if (ASTStorage * storage = old_create_query_ast.storage)
{
storage->reset(storage->settings);
}
return old_ast;
}
SystemLogs::SystemLogs(ContextPtr global_context, const Poco::Util::AbstractConfiguration & config) SystemLogs::SystemLogs(ContextPtr global_context, const Poco::Util::AbstractConfiguration & config)
{ {
query_log = createSystemLog<QueryLog>(global_context, "system", "query_log", config, "query_log"); query_log = createSystemLog<QueryLog>(global_context, "system", "query_log", config, "query_log");

View File

@ -61,6 +61,7 @@ namespace DB
namespace ErrorCodes namespace ErrorCodes
{ {
extern const int TIMEOUT_EXCEEDED; extern const int TIMEOUT_EXCEEDED;
extern const int LOGICAL_ERROR;
} }
#define DBMS_SYSTEM_LOG_QUEUE_SIZE 1048576 #define DBMS_SYSTEM_LOG_QUEUE_SIZE 1048576
@ -83,13 +84,18 @@ class ISystemLog
{ {
public: public:
virtual String getName() = 0; virtual String getName() = 0;
virtual ASTPtr getCreateTableQuery() = 0;
//// force -- force table creation (used for SYSTEM FLUSH LOGS) //// force -- force table creation (used for SYSTEM FLUSH LOGS)
virtual void flush(bool force = false) = 0; virtual void flush(bool force = false) = 0;
virtual void prepareTable() = 0; virtual void prepareTable() = 0;
virtual void startup() = 0; virtual void startup() = 0;
virtual void shutdown() = 0; virtual void shutdown() = 0;
virtual ~ISystemLog() = default; virtual ~ISystemLog() = default;
/// returns CREATE TABLE query, but with removed:
/// - UUID
/// - SETTINGS (for MergeTree)
/// That way it can be used to compare with the SystemLog::getCreateTableQuery()
static ASTPtr getCreateTableQueryClean(const StorageID & table_id, ContextPtr context);
}; };
@ -171,7 +177,7 @@ public:
return LogElement::name(); return LogElement::name();
} }
ASTPtr getCreateTableQuery() override; ASTPtr getCreateTableQuery();
protected: protected:
Poco::Logger * log; Poco::Logger * log;
@ -181,6 +187,8 @@ private:
const StorageID table_id; const StorageID table_id;
const String storage_def; const String storage_def;
StoragePtr table; StoragePtr table;
String create_query;
String old_create_query;
bool is_prepared = false; bool is_prepared = false;
const size_t flush_interval_milliseconds; const size_t flush_interval_milliseconds;
ThreadFromGlobalPool saving_thread; ThreadFromGlobalPool saving_thread;
@ -228,6 +236,7 @@ SystemLog<LogElement>::SystemLog(
: WithContext(context_) : WithContext(context_)
, table_id(database_name_, table_name_) , table_id(database_name_, table_name_)
, storage_def(storage_def_) , storage_def(storage_def_)
, create_query(serializeAST(*getCreateTableQuery()))
, flush_interval_milliseconds(flush_interval_milliseconds_) , flush_interval_milliseconds(flush_interval_milliseconds_)
{ {
assert(database_name_ == DatabaseCatalog::SYSTEM_DATABASE); assert(database_name_ == DatabaseCatalog::SYSTEM_DATABASE);
@ -520,14 +529,14 @@ void SystemLog<LogElement>::prepareTable()
if (table) if (table)
{ {
auto metadata_columns = table->getInMemoryMetadataPtr()->getColumns(); if (old_create_query.empty())
auto old_query = InterpreterCreateQuery::formatColumns(metadata_columns); {
old_create_query = serializeAST(*getCreateTableQueryClean(table_id, getContext()));
if (old_create_query.empty())
throw Exception(ErrorCodes::LOGICAL_ERROR, "Empty CREATE QUERY for {}", backQuoteIfNeed(table_id.table_name));
}
auto ordinary_columns = LogElement::getNamesAndTypes(); if (old_create_query != create_query)
auto alias_columns = LogElement::getNamesAndAliases();
auto current_query = InterpreterCreateQuery::formatColumns(ordinary_columns, alias_columns);
if (serializeAST(*old_query) != serializeAST(*current_query))
{ {
/// Rename the existing table. /// Rename the existing table.
int suffix = 0; int suffix = 0;
@ -553,9 +562,11 @@ void SystemLog<LogElement>::prepareTable()
LOG_DEBUG( LOG_DEBUG(
log, log,
"Existing table {} for system log has obsolete or different structure. Renaming it to {}", "Existing table {} for system log has obsolete or different structure. Renaming it to {}.\nOld: {}\nNew: {}\n.",
description, description,
backQuoteIfNeed(to.table)); backQuoteIfNeed(to.table),
old_create_query,
create_query);
auto query_context = Context::createCopy(context); auto query_context = Context::createCopy(context);
query_context->makeQueryContext(); query_context->makeQueryContext();
@ -573,17 +584,17 @@ void SystemLog<LogElement>::prepareTable()
/// Create the table. /// Create the table.
LOG_DEBUG(log, "Creating new table {} for {}", description, LogElement::name()); LOG_DEBUG(log, "Creating new table {} for {}", description, LogElement::name());
auto create = getCreateTableQuery();
auto query_context = Context::createCopy(context); auto query_context = Context::createCopy(context);
query_context->makeQueryContext(); query_context->makeQueryContext();
InterpreterCreateQuery interpreter(create, query_context); auto create_query_ast = getCreateTableQuery();
InterpreterCreateQuery interpreter(create_query_ast, query_context);
interpreter.setInternal(true); interpreter.setInternal(true);
interpreter.execute(); interpreter.execute();
table = DatabaseCatalog::instance().getTable(table_id, getContext()); table = DatabaseCatalog::instance().getTable(table_id, getContext());
old_create_query.clear();
} }
is_prepared = true; is_prepared = true;

View File

@ -310,7 +310,7 @@ void ThreadStatus::resetPerformanceCountersLastUsage()
void ThreadStatus::initQueryProfiler() void ThreadStatus::initQueryProfiler()
{ {
if (!query_profiled_enabled) if (!query_profiler_enabled)
return; return;
/// query profilers are useless without trace collector /// query profilers are useless without trace collector
@ -326,11 +326,11 @@ void ThreadStatus::initQueryProfiler()
{ {
if (settings.query_profiler_real_time_period_ns > 0) if (settings.query_profiler_real_time_period_ns > 0)
query_profiler_real = std::make_unique<QueryProfilerReal>(thread_id, query_profiler_real = std::make_unique<QueryProfilerReal>(thread_id,
/* period */ static_cast<UInt32>(settings.query_profiler_real_time_period_ns)); /* period= */ static_cast<UInt32>(settings.query_profiler_real_time_period_ns));
if (settings.query_profiler_cpu_time_period_ns > 0) if (settings.query_profiler_cpu_time_period_ns > 0)
query_profiler_cpu = std::make_unique<QueryProfilerCPU>(thread_id, query_profiler_cpu = std::make_unique<QueryProfilerCPU>(thread_id,
/* period */ static_cast<UInt32>(settings.query_profiler_cpu_time_period_ns)); /* period= */ static_cast<UInt32>(settings.query_profiler_cpu_time_period_ns));
} }
catch (...) catch (...)
{ {

View File

@ -14,7 +14,6 @@ namespace ErrorCodes
extern const int TOO_DEEP_AST; extern const int TOO_DEEP_AST;
extern const int BAD_ARGUMENTS; extern const int BAD_ARGUMENTS;
extern const int UNKNOWN_ELEMENT_IN_AST; extern const int UNKNOWN_ELEMENT_IN_AST;
extern const int LOGICAL_ERROR;
} }
@ -48,23 +47,6 @@ size_t IAST::checkSize(size_t max_size) const
return res; return res;
} }
void IAST::reset(IAST *& field)
{
if (field == nullptr)
return;
const auto child = std::find_if(children.begin(), children.end(), [field](const auto & p)
{
return p.get() == field;
});
if (child == children.end())
throw Exception("AST subtree not found in children", ErrorCodes::LOGICAL_ERROR);
children.erase(child);
field = nullptr;
}
IAST::Hash IAST::getTreeHash() const IAST::Hash IAST::getTreeHash() const
{ {

View File

@ -157,7 +157,23 @@ public:
set(field, child); set(field, child);
} }
void reset(IAST *& field); template <typename T>
void reset(T * & field)
{
if (field == nullptr)
return;
const auto child = std::find_if(children.begin(), children.end(), [field](const auto & p)
{
return p.get() == field;
});
if (child == children.end())
throw Exception("AST subtree not found in children", ErrorCodes::LOGICAL_ERROR);
children.erase(child);
field = nullptr;
}
/// Convert to a string. /// Convert to a string.

View File

@ -208,8 +208,14 @@ KeeperTCPHandler::KeeperTCPHandler(IServer & server_, const Poco::Net::StreamSoc
, log(&Poco::Logger::get("KeeperTCPHandler")) , log(&Poco::Logger::get("KeeperTCPHandler"))
, global_context(Context::createCopy(server.context())) , global_context(Context::createCopy(server.context()))
, keeper_dispatcher(global_context->getKeeperDispatcher()) , keeper_dispatcher(global_context->getKeeperDispatcher())
, operation_timeout(0, global_context->getConfigRef().getUInt("keeper_server.operation_timeout_ms", Coordination::DEFAULT_OPERATION_TIMEOUT_MS) * 1000) , operation_timeout(
, session_timeout(0, global_context->getConfigRef().getUInt("keeper_server.session_timeout_ms", Coordination::DEFAULT_SESSION_TIMEOUT_MS) * 1000) 0,
global_context->getConfigRef().getUInt(
"keeper_server.coordination_settings.operation_timeout_ms", Coordination::DEFAULT_OPERATION_TIMEOUT_MS) * 1000)
, session_timeout(
0,
global_context->getConfigRef().getUInt(
"keeper_server.coordination_settings.session_timeout_ms", Coordination::DEFAULT_SESSION_TIMEOUT_MS) * 1000)
, poll_wrapper(std::make_unique<SocketInterruptablePollWrapper>(socket_)) , poll_wrapper(std::make_unique<SocketInterruptablePollWrapper>(socket_))
, responses(std::make_unique<ThreadSafeResponseQueue>(std::numeric_limits<size_t>::max())) , responses(std::make_unique<ThreadSafeResponseQueue>(std::numeric_limits<size_t>::max()))
, last_op(std::make_unique<LastOp>(EMPTY_LAST_OP)) , last_op(std::make_unique<LastOp>(EMPTY_LAST_OP))

View File

@ -11,8 +11,7 @@ from github import Github
from s3_helper import S3Helper from s3_helper import S3Helper
from get_robot_token import get_best_robot_token from get_robot_token import get_best_robot_token
from pr_info import PRInfo from pr_info import PRInfo
from ci_config import build_config_to_string from build_download_helper import get_build_name_for_check, get_build_urls
from build_download_helper import get_build_config_for_check, get_build_urls
from docker_pull_helper import get_image_with_version from docker_pull_helper import get_image_with_version
from commit_status_helper import post_commit_status from commit_status_helper import post_commit_status
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
@ -54,11 +53,9 @@ if __name__ == "__main__":
docker_image = get_image_with_version(temp_path, IMAGE_NAME) docker_image = get_image_with_version(temp_path, IMAGE_NAME)
build_config = get_build_config_for_check(check_name) build_name = get_build_name_for_check(check_name)
print(build_config) print(build_name)
build_config_str = build_config_to_string(build_config) urls = get_build_urls(build_name, reports_path)
print(build_config_str)
urls = get_build_urls(build_config_str, reports_path)
if not urls: if not urls:
raise Exception("No build URLs found") raise Exception("No build URLs found")

View File

@ -12,19 +12,17 @@ from pr_info import PRInfo
from get_robot_token import get_best_robot_token from get_robot_token import get_best_robot_token
from version_helper import get_version_from_repo, update_version_local from version_helper import get_version_from_repo, update_version_local
from ccache_utils import get_ccache_if_not_exists, upload_ccache from ccache_utils import get_ccache_if_not_exists, upload_ccache
from ci_config import build_config_to_string, CI_CONFIG from ci_config import CI_CONFIG
from docker_pull_helper import get_image_with_version from docker_pull_helper import get_image_with_version
def get_build_config(build_check_name, build_number): def get_build_config(build_check_name, build_name):
if build_check_name == 'ClickHouse build check (actions)': if build_check_name == 'ClickHouse build check (actions)':
build_config_name = 'build_config' build_config_name = 'build_config'
elif build_check_name == 'ClickHouse special build check (actions)':
build_config_name = 'special_build_config'
else: else:
raise Exception(f"Unknown build check name {build_check_name}") raise Exception(f"Unknown build check name {build_check_name}")
return CI_CONFIG[build_config_name][build_number] return CI_CONFIG[build_config_name][build_name]
def _can_export_binaries(build_config): def _can_export_binaries(build_config):
@ -94,9 +92,9 @@ if __name__ == "__main__":
caches_path = os.getenv("CACHES_PATH", temp_path) caches_path = os.getenv("CACHES_PATH", temp_path)
build_check_name = sys.argv[1] build_check_name = sys.argv[1]
build_number = int(sys.argv[2]) build_name = sys.argv[2]
build_config = get_build_config(build_check_name, build_number) build_config = get_build_config(build_check_name, build_name)
if not os.path.exists(temp_path): if not os.path.exists(temp_path):
os.makedirs(temp_path) os.makedirs(temp_path)
@ -125,7 +123,6 @@ if __name__ == "__main__":
logging.info("Updated local files with version") logging.info("Updated local files with version")
build_name = build_config_to_string(build_config)
logging.info("Build short name %s", build_name) logging.info("Build short name %s", build_name)
subprocess.check_call(f"echo 'BUILD_NAME=build_urls_{build_name}' >> $GITHUB_ENV", shell=True) subprocess.check_call(f"echo 'BUILD_NAME=build_urls_{build_name}' >> $GITHUB_ENV", shell=True)
@ -161,7 +158,12 @@ if __name__ == "__main__":
logging.info("Will upload cache") logging.info("Will upload cache")
upload_ccache(ccache_path, s3_helper, pr_info.number, temp_path) upload_ccache(ccache_path, s3_helper, pr_info.number, temp_path)
s3_path_prefix = str(pr_info.number) + "/" + pr_info.sha + "/" + build_check_name.lower().replace(' ', '_') + "/" + build_name # for release pull requests we use branch names prefixes, not pr numbers
if 'release' in pr_info.labels or 'release-lts' in pr_info.labels:
s3_path_prefix = pr_info.head_ref + "/" + pr_info.sha + "/" + build_name
else:
s3_path_prefix = str(pr_info.number) + "/" + pr_info.sha + "/" + build_name
if os.path.exists(log_path): if os.path.exists(log_path):
log_url = s3_helper.upload_build_file_to_s3(log_path, s3_path_prefix + "/" + os.path.basename(log_path)) log_url = s3_helper.upload_build_file_to_s3(log_path, s3_path_prefix + "/" + os.path.basename(log_path))
logging.info("Log url %s", log_url) logging.info("Log url %s", log_url)

View File

@ -8,17 +8,17 @@ import time
import requests import requests
from ci_config import CI_CONFIG, build_config_to_string from ci_config import CI_CONFIG
DOWNLOAD_RETRIES_COUNT = 5 DOWNLOAD_RETRIES_COUNT = 5
def get_build_config_for_check(check_name): def get_build_name_for_check(check_name):
return CI_CONFIG["tests_config"][check_name]['required_build_properties'] return CI_CONFIG['tests_config'][check_name]['required_build']
def get_build_urls(build_config_str, reports_path): def get_build_urls(build_name, reports_path):
for root, _, files in os.walk(reports_path): for root, _, files in os.walk(reports_path):
for f in files: for f in files:
if build_config_str in f : if build_name in f :
logging.info("Found build report json %s", f) logging.info("Found build report json %s", f)
with open(os.path.join(root, f), 'r', encoding='utf-8') as file_handler: with open(os.path.join(root, f), 'r', encoding='utf-8') as file_handler:
build_report = json.load(file_handler) build_report = json.load(file_handler)
@ -72,11 +72,8 @@ def download_builds(result_path, build_urls, filter_fn):
dowload_build_with_progress(url, os.path.join(result_path, fname)) dowload_build_with_progress(url, os.path.join(result_path, fname))
def download_builds_filter(check_name, reports_path, result_path, filter_fn=lambda _: True): def download_builds_filter(check_name, reports_path, result_path, filter_fn=lambda _: True):
build_config = get_build_config_for_check(check_name) build_name = get_build_name_for_check(check_name)
print(build_config) urls = get_build_urls(build_name, reports_path)
build_config_str = build_config_to_string(build_config)
print(build_config_str)
urls = get_build_urls(build_config_str, reports_path)
print(urls) print(urls)
if not urls: if not urls:

View File

@ -1,8 +1,8 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
CI_CONFIG = { CI_CONFIG = {
"build_config": [ "build_config": {
{ "package_release": {
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -13,7 +13,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "performance": {
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -23,7 +23,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "binary_gcc": {
"compiler": "gcc-11", "compiler": "gcc-11",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -33,7 +33,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "package_asan": {
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "", "build_type": "",
"sanitizer": "address", "sanitizer": "address",
@ -43,7 +43,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "package_ubsan": {
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "", "build_type": "",
"sanitizer": "undefined", "sanitizer": "undefined",
@ -53,7 +53,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "package_tsan": {
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "", "build_type": "",
"sanitizer": "thread", "sanitizer": "thread",
@ -63,7 +63,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "package_msan": {
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "", "build_type": "",
"sanitizer": "memory", "sanitizer": "memory",
@ -73,7 +73,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "package_debug": {
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "debug", "build_type": "debug",
"sanitizer": "", "sanitizer": "",
@ -83,7 +83,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "binary_release": {
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -92,10 +92,8 @@ CI_CONFIG = {
"splitted": "unsplitted", "splitted": "unsplitted",
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
} },
], "package_tidy": {
"special_build_config": [
{
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "debug", "build_type": "debug",
"sanitizer": "", "sanitizer": "",
@ -105,7 +103,7 @@ CI_CONFIG = {
"tidy": "enable", "tidy": "enable",
"with_coverage": False "with_coverage": False
}, },
{ "binary_splitted": {
"compiler": "clang-13", "compiler": "clang-13",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -115,7 +113,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "binary_darwin": {
"compiler": "clang-13-darwin", "compiler": "clang-13-darwin",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -125,7 +123,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "binary_aarch64": {
"compiler": "clang-13-aarch64", "compiler": "clang-13-aarch64",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -135,7 +133,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "binary_freebsd": {
"compiler": "clang-13-freebsd", "compiler": "clang-13-freebsd",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -145,7 +143,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "binary_darwin_aarch64": {
"compiler": "clang-13-darwin-aarch64", "compiler": "clang-13-darwin-aarch64",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -155,7 +153,7 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
}, },
{ "binary_ppc64le": {
"compiler": "clang-13-ppc64le", "compiler": "clang-13-ppc64le",
"build_type": "", "build_type": "",
"sanitizer": "", "sanitizer": "",
@ -165,550 +163,139 @@ CI_CONFIG = {
"tidy": "disable", "tidy": "disable",
"with_coverage": False "with_coverage": False
} }
], },
"tests_config": { "tests_config": {
"Stateful tests (address, actions)": { "Stateful tests (address, actions)": {
"required_build_properties": { "required_build": "package_asan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateful tests (thread, actions)": { "Stateful tests (thread, actions)": {
"required_build_properties": { "required_build": "package_tsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateful tests (memory, actions)": { "Stateful tests (memory, actions)": {
"required_build_properties": { "required_build": "package_msan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateful tests (ubsan, actions)": { "Stateful tests (ubsan, actions)": {
"required_build_properties": { "required_build": "package_ubsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateful tests (debug, actions)": { "Stateful tests (debug, actions)": {
"required_build_properties": { "required_build": "package_debug",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "debug",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateful tests (release, actions)": { "Stateful tests (release, actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateful tests (release, DatabaseOrdinary, actions)": { "Stateful tests (release, DatabaseOrdinary, actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateful tests (release, DatabaseReplicated, actions)": { "Stateful tests (release, DatabaseReplicated, actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests (address, actions)": { "Stateless tests (address, actions)": {
"required_build_properties": { "required_build": "package_asan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests (thread, actions)": { "Stateless tests (thread, actions)": {
"required_build_properties": { "required_build": "package_tsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests (memory, actions)": { "Stateless tests (memory, actions)": {
"required_build_properties": { "required_build": "package_msan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests (ubsan, actions)": { "Stateless tests (ubsan, actions)": {
"required_build_properties": { "required_build": "package_ubsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests (debug, actions)": { "Stateless tests (debug, actions)": {
"required_build_properties": { "required_build": "package_debug",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "debug",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests (release, actions)": { "Stateless tests (release, actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests (release, wide parts enabled, actions)": { "Stateless tests (release, wide parts enabled, actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests (release, DatabaseOrdinary, actions)": { "Stateless tests (release, DatabaseOrdinary, actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests (release, DatabaseReplicated, actions)": { "Stateless tests (release, DatabaseReplicated, actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stress test (address, actions)": { "Stress test (address, actions)": {
"required_build_properties": { "required_build": "package_asan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stress test (thread, actions)": { "Stress test (thread, actions)": {
"required_build_properties": { "required_build": "package_tsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stress test (undefined, actions)": { "Stress test (undefined, actions)": {
"required_build_properties": { "required_build": "package_ubsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stress test (memory, actions)": { "Stress test (memory, actions)": {
"required_build_properties": { "required_build": "package_msan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stress test (debug, actions)": { "Stress test (debug, actions)": {
"required_build_properties": { "required_build": "package_debug",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "debug",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Integration tests (asan, actions)": { "Integration tests (asan, actions)": {
"required_build_properties": { "required_build": "package_asan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Integration tests (thread, actions)": { "Integration tests (thread, actions)": {
"required_build_properties": { "required_build": "package_tsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Integration tests (release, actions)": { "Integration tests (release, actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Integration tests (memory, actions)": { "Integration tests (memory, actions)": {
"required_build_properties": { "required_build": "package_msan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Integration tests flaky check (asan, actions)": { "Integration tests flaky check (asan, actions)": {
"required_build_properties": { "required_build": "package_asan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Compatibility check (actions)": { "Compatibility check (actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Split build smoke test (actions)": { "Split build smoke test (actions)": {
"required_build_properties": { "required_build": "binary_splitted",
"compiler": "clang-13",
"package_type": "binary",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "splitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Testflows check (actions)": { "Testflows check (actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Unit tests (release-gcc, actions)": { "Unit tests (release-gcc, actions)": {
"required_build_properties": { "required_build": "binary_gcc",
"compiler": "gcc-11",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Unit tests (release-clang, actions)": { "Unit tests (release-clang, actions)": {
"required_build_properties": { "required_build": "binary_release",
"compiler": "clang-13",
"package_type": "binary",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Unit tests (asan, actions)": { "Unit tests (asan, actions)": {
"required_build_properties": { "required_build": "package_asan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Unit tests (msan, actions)": { "Unit tests (msan, actions)": {
"required_build_properties": { "required_build": "package_msan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Unit tests (tsan, actions)": { "Unit tests (tsan, actions)": {
"required_build_properties": { "required_build": "package_tsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Unit tests (ubsan, actions)": { "Unit tests (ubsan, actions)": {
"required_build_properties": { "required_build": "package_ubsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"AST fuzzer (debug, actions)": { "AST fuzzer (debug, actions)": {
"required_build_properties": { "required_build": "package_debug",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "debug",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"AST fuzzer (ASan, actions)": { "AST fuzzer (ASan, actions)": {
"required_build_properties": { "required_build": "package_asan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"AST fuzzer (MSan, actions)": { "AST fuzzer (MSan, actions)": {
"required_build_properties": { "required_build": "package_msan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "memory",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"AST fuzzer (TSan, actions)": { "AST fuzzer (TSan, actions)": {
"required_build_properties": { "required_build": "package_tsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "thread",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"AST fuzzer (UBSan, actions)": { "AST fuzzer (UBSan, actions)": {
"required_build_properties": { "required_build": "package_ubsan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "undefined",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Release (actions)": { "Release (actions)": {
"required_build_properties": { "required_build": "package_release",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"Stateless tests flaky check (address, actions)": { "Stateless tests flaky check (address, actions)": {
"required_build_properties": { "required_build": "package_asan",
"compiler": "clang-13",
"package_type": "deb",
"build_type": "relwithdebuginfo",
"sanitizer": "address",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
}, },
"ClickHouse Keeper Jepsen (actions)": { "ClickHouse Keeper Jepsen (actions)": {
"required_build_properties": { "required_build": "binary_release",
"compiler": "clang-13",
"package_type": "binary",
"build_type": "relwithdebuginfo",
"sanitizer": "none",
"bundled": "bundled",
"splitted": "unsplitted",
"clang_tidy": "disable",
"with_coverage": False
}
} }
} }
} }
def build_config_to_string(build_config):
if build_config["package_type"] == "performance":
return "performance"
return "_".join([
build_config['compiler'],
build_config['build_type'] if build_config['build_type'] else "relwithdebuginfo",
build_config['sanitizer'] if build_config['sanitizer'] else "none",
build_config['bundled'],
build_config['splitted'],
'tidy' if 'tidy' in build_config and build_config['tidy'] == 'enable' else 'notidy',
'with_coverage' if 'with_coverage' in build_config and build_config['with_coverage'] else 'without_coverage',
build_config['package_type'],
])

View File

@ -163,7 +163,7 @@ def main(github_secret_key, github_app_id, push_to_cloudwatch, delete_offline_ru
if delete_offline_runners: if delete_offline_runners:
print("Going to delete offline runners") print("Going to delete offline runners")
for runner in runners: for runner in runners:
if runner.offline: if runner.offline and not runner.busy:
print("Deleting runner", runner) print("Deleting runner", runner)
delete_runner(access_token, runner) delete_runner(access_token, runner)

View File

@ -6,7 +6,7 @@ import requests
from unidiff import PatchSet from unidiff import PatchSet
DIFF_IN_DOCUMENTATION_EXT = [".html", ".md", ".yml", ".txt", ".css", ".js", ".xml", ".ico", ".conf", ".svg", ".png", ".jpg", ".py", ".sh"] DIFF_IN_DOCUMENTATION_EXT = [".html", ".md", ".yml", ".txt", ".css", ".js", ".xml", ".ico", ".conf", ".svg", ".png", ".jpg", ".py", ".sh", ".json"]
def get_pr_for_commit(sha, ref): def get_pr_for_commit(sha, ref):
try_get_pr_url = f"https://api.github.com/repos/{os.getenv('GITHUB_REPOSITORY', 'ClickHouse/ClickHouse')}/commits/{sha}/pulls" try_get_pr_url = f"https://api.github.com/repos/{os.getenv('GITHUB_REPOSITORY', 'ClickHouse/ClickHouse')}/commits/{sha}/pulls"

View File

@ -34,14 +34,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44445</port> <port>9235</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -49,7 +49,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44446</port> <port>9236</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -22,7 +22,7 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -16,7 +16,7 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -11,5 +11,5 @@ keeper_server:
server: server:
id: 1 id: 1
hostname: localhost hostname: localhost
port: 44444 port: 9234

View File

@ -16,7 +16,7 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -11,5 +11,5 @@ keeper_server:
server: server:
id: 1 id: 1
hostname: localhost hostname: localhost
port: 44444 port: 9234

View File

@ -16,7 +16,7 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -21,7 +21,7 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -20,7 +20,7 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>localhost</hostname> <hostname>localhost</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -0,0 +1,22 @@
<clickhouse>
<keeper_server>
<tcp_port>9181</tcp_port>
<server_id>1</server_id>
<log_storage_path>/var/lib/clickhouse/coordination/log</log_storage_path>
<snapshot_storage_path>/var/lib/clickhouse/coordination/snapshots</snapshot_storage_path>
<coordination_settings>
<operation_timeout_ms>5000</operation_timeout_ms>
<session_timeout_ms>10000</session_timeout_ms>
<raft_logs_level>trace</raft_logs_level>
</coordination_settings>
<raft_configuration>
<server>
<id>1</id>
<hostname>node1</hostname>
<port>9234</port>
</server>
</raft_configuration>
</keeper_server>
</clickhouse>

View File

@ -0,0 +1,8 @@
<clickhouse>
<zookeeper>
<node index="1">
<host>node1</host>
<port>9181</port>
</node>
</zookeeper>
</clickhouse>

View File

@ -17,14 +17,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>false</can_become_leader> <can_become_leader>false</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -32,7 +32,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>false</can_become_leader> <can_become_leader>false</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -17,14 +17,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>false</can_become_leader> <can_become_leader>false</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -32,7 +32,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>false</can_become_leader> <can_become_leader>false</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -17,14 +17,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>false</can_become_leader> <can_become_leader>false</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -32,7 +32,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>false</can_become_leader> <can_become_leader>false</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -7,17 +7,17 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -7,17 +7,17 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -6,17 +6,17 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -17,14 +17,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -32,7 +32,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -17,14 +17,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -32,7 +32,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -17,14 +17,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -32,7 +32,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -16,14 +16,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -31,7 +31,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -16,14 +16,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -31,7 +31,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -16,14 +16,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -31,7 +31,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -16,14 +16,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -31,7 +31,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -16,14 +16,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -31,7 +31,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -16,14 +16,14 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<priority>3</priority> <priority>3</priority>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>2</priority> <priority>2</priority>
@ -31,7 +31,7 @@
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
<can_become_leader>true</can_become_leader> <can_become_leader>true</can_become_leader>
<start_as_follower>true</start_as_follower> <start_as_follower>true</start_as_follower>
<priority>1</priority> <priority>1</priority>

View File

@ -15,7 +15,7 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -15,17 +15,17 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -15,17 +15,17 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -15,17 +15,17 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>3</id> <id>3</id>
<hostname>node3</hostname> <hostname>node3</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

View File

@ -15,12 +15,12 @@
<server> <server>
<id>1</id> <id>1</id>
<hostname>node1</hostname> <hostname>node1</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
<server> <server>
<id>2</id> <id>2</id>
<hostname>node2</hostname> <hostname>node2</hostname>
<port>44444</port> <port>9234</port>
</server> </server>
</raft_configuration> </raft_configuration>
</keeper_server> </keeper_server>

Some files were not shown because too many files have changed in this diff Show More