diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
new file mode 100644
index 00000000000..c83d3f6d5bd
--- /dev/null
+++ b/.github/workflows/backport.yml
@@ -0,0 +1,32 @@
+name: CherryPick
+concurrency:
+ group: cherry-pick
+on: # yamllint disable-line rule:truthy
+ schedule:
+ - cron: '0 */3 * * *'
+jobs:
+ CherryPick:
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
+ fetch-depth: 0
+ - name: Cherry pick
+ env:
+ TEMP_PATH: ${{runner.temp}}/cherry_pick
+ ROBOT_CLICKHOUSE_SSH_KEY: ${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
+ REPO_OWNER: "ClickHouse"
+ REPO_NAME: "ClickHouse"
+ REPO_TEAM: "core"
+ run: |
+ sudo pip install GitPython
+ cd $GITHUB_WORKSPACE/tests/ci
+ python3 cherry_pick.py
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
diff --git a/.github/workflows/cancel.yml b/.github/workflows/cancel.yml
index 03e1007b841..9b96a1dbffa 100644
--- a/.github/workflows/cancel.yml
+++ b/.github/workflows/cancel.yml
@@ -1,7 +1,7 @@
name: Cancel
on: # yamllint disable-line rule:truthy
workflow_run:
- workflows: ["CIGithubActions"]
+ workflows: ["CIGithubActions", "ReleaseCI"]
types:
- requested
jobs:
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 0a4beb50708..24b3e178651 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -9,6 +9,9 @@ on: # yamllint disable-line rule:truthy
- opened
branches:
- master
+##########################################################################################
+##################################### SMALL CHECKS #######################################
+##########################################################################################
jobs:
CheckLabels:
runs-on: [self-hosted, style-checker]
@@ -82,160 +85,6 @@ jobs:
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
- BuilderDebDebug:
- needs: DockerHubPush
- if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
- runs-on: [self-hosted, builder]
- steps:
- - name: Download changed images
- uses: actions/download-artifact@v2
- with:
- name: changed_images
- path: ${{ runner.temp }}/images_path
- - name: Check out repository code
- uses: actions/checkout@v2
- with:
- submodules: 'recursive'
- fetch-depth: 0 # otherwise we will have no info about contributors
- - name: Build
- env:
- TEMP_PATH: ${{runner.temp}}/build_check
- IMAGES_PATH: ${{runner.temp}}/images_path
- REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
- CACHES_PATH: ${{runner.temp}}/../ccaches
- CHECK_NAME: 'ClickHouse build check (actions)'
- BUILD_NUMBER: 7
- run: |
- sudo rm -fr $TEMP_PATH
- mkdir -p $TEMP_PATH
- cp -r $GITHUB_WORKSPACE $TEMP_PATH
- cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
- - name: Upload build URLs to artifacts
- uses: actions/upload-artifact@v2
- with:
- name: ${{ env.BUILD_NAME }}
- path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- - name: Cleanup
- if: always()
- run: |
- docker kill $(docker ps -q) ||:
- docker rm -f $(docker ps -a -q) ||:
- sudo rm -fr $TEMP_PATH
- BuilderReport:
- needs: [BuilderDebDebug]
- runs-on: [self-hosted, style-checker]
- steps:
- - name: Download json reports
- uses: actions/download-artifact@v2
- with:
- path: ${{runner.temp}}/reports_dir
- - name: Check out repository code
- uses: actions/checkout@v2
- - name: Report Builder
- env:
- TEMP_PATH: ${{runner.temp}}/report_check
- REPORTS_PATH: ${{runner.temp}}/reports_dir
- CHECK_NAME: 'ClickHouse build check (actions)'
- run: |
- sudo rm -fr $TEMP_PATH
- mkdir -p $TEMP_PATH
- cd $GITHUB_WORKSPACE/tests/ci
- python3 build_report_check.py "$CHECK_NAME"
- - name: Cleanup
- if: always()
- run: |
- docker kill $(docker ps -q) ||:
- docker rm -f $(docker ps -a -q) ||:
- sudo rm -fr $TEMP_PATH
- FunctionalStatelessTestDebug:
- needs: [BuilderDebDebug]
- runs-on: [self-hosted, func-tester]
- steps:
- - name: Download json reports
- uses: actions/download-artifact@v2
- with:
- path: ${{runner.temp}}/reports_dir
- - name: Check out repository code
- uses: actions/checkout@v2
- - name: Functional test
- env:
- TEMP_PATH: ${{runner.temp}}/stateless_debug
- REPORTS_PATH: ${{runner.temp}}/reports_dir
- CHECK_NAME: 'Stateless tests (debug, actions)'
- REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
- REQUIRED_BUILD_NUMBER: 7
- KILL_TIMEOUT: 10800
- run: |
- sudo rm -fr $TEMP_PATH
- mkdir -p $TEMP_PATH
- cp -r $GITHUB_WORKSPACE $TEMP_PATH
- cd $REPO_COPY/tests/ci
- python3 functional_test_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER $KILL_TIMEOUT
- - name: Cleanup
- if: always()
- run: |
- docker kill $(docker ps -q) ||:
- docker rm -f $(docker ps -a -q) ||:
- sudo rm -fr $TEMP_PATH
- FunctionalStatefulTestDebug:
- needs: [BuilderDebDebug]
- runs-on: [self-hosted, func-tester]
- steps:
- - name: Download json reports
- uses: actions/download-artifact@v2
- with:
- path: ${{runner.temp}}/reports_dir
- - name: Check out repository code
- uses: actions/checkout@v2
- - name: Functional test
- env:
- TEMP_PATH: ${{runner.temp}}/stateful_debug
- REPORTS_PATH: ${{runner.temp}}/reports_dir
- CHECK_NAME: 'Stateful tests (debug, actions)'
- REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
- REQUIRED_BUILD_NUMBER: 7
- KILL_TIMEOUT: 3600
- run: |
- sudo rm -fr $TEMP_PATH
- mkdir -p $TEMP_PATH
- cp -r $GITHUB_WORKSPACE $TEMP_PATH
- cd $REPO_COPY/tests/ci
- python3 functional_test_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER $KILL_TIMEOUT
- - name: Cleanup
- if: always()
- run: |
- docker kill $(docker ps -q) ||:
- docker rm -f $(docker ps -a -q) ||:
- sudo rm -fr $TEMP_PATH
- StressTestDebug:
- needs: [BuilderDebDebug]
- runs-on: [self-hosted, stress-tester]
- steps:
- - name: Download json reports
- uses: actions/download-artifact@v2
- with:
- path: ${{runner.temp}}/reports_dir
- - name: Check out repository code
- uses: actions/checkout@v2
- - name: Stress test
- env:
- TEMP_PATH: ${{runner.temp}}/stress_debug
- REPORTS_PATH: ${{runner.temp}}/reports_dir
- CHECK_NAME: 'Stress tests (debug, actions)'
- REPO_COPY: ${{runner.temp}}/stress_debug/ClickHouse
- REQUIRED_BUILD_NUMBER: 7
- run: |
- sudo rm -fr $TEMP_PATH
- mkdir -p $TEMP_PATH
- cp -r $GITHUB_WORKSPACE $TEMP_PATH
- cd $REPO_COPY/tests/ci
- python3 stress_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER
- - name: Cleanup
- if: always()
- run: |
- docker kill $(docker ps -q) ||:
- docker rm -f $(docker ps -a -q) ||:
- sudo rm -fr $TEMP_PATH
FastTest:
needs: DockerHubPush
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
@@ -259,8 +108,1411 @@ jobs:
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
+ PVSCheck:
+ needs: DockerHubPush
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ - name: PVS Check
+ env:
+ TEMP_PATH: ${{runner.temp}}/pvs_check
+ REPO_COPY: ${{runner.temp}}/pvs_check/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 pvs_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ CompatibilityCheck:
+ needs: [BuilderDebRelease]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: CompatibilityCheck
+ env:
+ TEMP_PATH: ${{runner.temp}}/compatibility_check
+ REPO_COPY: ${{runner.temp}}/compatibility_check/ClickHouse
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 compatibility_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ SplitBuildSmokeTest:
+ needs: [BuilderDebSplitted]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Split build check
+ env:
+ TEMP_PATH: ${{runner.temp}}/split_build_check
+ REPO_COPY: ${{runner.temp}}/split_build_check/ClickHouse
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 split_build_smoke_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+#########################################################################################
+#################################### ORDINARY BUILDS ####################################
+#########################################################################################
+ BuilderDebRelease:
+ needs: [DockerHubPush, FastTest]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_release'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderBinRelease:
+ needs: [DockerHubPush, FastTest]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'binary_release'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebAsan:
+ needs: [DockerHubPush, FastTest]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_asan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebUBsan:
+ needs: [DockerHubPush, FastTest]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_ubsan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebTsan:
+ needs: [DockerHubPush, FastTest]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_tsan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebMsan:
+ needs: [DockerHubPush, FastTest]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_msan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebDebug:
+ needs: [DockerHubPush, FastTest]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_debug'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##########################################################################################
+##################################### SPECIAL BUILDS #####################################
+##########################################################################################
+ BuilderDebSplitted:
+ needs: [DockerHubPush, FastTest]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'binary_splitted'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+############################################################################################
+##################################### BUILD REPORTER #######################################
+############################################################################################
+ BuilderReport:
+ needs:
+ - BuilderDebRelease
+ - BuilderBinRelease
+ - BuilderDebAsan
+ - BuilderDebTsan
+ - BuilderDebUBsan
+ - BuilderDebMsan
+ - BuilderDebDebug
+ - BuilderDebSplitted
+ runs-on: [self-hosted, style-checker]
+ if: always()
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Report Builder
+ env:
+ TEMP_PATH: ${{runner.temp}}/report_check
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cd $GITHUB_WORKSPACE/tests/ci
+ python3 build_report_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+########################### FUNCTIONAl STATELESS TESTS #######################################
+##############################################################################################
+ FunctionalStatelessTestRelease:
+ needs: [BuilderDebRelease]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (release, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestTsan:
+ needs: [BuilderDebTsan]
+ # tests can consume more than 60GB of memory,
+ # so use bigger server
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_tsan/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (ubsan, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_ubsan/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_memory
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (memory, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_memory/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestFlakyCheck:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_flaky_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests flaky check (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_flaky_asan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+############################ FUNCTIONAl STATEFUL TESTS #######################################
+##############################################################################################
+ FunctionalStatefulTestRelease:
+ needs: [BuilderDebRelease]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (release, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_tsan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_msan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (memory, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_msan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (ubsan, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_ubsan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+######################################### STRESS TESTS #######################################
+##############################################################################################
+ StressTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_thread
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_thread
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_memory
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (memory, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_memory/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_undefined
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (undefined, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_undefined/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_debug/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+##################################### AST FUZZERS ############################################
+##############################################################################################
+ ASTFuzzerTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (ASan, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_asan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ ASTFuzzerTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (TSan, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_tsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ ASTFuzzerTestUBSan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (UBSan, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_ubsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ ASTFuzzerTestMSan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_msan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (MSan, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_msan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ ASTFuzzerTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_debug/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+#############################################################################################
+############################# INTEGRATION TESTS #############################################
+#############################################################################################
+ IntegrationTestsAsan:
+ needs: [BuilderDebAsan, FunctionalStatelessTestAsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests (asan, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_asan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ IntegrationTestsTsan:
+ needs: [BuilderDebTsan, FunctionalStatelessTestTsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_tsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ IntegrationTestsRelease:
+ needs: [BuilderDebRelease, FunctionalStatelessTestRelease]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_release
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests (release, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_release/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ IntegrationTestsFlakyCheck:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_asan_flaky_check
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests flaky check (asan, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_asan_flaky_check/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+#############################################################################################
+#################################### UNIT TESTS #############################################
+#############################################################################################
+ UnitTestsAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (asan, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_asan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ UnitTestsReleaseClang:
+ needs: [BuilderBinRelease]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (release-clang, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_asan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ UnitTestsTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (tsan, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_tsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ UnitTestsMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_msan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (msan, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_msan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ UnitTestsUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (ubsan, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_ubsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
FinishCheck:
- needs: [StyleCheck, DockerHubPush, CheckLabels, BuilderReport, FastTest, FunctionalStatelessTestDebug, FunctionalStatefulTestDebug, DocsCheck, StressTestDebug]
+ needs:
+ - StyleCheck
+ - DockerHubPush
+ - CheckLabels
+ - BuilderReport
+ - FastTest
+ - FunctionalStatelessTestDebug
+ - FunctionalStatelessTestRelease
+ - FunctionalStatelessTestAsan
+ - FunctionalStatelessTestTsan
+ - FunctionalStatelessTestMsan
+ - FunctionalStatelessTestUBsan
+ - FunctionalStatefulTestDebug
+ - FunctionalStatefulTestRelease
+ - FunctionalStatefulTestAsan
+ - FunctionalStatefulTestTsan
+ - FunctionalStatefulTestMsan
+ - FunctionalStatefulTestUBsan
+ - DocsCheck
+ - StressTestDebug
+ - StressTestAsan
+ - StressTestTsan
+ - StressTestMsan
+ - StressTestUBsan
+ - ASTFuzzerTestDebug
+ - ASTFuzzerTestAsan
+ - ASTFuzzerTestTsan
+ - ASTFuzzerTestMSan
+ - ASTFuzzerTestUBSan
+ - IntegrationTestsAsan
+ - IntegrationTestsRelease
+ - IntegrationTestsTsan
+ - PVSCheck
+ - UnitTestsAsan
+ - UnitTestsTsan
+ - UnitTestsMsan
+ - UnitTestsUBsan
+ - UnitTestsReleaseClang
+ - SplitBuildSmokeTest
+ - CompatibilityCheck
+ - IntegrationTestsFlakyCheck
runs-on: [self-hosted, style-checker]
steps:
- name: Check out repository code
diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml
new file mode 100644
index 00000000000..18969eeedd1
--- /dev/null
+++ b/.github/workflows/master.yml
@@ -0,0 +1,1384 @@
+name: MasterCI
+on: # yamllint disable-line rule:truthy
+ push:
+ branches:
+ - 'master'
+jobs:
+ DockerHubPush:
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Images check
+ run: |
+ cd $GITHUB_WORKSPACE/tests/ci
+ python3 docker_images_check.py
+ - name: Upload images files to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/docker_images_check/changed_images.json
+ StyleCheck:
+ needs: DockerHubPush
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/style_check
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Style Check
+ env:
+ TEMP_PATH: ${{ runner.temp }}/style_check
+ run: |
+ cd $GITHUB_WORKSPACE/tests/ci
+ python3 style_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ PVSCheck:
+ needs: DockerHubPush
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ - name: PVS Check
+ env:
+ TEMP_PATH: ${{runner.temp}}/pvs_check
+ REPO_COPY: ${{runner.temp}}/pvs_check/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 pvs_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ CompatibilityCheck:
+ needs: [BuilderDebRelease]
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: CompatibilityCheck
+ env:
+ TEMP_PATH: ${{runner.temp}}/compatibility_check
+ REPO_COPY: ${{runner.temp}}/compatibility_check/ClickHouse
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 compatibility_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ SplitBuildSmokeTest:
+ needs: [BuilderDebSplitted]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Split build check
+ env:
+ TEMP_PATH: ${{runner.temp}}/split_build_check
+ REPO_COPY: ${{runner.temp}}/split_build_check/ClickHouse
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 split_build_smoke_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+#########################################################################################
+#################################### ORDINARY BUILDS ####################################
+#########################################################################################
+ BuilderDebRelease:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_release'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderBinRelease:
+ needs: [DockerHubPush]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'binary_release'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebAsan:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_asan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebUBsan:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_ubsan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebTsan:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_tsan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebMsan:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_msan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebDebug:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_debug'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##########################################################################################
+##################################### SPECIAL BUILDS #####################################
+##########################################################################################
+ BuilderDebSplitted:
+ needs: [DockerHubPush]
+ if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'binary_splitted'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+############################################################################################
+##################################### BUILD REPORTER #######################################
+############################################################################################
+ BuilderReport:
+ needs:
+ - BuilderDebRelease
+ - BuilderBinRelease
+ - BuilderDebAsan
+ - BuilderDebTsan
+ - BuilderDebUBsan
+ - BuilderDebMsan
+ - BuilderDebDebug
+ - BuilderDebSplitted
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Report Builder
+ env:
+ TEMP_PATH: ${{runner.temp}}/report_check
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cd $GITHUB_WORKSPACE/tests/ci
+ python3 build_report_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+########################### FUNCTIONAl STATELESS TESTS #######################################
+##############################################################################################
+ FunctionalStatelessTestRelease:
+ needs: [BuilderDebRelease]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (release, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_tsan/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (ubsan, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_ubsan/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_memory
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (memory, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_memory/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+############################ FUNCTIONAl STATEFUL TESTS #######################################
+##############################################################################################
+ FunctionalStatefulTestRelease:
+ needs: [BuilderDebRelease]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (release, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_tsan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_msan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (memory, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_msan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (ubsan, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_ubsan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+######################################### STRESS TESTS #######################################
+##############################################################################################
+ StressTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_thread
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_thread
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_memory
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (memory, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_memory/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_undefined
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (undefined, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_undefined/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_debug/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+#############################################################################################
+############################# INTEGRATION TESTS #############################################
+#############################################################################################
+ IntegrationTestsAsan:
+ needs: [BuilderDebAsan, FunctionalStatelessTestAsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests (asan, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_asan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ IntegrationTestsTsan:
+ needs: [BuilderDebTsan, FunctionalStatelessTestTsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_tsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ IntegrationTestsRelease:
+ needs: [BuilderDebRelease, FunctionalStatelessTestRelease]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_release
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests (release, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_release/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+##################################### AST FUZZERS ############################################
+##############################################################################################
+ ASTFuzzerTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (ASan, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_asan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ ASTFuzzerTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (TSan, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_tsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ ASTFuzzerTestUBSan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (UBSan, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_ubsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ ASTFuzzerTestMSan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_msan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (MSan, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_msan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ ASTFuzzerTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Fuzzer
+ env:
+ TEMP_PATH: ${{runner.temp}}/ast_fuzzer_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'AST fuzzer (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/ast_fuzzer_debug/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 ast_fuzzer_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+#############################################################################################
+#################################### UNIT TESTS #############################################
+#############################################################################################
+ UnitTestsAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (asan, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_asan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ UnitTestsReleaseClang:
+ needs: [BuilderBinRelease]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (release-clang, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_asan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ UnitTestsTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (tsan, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_tsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ UnitTestsMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_msan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (msan, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_msan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ UnitTestsUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Unit test
+ env:
+ TEMP_PATH: ${{runner.temp}}/unit_tests_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Unit tests (msan, actions)'
+ REPO_COPY: ${{runner.temp}}/unit_tests_ubsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 unit_tests_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FinishCheck:
+ needs:
+ - DockerHubPush
+ - BuilderReport
+ - FunctionalStatelessTestDebug
+ - FunctionalStatelessTestRelease
+ - FunctionalStatelessTestAsan
+ - FunctionalStatelessTestTsan
+ - FunctionalStatelessTestMsan
+ - FunctionalStatelessTestUBsan
+ - FunctionalStatefulTestDebug
+ - FunctionalStatefulTestRelease
+ - FunctionalStatefulTestAsan
+ - FunctionalStatefulTestTsan
+ - FunctionalStatefulTestMsan
+ - FunctionalStatefulTestUBsan
+ - StressTestDebug
+ - StressTestAsan
+ - StressTestTsan
+ - StressTestMsan
+ - StressTestUBsan
+ - IntegrationTestsAsan
+ - IntegrationTestsRelease
+ - IntegrationTestsTsan
+ - CompatibilityCheck
+ - ASTFuzzerTestDebug
+ - ASTFuzzerTestAsan
+ - ASTFuzzerTestTsan
+ - ASTFuzzerTestMSan
+ - ASTFuzzerTestUBSan
+ - UnitTestsAsan
+ - UnitTestsTsan
+ - UnitTestsMsan
+ - UnitTestsUBsan
+ - UnitTestsReleaseClang
+ - SplitBuildSmokeTest
+ - PVSCheck
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Finish label
+ run: |
+ cd $GITHUB_WORKSPACE/tests/ci
+ python3 finish_check.py
diff --git a/.github/workflows/release_branches.yml b/.github/workflows/release_branches.yml
new file mode 100644
index 00000000000..988db77e62a
--- /dev/null
+++ b/.github/workflows/release_branches.yml
@@ -0,0 +1,933 @@
+name: ReleaseCI
+on: # yamllint disable-line rule:truthy
+ push:
+ branches:
+ - '21.**'
+ - '22.**'
+ - '23.**'
+ - '24.**'
+ - 'backport/**'
+jobs:
+ DockerHubPush:
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Images check
+ run: |
+ cd $GITHUB_WORKSPACE/tests/ci
+ python3 docker_images_check.py
+ - name: Upload images files to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/docker_images_check/changed_images.json
+ CompatibilityCheck:
+ needs: [BuilderDebRelease]
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: CompatibilityCheck
+ env:
+ TEMP_PATH: ${{runner.temp}}/compatibility_check
+ REPO_COPY: ${{runner.temp}}/compatibility_check/ClickHouse
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 compatibility_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+#########################################################################################
+#################################### ORDINARY BUILDS ####################################
+#########################################################################################
+ BuilderDebRelease:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_release'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebAsan:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_asan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebUBsan:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_ubsan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebTsan:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_tsan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebMsan:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_msan'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ BuilderDebDebug:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/images_path
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ submodules: 'recursive'
+ fetch-depth: 0 # otherwise we will have no info about contributors
+ - name: Build
+ env:
+ TEMP_PATH: ${{runner.temp}}/build_check
+ IMAGES_PATH: ${{runner.temp}}/images_path
+ REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH: ${{runner.temp}}/../ccaches
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ BUILD_NAME: 'package_debug'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
+ - name: Upload build URLs to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_NAME }}
+ path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+############################################################################################
+##################################### BUILD REPORTER #######################################
+############################################################################################
+ BuilderReport:
+ needs:
+ - BuilderDebRelease
+ - BuilderDebAsan
+ - BuilderDebTsan
+ - BuilderDebUBsan
+ - BuilderDebMsan
+ - BuilderDebDebug
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Report Builder
+ env:
+ TEMP_PATH: ${{runner.temp}}/report_check
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'ClickHouse build check (actions)'
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cd $GITHUB_WORKSPACE/tests/ci
+ python3 build_report_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+########################### FUNCTIONAl STATELESS TESTS #######################################
+##############################################################################################
+ FunctionalStatelessTestRelease:
+ needs: [BuilderDebRelease]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (release, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_tsan/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (ubsan, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_ubsan/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_memory
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (memory, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_memory/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatelessTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateless_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateless tests (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
+ KILL_TIMEOUT: 10800
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+############################ FUNCTIONAl STATEFUL TESTS #######################################
+##############################################################################################
+ FunctionalStatefulTestRelease:
+ needs: [BuilderDebRelease]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (release, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_tsan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_msan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (memory, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_msan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_ubsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (ubsan, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_ubsan/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FunctionalStatefulTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, func-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stateful_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stateful tests (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
+ KILL_TIMEOUT: 3600
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+##############################################################################################
+######################################### STRESS TESTS #######################################
+##############################################################################################
+ StressTestAsan:
+ needs: [BuilderDebAsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_thread
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (address, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestTsan:
+ needs: [BuilderDebTsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_thread
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestMsan:
+ needs: [BuilderDebMsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_memory
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (memory, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_memory/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestUBsan:
+ needs: [BuilderDebUBsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_undefined
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (undefined, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_undefined/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ StressTestDebug:
+ needs: [BuilderDebDebug]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Stress test
+ env:
+ TEMP_PATH: ${{runner.temp}}/stress_debug
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Stress test (debug, actions)'
+ REPO_COPY: ${{runner.temp}}/stress_debug/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 stress_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+#############################################################################################
+############################# INTEGRATION TESTS #############################################
+#############################################################################################
+ IntegrationTestsAsan:
+ needs: [BuilderDebAsan, FunctionalStatelessTestAsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_asan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests (asan, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_asan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ IntegrationTestsTsan:
+ needs: [BuilderDebTsan, FunctionalStatelessTestTsan]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_tsan
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests (thread, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_tsan/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ IntegrationTestsRelease:
+ needs: [BuilderDebRelease, FunctionalStatelessTestRelease]
+ runs-on: [self-hosted, stress-tester]
+ steps:
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{runner.temp}}/reports_dir
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Integration test
+ env:
+ TEMP_PATH: ${{runner.temp}}/integration_tests_release
+ REPORTS_PATH: ${{runner.temp}}/reports_dir
+ CHECK_NAME: 'Integration tests (release, actions)'
+ REPO_COPY: ${{runner.temp}}/integration_tests_release/ClickHouse
+ run: |
+ sudo rm -fr $TEMP_PATH
+ mkdir -p $TEMP_PATH
+ cp -r $GITHUB_WORKSPACE $TEMP_PATH
+ cd $REPO_COPY/tests/ci
+ python3 integration_test_check.py "$CHECK_NAME"
+ - name: Cleanup
+ if: always()
+ run: |
+ docker kill $(docker ps -q) ||:
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr $TEMP_PATH
+ FinishCheck:
+ needs:
+ - DockerHubPush
+ - BuilderReport
+ - FunctionalStatelessTestDebug
+ - FunctionalStatelessTestRelease
+ - FunctionalStatelessTestAsan
+ - FunctionalStatelessTestTsan
+ - FunctionalStatelessTestMsan
+ - FunctionalStatelessTestUBsan
+ - FunctionalStatefulTestDebug
+ - FunctionalStatefulTestRelease
+ - FunctionalStatefulTestAsan
+ - FunctionalStatefulTestTsan
+ - FunctionalStatefulTestMsan
+ - FunctionalStatefulTestUBsan
+ - StressTestDebug
+ - StressTestAsan
+ - StressTestTsan
+ - StressTestMsan
+ - StressTestUBsan
+ - IntegrationTestsAsan
+ - IntegrationTestsRelease
+ - IntegrationTestsTsan
+ - CompatibilityCheck
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Finish label
+ run: |
+ cd $GITHUB_WORKSPACE/tests/ci
+ python3 finish_check.py
diff --git a/.gitmodules b/.gitmodules
index 8ad81b5094f..5672b89b4d2 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -140,7 +140,7 @@
url = https://github.com/ClickHouse-Extras/libc-headers.git
[submodule "contrib/replxx"]
path = contrib/replxx
- url = https://github.com/AmokHuginnsson/replxx.git
+ url = https://github.com/ClickHouse-Extras/replxx.git
[submodule "contrib/avro"]
path = contrib/avro
url = https://github.com/ClickHouse-Extras/avro.git
@@ -171,12 +171,6 @@
[submodule "contrib/sentry-native"]
path = contrib/sentry-native
url = https://github.com/ClickHouse-Extras/sentry-native.git
-[submodule "contrib/gcem"]
- path = contrib/gcem
- url = https://github.com/kthohr/gcem.git
-[submodule "contrib/stats"]
- path = contrib/stats
- url = https://github.com/kthohr/stats.git
[submodule "contrib/krb5"]
path = contrib/krb5
url = https://github.com/ClickHouse-Extras/krb5
diff --git a/CHANGELOG.md b/CHANGELOG.md
index adaaa0f1bc7..f34725448f2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,269 @@
+### ClickHouse release v21.11, 2021-11-09
+
+#### Backward Incompatible Change
+
+* Change order of json_path and json arguments in SQL/JSON functions (to be consistent with the standard). Closes [#30449](https://github.com/ClickHouse/ClickHouse/issues/30449). [#30474](https://github.com/ClickHouse/ClickHouse/pull/30474) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Remove `MergeTree` table setting `write_final_mark`. It will be always `true`. [#30455](https://github.com/ClickHouse/ClickHouse/pull/30455) ([Kseniia Sumarokova](https://github.com/kssenii)). No actions required, all tables are compatible with the new version.
+* Function `bayesAB` is removed. Please help to return this function back, refreshed. This closes [#26233](https://github.com/ClickHouse/ClickHouse/issues/26233). [#29934](https://github.com/ClickHouse/ClickHouse/pull/29934) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* This is relevant only if you already started using the experimental `clickhouse-keeper` support. Now ClickHouse Keeper snapshots compressed with `ZSTD` codec by default instead of custom ClickHouse LZ4 block compression. This behavior can be turned off with `compress_snapshots_with_zstd_format` coordination setting (must be equal on all quorum replicas). Backward incompatibility is quite rare and may happen only when new node will send snapshot (happens in case of recovery) to the old node which is unable to read snapshots in ZSTD format. [#29417](https://github.com/ClickHouse/ClickHouse/pull/29417) ([alesapin](https://github.com/alesapin)).
+
+#### New Feature
+
+* New asynchronous INSERT mode allows to accumulate inserted data and store it in a single batch in background. On client it can be enabled by setting `async_insert` for `INSERT` queries with data inlined in query or in separate buffer (e.g. for `INSERT` queries via HTTP protocol). If `wait_for_async_insert` is true (by default) the client will wait until data will be flushed to table. On server-side it controlled by the settings `async_insert_threads`, `async_insert_max_data_size` and `async_insert_busy_timeout_ms`. Implements [#18282](https://github.com/ClickHouse/ClickHouse/issues/18282). [#27537](https://github.com/ClickHouse/ClickHouse/pull/27537) ([Anton Popov](https://github.com/CurtizJ)). [#20557](https://github.com/ClickHouse/ClickHouse/pull/20557) ([Ivan](https://github.com/abyss7)). Notes on performance: with asynchronous inserts you can do up to around 10 000 individual INSERT queries per second, so it is still recommended to insert in batches if you want to achieve performance up to millions inserted rows per second.
+* Add interactive mode for `clickhouse-local`. So, you can just run `clickhouse-local` to get a command line ClickHouse interface without connecting to a server and process data from files and external data sources. Also merge the code of `clickhouse-client` and `clickhouse-local` together. Closes [#7203](https://github.com/ClickHouse/ClickHouse/issues/7203). Closes [#25516](https://github.com/ClickHouse/ClickHouse/issues/25516). Closes [#22401](https://github.com/ClickHouse/ClickHouse/issues/22401). [#26231](https://github.com/ClickHouse/ClickHouse/pull/26231) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Added support for executable (scriptable) user defined functions. These are UDFs that can be written in any programming language. [#28803](https://github.com/ClickHouse/ClickHouse/pull/28803) ([Maksim Kita](https://github.com/kitaisreal)).
+* Allow predefined connections to external data sources. This allows to avoid specifying credentials or addresses while using external data sources, they can be referenced by names instead. Closes [#28367](https://github.com/ClickHouse/ClickHouse/issues/28367). [#28577](https://github.com/ClickHouse/ClickHouse/pull/28577) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Added `INFORMATION_SCHEMA` database with `SCHEMATA`, `TABLES`, `VIEWS` and `COLUMNS` views to the corresponding tables in `system` database. Closes [#9770](https://github.com/ClickHouse/ClickHouse/issues/9770). [#28691](https://github.com/ClickHouse/ClickHouse/pull/28691) ([tavplubix](https://github.com/tavplubix)).
+* Support `EXISTS (subquery)`. Closes [#6852](https://github.com/ClickHouse/ClickHouse/issues/6852). [#29731](https://github.com/ClickHouse/ClickHouse/pull/29731) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Session logging for audit. Logging all successful and failed login and logout events to a new `system.session_log` table. [#22415](https://github.com/ClickHouse/ClickHouse/pull/22415) ([Vasily Nemkov](https://github.com/Enmk)) ([Vitaly Baranov](https://github.com/vitlibar)).
+* Support multidimensional cosine distance and euclidean distance functions; L1, L2, Lp, Linf distances and norms. Scalar product on tuples and various arithmetic operators on tuples. This fully closes [#4509](https://github.com/ClickHouse/ClickHouse/issues/4509) and even more. [#27933](https://github.com/ClickHouse/ClickHouse/pull/27933) ([Alexey Boykov](https://github.com/mathalex)).
+* Add support for compression and decompression for `INTO OUTFILE` and `FROM INFILE` (with autodetect or with additional optional parameter). [#27135](https://github.com/ClickHouse/ClickHouse/pull/27135) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
+* Add CORS (Cross Origin Resource Sharing) support with HTTP `OPTIONS` request. It means, now Grafana will work with serverless requests without a kludges. Closes [#18693](https://github.com/ClickHouse/ClickHouse/issues/18693). [#29155](https://github.com/ClickHouse/ClickHouse/pull/29155) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
+* Queries with JOIN ON now supports disjunctions (OR). [#21320](https://github.com/ClickHouse/ClickHouse/pull/21320) ([Ilya Golshtein](https://github.com/ilejn)).
+* Added function `tokens`. That allow to split string into tokens using non-alpha numeric ASCII characters as separators. [#29981](https://github.com/ClickHouse/ClickHouse/pull/29981) ([Maksim Kita](https://github.com/kitaisreal)). Added function `ngrams` to extract ngrams from text. Closes [#29699](https://github.com/ClickHouse/ClickHouse/issues/29699). [#29738](https://github.com/ClickHouse/ClickHouse/pull/29738) ([Maksim Kita](https://github.com/kitaisreal)).
+* Add functions for Unicode normalization: `normalizeUTF8NFC`, `normalizeUTF8NFD`, `normalizeUTF8NFKC`, `normalizeUTF8NFKD` functions. [#28633](https://github.com/ClickHouse/ClickHouse/pull/28633) ([darkkeks](https://github.com/darkkeks)).
+* Streaming consumption of application log files in ClickHouse with `FileLog` table engine. It's like `Kafka` or `RabbitMQ` engine but for append-only and rotated logs in local filesystem. Closes [#6953](https://github.com/ClickHouse/ClickHouse/issues/6953). [#25969](https://github.com/ClickHouse/ClickHouse/pull/25969) ([flynn](https://github.com/ucasfl)) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Add `CapnProto` output format, refactor `CapnProto` input format. [#29291](https://github.com/ClickHouse/ClickHouse/pull/29291) ([Kruglov Pavel](https://github.com/Avogar)).
+* Allow to write number in query as binary literal. Example `SELECT 0b001;`. [#29304](https://github.com/ClickHouse/ClickHouse/pull/29304) ([Maksim Kita](https://github.com/kitaisreal)).
+* Added `hashed_array` dictionary type. It saves memory when using dictionaries with multiple attributes. Closes [#30236](https://github.com/ClickHouse/ClickHouse/issues/30236). [#30242](https://github.com/ClickHouse/ClickHouse/pull/30242) ([Maksim Kita](https://github.com/kitaisreal)).
+* Added `JSONExtractKeys` function. [#30056](https://github.com/ClickHouse/ClickHouse/pull/30056) ([Vitaly](https://github.com/orloffv)).
+* Add a function `getOSKernelVersion` - it returns a string with OS kernel version. [#29755](https://github.com/ClickHouse/ClickHouse/pull/29755) ([Memo](https://github.com/Joeywzr)).
+* Added `MD4` and `SHA384` functions. MD4 is an obsolete and insecure hash function, it can be used only in rare cases when MD4 is already being used in some legacy system and you need to get exactly the same result. [#29602](https://github.com/ClickHouse/ClickHouse/pull/29602) ([Nikita Tikhomirov](https://github.com/NSTikhomirov)).
+* HSTS can be enabled for Clickhouse HTTP server by setting `hsts_max_age` in configuration file with a positive number. [#29516](https://github.com/ClickHouse/ClickHouse/pull/29516) ([凌涛](https://github.com/lingtaolf)).
+* Huawei OBS Storage support. Closes [#24294](https://github.com/ClickHouse/ClickHouse/issues/24294). [#29511](https://github.com/ClickHouse/ClickHouse/pull/29511) ([kevin wan](https://github.com/MaxWk)).
+* New function `mapContainsKeyLike` to get the map that key matches a simple regular expression. [#29471](https://github.com/ClickHouse/ClickHouse/pull/29471) ([凌涛](https://github.com/lingtaolf)). New function `mapExtractKeyLike` to get the map only kept elements matched specified pattern. [#30793](https://github.com/ClickHouse/ClickHouse/pull/30793) ([凌涛](https://github.com/lingtaolf)).
+* Implemented `ALTER TABLE x MODIFY COMMENT`. [#29264](https://github.com/ClickHouse/ClickHouse/pull/29264) ([Vasily Nemkov](https://github.com/Enmk)).
+* Adds H3 inspection functions that are missing from ClickHouse but are available via the H3 api: https://h3geo.org/docs/api/inspection. [#29209](https://github.com/ClickHouse/ClickHouse/pull/29209) ([Bharat Nallan](https://github.com/bharatnc)).
+* Allow non-replicated ALTER TABLE FETCH and ATTACH in Replicated databases. [#29202](https://github.com/ClickHouse/ClickHouse/pull/29202) ([Kevin Michel](https://github.com/kmichel-aiven)).
+* Added a setting `output_format_csv_null_representation`: This is the same as `output_format_tsv_null_representation` but is for CSV output. [#29123](https://github.com/ClickHouse/ClickHouse/pull/29123) ([PHO](https://github.com/depressed-pho)).
+* Added function `zookeeperSessionUptime()` which returns uptime of current ZooKeeper session in seconds. [#28983](https://github.com/ClickHouse/ClickHouse/pull/28983) ([tavplubix](https://github.com/tavplubix)).
+* Implements the `h3ToGeoBoundary` function. [#28952](https://github.com/ClickHouse/ClickHouse/pull/28952) ([Ivan Veselov](https://github.com/fuzzERot)).
+* Add aggregate function `exponentialMovingAverage` that can be used as window function. This closes [#27511](https://github.com/ClickHouse/ClickHouse/issues/27511). [#28914](https://github.com/ClickHouse/ClickHouse/pull/28914) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Allow to include subcolumns of table columns into `DESCRIBE` query result (can be enabled by setting `describe_include_subcolumns`). [#28905](https://github.com/ClickHouse/ClickHouse/pull/28905) ([Anton Popov](https://github.com/CurtizJ)).
+* `Executable`, `ExecutablePool` added option `send_chunk_header`. If this option is true then chunk rows_count with line break will be sent to client before chunk. [#28833](https://github.com/ClickHouse/ClickHouse/pull/28833) ([Maksim Kita](https://github.com/kitaisreal)).
+* `tokenbf_v1` and `ngram` support Map with key of String of FixedSring type. It enhance data skipping in query with map key filter. ```sql CREATE TABLE map_tokenbf ( row_id UInt32, map Map(String, String), INDEX map_tokenbf map TYPE ngrambf_v1(4,256,2,0) GRANULARITY 1 ) Engine=MergeTree() Order by id ``` With table above, the query `select * from map_tokebf where map['K']='V'` will skip the granule that doesn't contain key `A` . Of course, how many rows will skipped is depended on the `granularity` and `index_granularity` you set. [#28511](https://github.com/ClickHouse/ClickHouse/pull/28511) ([凌涛](https://github.com/lingtaolf)).
+* Send profile events from server to client. New packet type `ProfileEvents` was introduced. Closes [#26177](https://github.com/ClickHouse/ClickHouse/issues/26177). [#28364](https://github.com/ClickHouse/ClickHouse/pull/28364) ([Dmitry Novik](https://github.com/novikd)).
+* Bit shift operations for `FixedString` and `String` data types. This closes [#27763](https://github.com/ClickHouse/ClickHouse/issues/27763). [#28325](https://github.com/ClickHouse/ClickHouse/pull/28325) ([小路](https://github.com/nicelulu)).
+* Support adding / deleting tables to replication from PostgreSQL dynamically in database engine MaterializedPostgreSQL. Support alter for database settings. Closes [#27573](https://github.com/ClickHouse/ClickHouse/issues/27573). [#28301](https://github.com/ClickHouse/ClickHouse/pull/28301) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Added function accurateCastOrDefault(x, T). Closes [#21330](https://github.com/ClickHouse/ClickHouse/issues/21330). Authors @taiyang-li. [#23028](https://github.com/ClickHouse/ClickHouse/pull/23028) ([Maksim Kita](https://github.com/kitaisreal)).
+* Add Function `toUUIDOrDefault`, `toUInt8/16/32/64/256OrDefault`, `toInt8/16/32/64/128/256OrDefault`, which enables user defining default value(not null) when string parsing is failed. [#21330](https://github.com/ClickHouse/ClickHouse/pull/21330) ([taiyang-li](https://github.com/taiyang-li)).
+
+#### Performance Improvement
+
+* Background merges can be preempted by each other and they are scheduled with appropriate priorities. Now long running merges won't prevent short merges to proceed. This is needed for a better scheduling and controlling of merges execution. It reduces the chances to get "too many parts" error. [#22381](https://github.com/ClickHouse/ClickHouse/issues/22381). [#25165](https://github.com/ClickHouse/ClickHouse/pull/25165) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)). Added an ability to execute more merges and mutations than the number of threads in background pool. Merges and mutations will be executed step by step according to their sizes (lower is more prioritized). The ratio of the number of tasks to threads to execute is controlled by a setting `background_merges_mutations_concurrency_ratio`, 2 by default. [#29140](https://github.com/ClickHouse/ClickHouse/pull/29140) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
+* Allow to use asynchronous reads for remote filesystems. Lower the number of seeks while reading from remote filesystems. It improves performance tremendously and makes the experimental `web` and `s3` disks to work faster than EBS under certain conditions. [#29205](https://github.com/ClickHouse/ClickHouse/pull/29205) ([Kseniia Sumarokova](https://github.com/kssenii)). In the meantime, the `web` disk type (static dataset hosted on a web server) is graduated from being experimental to be production ready.
+* Queries with `INTO OUTFILE` in `clickhouse-client` will use multiple threads. Fix the issue with flickering progress-bar when using `INTO OUTFILE`. This closes [#30873](https://github.com/ClickHouse/ClickHouse/issues/30873). This closes [#30872](https://github.com/ClickHouse/ClickHouse/issues/30872). [#30886](https://github.com/ClickHouse/ClickHouse/pull/30886) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Reduce amount of redundant compressed data read from disk for some types `SELECT` queries (only for `MergeTree` engines family). [#30111](https://github.com/ClickHouse/ClickHouse/pull/30111) ([alesapin](https://github.com/alesapin)).
+* Remove some redundant `seek` calls while reading compressed blocks in MergeTree table engines family. [#29766](https://github.com/ClickHouse/ClickHouse/pull/29766) ([alesapin](https://github.com/alesapin)).
+* Make `url` table function to process multiple URLs in parallel. This closes [#29670](https://github.com/ClickHouse/ClickHouse/issues/29670) and closes [#29671](https://github.com/ClickHouse/ClickHouse/issues/29671). [#29673](https://github.com/ClickHouse/ClickHouse/pull/29673) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Improve performance of aggregation in order of primary key (with enabled setting `optimize_aggregation_in_order`). [#30266](https://github.com/ClickHouse/ClickHouse/pull/30266) ([Anton Popov](https://github.com/CurtizJ)).
+* Now clickhouse is using DNS cache while communicating with external S3. [#29999](https://github.com/ClickHouse/ClickHouse/pull/29999) ([alesapin](https://github.com/alesapin)).
+* Add support for pushdown of `IS NULL`/`IS NOT NULL` to external databases (i.e. MySQL). [#29463](https://github.com/ClickHouse/ClickHouse/pull/29463) ([Azat Khuzhin](https://github.com/azat)). Transform `isNull`/`isNotNull` to `IS NULL`/`IS NOT NULL` (for external dbs, i.e. MySQL). [#29446](https://github.com/ClickHouse/ClickHouse/pull/29446) ([Azat Khuzhin](https://github.com/azat)).
+* SELECT queries from Dictionary tables will use multiple threads. [#30500](https://github.com/ClickHouse/ClickHouse/pull/30500) ([Maksim Kita](https://github.com/kitaisreal)).
+* Improve performance for filtering (WHERE operation) of `Decimal` columns. [#30431](https://github.com/ClickHouse/ClickHouse/pull/30431) ([Jun Jin](https://github.com/vesslanjin)).
+* Remove branchy code in filter operation with a better implementation with popcnt/ctz which have better performance. [#29881](https://github.com/ClickHouse/ClickHouse/pull/29881) ([Jun Jin](https://github.com/vesslanjin)).
+* Improve filter bytemask generator (used for WHERE operator) function all in one with SSE/AVX2/AVX512 instructions. Note that by default ClickHouse is only using SSE, so it's only relevant for custom builds. [#30014](https://github.com/ClickHouse/ClickHouse/pull/30014) ([jasperzhu](https://github.com/jinjunzh)). [#30670](https://github.com/ClickHouse/ClickHouse/pull/30670) ([jasperzhu](https://github.com/jinjunzh)).
+* Improve the performance of SUM aggregate function of Nullable floating point numbers. [#28906](https://github.com/ClickHouse/ClickHouse/pull/28906) ([Raúl Marín](https://github.com/Algunenano)).
+* Speed up part loading process with multiple disks are in use. The idea is similar to https://github.com/ClickHouse/ClickHouse/pull/16423 . Prod env shows improvement: 24 min -> 16 min . [#28363](https://github.com/ClickHouse/ClickHouse/pull/28363) ([Amos Bird](https://github.com/amosbird)).
+* Reduce default settings for S3 multipart upload part size to lower memory usage. [#28679](https://github.com/ClickHouse/ClickHouse/pull/28679) ([ianton-ru](https://github.com/ianton-ru)).
+* Speed up `bitmapAnd` function. [#28332](https://github.com/ClickHouse/ClickHouse/pull/28332) ([dddounaiking](https://github.com/OodounaikingoO)).
+* Removed sub-optimal mutation notifications in `StorageMergeTree` when merges are still going. [#27552](https://github.com/ClickHouse/ClickHouse/pull/27552) ([Vladimir Chebotarev](https://github.com/excitoon)).
+* Attempt to improve performance of string comparison. [#28767](https://github.com/ClickHouse/ClickHouse/pull/28767) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Primary key index and partition filter can work in tuple. [#29281](https://github.com/ClickHouse/ClickHouse/pull/29281) ([凌涛](https://github.com/lingtaolf)).
+* If query has multiple quantile aggregate functions with the same arguments but different level parameter, they will be fused together and executed in one pass if the setting `optimize_syntax_fuse_functions` is enabled. [#26657](https://github.com/ClickHouse/ClickHouse/pull/26657) ([hexiaoting](https://github.com/hexiaoting)).
+* Now min-max aggregation over the first expression of primary key is optimized by projection. This is for [#329](https://github.com/ClickHouse/ClickHouse/issues/329). [#29918](https://github.com/ClickHouse/ClickHouse/pull/29918) ([Amos Bird](https://github.com/amosbird)).
+
+#### Experimental Feature
+
+* Add ability to change nodes configuration (in `.xml` file) for ClickHouse Keeper. [#30372](https://github.com/ClickHouse/ClickHouse/pull/30372) ([alesapin](https://github.com/alesapin)).
+* Add `sparkbar` aggregate function. This closes [#26175](https://github.com/ClickHouse/ClickHouse/issues/26175). [#27481](https://github.com/ClickHouse/ClickHouse/pull/27481) ([小路](https://github.com/nicelulu)). Note: there is one flaw in this function, the behaviour will be changed in future releases.
+
+#### Improvement
+
+* Allow user to change log levels without restart. [#29586](https://github.com/ClickHouse/ClickHouse/pull/29586) ([Nikolay Degterinsky](https://github.com/evillique)).
+* Multiple improvements for SQL UDF. Queries for manipulation of SQL User Defined Functions now support ON CLUSTER clause. Example `CREATE FUNCTION test_function ON CLUSTER 'cluster' AS x -> x + 1;`. Closes [#30666](https://github.com/ClickHouse/ClickHouse/issues/30666). [#30734](https://github.com/ClickHouse/ClickHouse/pull/30734) ([Maksim Kita](https://github.com/kitaisreal)). Support `CREATE OR REPLACE`, `CREATE IF NOT EXISTS` syntaxes. [#30454](https://github.com/ClickHouse/ClickHouse/pull/30454) ([Maksim Kita](https://github.com/kitaisreal)). Added DROP IF EXISTS support. Example `DROP FUNCTION IF EXISTS test_function`. [#30437](https://github.com/ClickHouse/ClickHouse/pull/30437) ([Maksim Kita](https://github.com/kitaisreal)). Support lambdas. Example `CREATE FUNCTION lambda_function AS x -> arrayMap(element -> element * 2, x);`. [#30435](https://github.com/ClickHouse/ClickHouse/pull/30435) ([Maksim Kita](https://github.com/kitaisreal)). Support SQL user defined functions for `clickhouse-local`. [#30179](https://github.com/ClickHouse/ClickHouse/pull/30179) ([Maksim Kita](https://github.com/kitaisreal)).
+* Enable per-query memory profiler (set to `memory_profiler_step` = 4MiB) globally. [#29455](https://github.com/ClickHouse/ClickHouse/pull/29455) ([Azat Khuzhin](https://github.com/azat)).
+* Added columns `data_compressed_bytes`, `data_uncompressed_bytes`, `marks_bytes` into `system.data_skipping_indices`. Added columns `secondary_indices_compressed_bytes`, `secondary_indices_uncompressed_bytes`, `secondary_indices_marks_bytes` into `system.parts`. Closes [#29697](https://github.com/ClickHouse/ClickHouse/issues/29697). [#29896](https://github.com/ClickHouse/ClickHouse/pull/29896) ([Maksim Kita](https://github.com/kitaisreal)).
+* Add `table` alias to system.tables and `database` alias to system.databases [#29677](https://github.com/ClickHouse/ClickHouse/issues/29677). [#29882](https://github.com/ClickHouse/ClickHouse/pull/29882) ([kevin wan](https://github.com/MaxWk)).
+* Correctly resolve interdependencies between tables on server startup. Closes [#8004](https://github.com/ClickHouse/ClickHouse/issues/8004), closes [#15170](https://github.com/ClickHouse/ClickHouse/issues/15170). [#28373](https://github.com/ClickHouse/ClickHouse/pull/28373) ([tavplubix](https://github.com/tavplubix)).
+* Avoid error "Division by zero" when denominator is Nullable in functions `divide`, `intDiv` and `modulo`. Closes [#22621](https://github.com/ClickHouse/ClickHouse/issues/22621). [#28352](https://github.com/ClickHouse/ClickHouse/pull/28352) ([Kruglov Pavel](https://github.com/Avogar)).
+* Allow to parse values of `Date` data type in text formats as `YYYYMMDD` in addition to `YYYY-MM-DD`. This closes [#30870](https://github.com/ClickHouse/ClickHouse/issues/30870). [#30871](https://github.com/ClickHouse/ClickHouse/pull/30871) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Web UI: render bars in table cells. [#29792](https://github.com/ClickHouse/ClickHouse/pull/29792) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* User can now create dictionaries with comments: `CREATE DICTIONARY ... COMMENT 'vaue'` ... [#29899](https://github.com/ClickHouse/ClickHouse/pull/29899) ([Vasily Nemkov](https://github.com/Enmk)). Users now can set comments to database in `CREATE DATABASE` statement ... [#29429](https://github.com/ClickHouse/ClickHouse/pull/29429) ([Vasily Nemkov](https://github.com/Enmk)).
+* Introduce `compiled_expression_cache_elements_size` setting. If you will ever want to use this setting, you will already know what it does. [#30667](https://github.com/ClickHouse/ClickHouse/pull/30667) ([Maksim Kita](https://github.com/kitaisreal)).
+* clickhouse-format now supports option `--query`. In previous versions you have to pass the query to stdin. [#29325](https://github.com/ClickHouse/ClickHouse/pull/29325) ([凌涛](https://github.com/lingtaolf)).
+* Support `ALTER TABLE` for tables in `Memory` databases. Memory databases are used in `clickhouse-local`. [#30866](https://github.com/ClickHouse/ClickHouse/pull/30866) ([tavplubix](https://github.com/tavplubix)).
+* Arrays of all serializable types are now supported by `arrayStringConcat`. [#30840](https://github.com/ClickHouse/ClickHouse/pull/30840) ([Nickita Taranov](https://github.com/nickitat)).
+* ClickHouse now will account docker/cgroups limitations to get system memory amount. See [#25662](https://github.com/ClickHouse/ClickHouse/issues/25662). [#30574](https://github.com/ClickHouse/ClickHouse/pull/30574) ([Pavel Medvedev](https://github.com/pmed)).
+* Fetched table structure for PostgreSQL database is more reliable now. [#30477](https://github.com/ClickHouse/ClickHouse/pull/30477) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Full support of positional arguments in GROUP BY and ORDER BY. [#30433](https://github.com/ClickHouse/ClickHouse/pull/30433) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Allow extracting non-string element as string using JSONExtractString. This is for [pull/25452#issuecomment-927123287](https://github.com/ClickHouse/ClickHouse/pull/25452#issuecomment-927123287). [#30426](https://github.com/ClickHouse/ClickHouse/pull/30426) ([Amos Bird](https://github.com/amosbird)).
+* Added an ability to use FINAL clause in SELECT queries from `GraphiteMergeTree`. [#30360](https://github.com/ClickHouse/ClickHouse/pull/30360) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
+* Minor improvements in replica cloning and enqueuing fetch for broken parts, that should avoid extremely rare hanging of `GET_PART` entries in replication queue. [#30346](https://github.com/ClickHouse/ClickHouse/pull/30346) ([tavplubix](https://github.com/tavplubix)).
+* Allow symlinks to files in `user_files` directory for file table function. [#30309](https://github.com/ClickHouse/ClickHouse/pull/30309) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Fixed comparison of `Date32` with `Date`, `DateTime`, `DateTime64` and `String`. [#30219](https://github.com/ClickHouse/ClickHouse/pull/30219) ([liang.huang](https://github.com/lhuang09287750)).
+* Allow to remove `SAMPLE BY` expression from `MergeTree` tables (`ALTER TABLE
REMOVE SAMPLE BY`). [#30180](https://github.com/ClickHouse/ClickHouse/pull/30180) ([Anton Popov](https://github.com/CurtizJ)).
+* Now `Keeper` (as part of `clickhouse-server`) will start asynchronously if it can connect to some other node. [#30170](https://github.com/ClickHouse/ClickHouse/pull/30170) ([alesapin](https://github.com/alesapin)).
+* Now `clickhouse-client` supports native multi-line editing. [#30143](https://github.com/ClickHouse/ClickHouse/pull/30143) ([Amos Bird](https://github.com/amosbird)).
+* `polygon` dictionaries (reverse geocoding): added support for reading the dictionary content with SELECT query method if setting `store_polygon_key_column` = true. Closes [#30090](https://github.com/ClickHouse/ClickHouse/issues/30090). [#30142](https://github.com/ClickHouse/ClickHouse/pull/30142) ([Maksim Kita](https://github.com/kitaisreal)).
+* Add ClickHouse logo to Play UI. [#29674](https://github.com/ClickHouse/ClickHouse/pull/29674) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Better exception message while reading column from Arrow-supported formats like `Arrow`, `ArrowStream`, `Parquet` and `ORC`. This closes [#29926](https://github.com/ClickHouse/ClickHouse/issues/29926). [#29927](https://github.com/ClickHouse/ClickHouse/pull/29927) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Fix data-race between flush and startup in `Buffer` tables. This can appear in tests. [#29930](https://github.com/ClickHouse/ClickHouse/pull/29930) ([Azat Khuzhin](https://github.com/azat)).
+* Fix `lock-order-inversion` between `DROP TABLE` for `DatabaseMemory` and `LiveView`. Live View is an experimental feature. Memory database is used in clickhouse-local. [#29929](https://github.com/ClickHouse/ClickHouse/pull/29929) ([Azat Khuzhin](https://github.com/azat)).
+* Fix lock-order-inversion between periodic dictionary reload and config reload. [#29928](https://github.com/ClickHouse/ClickHouse/pull/29928) ([Azat Khuzhin](https://github.com/azat)).
+* Update zoneinfo files to 2021c. [#29925](https://github.com/ClickHouse/ClickHouse/pull/29925) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Add ability to configure retries and delays between them for `clickhouse-copier`. [#29921](https://github.com/ClickHouse/ClickHouse/pull/29921) ([Azat Khuzhin](https://github.com/azat)).
+* Add `shutdown_wait_unfinished_queries` server setting to allowing waiting for running queries up to `shutdown_wait_unfinished` time. This is for [#24451](https://github.com/ClickHouse/ClickHouse/issues/24451). [#29914](https://github.com/ClickHouse/ClickHouse/pull/29914) ([Amos Bird](https://github.com/amosbird)).
+* Add ability to trace peak memory usage (with new trace_type in `system.trace_log` - `MemoryPeak`). [#29858](https://github.com/ClickHouse/ClickHouse/pull/29858) ([Azat Khuzhin](https://github.com/azat)).
+* PostgreSQL foreign tables: Added partitioned table prefix 'p' for the query for fetching replica identity index. [#29828](https://github.com/ClickHouse/ClickHouse/pull/29828) ([Shoh Jahon](https://github.com/Shohjahon)).
+* Apply `max_untracked_memory`/`memory_profiler_step`/`memory_profiler_sample_probability` during mutate/merge to profile memory usage during merges. [#29681](https://github.com/ClickHouse/ClickHouse/pull/29681) ([Azat Khuzhin](https://github.com/azat)).
+* Query obfuscator: `clickhouse-format --obfuscate` now works with more types of queries. [#29672](https://github.com/ClickHouse/ClickHouse/pull/29672) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Fixed the issue: `clickhouse-format --obfuscate` cannot process queries with embedded dictionaries (functions `regionTo...`). [#29667](https://github.com/ClickHouse/ClickHouse/pull/29667) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Fix incorrect Nullable processing of JSON functions. This fixes [#29615](https://github.com/ClickHouse/ClickHouse/issues/29615) . Mark as improvement because https://github.com/ClickHouse/ClickHouse/pull/28012 is not released. [#29659](https://github.com/ClickHouse/ClickHouse/pull/29659) ([Amos Bird](https://github.com/amosbird)).
+* Increase `listen_backlog` by default (to match default in newer linux kernel). [#29643](https://github.com/ClickHouse/ClickHouse/pull/29643) ([Azat Khuzhin](https://github.com/azat)).
+* Reload dictionaries, models, user defined executable functions if servers config `dictionaries_config`, `models_config`, `user_defined_executable_functions_config` changes. Closes [#28142](https://github.com/ClickHouse/ClickHouse/issues/28142). [#29529](https://github.com/ClickHouse/ClickHouse/pull/29529) ([Maksim Kita](https://github.com/kitaisreal)).
+* Get rid of pointless restriction on projection name. Now projection name can start with `tmp_`. [#29520](https://github.com/ClickHouse/ClickHouse/pull/29520) ([Amos Bird](https://github.com/amosbird)).
+* Fixed `There is no query or query context has expired` error in mutations with nested subqueries. Do not allow subqueries in mutation if table is replicated and `allow_nondeterministic_mutations` setting is disabled. [#29495](https://github.com/ClickHouse/ClickHouse/pull/29495) ([tavplubix](https://github.com/tavplubix)).
+* Apply config changes to `max_concurrent_queries` during runtime (no need to restart). [#29414](https://github.com/ClickHouse/ClickHouse/pull/29414) ([Raúl Marín](https://github.com/Algunenano)).
+* Added setting `use_skip_indexes`. [#29405](https://github.com/ClickHouse/ClickHouse/pull/29405) ([Maksim Kita](https://github.com/kitaisreal)).
+* Add support for `FREEZE`ing in-memory parts (for backups). [#29376](https://github.com/ClickHouse/ClickHouse/pull/29376) ([Mo Xuan](https://github.com/mo-avatar)).
+* Pass through initial query_id for `clickhouse-benchmark` (previously if you run remote query via `clickhouse-benchmark`, queries on shards will not be linked to the initial query via `initial_query_id`). [#29364](https://github.com/ClickHouse/ClickHouse/pull/29364) ([Azat Khuzhin](https://github.com/azat)).
+* Skip indexes `tokenbf_v1` and `ngrambf_v1`: added support for `Array` data type with key of `String` of `FixedString` type. [#29280](https://github.com/ClickHouse/ClickHouse/pull/29280) ([Maksim Kita](https://github.com/kitaisreal)). Skip indexes `tokenbf_v1` and `ngrambf_v1` added support for `Map` data type with key of `String` of `FixedString` type. Author @lingtaolf. [#29220](https://github.com/ClickHouse/ClickHouse/pull/29220) ([Maksim Kita](https://github.com/kitaisreal)).
+* Function `has`: added support for `Map` data type. [#29267](https://github.com/ClickHouse/ClickHouse/pull/29267) ([Maksim Kita](https://github.com/kitaisreal)).
+* Add `compress_logs` settings for clickhouse-keeper which allow to compress clickhouse-keeper logs (for replicated state machine) in `ZSTD` . Implements: [#26977](https://github.com/ClickHouse/ClickHouse/issues/26977). [#29223](https://github.com/ClickHouse/ClickHouse/pull/29223) ([alesapin](https://github.com/alesapin)).
+* Add a setting `external_table_strict_query` - it will force passing the whole WHERE expression in queries to foreign databases even if it is incompatible. [#29206](https://github.com/ClickHouse/ClickHouse/pull/29206) ([Azat Khuzhin](https://github.com/azat)).
+* Disable projections when `ARRAY JOIN` is used. In previous versions projection analysis may break aliases in array join. [#29139](https://github.com/ClickHouse/ClickHouse/pull/29139) ([Amos Bird](https://github.com/amosbird)).
+* Support more types in `MsgPack` input/output format. [#29077](https://github.com/ClickHouse/ClickHouse/pull/29077) ([Kruglov Pavel](https://github.com/Avogar)).
+* Allow to input and output `LowCardinality` columns in `ORC` input/output format. [#29062](https://github.com/ClickHouse/ClickHouse/pull/29062) ([Kruglov Pavel](https://github.com/Avogar)).
+* Select from `system.distributed_ddl_queue` might show incorrect values, it's fixed. [#29061](https://github.com/ClickHouse/ClickHouse/pull/29061) ([tavplubix](https://github.com/tavplubix)).
+* Correct behaviour with unknown methods for HTTP connection. Solves [#29050](https://github.com/ClickHouse/ClickHouse/issues/29050). [#29057](https://github.com/ClickHouse/ClickHouse/pull/29057) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
+* `clickhouse-keeper`: Fix bug in `clickhouse-keeper-converter` which can lead to some data loss while restoring from ZooKeeper logs (not snapshot). [#29030](https://github.com/ClickHouse/ClickHouse/pull/29030) ([小路](https://github.com/nicelulu)). Fix bug in `clickhouse-keeper-converter` which can lead to incorrect ZooKeeper log deserialization. [#29071](https://github.com/ClickHouse/ClickHouse/pull/29071) ([小路](https://github.com/nicelulu)).
+* Apply settings from `CREATE ... AS SELECT` queries (fixes: [#28810](https://github.com/ClickHouse/ClickHouse/issues/28810)). [#28962](https://github.com/ClickHouse/ClickHouse/pull/28962) ([Azat Khuzhin](https://github.com/azat)).
+* Respect default database setting for ALTER TABLE ... ON CLUSTER ... REPLACE/MOVE PARTITION FROM/TO ... [#28955](https://github.com/ClickHouse/ClickHouse/pull/28955) ([anneji-dev](https://github.com/anneji-dev)).
+* gRPC protocol: Allow change server-side compression from client. [#28953](https://github.com/ClickHouse/ClickHouse/pull/28953) ([Vitaly Baranov](https://github.com/vitlibar)).
+* Skip "no data" exception when reading thermal sensors for asynchronous metrics. This closes [#28852](https://github.com/ClickHouse/ClickHouse/issues/28852). [#28882](https://github.com/ClickHouse/ClickHouse/pull/28882) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Fixed logical race condition that might cause `Dictionary not found` error for existing dictionary in rare cases. [#28853](https://github.com/ClickHouse/ClickHouse/pull/28853) ([tavplubix](https://github.com/tavplubix)).
+* Relax nested function for If-combinator check (but forbid nested identical combinators). [#28828](https://github.com/ClickHouse/ClickHouse/pull/28828) ([Azat Khuzhin](https://github.com/azat)).
+* Fix possible uncaught exception during server termination. [#28761](https://github.com/ClickHouse/ClickHouse/pull/28761) ([Azat Khuzhin](https://github.com/azat)).
+* Forbid cleaning of tmp directories that can be used by an active mutation/merge if mutation/merge is extraordinarily long. [#28760](https://github.com/ClickHouse/ClickHouse/pull/28760) ([Azat Khuzhin](https://github.com/azat)).
+* Allow optimization `optimize_arithmetic_operations_in_aggregate_functions = 1` when alias is used. [#28746](https://github.com/ClickHouse/ClickHouse/pull/28746) ([Amos Bird](https://github.com/amosbird)).
+* Implement `detach_not_byte_identical_parts` setting for `ReplicatedMergeTree`, that will detach instead of remove not byte-identical parts (after mege/mutate). [#28708](https://github.com/ClickHouse/ClickHouse/pull/28708) ([Azat Khuzhin](https://github.com/azat)).
+* Implement `max_suspicious_broken_parts_bytes` setting for `MergeTree` (to limit total size of all broken parts, default is `1GiB`). [#28707](https://github.com/ClickHouse/ClickHouse/pull/28707) ([Azat Khuzhin](https://github.com/azat)).
+* Enable expanding macros in `RabbitMQ` table settings. [#28683](https://github.com/ClickHouse/ClickHouse/pull/28683) ([Vitaly Baranov](https://github.com/vitlibar)).
+* Restore the possibility to read data of a table using the `Log` engine in multiple threads. [#28125](https://github.com/ClickHouse/ClickHouse/pull/28125) ([Vitaly Baranov](https://github.com/vitlibar)).
+* Fix misbehavior of NULL column handling in JSON functions. This fixes [#27930](https://github.com/ClickHouse/ClickHouse/issues/27930). [#28012](https://github.com/ClickHouse/ClickHouse/pull/28012) ([Amos Bird](https://github.com/amosbird)).
+* Allow to set the size of Mark/Uncompressed cache for skip indices separately from columns. [#27961](https://github.com/ClickHouse/ClickHouse/pull/27961) ([Amos Bird](https://github.com/amosbird)).
+* Allow to mix JOIN with `USING` with other JOIN types. [#23881](https://github.com/ClickHouse/ClickHouse/pull/23881) ([darkkeks](https://github.com/darkkeks)).
+* Update aws-sdk submodule for throttling in Yandex Cloud S3. [#30646](https://github.com/ClickHouse/ClickHouse/pull/30646) ([ianton-ru](https://github.com/ianton-ru)).
+* Fix releasing query ID and session ID at the end of query processing while handing gRPC call. [#29954](https://github.com/ClickHouse/ClickHouse/pull/29954) ([Vitaly Baranov](https://github.com/vitlibar)).
+* Fix shutdown of `AccessControlManager` to fix flaky test. [#29951](https://github.com/ClickHouse/ClickHouse/pull/29951) ([Vitaly Baranov](https://github.com/vitlibar)).
+* Fix failed assertion in reading from `HDFS`. Update libhdfs3 library to be able to run in tests in debug. Closes [#29251](https://github.com/ClickHouse/ClickHouse/issues/29251). Closes [#27814](https://github.com/ClickHouse/ClickHouse/issues/27814). [#29276](https://github.com/ClickHouse/ClickHouse/pull/29276) ([Kseniia Sumarokova](https://github.com/kssenii)).
+
+
+#### Build/Testing/Packaging Improvement
+
+* Add support for FreeBSD builds for Aarch64 machines. [#29952](https://github.com/ClickHouse/ClickHouse/pull/29952) ([MikaelUrankar](https://github.com/MikaelUrankar)).
+* Recursive submodules are no longer needed for ClickHouse. [#30315](https://github.com/ClickHouse/ClickHouse/pull/30315) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* ClickHouse can be statically built with Musl. This is added as experiment, it does not support building `odbc-bridge`, `library-bridge`, integration with CatBoost and some libraries. [#30248](https://github.com/ClickHouse/ClickHouse/pull/30248) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Enable `Protobuf`, `Arrow`, `ORC`, `Parquet` for `AArch64` and `Darwin` (macOS) builds. This closes [#29248](https://github.com/ClickHouse/ClickHouse/issues/29248). This closes [#28018](https://github.com/ClickHouse/ClickHouse/issues/28018). [#30015](https://github.com/ClickHouse/ClickHouse/pull/30015) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Add cross-build for PowerPC (powerpc64le). This closes [#9589](https://github.com/ClickHouse/ClickHouse/issues/9589). Enable support for interaction with MySQL for AArch64 and PowerPC. This closes [#26301](https://github.com/ClickHouse/ClickHouse/issues/26301). [#30010](https://github.com/ClickHouse/ClickHouse/pull/30010) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Leave only required files in cross-compile toolchains. Include them as submodules (earlier they were downloaded as tarballs). [#29974](https://github.com/ClickHouse/ClickHouse/pull/29974) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Implemented structure-aware fuzzing approach in ClickHouse for select statement parser. [#30012](https://github.com/ClickHouse/ClickHouse/pull/30012) ([Paul](https://github.com/PaulCher)).
+* Turning on experimental constexpr expressions evaluator for clang to speed up template code compilation. [#29668](https://github.com/ClickHouse/ClickHouse/pull/29668) ([myrrc](https://github.com/myrrc)).
+* Add ability to compile using newer version fo glibc without using new symbols. [#29594](https://github.com/ClickHouse/ClickHouse/pull/29594) ([Azat Khuzhin](https://github.com/azat)).
+* Reduce Debug build binary size by clang optimization option. [#28736](https://github.com/ClickHouse/ClickHouse/pull/28736) ([flynn](https://github.com/ucasfl)).
+* Now all images for CI will be placed in the separate dockerhub repo. [#28656](https://github.com/ClickHouse/ClickHouse/pull/28656) ([alesapin](https://github.com/alesapin)).
+* Improve support for build with clang-13. [#28046](https://github.com/ClickHouse/ClickHouse/pull/28046) ([Sergei Semin](https://github.com/syominsergey)).
+* Add ability to print raw profile events to `clickhouse-client` (This can be useful for debugging and for testing). [#30064](https://github.com/ClickHouse/ClickHouse/pull/30064) ([Azat Khuzhin](https://github.com/azat)).
+* Add time dependency for clickhouse-server unit (systemd and sysvinit init). [#28891](https://github.com/ClickHouse/ClickHouse/pull/28891) ([Azat Khuzhin](https://github.com/azat)).
+* Reload stacktrace cache when symbol is reloaded. [#28137](https://github.com/ClickHouse/ClickHouse/pull/28137) ([Amos Bird](https://github.com/amosbird)).
+
+#### Bug Fix
+
+* Functions for case-insensitive search in UTF-8 strings like `positionCaseInsensitiveUTF8` and `countSubstringsCaseInsensitiveUTF8` might find substrings that actually does not match in very rare cases, it's fixed. [#30663](https://github.com/ClickHouse/ClickHouse/pull/30663) ([tavplubix](https://github.com/tavplubix)).
+* Fix reading from empty file on encrypted disk. [#30494](https://github.com/ClickHouse/ClickHouse/pull/30494) ([Vitaly Baranov](https://github.com/vitlibar)).
+* Fix transformation of disjunctions chain to `IN` (controlled by settings `optimize_min_equality_disjunction_chain_length`) in distributed queries with settings `legacy_column_name_of_tuple_literal = 0`. [#28658](https://github.com/ClickHouse/ClickHouse/pull/28658) ([Anton Popov](https://github.com/CurtizJ)).
+* Allow using a materialized column as the sharding key in a distributed table even if `insert_allow_materialized_columns=0`:. [#28637](https://github.com/ClickHouse/ClickHouse/pull/28637) ([Vitaly Baranov](https://github.com/vitlibar)).
+* Fix `ORDER BY ... WITH FILL` with set `TO` and `FROM` and no rows in result set. [#30888](https://github.com/ClickHouse/ClickHouse/pull/30888) ([Anton Popov](https://github.com/CurtizJ)).
+* Fix set index not used in AND/OR expressions when there are more than two operands. This fixes [#30416](https://github.com/ClickHouse/ClickHouse/issues/30416) . [#30887](https://github.com/ClickHouse/ClickHouse/pull/30887) ([Amos Bird](https://github.com/amosbird)).
+* Fix crash when projection with hashing function is materialized. This fixes [#30861](https://github.com/ClickHouse/ClickHouse/issues/30861) . The issue is similar to https://github.com/ClickHouse/ClickHouse/pull/28560 which is a lack of proper understanding of the invariant of header's emptyness. [#30877](https://github.com/ClickHouse/ClickHouse/pull/30877) ([Amos Bird](https://github.com/amosbird)).
+* Fixed ambiguity when extracting auxiliary ZooKeeper name from ZooKeeper path in `ReplicatedMergeTree`. Previously server might fail to start with `Unknown auxiliary ZooKeeper name` if ZooKeeper path contains a colon. Fixes [#29052](https://github.com/ClickHouse/ClickHouse/issues/29052). Also it was allowed to specify ZooKeeper path that does not start with slash, but now it's deprecated and creation of new tables with such path is not allowed. Slashes and colons in auxiliary ZooKeeper names are not allowed too. [#30822](https://github.com/ClickHouse/ClickHouse/pull/30822) ([tavplubix](https://github.com/tavplubix)).
+* Clean temporary directory when localBackup failed by some reason. [#30797](https://github.com/ClickHouse/ClickHouse/pull/30797) ([ianton-ru](https://github.com/ianton-ru)).
+* Fixed a race condition between `REPLACE/MOVE PARTITION` and background merge in non-replicated `MergeTree` that might cause a part of moved/replaced data to remain in partition. Fixes [#29327](https://github.com/ClickHouse/ClickHouse/issues/29327). [#30717](https://github.com/ClickHouse/ClickHouse/pull/30717) ([tavplubix](https://github.com/tavplubix)).
+* Fix PREWHERE with WHERE in case of always true PREWHERE. [#30668](https://github.com/ClickHouse/ClickHouse/pull/30668) ([Azat Khuzhin](https://github.com/azat)).
+* Limit push down optimization could cause a error `Cannot find column`. Fixes [#30438](https://github.com/ClickHouse/ClickHouse/issues/30438). [#30562](https://github.com/ClickHouse/ClickHouse/pull/30562) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
+* Add missing parenthesis for `isNotNull`/`isNull` rewrites to `IS [NOT] NULL` (fixes queries that has something like `isNotNull(1)+isNotNull(2)`). [#30520](https://github.com/ClickHouse/ClickHouse/pull/30520) ([Azat Khuzhin](https://github.com/azat)).
+* Fix deadlock on ALTER with scalar subquery to the same table, close [#30461](https://github.com/ClickHouse/ClickHouse/issues/30461). [#30492](https://github.com/ClickHouse/ClickHouse/pull/30492) ([Vladimir C](https://github.com/vdimir)).
+* Fixed segfault which might happen if session expired during execution of REPLACE PARTITION. [#30432](https://github.com/ClickHouse/ClickHouse/pull/30432) ([tavplubix](https://github.com/tavplubix)).
+* Queries with condition like `IN (subquery)` could return incorrect result in case if aggregate projection applied. Fixed creation of sets for projections. [#30310](https://github.com/ClickHouse/ClickHouse/pull/30310) ([Amos Bird](https://github.com/amosbird)).
+* Fix column alias resolution of JOIN queries when projection is enabled. This fixes [#30146](https://github.com/ClickHouse/ClickHouse/issues/30146). [#30293](https://github.com/ClickHouse/ClickHouse/pull/30293) ([Amos Bird](https://github.com/amosbird)).
+* Fix some deficiency in `replaceRegexpAll` function. [#30292](https://github.com/ClickHouse/ClickHouse/pull/30292) ([Memo](https://github.com/Joeywzr)).
+* Fix ComplexKeyHashedDictionary, ComplexKeySparseHashedDictionary parsing `preallocate` option from layout config. [#30246](https://github.com/ClickHouse/ClickHouse/pull/30246) ([Maksim Kita](https://github.com/kitaisreal)).
+* Fix `[I]LIKE` function. Closes [#28661](https://github.com/ClickHouse/ClickHouse/issues/28661). [#30244](https://github.com/ClickHouse/ClickHouse/pull/30244) ([Nikolay Degterinsky](https://github.com/evillique)).
+* Fix crash with shortcircuit and lowcardinality in multiIf. [#30243](https://github.com/ClickHouse/ClickHouse/pull/30243) ([Raúl Marín](https://github.com/Algunenano)).
+* FlatDictionary, HashedDictionary fix bytes_allocated calculation for nullable attributes. [#30238](https://github.com/ClickHouse/ClickHouse/pull/30238) ([Maksim Kita](https://github.com/kitaisreal)).
+* Allow identifiers starting with numbers in multiple joins. [#30230](https://github.com/ClickHouse/ClickHouse/pull/30230) ([Vladimir C](https://github.com/vdimir)).
+* Fix reading from `MergeTree` with `max_read_buffer_size = 0` (when the user wants to shoot himself in the foot) (can lead to exceptions `Can't adjust last granule`, `LOGICAL_ERROR`, or even data loss). [#30192](https://github.com/ClickHouse/ClickHouse/pull/30192) ([Azat Khuzhin](https://github.com/azat)).
+* Fix `pread_fake_async`/`pread_threadpool` with `min_bytes_to_use_direct_io`. [#30191](https://github.com/ClickHouse/ClickHouse/pull/30191) ([Azat Khuzhin](https://github.com/azat)).
+* Fix INSERT SELECT incorrectly fills MATERIALIZED column based of Nullable column. [#30189](https://github.com/ClickHouse/ClickHouse/pull/30189) ([Azat Khuzhin](https://github.com/azat)).
+* Support nullable arguments in function `initializeAggregation`. [#30177](https://github.com/ClickHouse/ClickHouse/pull/30177) ([Anton Popov](https://github.com/CurtizJ)).
+* Fix error `Port is already connected` for queries with `GLOBAL IN` and `WITH TOTALS`. Only for 21.9 and 21.10. [#30086](https://github.com/ClickHouse/ClickHouse/pull/30086) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
+* Fix race between MOVE PARTITION and merges/mutations for MergeTree. [#30074](https://github.com/ClickHouse/ClickHouse/pull/30074) ([Azat Khuzhin](https://github.com/azat)).
+* Dropped `Memory` database might reappear after server restart, it's fixed ([#29795](https://github.com/ClickHouse/ClickHouse/issues/29795)). Also added `force_remove_data_recursively_on_drop` setting as a workaround for `Directory not empty` error when dropping `Ordinary` database (because it's not possible to remove data leftovers manually in cloud environment). [#30054](https://github.com/ClickHouse/ClickHouse/pull/30054) ([tavplubix](https://github.com/tavplubix)).
+* Fix crash of sample by `tuple()`, closes [#30004](https://github.com/ClickHouse/ClickHouse/issues/30004). [#30016](https://github.com/ClickHouse/ClickHouse/pull/30016) ([flynn](https://github.com/ucasfl)).
+* try to close issue: [#29965](https://github.com/ClickHouse/ClickHouse/issues/29965). [#29976](https://github.com/ClickHouse/ClickHouse/pull/29976) ([hexiaoting](https://github.com/hexiaoting)).
+* Fix possible data-race between `FileChecker` and `StorageLog`/`StorageStripeLog`. [#29959](https://github.com/ClickHouse/ClickHouse/pull/29959) ([Azat Khuzhin](https://github.com/azat)).
+* Fix data-race between `LogSink::writeMarks()` and `LogSource` in `StorageLog`. [#29946](https://github.com/ClickHouse/ClickHouse/pull/29946) ([Azat Khuzhin](https://github.com/azat)).
+* Fix potential resource leak of the concurrent query limit of merge tree tables introduced in https://github.com/ClickHouse/ClickHouse/pull/19544. [#29879](https://github.com/ClickHouse/ClickHouse/pull/29879) ([Amos Bird](https://github.com/amosbird)).
+* Fix system tables recreation check (fails to detect changes in enum values). [#29857](https://github.com/ClickHouse/ClickHouse/pull/29857) ([Azat Khuzhin](https://github.com/azat)).
+* MaterializedMySQL: Fix an issue where if the connection to MySQL was lost, only parts of a transaction could be processed. [#29837](https://github.com/ClickHouse/ClickHouse/pull/29837) ([Håvard Kvålen](https://github.com/havardk)).
+* Avoid `Timeout exceeded: elapsed 18446744073.709553 seconds` error that might happen in extremely rare cases, presumably due to some bug in kernel. Fixes [#29154](https://github.com/ClickHouse/ClickHouse/issues/29154). [#29811](https://github.com/ClickHouse/ClickHouse/pull/29811) ([tavplubix](https://github.com/tavplubix)).
+* Fix bad cast in `ATTACH TABLE ... FROM 'path'` query when non-string literal is used instead of path. It may lead to reading of uninitialized memory. [#29790](https://github.com/ClickHouse/ClickHouse/pull/29790) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Fix concurrent access to `LowCardinality` during `GROUP BY` (in combination with `Buffer` tables it may lead to troubles). [#29782](https://github.com/ClickHouse/ClickHouse/pull/29782) ([Azat Khuzhin](https://github.com/azat)).
+* Fix incorrect `GROUP BY` (multiple rows with the same keys in result) in case of distributed query when shards had mixed versions `<= 21.3` and `>= 21.4`, `GROUP BY` key had several columns all with fixed size, and two-level aggregation was activated (see `group_by_two_level_threshold` and `group_by_two_level_threshold_bytes`). Fixes [#29580](https://github.com/ClickHouse/ClickHouse/issues/29580). [#29735](https://github.com/ClickHouse/ClickHouse/pull/29735) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
+* Fixed incorrect behaviour of setting `materialized_postgresql_tables_list` at server restart. Found in [#28529](https://github.com/ClickHouse/ClickHouse/issues/28529). [#29686](https://github.com/ClickHouse/ClickHouse/pull/29686) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Condition in filter predicate could be lost after push-down optimisation. [#29625](https://github.com/ClickHouse/ClickHouse/pull/29625) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
+* Fix JIT expression compilation with aliases and short-circuit expression evaluation. Closes [#29403](https://github.com/ClickHouse/ClickHouse/issues/29403). [#29574](https://github.com/ClickHouse/ClickHouse/pull/29574) ([Maksim Kita](https://github.com/kitaisreal)).
+* Fix rare segfault in `ALTER MODIFY` query when using incorrect table identifier in `DEFAULT` expression like `x.y.z...` Fixes [#29184](https://github.com/ClickHouse/ClickHouse/issues/29184). [#29573](https://github.com/ClickHouse/ClickHouse/pull/29573) ([alesapin](https://github.com/alesapin)).
+* Fix nullptr deference for `GROUP BY WITH TOTALS HAVING` (when the column from `HAVING` wasn't selected). [#29553](https://github.com/ClickHouse/ClickHouse/pull/29553) ([Azat Khuzhin](https://github.com/azat)).
+* Avoid deadlocks when reading and writting on Join table engine tables at the same time. [#29544](https://github.com/ClickHouse/ClickHouse/pull/29544) ([Raúl Marín](https://github.com/Algunenano)).
+* Fix bug in check `pathStartsWith` becuase there was bug with the usage of `std::mismatch`: ` The behavior is undefined if the second range is shorter than the first range.`. [#29531](https://github.com/ClickHouse/ClickHouse/pull/29531) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* In ODBC bridge add retries for error Invalid cursor state. It is a retriable error. Closes [#29473](https://github.com/ClickHouse/ClickHouse/issues/29473). [#29518](https://github.com/ClickHouse/ClickHouse/pull/29518) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Fixed incorrect table name parsing on loading of `Lazy` database. Fixes [#29456](https://github.com/ClickHouse/ClickHouse/issues/29456). [#29476](https://github.com/ClickHouse/ClickHouse/pull/29476) ([tavplubix](https://github.com/tavplubix)).
+* Fix possible `Block structure mismatch` for subqueries with pushed-down `HAVING` predicate. Fixes [#29010](https://github.com/ClickHouse/ClickHouse/issues/29010). [#29475](https://github.com/ClickHouse/ClickHouse/pull/29475) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
+* Fix Logical error `Cannot capture columns` in functions greatest/least. Closes [#29334](https://github.com/ClickHouse/ClickHouse/issues/29334). [#29454](https://github.com/ClickHouse/ClickHouse/pull/29454) ([Kruglov Pavel](https://github.com/Avogar)).
+* RocksDB table engine: fix race condition during multiple DB opening (and get back some tests that triggers the problem on CI). [#29393](https://github.com/ClickHouse/ClickHouse/pull/29393) ([Azat Khuzhin](https://github.com/azat)).
+* Fix replicated access storage not shutting down cleanly when misconfigured. [#29388](https://github.com/ClickHouse/ClickHouse/pull/29388) ([Kevin Michel](https://github.com/kmichel-aiven)).
+* Remove window function `nth_value` as it is not memory-safe. This closes [#29347](https://github.com/ClickHouse/ClickHouse/issues/29347). [#29348](https://github.com/ClickHouse/ClickHouse/pull/29348) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Fix vertical merges of projection parts. This fixes [#29253](https://github.com/ClickHouse/ClickHouse/issues/29253) . This PR also fixes several projection merge/mutation issues introduced in https://github.com/ClickHouse/ClickHouse/pull/25165. [#29337](https://github.com/ClickHouse/ClickHouse/pull/29337) ([Amos Bird](https://github.com/amosbird)).
+* Fix hanging DDL queries on Replicated database while adding a new replica. [#29328](https://github.com/ClickHouse/ClickHouse/pull/29328) ([Kevin Michel](https://github.com/kmichel-aiven)).
+* Fix connection timeouts (`send_timeout`/`receive_timeout`). [#29282](https://github.com/ClickHouse/ClickHouse/pull/29282) ([Azat Khuzhin](https://github.com/azat)).
+* Fix possible `Table columns structure in ZooKeeper is different from local table structure` exception while recreating or creating new replicas of `ReplicatedMergeTree`, when one of table columns have default expressions with case-insensitive functions. [#29266](https://github.com/ClickHouse/ClickHouse/pull/29266) ([Anton Popov](https://github.com/CurtizJ)).
+* Send normal `Database doesn't exist error` (`UNKNOWN_DATABASE`) to the client (via TCP) instead of `Attempt to read after eof` (`ATTEMPT_TO_READ_AFTER_EOF`). [#29229](https://github.com/ClickHouse/ClickHouse/pull/29229) ([Azat Khuzhin](https://github.com/azat)).
+* Fix segfault while inserting into column with type LowCardinality(Nullable) in Avro input format. [#29132](https://github.com/ClickHouse/ClickHouse/pull/29132) ([Kruglov Pavel](https://github.com/Avogar)).
+* Do not allow to reuse previous credentials in case of inter-server secret (Before INSERT via Buffer/Kafka to Distributed table with interserver secret configured for that cluster, may re-use previously set user for that connection). [#29060](https://github.com/ClickHouse/ClickHouse/pull/29060) ([Azat Khuzhin](https://github.com/azat)).
+* Handle `any_join_distinct_right_table_keys` when join with dictionary, close [#29007](https://github.com/ClickHouse/ClickHouse/issues/29007). [#29014](https://github.com/ClickHouse/ClickHouse/pull/29014) ([Vladimir C](https://github.com/vdimir)).
+* Fix "Not found column ... in block" error, when join on alias column, close [#26980](https://github.com/ClickHouse/ClickHouse/issues/26980). [#29008](https://github.com/ClickHouse/ClickHouse/pull/29008) ([Vladimir C](https://github.com/vdimir)).
+* Fix the number of threads used in `GLOBAL IN` subquery (it was executed in single threads since [#19414](https://github.com/ClickHouse/ClickHouse/issues/19414) bugfix). [#28997](https://github.com/ClickHouse/ClickHouse/pull/28997) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
+* Fix bad optimizations of ORDER BY if it contains WITH FILL. This closes [#28908](https://github.com/ClickHouse/ClickHouse/issues/28908). This closes [#26049](https://github.com/ClickHouse/ClickHouse/issues/26049). [#28910](https://github.com/ClickHouse/ClickHouse/pull/28910) ([alexey-milovidov](https://github.com/alexey-milovidov)).
+* Fix higher-order array functions (`SIGSEGV` for `arrayCompact`/`ILLEGAL_COLUMN` for `arrayDifference`/`arrayCumSumNonNegative`) with consts. [#28904](https://github.com/ClickHouse/ClickHouse/pull/28904) ([Azat Khuzhin](https://github.com/azat)).
+* Fix waiting for mutation with `mutations_sync=2`. [#28889](https://github.com/ClickHouse/ClickHouse/pull/28889) ([Azat Khuzhin](https://github.com/azat)).
+* Fix queries to external databases (i.e. MySQL) with multiple columns in IN ( i.e. `(k,v) IN ((1, 2))` ). [#28888](https://github.com/ClickHouse/ClickHouse/pull/28888) ([Azat Khuzhin](https://github.com/azat)).
+* Fix bug with `LowCardinality` in short-curcuit function evaluation. Closes [#28884](https://github.com/ClickHouse/ClickHouse/issues/28884). [#28887](https://github.com/ClickHouse/ClickHouse/pull/28887) ([Kruglov Pavel](https://github.com/Avogar)).
+* Fix reading of subcolumns from compact parts. [#28873](https://github.com/ClickHouse/ClickHouse/pull/28873) ([Anton Popov](https://github.com/CurtizJ)).
+* Fixed a race condition between `DROP PART` and `REPLACE/MOVE PARTITION` that might cause replicas to diverge in rare cases. [#28864](https://github.com/ClickHouse/ClickHouse/pull/28864) ([tavplubix](https://github.com/tavplubix)).
+* Fix expressions compilation with short circuit evaluation. [#28821](https://github.com/ClickHouse/ClickHouse/pull/28821) ([Azat Khuzhin](https://github.com/azat)).
+* Fix extremely rare case when ReplicatedMergeTree replicas can diverge after hard reboot of all replicas. The error looks like `Part ... intersects (previous|next) part ...`. [#28817](https://github.com/ClickHouse/ClickHouse/pull/28817) ([alesapin](https://github.com/alesapin)).
+* Better check for connection usability and also catch any exception in `RabbitMQ` shutdown just in case. [#28797](https://github.com/ClickHouse/ClickHouse/pull/28797) ([Kseniia Sumarokova](https://github.com/kssenii)).
+* Fix benign race condition in ReplicatedMergeTreeQueue. Shouldn't be visible for user, but can lead to subtle bugs. [#28734](https://github.com/ClickHouse/ClickHouse/pull/28734) ([alesapin](https://github.com/alesapin)).
+* Fix possible crash for `SELECT` with partially created aggregate projection in case of exception. [#28700](https://github.com/ClickHouse/ClickHouse/pull/28700) ([Amos Bird](https://github.com/amosbird)).
+* Fix the coredump in the creation of distributed tables, when the parameters passed in are wrong. [#28686](https://github.com/ClickHouse/ClickHouse/pull/28686) ([Zhiyong Wang](https://github.com/ljcui)).
+* Add Settings.Names, Settings.Values aliases for system.processes table. [#28685](https://github.com/ClickHouse/ClickHouse/pull/28685) ([Vitaly](https://github.com/orloffv)).
+* Support for S2 Geometry library: Fix the number of arguments required by `s2RectAdd` and `s2RectContains` functions. [#28663](https://github.com/ClickHouse/ClickHouse/pull/28663) ([Bharat Nallan](https://github.com/bharatnc)).
+* Fix invalid constant type conversion when Nullable or LowCardinality primary key is used. [#28636](https://github.com/ClickHouse/ClickHouse/pull/28636) ([Amos Bird](https://github.com/amosbird)).
+* Fix "Column is not under aggregate function and not in GROUP BY" with PREWHERE (Fixes: [#28461](https://github.com/ClickHouse/ClickHouse/issues/28461)). [#28502](https://github.com/ClickHouse/ClickHouse/pull/28502) ([Azat Khuzhin](https://github.com/azat)).
+
+
### ClickHouse release v21.10, 2021-10-16
#### Backward Incompatible Change
diff --git a/CMakeLists.txt b/CMakeLists.txt
index ede70b09d94..6df7b24abb1 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -169,9 +169,7 @@ endif ()
include (cmake/check_flags.cmake)
include (cmake/add_warning.cmake)
-if (NOT MSVC)
- set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wall") # -Werror and many more is also added inside cmake/warnings.cmake
-endif ()
+set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wall") # -Werror and many more is also added inside cmake/warnings.cmake
if (COMPILER_CLANG)
# clang: warning: argument unused during compilation: '-specs=/usr/share/dpkg/no-pie-compile.specs' [-Wunused-command-line-argument]
@@ -201,7 +199,7 @@ endif ()
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
-if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
+if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
# Only for Linux, x86_64 or aarch64.
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
elseif(GLIBC_COMPATIBILITY)
@@ -219,30 +217,6 @@ endif()
# Make sure the final executable has symbols exported
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
-find_program (OBJCOPY_PATH NAMES "llvm-objcopy" "llvm-objcopy-13" "llvm-objcopy-12" "llvm-objcopy-11" "llvm-objcopy-10" "llvm-objcopy-9" "llvm-objcopy-8" "objcopy")
-
-if (NOT OBJCOPY_PATH AND OS_DARWIN)
- find_program (BREW_PATH NAMES "brew")
- if (BREW_PATH)
- execute_process (COMMAND ${BREW_PATH} --prefix llvm ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE LLVM_PREFIX)
- if (LLVM_PREFIX)
- find_program (OBJCOPY_PATH NAMES "llvm-objcopy" PATHS "${LLVM_PREFIX}/bin" NO_DEFAULT_PATH)
- endif ()
- if (NOT OBJCOPY_PATH)
- execute_process (COMMAND ${BREW_PATH} --prefix binutils ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE BINUTILS_PREFIX)
- if (BINUTILS_PREFIX)
- find_program (OBJCOPY_PATH NAMES "objcopy" PATHS "${BINUTILS_PREFIX}/bin" NO_DEFAULT_PATH)
- endif ()
- endif ()
- endif ()
-endif ()
-
-if (OBJCOPY_PATH)
- message (STATUS "Using objcopy: ${OBJCOPY_PATH}")
-else ()
- message (FATAL_ERROR "Cannot find objcopy.")
-endif ()
-
if (OS_DARWIN)
# The `-all_load` flag forces loading of all symbols from all libraries,
# and leads to multiply-defined symbols. This flag allows force loading
@@ -278,6 +252,13 @@ if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
endif ()
endif()
+if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
+ set(USE_DEBUG_HELPERS ON)
+else ()
+ set(USE_DEBUG_HELPERS ON)
+endif()
+option(USE_DEBUG_HELPERS "Enable debug helpers" ${USE_DEBUG_HELPERS})
+
# Create BuildID when using lld. For other linkers it is created by default.
if (LINKER_NAME MATCHES "lld$")
# SHA1 is not cryptographically secure but it is the best what lld is offering.
@@ -312,6 +293,10 @@ include(cmake/cpu_features.cmake)
# Enable it explicitly.
set (COMPILER_FLAGS "${COMPILER_FLAGS} -fasynchronous-unwind-tables")
+# Reproducible builds
+set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffile-prefix-map=${CMAKE_SOURCE_DIR}=.")
+set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} -ffile-prefix-map=${CMAKE_SOURCE_DIR}=.")
+
if (${CMAKE_VERSION} VERSION_LESS "3.12.4")
# CMake < 3.12 doesn't support setting 20 as a C++ standard version.
# We will add C++ standard controlling flag in CMAKE_CXX_FLAGS manually for now.
@@ -392,6 +377,8 @@ if (COMPILER_CLANG)
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
endif()
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstrict-vtable-pointers")
+
# Set new experimental pass manager, it's a performance, build time and binary size win.
# Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang.
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager")
@@ -402,32 +389,13 @@ if (COMPILER_CLANG)
# completely.
if (ENABLE_THINLTO AND NOT ENABLE_TESTS AND NOT SANITIZE)
# Link time optimization
- set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin")
- set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin")
- set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin")
+ set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
+ set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
+ set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
elseif (ENABLE_THINLTO)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable ThinLTO")
endif ()
- # Always prefer llvm tools when using clang. For instance, we cannot use GNU ar when llvm LTO is enabled
- find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
-
- if (LLVM_AR_PATH)
- message(STATUS "Using llvm-ar: ${LLVM_AR_PATH}.")
- set (CMAKE_AR ${LLVM_AR_PATH})
- else ()
- message(WARNING "Cannot find llvm-ar. System ar will be used instead. It does not work with ThinLTO.")
- endif ()
-
- find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9" "llvm-ranlib-8")
-
- if (LLVM_RANLIB_PATH)
- message(STATUS "Using llvm-ranlib: ${LLVM_RANLIB_PATH}.")
- set (CMAKE_RANLIB ${LLVM_RANLIB_PATH})
- else ()
- message(WARNING "Cannot find llvm-ranlib. System ranlib will be used instead. It does not work with ThinLTO.")
- endif ()
-
elseif (ENABLE_THINLTO)
message (${RECONFIGURE_MESSAGE_LEVEL} "ThinLTO is only available with CLang")
endif ()
@@ -435,20 +403,7 @@ endif ()
# Turns on all external libs like s3, kafka, ODBC, ...
option(ENABLE_LIBRARIES "Enable all external libraries by default" ON)
-# We recommend avoiding this mode for production builds because we can't guarantee
-# all needed libraries exist in your system.
-# This mode exists for enthusiastic developers who are searching for trouble.
-# The whole idea of using unknown version of libraries from the OS distribution is deeply flawed.
-# Useful for maintainers of OS packages.
-option (UNBUNDLED "Use system libraries instead of ones in contrib/" OFF)
-
-if (UNBUNDLED)
- set(NOT_UNBUNDLED OFF)
-else ()
- set(NOT_UNBUNDLED ON)
-endif ()
-
-if (UNBUNDLED OR NOT (OS_LINUX OR OS_DARWIN))
+if (NOT (OS_LINUX OR OS_DARWIN))
# Using system libs can cause a lot of warnings in includes (on macro expansion).
option(WERROR "Enable -Werror compiler option" OFF)
else ()
@@ -494,19 +449,6 @@ else ()
set (CMAKE_POSITION_INDEPENDENT_CODE ON)
endif ()
-# https://github.com/include-what-you-use/include-what-you-use
-option (USE_INCLUDE_WHAT_YOU_USE "Automatically reduce unneeded includes in source code (external tool)" OFF)
-
-if (USE_INCLUDE_WHAT_YOU_USE)
- find_program(IWYU_PATH NAMES include-what-you-use iwyu)
- if (NOT IWYU_PATH)
- message(FATAL_ERROR "Could not find the program include-what-you-use")
- endif()
- if (${CMAKE_VERSION} VERSION_LESS "3.3.0")
- message(FATAL_ERROR "include-what-you-use requires CMake version at least 3.3.")
- endif()
-endif ()
-
if (ENABLE_TESTS)
message (STATUS "Unit tests are enabled")
else()
@@ -527,7 +469,6 @@ message (STATUS
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
MAKE_STATIC_LIBRARIES=${MAKE_STATIC_LIBRARIES}
SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES}
- UNBUNDLED=${UNBUNDLED}
CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}")
include (GNUInstallDirs)
@@ -590,7 +531,6 @@ include (cmake/find/avro.cmake)
include (cmake/find/msgpack.cmake)
include (cmake/find/cassandra.cmake)
include (cmake/find/sentry.cmake)
-include (cmake/find/stats.cmake)
include (cmake/find/datasketches.cmake)
include (cmake/find/libprotobuf-mutator.cmake)
@@ -608,8 +548,6 @@ include (cmake/find/mysqlclient.cmake)
# When testing for memory leaks with Valgrind, don't link tcmalloc or jemalloc.
-include (cmake/print_flags.cmake)
-
if (TARGET global-group)
install (EXPORT global DESTINATION cmake)
endif ()
@@ -624,7 +562,7 @@ macro (add_executable target)
# invoke built-in add_executable
# explicitly acquire and interpose malloc symbols by clickhouse_malloc
# if GLIBC_COMPATIBILITY is ON and ENABLE_THINLTO is on than provide memcpy symbol explicitly to neutrialize thinlto's libcall generation.
- if (GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
+ if (ARCH_AMD64 AND GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
_add_executable (${ARGV} $ $)
else ()
_add_executable (${ARGV} $)
@@ -661,6 +599,7 @@ include_directories(${ConfigIncludePath})
# Add as many warnings as possible for our own code.
include (cmake/warnings.cmake)
+include (cmake/print_flags.cmake)
add_subdirectory (base)
add_subdirectory (src)
diff --git a/PreLoad.cmake b/PreLoad.cmake
index 0a25a55e7bf..9fba896d72e 100644
--- a/PreLoad.cmake
+++ b/PreLoad.cmake
@@ -18,3 +18,26 @@ if (NOT DEFINED ENV{CLION_IDE} AND NOT DEFINED ENV{XCODE_IDE})
set(CMAKE_GENERATOR "Ninja" CACHE INTERNAL "" FORCE)
endif ()
endif()
+
+
+# Default toolchain - this is needed to avoid dependency on OS files.
+execute_process(COMMAND uname -s OUTPUT_VARIABLE OS)
+execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCH)
+
+if (OS MATCHES "Linux"
+ AND NOT DEFINED CMAKE_TOOLCHAIN_FILE
+ AND NOT DISABLE_HERMETIC_BUILD
+ AND ($ENV{CC} MATCHES ".*clang.*" OR CMAKE_C_COMPILER MATCHES ".*clang.*")
+ AND (USE_STATIC_LIBRARIES OR NOT DEFINED USE_STATIC_LIBRARIES))
+
+ if (ARCH MATCHES "amd64|x86_64")
+ set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-x86_64.cmake" CACHE INTERNAL "" FORCE)
+ elseif (ARCH MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
+ set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-aarch64.cmake" CACHE INTERNAL "" FORCE)
+ elseif (ARCH MATCHES "^(ppc64le.*|PPC64LE.*)")
+ set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-ppc64le.cmake" CACHE INTERNAL "" FORCE)
+ else ()
+ message (FATAL_ERROR "Unsupported architecture: ${ARCH}")
+ endif ()
+
+endif()
diff --git a/SECURITY.md b/SECURITY.md
index 6b82c97e7d9..1872d67a529 100644
--- a/SECURITY.md
+++ b/SECURITY.md
@@ -19,9 +19,11 @@ The following versions of ClickHouse server are currently being supported with s
| 21.4 | :x: |
| 21.5 | :x: |
| 21.6 | :x: |
-| 21.7 | ✅ |
+| 21.7 | :x: |
| 21.8 | ✅ |
| 21.9 | ✅ |
+| 21.10 | ✅ |
+| 21.11 | ✅ |
## Reporting a Vulnerability
diff --git a/base/base/CMakeLists.txt b/base/base/CMakeLists.txt
index 281374b7121..000233738f7 100644
--- a/base/base/CMakeLists.txt
+++ b/base/base/CMakeLists.txt
@@ -29,7 +29,8 @@ elseif (ENABLE_READLINE)
endif ()
if (USE_DEBUG_HELPERS)
- set (INCLUDE_DEBUG_HELPERS "-include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"")
+ get_target_property(MAGIC_ENUM_INCLUDE_DIR magic_enum INTERFACE_INCLUDE_DIRECTORIES)
+ set (INCLUDE_DEBUG_HELPERS "-I\"${MAGIC_ENUM_INCLUDE_DIR}\" -include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${INCLUDE_DEBUG_HELPERS}")
endif ()
diff --git a/base/base/CachedFn.h b/base/base/CachedFn.h
index ed1dbdbd48c..19b2a8ce2c0 100644
--- a/base/base/CachedFn.h
+++ b/base/base/CachedFn.h
@@ -18,8 +18,8 @@ struct CachedFn
{
private:
using Traits = FnTraits;
- using DecayedArgs = TLMap;
- using Key = TLChangeRoot;
+ using DecayedArgs = TypeListMap;
+ using Key = TypeListChangeRoot;
using Result = typename Traits::Ret;
std::map cache; // Can't use hashmap as tuples are unhashable by default
diff --git a/base/base/FnTraits.h b/base/base/FnTraits.h
index 6b8dc7cd0d2..78e1fa62ebb 100644
--- a/base/base/FnTraits.h
+++ b/base/base/FnTraits.h
@@ -1,6 +1,6 @@
#pragma once
-#include "Typelist.h"
+#include "TypeList.h"
namespace detail
{
@@ -14,7 +14,7 @@ struct FnTraits
static constexpr bool value = std::is_invocable_r_v;
using Ret = R;
- using Args = Typelist;
+ using Args = TypeList;
};
template
diff --git a/base/base/LocalDateTime.h b/base/base/LocalDateTime.h
index 0edc12374bb..282a56ac640 100644
--- a/base/base/LocalDateTime.h
+++ b/base/base/LocalDateTime.h
@@ -108,6 +108,11 @@ public:
LocalDate toDate() const { return LocalDate(m_year, m_month, m_day); }
LocalDateTime toStartOfDate() const { return LocalDateTime(m_year, m_month, m_day, 0, 0, 0); }
+ time_t to_time_t(const DateLUTImpl & time_zone = DateLUT::instance()) const
+ {
+ return time_zone.makeDateTime(m_year, m_month, m_day, m_hour, m_minute, m_second);
+ }
+
std::string toString() const
{
std::string s{"0000-00-00 00:00:00"};
diff --git a/base/base/ReplxxLineReader.cpp b/base/base/ReplxxLineReader.cpp
index 38867faf5d5..3c2ac1f8891 100644
--- a/base/base/ReplxxLineReader.cpp
+++ b/base/base/ReplxxLineReader.cpp
@@ -25,6 +25,16 @@ void trim(String & s)
s.erase(std::find_if(s.rbegin(), s.rend(), [](int ch) { return !std::isspace(ch); }).base(), s.end());
}
+std::string getEditor()
+{
+ const char * editor = std::getenv("EDITOR");
+
+ if (!editor || !*editor)
+ editor = "vim";
+
+ return editor;
+}
+
/// Copied from replxx::src/util.cxx::now_ms_str() under the terms of 3-clause BSD license of Replxx.
/// Copyright (c) 2017-2018, Marcin Konarski (amok at codestation.org)
/// Copyright (c) 2010, Salvatore Sanfilippo (antirez at gmail dot com)
@@ -123,6 +133,7 @@ ReplxxLineReader::ReplxxLineReader(
Patterns delimiters_,
replxx::Replxx::highlighter_callback_t highlighter_)
: LineReader(history_file_path_, multiline_, std::move(extenders_), std::move(delimiters_)), highlighter(std::move(highlighter_))
+ , editor(getEditor())
{
using namespace std::placeholders;
using Replxx = replxx::Replxx;
@@ -236,14 +247,13 @@ void ReplxxLineReader::addToHistory(const String & line)
rx.print("Unlock of history file failed: %s\n", errnoToString(errno).c_str());
}
-int ReplxxLineReader::execute(const std::string & command)
+/// See comments in ShellCommand::executeImpl()
+/// (for the vfork via dlsym())
+int ReplxxLineReader::executeEditor(const std::string & path)
{
- std::vector argv0("sh", &("sh"[3]));
- std::vector argv1("-c", &("-c"[3]));
- std::vector argv2(command.data(), command.data() + command.size() + 1);
-
- const char * filename = "/bin/sh";
- char * const argv[] = {argv0.data(), argv1.data(), argv2.data(), nullptr};
+ std::vector argv0(editor.data(), editor.data() + editor.size() + 1);
+ std::vector argv1(path.data(), path.data() + path.size() + 1);
+ char * const argv[] = {argv0.data(), argv1.data(), nullptr};
static void * real_vfork = dlsym(RTLD_DEFAULT, "vfork");
if (!real_vfork)
@@ -260,6 +270,7 @@ int ReplxxLineReader::execute(const std::string & command)
return -1;
}
+ /// Child
if (0 == pid)
{
sigset_t mask;
@@ -267,16 +278,26 @@ int ReplxxLineReader::execute(const std::string & command)
sigprocmask(0, nullptr, &mask);
sigprocmask(SIG_UNBLOCK, &mask, nullptr);
- execv(filename, argv);
+ execvp(editor.c_str(), argv);
+ rx.print("Cannot execute %s: %s\n", editor.c_str(), errnoToString(errno).c_str());
_exit(-1);
}
int status = 0;
- if (-1 == waitpid(pid, &status, 0))
+ do
{
- rx.print("Cannot waitpid: %s\n", errnoToString(errno).c_str());
- return -1;
- }
+ int exited_pid = waitpid(pid, &status, 0);
+ if (exited_pid == -1)
+ {
+ if (errno == EINTR)
+ continue;
+
+ rx.print("Cannot waitpid: %s\n", errnoToString(errno).c_str());
+ return -1;
+ }
+ else
+ break;
+ } while (true);
return status;
}
@@ -290,10 +311,6 @@ void ReplxxLineReader::openEditor()
return;
}
- const char * editor = std::getenv("EDITOR");
- if (!editor || !*editor)
- editor = "vim";
-
replxx::Replxx::State state(rx.get_state());
size_t bytes_written = 0;
@@ -316,7 +333,7 @@ void ReplxxLineReader::openEditor()
return;
}
- if (0 == execute(fmt::format("{} {}", editor, filename)))
+ if (0 == executeEditor(filename))
{
try
{
diff --git a/base/base/ReplxxLineReader.h b/base/base/ReplxxLineReader.h
index 9aa32a1e26d..d4cc7de1e7a 100644
--- a/base/base/ReplxxLineReader.h
+++ b/base/base/ReplxxLineReader.h
@@ -22,7 +22,7 @@ public:
private:
InputStatus readOneLine(const String & prompt) override;
void addToHistory(const String & line) override;
- int execute(const std::string & command);
+ int executeEditor(const std::string & path);
void openEditor();
replxx::Replxx rx;
@@ -31,4 +31,6 @@ private:
// used to call flock() to synchronize multiple clients using same history file
int history_file_fd = -1;
bool bracketed_paste_enabled = false;
+
+ std::string editor;
};
diff --git a/base/base/TypeList.h b/base/base/TypeList.h
new file mode 100644
index 00000000000..244403b1c6b
--- /dev/null
+++ b/base/base/TypeList.h
@@ -0,0 +1,44 @@
+#pragma once
+
+#include
+#include
+#include
+#include "defines.h"
+#include "TypePair.h"
+
+/// General-purpose typelist. Easy on compilation times as it does not use recursion.
+template
+struct TypeList { static constexpr size_t size = sizeof...(Args); };
+
+namespace TypeListUtils /// In some contexts it's more handy to use functions instead of aliases
+{
+ template
+ constexpr TypeList concat(TypeList, TypeList) { return {}; }
+
+ template
+ constexpr TypeList prepend(TypeList) { return {}; }
+
+ template
+ constexpr TypeList append(TypeList) { return {}; }
+
+ template typename F, typename ...Args>
+ constexpr TypeList...> map(TypeList) { return {}; }
+
+ template typename Root, typename ...Args>
+ constexpr Root changeRoot(TypeList) { return {}; }
+
+ template
+ constexpr void forEach(TypeList, F && f) { (std::forward(f)(Id{}), ...); }
+}
+
+template
+using TypeListConcat = decltype(TypeListUtils::concat(TypeListLeft{}, TypeListRight{}));
+
+template using TypeListPrepend = decltype(TypeListUtils::prepend(List{}));
+template using TypeListAppend = decltype(TypeListUtils::append(List{}));
+
+template typename F, typename List>
+using TypeListMap = decltype(TypeListUtils::map(List{}));
+
+template typename Root, typename List>
+using TypeListChangeRoot = decltype(TypeListUtils::changeRoot(List{}));
diff --git a/base/base/TypeLists.h b/base/base/TypeLists.h
new file mode 100644
index 00000000000..6c1283d054c
--- /dev/null
+++ b/base/base/TypeLists.h
@@ -0,0 +1,21 @@
+#pragma once
+
+#include "TypeList.h"
+#include "extended_types.h"
+#include "Decimal.h"
+#include "UUID.h"
+
+namespace DB
+{
+
+using TypeListNativeInt = TypeList;
+using TypeListFloat = TypeList;
+using TypeListNativeNumber = TypeListConcat;
+using TypeListWideInt = TypeList;
+using TypeListInt = TypeListConcat;
+using TypeListIntAndFloat = TypeListConcat;
+using TypeListDecimal = TypeList;
+using TypeListNumber = TypeListConcat;
+using TypeListNumberWithUUID = TypeListAppend;
+
+}
diff --git a/base/base/TypePair.h b/base/base/TypePair.h
index 54f31831241..8c2f380618c 100644
--- a/base/base/TypePair.h
+++ b/base/base/TypePair.h
@@ -1,4 +1,4 @@
#pragma once
-template struct TypePair { };
-template struct Id { };
+template struct TypePair {};
+template struct Id {};
diff --git a/base/base/Typelist.h b/base/base/Typelist.h
deleted file mode 100644
index 74abaf9e388..00000000000
--- a/base/base/Typelist.h
+++ /dev/null
@@ -1,44 +0,0 @@
-#pragma once
-
-#include
-#include
-#include
-#include "defines.h"
-#include "TypePair.h"
-
-/// General-purpose typelist. Easy on compilation times as it does not use recursion.
-template
-struct Typelist { static constexpr size_t size = sizeof...(Args); };
-
-namespace TLUtils /// In some contexts it's more handy to use functions instead of aliases
-{
-template
-constexpr Typelist concat(Typelist, Typelist) { return {}; }
-
-template
-constexpr Typelist prepend(Typelist) { return {}; }
-
-template
-constexpr Typelist append(Typelist) { return {}; }
-
-template class F, class ...Args>
-constexpr Typelist...> map(Typelist) { return {}; }
-
-template class Root, class ...Args>
-constexpr Root changeRoot(Typelist) { return {}; }
-
-template
-constexpr void forEach(Typelist, F && f) { (std::forward(f)(Id{}), ...); }
-}
-
-template
-using TLConcat = decltype(TLUtils::concat(TLLeft{}, TLRight{}));
-
-template using TLPrepend = decltype(TLUtils::prepend(Typelist{}));
-template using TLAppend = decltype(TLUtils::append(Typelist{}));
-
-template class F, class Typelist>
-using TLMap = decltype(TLUtils::map(Typelist{}));
-
-template class Root, class Typelist>
-using TLChangeRoot = decltype(TLUtils::changeRoot(Typelist{}));
diff --git a/base/base/Typelists.h b/base/base/Typelists.h
deleted file mode 100644
index d5d58be96cc..00000000000
--- a/base/base/Typelists.h
+++ /dev/null
@@ -1,18 +0,0 @@
-#pragma once
-
-#include "Typelist.h"
-#include "extended_types.h"
-#include "Decimal.h"
-#include "UUID.h"
-
-namespace DB
-{
-using TLIntegral = Typelist;
-using TLExtendedIntegral = Typelist;
-using TLDecimals = Typelist;
-
-using TLIntegralWithExtended = TLConcat;
-
-using TLNumbers = TLConcat;
-using TLNumbersWithUUID = TLAppend;
-}
diff --git a/base/base/UUID.h b/base/base/UUID.h
index 8b1ed16ea2f..d6982b55faa 100644
--- a/base/base/UUID.h
+++ b/base/base/UUID.h
@@ -5,5 +5,5 @@
namespace DB
{
-using UUID = StrongTypedef;
+ using UUID = StrongTypedef;
}
diff --git a/base/base/defines.h b/base/base/defines.h
index 21a3c09f532..bd98e99f5b9 100644
--- a/base/base/defines.h
+++ b/base/base/defines.h
@@ -28,8 +28,8 @@
#define NO_INLINE __attribute__((__noinline__))
#define MAY_ALIAS __attribute__((__may_alias__))
-#if !defined(__x86_64__) && !defined(__aarch64__) && !defined(__PPC__)
-# error "The only supported platforms are x86_64 and AArch64, PowerPC (work in progress)"
+#if !defined(__x86_64__) && !defined(__aarch64__) && !defined(__PPC__) && !(defined(__riscv) && (__riscv_xlen == 64))
+# error "The only supported platforms are x86_64 and AArch64, PowerPC (work in progress) and RISC-V 64 (experimental)"
#endif
/// Check for presence of address sanitizer
diff --git a/base/base/getMemoryAmount.cpp b/base/base/getMemoryAmount.cpp
index 8240f82fc67..9e1d2ac3279 100644
--- a/base/base/getMemoryAmount.cpp
+++ b/base/base/getMemoryAmount.cpp
@@ -16,17 +16,6 @@
*/
uint64_t getMemoryAmountOrZero()
{
-#if defined(OS_LINUX)
- // Try to lookup at the Cgroup limit
- std::ifstream cgroup_limit("/sys/fs/cgroup/memory/memory.limit_in_bytes");
- if (cgroup_limit.is_open())
- {
- uint64_t amount = 0; // in case of read error
- cgroup_limit >> amount;
- return amount;
- }
-#endif
-
int64_t num_pages = sysconf(_SC_PHYS_PAGES);
if (num_pages <= 0)
return 0;
@@ -35,7 +24,22 @@ uint64_t getMemoryAmountOrZero()
if (page_size <= 0)
return 0;
- return num_pages * page_size;
+ uint64_t memory_amount = num_pages * page_size;
+
+#if defined(OS_LINUX)
+ // Try to lookup at the Cgroup limit
+ std::ifstream cgroup_limit("/sys/fs/cgroup/memory/memory.limit_in_bytes");
+ if (cgroup_limit.is_open())
+ {
+ uint64_t memory_limit = 0; // in case of read error
+ cgroup_limit >> memory_limit;
+ if (memory_limit > 0 && memory_limit < memory_amount)
+ memory_amount = memory_limit;
+ }
+#endif
+
+ return memory_amount;
+
}
diff --git a/base/base/iostream_debug_helpers.h b/base/base/iostream_debug_helpers.h
index 4362d832970..3a3f1a741ad 100644
--- a/base/base/iostream_debug_helpers.h
+++ b/base/base/iostream_debug_helpers.h
@@ -6,6 +6,7 @@
#include
#include
#include
+#include
/** Usage:
*
@@ -61,6 +62,13 @@ std::enable_if_t & dumpImpl(Out & out, T && x, std::decay_t<
}
+template
+std::enable_if_t>, Out> &
+dumpImpl(Out & out, T && x)
+{
+ return out << magic_enum::enum_name(x);
+}
+
/// string and const char * - output not as container or pointer.
template
@@ -131,15 +139,26 @@ Out & dumpValue(Out & out, T && x)
template
Out & dump(Out & out, const char * name, T && x)
{
+ // Dumping string literal, printing name and demangled type is irrelevant.
+ if constexpr (std::is_same_v>>)
+ {
+ const auto name_len = strlen(name);
+ const auto value_len = strlen(x);
+ // `name` is the same as quoted `x`
+ if (name_len > 2 && value_len > 0 && name[0] == '"' && name[name_len - 1] == '"'
+ && strncmp(name + 1, x, std::min(value_len, name_len) - 1) == 0)
+ return out << x;
+ }
+
out << demangle(typeid(x).name()) << " " << name << " = ";
- return dumpValue(out, x);
+ return dumpValue(out, x) << "; ";
}
#ifdef __clang__
#pragma clang diagnostic ignored "-Wgnu-zero-variadic-macro-arguments"
#endif
-#define DUMPVAR(VAR) ::dump(std::cerr, #VAR, (VAR)); std::cerr << "; ";
+#define DUMPVAR(VAR) ::dump(std::cerr, #VAR, (VAR));
#define DUMPHEAD std::cerr << __FILE__ << ':' << __LINE__ << " [ " << getThreadId() << " ] ";
#define DUMPTAIL std::cerr << '\n';
diff --git a/base/daemon/BaseDaemon.cpp b/base/daemon/BaseDaemon.cpp
index 70915a520b3..524fc5bbb49 100644
--- a/base/daemon/BaseDaemon.cpp
+++ b/base/daemon/BaseDaemon.cpp
@@ -13,30 +13,21 @@
#if defined(__linux__)
#include
#endif
-#include
#include
#include
#include
-#include
#include
#include
#include
#include
-#include
#include
#include
-#include
-#include
-#include
#include
#include
#include
#include
-#include
-#include
-#include
#include
#include
@@ -56,13 +47,15 @@
#include
#include
#include
-#include
#include
#include
#include
#include
#include
+#include
+#include
+
#include
#if defined(OS_DARWIN)
@@ -675,6 +668,34 @@ void BaseDaemon::initialize(Application & self)
if ((!log_path.empty() && is_daemon) || config().has("logger.stderr"))
{
std::string stderr_path = config().getString("logger.stderr", log_path + "/stderr.log");
+
+ /// Check that stderr is writable before freopen(),
+ /// since freopen() will make stderr invalid on error,
+ /// and logging to stderr will be broken,
+ /// so the following code (that is used in every program) will not write anything:
+ ///
+ /// int main(int argc, char ** argv)
+ /// {
+ /// try
+ /// {
+ /// DB::SomeApp app;
+ /// return app.run(argc, argv);
+ /// }
+ /// catch (...)
+ /// {
+ /// std::cerr << DB::getCurrentExceptionMessage(true) << "\n";
+ /// return 1;
+ /// }
+ /// }
+ if (access(stderr_path.c_str(), W_OK))
+ {
+ int fd;
+ if ((fd = creat(stderr_path.c_str(), 0600)) == -1 && errno != EEXIST)
+ throw Poco::OpenFileException("File " + stderr_path + " (logger.stderr) is not writable");
+ if (fd != -1)
+ ::close(fd);
+ }
+
if (!freopen(stderr_path.c_str(), "a+", stderr))
throw Poco::OpenFileException("Cannot attach stderr to " + stderr_path);
@@ -973,6 +994,14 @@ void BaseDaemon::setupWatchdog()
memcpy(argv0, new_process_name, std::min(strlen(new_process_name), original_process_name.size()));
}
+ /// If streaming compression of logs is used then we write watchdog logs to cerr
+ if (config().getRawString("logger.stream_compress", "false") == "true")
+ {
+ Poco::AutoPtr pf = new OwnPatternFormatter;
+ Poco::AutoPtr log = new DB::OwnFormattingChannel(pf, new Poco::ConsoleChannel(std::cerr));
+ logger().setChannel(log);
+ }
+
logger().information(fmt::format("Will watch for the process with pid {}", pid));
/// Forward signals to the child process.
diff --git a/base/daemon/GraphiteWriter.cpp b/base/daemon/GraphiteWriter.cpp
index 176b48372d9..74b4d707d43 100644
--- a/base/daemon/GraphiteWriter.cpp
+++ b/base/daemon/GraphiteWriter.cpp
@@ -37,11 +37,3 @@ GraphiteWriter::GraphiteWriter(const std::string & config_name, const std::strin
root_path += sub_path;
}
}
-
-
-std::string GraphiteWriter::getPerServerPath(const std::string & server_name, const std::string & root_path)
-{
- std::string path = root_path + "." + server_name;
- std::replace(path.begin() + root_path.size() + 1, path.end(), '.', '_');
- return path;
-}
diff --git a/base/daemon/GraphiteWriter.h b/base/daemon/GraphiteWriter.h
index ef44d82bced..cdabac7e17c 100644
--- a/base/daemon/GraphiteWriter.h
+++ b/base/daemon/GraphiteWriter.h
@@ -8,10 +8,10 @@
#include
-/// пишет в Graphite данные в формате
+/// Writes to Graphite in the following format
/// path value timestamp\n
-/// path может иметь любую вложенность. Директории разделяются с помощью "."
-/// у нас принят следующий формат path - root_path.server_name.sub_path.key
+/// path can be arbitrary nested. Elements are separated by '.'
+/// Example: root_path.server_name.sub_path.key
class GraphiteWriter
{
public:
@@ -32,8 +32,6 @@ public:
writeImpl(key_val_vec, timestamp, custom_root_path);
}
- /// возвращает путь root_path.server_name
- static std::string getPerServerPath(const std::string & server_name, const std::string & root_path = "one_min");
private:
template
void writeImpl(const T & data, time_t timestamp, const std::string & custom_root_path)
diff --git a/base/loggers/Loggers.cpp b/base/loggers/Loggers.cpp
index 0f41296819e..5eb9ef95176 100644
--- a/base/loggers/Loggers.cpp
+++ b/base/loggers/Loggers.cpp
@@ -62,7 +62,13 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
if (!log_path.empty())
{
createDirectory(log_path);
- std::cerr << "Logging " << log_level_string << " to " << log_path << std::endl;
+
+ std::string ext;
+ if (config.getRawString("logger.stream_compress", "false") == "true")
+ ext = ".lz4";
+
+ std::cerr << "Logging " << log_level_string << " to " << log_path << ext << std::endl;
+
auto log_level = Poco::Logger::parseLevel(log_level_string);
if (log_level > max_log_level)
{
@@ -75,6 +81,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
log_file->setProperty(Poco::FileChannel::PROP_ROTATION, config.getRawString("logger.size", "100M"));
log_file->setProperty(Poco::FileChannel::PROP_ARCHIVE, "number");
log_file->setProperty(Poco::FileChannel::PROP_COMPRESS, config.getRawString("logger.compress", "true"));
+ log_file->setProperty(Poco::FileChannel::PROP_STREAMCOMPRESS, config.getRawString("logger.stream_compress", "false"));
log_file->setProperty(Poco::FileChannel::PROP_PURGECOUNT, config.getRawString("logger.count", "1"));
log_file->setProperty(Poco::FileChannel::PROP_FLUSH, config.getRawString("logger.flush", "true"));
log_file->setProperty(Poco::FileChannel::PROP_ROTATEONOPEN, config.getRawString("logger.rotateOnOpen", "false"));
@@ -100,13 +107,18 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
max_log_level = errorlog_level;
}
- std::cerr << "Logging errors to " << errorlog_path << std::endl;
+ std::string ext;
+ if (config.getRawString("logger.stream_compress", "false") == "true")
+ ext = ".lz4";
+
+ std::cerr << "Logging errors to " << errorlog_path << ext << std::endl;
error_log_file = new Poco::FileChannel;
error_log_file->setProperty(Poco::FileChannel::PROP_PATH, fs::weakly_canonical(errorlog_path));
error_log_file->setProperty(Poco::FileChannel::PROP_ROTATION, config.getRawString("logger.size", "100M"));
error_log_file->setProperty(Poco::FileChannel::PROP_ARCHIVE, "number");
error_log_file->setProperty(Poco::FileChannel::PROP_COMPRESS, config.getRawString("logger.compress", "true"));
+ error_log_file->setProperty(Poco::FileChannel::PROP_STREAMCOMPRESS, config.getRawString("logger.stream_compress", "false"));
error_log_file->setProperty(Poco::FileChannel::PROP_PURGECOUNT, config.getRawString("logger.count", "1"));
error_log_file->setProperty(Poco::FileChannel::PROP_FLUSH, config.getRawString("logger.flush", "true"));
error_log_file->setProperty(Poco::FileChannel::PROP_ROTATEONOPEN, config.getRawString("logger.rotateOnOpen", "false"));
diff --git a/base/loggers/OwnPatternFormatter.cpp b/base/loggers/OwnPatternFormatter.cpp
index 3e1c66acba3..02a2c2e510b 100644
--- a/base/loggers/OwnPatternFormatter.cpp
+++ b/base/loggers/OwnPatternFormatter.cpp
@@ -1,16 +1,12 @@
#include "OwnPatternFormatter.h"
#include
-#include
-#include
#include
#include
#include
#include
#include
-#include
#include
-#include "Loggers.h"
OwnPatternFormatter::OwnPatternFormatter(bool color_)
diff --git a/base/mysqlxx/CMakeLists.txt b/base/mysqlxx/CMakeLists.txt
index 947f61f9051..80db50c2593 100644
--- a/base/mysqlxx/CMakeLists.txt
+++ b/base/mysqlxx/CMakeLists.txt
@@ -13,10 +13,7 @@ add_library (mysqlxx
target_include_directories (mysqlxx PUBLIC ..)
-if (USE_INTERNAL_MYSQL_LIBRARY)
- target_include_directories (mysqlxx PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/include")
- target_include_directories (mysqlxx PUBLIC "${ClickHouse_BINARY_DIR}/contrib/mariadb-connector-c/include")
-else ()
+if (NOT USE_INTERNAL_MYSQL_LIBRARY)
set(PLATFORM_LIBRARIES ${CMAKE_DL_LIBS})
if (USE_MYSQL)
diff --git a/cmake/add_warning.cmake b/cmake/add_warning.cmake
index bc9642c9cc6..e7797fcc7a6 100644
--- a/cmake/add_warning.cmake
+++ b/cmake/add_warning.cmake
@@ -12,13 +12,13 @@ macro (add_warning flag)
if (SUPPORTS_CXXFLAG_${underscored_flag})
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -W${flag}")
else ()
- message (WARNING "Flag -W${flag} is unsupported")
+ message (STATUS "Flag -W${flag} is unsupported")
endif ()
if (SUPPORTS_CFLAG_${underscored_flag})
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -W${flag}")
else ()
- message (WARNING "Flag -W${flag} is unsupported")
+ message (STATUS "Flag -W${flag} is unsupported")
endif ()
endmacro ()
@@ -39,7 +39,7 @@ macro (target_add_warning target flag)
if (SUPPORTS_CXXFLAG_${underscored_flag})
target_compile_options (${target} PRIVATE "-W${flag}")
else ()
- message (WARNING "Flag -W${flag} is unsupported")
+ message (STATUS "Flag -W${flag} is unsupported")
endif ()
endmacro ()
diff --git a/cmake/arch.cmake b/cmake/arch.cmake
index 00cc16fbd10..82c0d40994c 100644
--- a/cmake/arch.cmake
+++ b/cmake/arch.cmake
@@ -16,3 +16,7 @@ endif ()
if (CMAKE_SYSTEM_PROCESSOR MATCHES "^(ppc64le.*|PPC64LE.*)")
set (ARCH_PPC64LE 1)
endif ()
+if (CMAKE_SYSTEM_PROCESSOR MATCHES "riscv64")
+ set (ARCH_RISCV64 1)
+endif ()
+
diff --git a/cmake/contrib_finder.cmake b/cmake/contrib_finder.cmake
index 64c6d5f5c0a..e97fda6a6f3 100644
--- a/cmake/contrib_finder.cmake
+++ b/cmake/contrib_finder.cmake
@@ -4,7 +4,7 @@ macro(find_contrib_lib LIB_NAME)
string(TOUPPER ${LIB_NAME} LIB_NAME_UC)
string(REPLACE "-" "_" LIB_NAME_UC ${LIB_NAME_UC})
- option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ${NOT_UNBUNDLED})
+ option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ON)
if (NOT USE_INTERNAL_${LIB_NAME_UC}_LIBRARY)
find_package ("${LIB_NAME}")
diff --git a/cmake/cpu_features.cmake b/cmake/cpu_features.cmake
index 199d1717723..20c61ead3d2 100644
--- a/cmake/cpu_features.cmake
+++ b/cmake/cpu_features.cmake
@@ -28,6 +28,9 @@ option (ARCH_NATIVE "Add -march=native compiler flag. This makes your binaries n
if (ARCH_NATIVE)
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=native")
+elseif (ARCH_AARCH64)
+ set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=armv8-a+crc")
+
else ()
set (TEST_FLAG "-mssse3")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
@@ -43,7 +46,6 @@ else ()
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
endif ()
-
set (TEST_FLAG "-msse4.1")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
@@ -138,7 +140,7 @@ else ()
#include
int main() {
auto a = _mm512_setzero_epi32();
- (void)a;
+ (void)a;
auto b = _mm512_add_epi16(__m512i(), __m512i());
(void)b;
return 0;
@@ -160,9 +162,9 @@ else ()
" HAVE_BMI)
if (HAVE_BMI AND ENABLE_BMI)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
- endif ()
+ endif ()
-#Limit avx2/avx512 flag for specific source build
+ # Limit avx2/avx512 flag for specific source build
set (X86_INTRINSICS_FLAGS "")
if (ENABLE_AVX2_FOR_SPEC_OP)
if (HAVE_BMI)
diff --git a/cmake/find/amqpcpp.cmake b/cmake/find/amqpcpp.cmake
index 374e6dd6d7e..e033bea439f 100644
--- a/cmake/find/amqpcpp.cmake
+++ b/cmake/find/amqpcpp.cmake
@@ -9,7 +9,7 @@ if (NOT ENABLE_AMQPCPP)
return()
endif()
-if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/CMakeLists.txt")
+if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/src")
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library")
set (USE_AMQPCPP 0)
diff --git a/cmake/find/avro.cmake b/cmake/find/avro.cmake
index 351fa15d2d3..a70fb92c122 100644
--- a/cmake/find/avro.cmake
+++ b/cmake/find/avro.cmake
@@ -8,10 +8,9 @@ if (NOT ENABLE_AVRO)
return()
endif()
-option (USE_INTERNAL_AVRO_LIBRARY
- "Set to FALSE to use system avro library instead of bundled" ON) # TODO: provide unbundled support
+option (USE_INTERNAL_AVRO_LIBRARY "Set to FALSE to use system avro library instead of bundled" ON)
-if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt")
+if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang")
if (USE_INTERNAL_AVRO_LIBRARY)
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro")
diff --git a/cmake/find/brotli.cmake b/cmake/find/brotli.cmake
index 4b2ee3d6de0..6469ec04f45 100644
--- a/cmake/find/brotli.cmake
+++ b/cmake/find/brotli.cmake
@@ -7,12 +7,7 @@ if (NOT ENABLE_BROTLI)
return()
endif()
-if (UNBUNDLED)
- # Many system ship only dynamic brotly libraries, so we back off to bundled by default
- option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ${USE_STATIC_LIBRARIES})
-else()
- option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
-endif()
+option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
if (USE_INTERNAL_BROTLI_LIBRARY)
diff --git a/cmake/find/capnp.cmake b/cmake/find/capnp.cmake
index 3d0d767bd0c..fa62c64105f 100644
--- a/cmake/find/capnp.cmake
+++ b/cmake/find/capnp.cmake
@@ -7,9 +7,9 @@ if (NOT ENABLE_CAPNP)
return()
endif()
-option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ON)
-if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/CMakeLists.txt")
+if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/c++")
if(USE_INTERNAL_CAPNP_LIBRARY)
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto")
diff --git a/cmake/find/curl.cmake b/cmake/find/curl.cmake
index 0a9fcc05ccf..577b13698c2 100644
--- a/cmake/find/curl.cmake
+++ b/cmake/find/curl.cmake
@@ -7,7 +7,7 @@ if (NOT ENABLE_CURL)
return()
endif()
-option (USE_INTERNAL_CURL "Use internal curl library" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_CURL "Use internal curl library" ON)
if (NOT USE_INTERNAL_CURL)
find_package (CURL)
@@ -22,8 +22,6 @@ if (NOT CURL_FOUND)
# find_package(CURL) compatibility for the following packages that uses
# find_package(CURL)/include(FindCURL):
- # - mariadb-connector-c
- # - aws-s3-cmake
# - sentry-native
set (CURL_FOUND ON CACHE BOOL "")
set (CURL_ROOT_DIR ${CURL_LIBRARY_DIR} CACHE PATH "")
diff --git a/cmake/find/cxx.cmake b/cmake/find/cxx.cmake
index f38ac77b1ea..d1f62f0ecea 100644
--- a/cmake/find/cxx.cmake
+++ b/cmake/find/cxx.cmake
@@ -1,4 +1,4 @@
-option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ${NOT_UNBUNDLED})
+option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ON)
if (NOT USE_LIBCXX)
if (USE_INTERNAL_LIBCXX_LIBRARY)
@@ -10,12 +10,12 @@ if (NOT USE_LIBCXX)
return()
endif()
-set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ${NOT_UNBUNDLED})
+set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ON)
option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi libraries instead of bundled"
${USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT})
-if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt")
+if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/src")
if (USE_INTERNAL_LIBCXX_LIBRARY)
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx")
diff --git a/cmake/find/datasketches.cmake b/cmake/find/datasketches.cmake
index 2d7e644890a..3d0bb1d1f95 100644
--- a/cmake/find/datasketches.cmake
+++ b/cmake/find/datasketches.cmake
@@ -2,7 +2,7 @@ option (ENABLE_DATASKETCHES "Enable DataSketches" ${ENABLE_LIBRARIES})
if (ENABLE_DATASKETCHES)
-option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt")
if (USE_INTERNAL_DATASKETCHES_LIBRARY)
diff --git a/cmake/find/grpc.cmake b/cmake/find/grpc.cmake
index f4b280876ef..92a85b0df04 100644
--- a/cmake/find/grpc.cmake
+++ b/cmake/find/grpc.cmake
@@ -22,7 +22,7 @@ endif()
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
# The external gRPC framework can be installed in the system by running
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
-option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
+option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
if(USE_INTERNAL_GRPC_LIBRARY)
diff --git a/cmake/find/gtest.cmake b/cmake/find/gtest.cmake
index c5f987d7368..935744bcbd1 100644
--- a/cmake/find/gtest.cmake
+++ b/cmake/find/gtest.cmake
@@ -1,6 +1,6 @@
# included only if ENABLE_TESTS=1
-option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt")
if (USE_INTERNAL_GTEST_LIBRARY)
diff --git a/cmake/find/icu.cmake b/cmake/find/icu.cmake
index 0b775a68eda..5ba25e93875 100644
--- a/cmake/find/icu.cmake
+++ b/cmake/find/icu.cmake
@@ -12,7 +12,7 @@ if (NOT ENABLE_ICU)
return()
endif()
-option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE")
if (USE_INTERNAL_ICU_LIBRARY)
diff --git a/cmake/find/ldap.cmake b/cmake/find/ldap.cmake
index 0d14e2c4199..d0d1e54bfec 100644
--- a/cmake/find/ldap.cmake
+++ b/cmake/find/ldap.cmake
@@ -1,7 +1,3 @@
-if (UNBUNDLED AND USE_STATIC_LIBRARIES)
- set (ENABLE_LDAP OFF CACHE INTERNAL "")
-endif()
-
option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES})
if (NOT ENABLE_LDAP)
@@ -11,7 +7,7 @@ if (NOT ENABLE_LDAP)
return()
endif()
-option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
if (USE_INTERNAL_LDAP_LIBRARY)
diff --git a/cmake/find/libgsasl.cmake b/cmake/find/libgsasl.cmake
index 3aec5c0c30a..d4e1ebce629 100644
--- a/cmake/find/libgsasl.cmake
+++ b/cmake/find/libgsasl.cmake
@@ -7,12 +7,7 @@ if (NOT ENABLE_GSASL_LIBRARY)
return()
endif()
-if (UNBUNDLED)
- # when USE_STATIC_LIBRARIES we usually need to pick up hell a lot of dependencies for libgsasl
- option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${USE_STATIC_LIBRARIES})
-else()
- option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
-endif()
+option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h")
if (USE_INTERNAL_LIBGSASL_LIBRARY)
@@ -35,7 +30,7 @@ if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY)
set (LIBGSASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src" "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include")
set (USE_INTERNAL_LIBGSASL_LIBRARY 1)
- set (LIBGSASL_LIBRARY libgsasl)
+ set (LIBGSASL_LIBRARY gsasl)
endif ()
if(LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
diff --git a/cmake/find/libpqxx.cmake b/cmake/find/libpqxx.cmake
index b2a1e217b10..68dddffde70 100644
--- a/cmake/find/libpqxx.cmake
+++ b/cmake/find/libpqxx.cmake
@@ -4,7 +4,7 @@ if (NOT ENABLE_LIBPQXX)
return()
endif()
-if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/CMakeLists.txt")
+if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/src")
message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library")
set (USE_LIBPQXX 0)
diff --git a/cmake/find/libxml2.cmake b/cmake/find/libxml2.cmake
index 8f7e79d84c9..e9fe7780d39 100644
--- a/cmake/find/libxml2.cmake
+++ b/cmake/find/libxml2.cmake
@@ -1,4 +1,4 @@
-option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h")
if (USE_INTERNAL_LIBXML2_LIBRARY)
diff --git a/cmake/find/msgpack.cmake b/cmake/find/msgpack.cmake
index c15fedd0e30..ac52740c774 100644
--- a/cmake/find/msgpack.cmake
+++ b/cmake/find/msgpack.cmake
@@ -7,7 +7,7 @@ if(NOT ENABLE_MSGPACK)
return()
endif()
-option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp")
if(USE_INTERNAL_MSGPACK_LIBRARY)
diff --git a/cmake/find/mysqlclient.cmake b/cmake/find/mysqlclient.cmake
index 0af03676d71..746775410cb 100644
--- a/cmake/find/mysqlclient.cmake
+++ b/cmake/find/mysqlclient.cmake
@@ -12,7 +12,7 @@ if(NOT ENABLE_MYSQL)
return()
endif()
-option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ${NOT_UNBUNDLED})
+option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README")
if(USE_INTERNAL_MYSQL_LIBRARY)
diff --git a/cmake/find/nanodbc.cmake b/cmake/find/nanodbc.cmake
index d48e294c9e5..2fa60e71f55 100644
--- a/cmake/find/nanodbc.cmake
+++ b/cmake/find/nanodbc.cmake
@@ -6,7 +6,7 @@ if (NOT USE_INTERNAL_NANODBC_LIBRARY)
message (FATAL_ERROR "Only the bundled nanodbc library can be used")
endif ()
-if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/CMakeLists.txt")
+if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/nanodbc")
message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init")
endif()
diff --git a/cmake/find/nlp.cmake b/cmake/find/nlp.cmake
index efa9b39ddae..5c10f2f24e7 100644
--- a/cmake/find/nlp.cmake
+++ b/cmake/find/nlp.cmake
@@ -13,7 +13,7 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libstemmer_c/Makefile")
return()
endif ()
-if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/CMakeLists.txt")
+if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/wnb")
message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled")
set (USE_NLP 0)
diff --git a/cmake/find/nuraft.cmake b/cmake/find/nuraft.cmake
index 59caa9e7373..c19f6774e7d 100644
--- a/cmake/find/nuraft.cmake
+++ b/cmake/find/nuraft.cmake
@@ -4,7 +4,7 @@ if (NOT ENABLE_NURAFT)
return()
endif()
-if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/CMakeLists.txt")
+if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/src")
message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library")
set (USE_NURAFT 0)
diff --git a/cmake/find/odbc.cmake b/cmake/find/odbc.cmake
index c475e600c0d..2f06cfed941 100644
--- a/cmake/find/odbc.cmake
+++ b/cmake/find/odbc.cmake
@@ -19,7 +19,7 @@ if (NOT ENABLE_ODBC)
return()
endif()
-option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ON)
if (NOT USE_INTERNAL_ODBC_LIBRARY)
find_library (LIBRARY_ODBC NAMES unixodbc odbc)
diff --git a/cmake/find/parquet.cmake b/cmake/find/parquet.cmake
index 4b56a829df5..89615710d48 100644
--- a/cmake/find/parquet.cmake
+++ b/cmake/find/parquet.cmake
@@ -13,7 +13,7 @@ if (NOT ENABLE_PARQUET)
endif()
if (NOT OS_FREEBSD) # Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory
- option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ${NOT_UNBUNDLED})
+ option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ON)
elseif(USE_INTERNAL_PARQUET_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet is not supported on freebsd")
endif()
diff --git a/cmake/find/protobuf.cmake b/cmake/find/protobuf.cmake
index 096288fd2ab..a2ea8ae87fc 100644
--- a/cmake/find/protobuf.cmake
+++ b/cmake/find/protobuf.cmake
@@ -11,7 +11,7 @@ endif()
# You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case.
# The external protobuf library can be installed in the system by running
# sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev
-option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
+option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
if(USE_INTERNAL_PROTOBUF_LIBRARY)
diff --git a/cmake/find/rapidjson.cmake b/cmake/find/rapidjson.cmake
index 62db4695c58..cdf6761446e 100644
--- a/cmake/find/rapidjson.cmake
+++ b/cmake/find/rapidjson.cmake
@@ -6,7 +6,7 @@ if(NOT ENABLE_RAPIDJSON)
return()
endif()
-option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ${NOT_UNBUNDLED})
+option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h")
if(USE_INTERNAL_RAPIDJSON_LIBRARY)
diff --git a/cmake/find/rdkafka.cmake b/cmake/find/rdkafka.cmake
index 5b370a42cdc..cad267bacff 100644
--- a/cmake/find/rdkafka.cmake
+++ b/cmake/find/rdkafka.cmake
@@ -7,9 +7,9 @@ if (NOT ENABLE_RDKAFKA)
return()
endif()
-option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ON)
-if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
+if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/src")
if(USE_INTERNAL_RDKAFKA_LIBRARY)
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka")
@@ -18,7 +18,7 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
set (MISSING_INTERNAL_CPPKAFKA_LIBRARY 1)
endif ()
-if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/CMakeLists.txt")
+if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src")
if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY)
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka")
@@ -40,7 +40,7 @@ if (NOT USE_INTERNAL_RDKAFKA_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sasl2 library needed for static librdkafka")
endif()
endif ()
- set (CPPKAFKA_LIBRARY cppkafka) # TODO: try to use unbundled version.
+ set (CPPKAFKA_LIBRARY cppkafka)
endif ()
if (RDKAFKA_LIB AND RDKAFKA_INCLUDE_DIR)
diff --git a/cmake/find/re2.cmake b/cmake/find/re2.cmake
index 09240f33f7d..ed5c72d13fa 100644
--- a/cmake/find/re2.cmake
+++ b/cmake/find/re2.cmake
@@ -1,6 +1,6 @@
-option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ON)
-if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/CMakeLists.txt")
+if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/re2")
if(USE_INTERNAL_RE2_LIBRARY)
message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library")
diff --git a/cmake/find/rocksdb.cmake b/cmake/find/rocksdb.cmake
index 109eabc271b..10592d1d037 100644
--- a/cmake/find/rocksdb.cmake
+++ b/cmake/find/rocksdb.cmake
@@ -11,9 +11,9 @@ if (NOT ENABLE_ROCKSDB)
return()
endif()
-option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ${NOT_UNBUNDLED})
+option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ON)
-if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/CMakeLists.txt")
+if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/include")
if (USE_INTERNAL_ROCKSDB_LIBRARY)
message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb")
diff --git a/cmake/find/sentry.cmake b/cmake/find/sentry.cmake
index 4283e75f9ef..e08cbad1729 100644
--- a/cmake/find/sentry.cmake
+++ b/cmake/find/sentry.cmake
@@ -9,7 +9,7 @@ if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h")
return()
endif ()
-if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT_UNBUNDLED AND NOT (OS_DARWIN AND COMPILER_CLANG))
+if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT (OS_DARWIN AND COMPILER_CLANG))
option (USE_SENTRY "Use Sentry" ${ENABLE_LIBRARIES})
set (SENTRY_TRANSPORT "curl" CACHE STRING "")
set (SENTRY_BACKEND "none" CACHE STRING "")
@@ -18,8 +18,6 @@ if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT_UNBUNDLED AND NOT (OS_
set (SENTRY_PIC OFF CACHE BOOL "")
set (BUILD_SHARED_LIBS OFF)
message (STATUS "Using sentry=${USE_SENTRY}: ${SENTRY_LIBRARY}")
-
- include_directories("${SENTRY_INCLUDE_DIR}")
elseif (USE_SENTRY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Sentry is not supported in current configuration")
endif ()
diff --git a/cmake/find/snappy.cmake b/cmake/find/snappy.cmake
index 2e1c8473904..245b3a9a2ff 100644
--- a/cmake/find/snappy.cmake
+++ b/cmake/find/snappy.cmake
@@ -1,4 +1,4 @@
-option(USE_SNAPPY "Enable snappy library" ${ENABLE_LIBRARIES})
+option(USE_SNAPPY "Enable snappy library" ON)
if(NOT USE_SNAPPY)
if (USE_INTERNAL_SNAPPY_LIBRARY)
@@ -7,7 +7,7 @@ if(NOT USE_SNAPPY)
return()
endif()
-option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ON)
if(NOT USE_INTERNAL_SNAPPY_LIBRARY)
find_library(SNAPPY_LIBRARY snappy)
diff --git a/cmake/find/ssl.cmake b/cmake/find/ssl.cmake
index fb411b93593..1ac6a54ed20 100644
--- a/cmake/find/ssl.cmake
+++ b/cmake/find/ssl.cmake
@@ -9,7 +9,7 @@ if(NOT ENABLE_SSL)
return()
endif()
-option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ${NOT_UNBUNDLED})
+option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md")
if(USE_INTERNAL_SSL_LIBRARY)
diff --git a/cmake/find/stats.cmake b/cmake/find/stats.cmake
deleted file mode 100644
index 589da1603d5..00000000000
--- a/cmake/find/stats.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-option(ENABLE_STATS "Enable StatsLib library" ${ENABLE_LIBRARIES})
-
-if (ENABLE_STATS)
- if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/stats")
- message (WARNING "submodule contrib/stats is missing. to fix try run: \n git submodule update --init")
- set (ENABLE_STATS 0)
- set (USE_STATS 0)
- elseif (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/gcem")
- message (WARNING "submodule contrib/gcem is missing. to fix try run: \n git submodule update --init")
- set (ENABLE_STATS 0)
- set (USE_STATS 0)
- else()
- set(STATS_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/stats/include)
- set(GCEM_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/gcem/include)
- set (USE_STATS 1)
- endif()
-
- if (NOT USE_STATS)
- message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable stats library")
- endif()
-endif()
-
-message (STATUS "Using stats=${USE_STATS} : ${STATS_INCLUDE_DIR}")
-message (STATUS "Using gcem=${USE_STATS}: ${GCEM_INCLUDE_DIR}")
diff --git a/cmake/find/xz.cmake b/cmake/find/xz.cmake
index f4c230859bc..f25937fe87d 100644
--- a/cmake/find/xz.cmake
+++ b/cmake/find/xz.cmake
@@ -1,4 +1,4 @@
-option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h")
if(USE_INTERNAL_XZ_LIBRARY)
diff --git a/cmake/find/zlib.cmake b/cmake/find/zlib.cmake
index 50a5bc63d15..c2ee8217afa 100644
--- a/cmake/find/zlib.cmake
+++ b/cmake/find/zlib.cmake
@@ -1,4 +1,4 @@
-option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ON)
if (NOT MSVC)
set (INTERNAL_ZLIB_NAME "zlib-ng" CACHE INTERNAL "")
@@ -29,9 +29,6 @@ if (NOT USE_INTERNAL_ZLIB_LIBRARY)
endif ()
if (NOT ZLIB_FOUND AND NOT MISSING_INTERNAL_ZLIB_LIBRARY)
- # https://github.com/zlib-ng/zlib-ng/pull/733
- # This is disabed by default
- add_compile_definitions(Z_TLS=__thread)
set (USE_INTERNAL_ZLIB_LIBRARY 1)
set (ZLIB_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}" "${ClickHouse_BINARY_DIR}/contrib/${INTERNAL_ZLIB_NAME}" CACHE INTERNAL "") # generated zconf.h
set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR}) # for poco
diff --git a/cmake/find/zstd.cmake b/cmake/find/zstd.cmake
index 812e1eed139..2b8dd53fbc3 100644
--- a/cmake/find/zstd.cmake
+++ b/cmake/find/zstd.cmake
@@ -1,4 +1,4 @@
-option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library instead of bundled" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h")
if(USE_INTERNAL_ZSTD_LIBRARY)
diff --git a/cmake/freebsd/toolchain-aarch64.cmake b/cmake/freebsd/toolchain-aarch64.cmake
index b8fdb4bbb7c..eeec635cc06 100644
--- a/cmake/freebsd/toolchain-aarch64.cmake
+++ b/cmake/freebsd/toolchain-aarch64.cmake
@@ -3,17 +3,13 @@ set (CMAKE_SYSTEM_PROCESSOR "aarch64")
set (CMAKE_C_COMPILER_TARGET "aarch64-unknown-freebsd12")
set (CMAKE_CXX_COMPILER_TARGET "aarch64-unknown-freebsd12")
set (CMAKE_ASM_COMPILER_TARGET "aarch64-unknown-freebsd12")
-set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../toolchain/freebsd-aarch64")
+set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-aarch64")
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake
-set (CMAKE_AR "/usr/bin/ar" CACHE FILEPATH "" FORCE)
-set (CMAKE_RANLIB "/usr/bin/ranlib" CACHE FILEPATH "" FORCE)
-
-set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
-
-set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
-set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
+# Will be changed later, but somehow needed to be set here.
+set (CMAKE_AR "ar")
+set (CMAKE_RANLIB "ranlib")
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
diff --git a/cmake/freebsd/toolchain-x86_64.cmake b/cmake/freebsd/toolchain-x86_64.cmake
index f9e45686db7..80cbeba549f 100644
--- a/cmake/freebsd/toolchain-x86_64.cmake
+++ b/cmake/freebsd/toolchain-x86_64.cmake
@@ -3,17 +3,13 @@ set (CMAKE_SYSTEM_PROCESSOR "x86_64")
set (CMAKE_C_COMPILER_TARGET "x86_64-pc-freebsd11")
set (CMAKE_CXX_COMPILER_TARGET "x86_64-pc-freebsd11")
set (CMAKE_ASM_COMPILER_TARGET "x86_64-pc-freebsd11")
-set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../toolchain/freebsd-x86_64")
+set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-x86_64")
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake
-set (CMAKE_AR "/usr/bin/ar" CACHE FILEPATH "" FORCE)
-set (CMAKE_RANLIB "/usr/bin/ranlib" CACHE FILEPATH "" FORCE)
-
-set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
-
-set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
-set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
+# Will be changed later, but somehow needed to be set here.
+set (CMAKE_AR "ar")
+set (CMAKE_RANLIB "ranlib")
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
diff --git a/cmake/linux/default_libs.cmake b/cmake/linux/default_libs.cmake
index 4abd0a951e1..23374916418 100644
--- a/cmake/linux/default_libs.cmake
+++ b/cmake/linux/default_libs.cmake
@@ -5,8 +5,12 @@ set (DEFAULT_LIBS "-nodefaultlibs")
# We need builtins from Clang's RT even without libcxx - for ubsan+int128.
# See https://bugs.llvm.org/show_bug.cgi?id=16404
-if (COMPILER_CLANG AND NOT CMAKE_CROSSCOMPILING)
- execute_process (COMMAND ${CMAKE_CXX_COMPILER} --print-libgcc-file-name --rtlib=compiler-rt OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
+if (COMPILER_CLANG)
+ execute_process (COMMAND ${CMAKE_CXX_COMPILER} --target=${CMAKE_CXX_COMPILER_TARGET} --print-libgcc-file-name --rtlib=compiler-rt OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
+
+ if (NOT EXISTS "${BUILTINS_LIBRARY}")
+ set (BUILTINS_LIBRARY "-lgcc")
+ endif ()
else ()
set (BUILTINS_LIBRARY "-lgcc")
endif ()
@@ -28,7 +32,7 @@ set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS})
# glibc-compatibility library relies to constant version of libc headers
# (because minor changes in function attributes between different glibc versions will introduce incompatibilities)
# This is for x86_64. For other architectures we have separate toolchains.
-if (ARCH_AMD64 AND NOT_UNBUNDLED AND NOT CMAKE_CROSSCOMPILING)
+if (ARCH_AMD64 AND NOT CMAKE_CROSSCOMPILING)
set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
endif ()
diff --git a/cmake/linux/toolchain-aarch64.cmake b/cmake/linux/toolchain-aarch64.cmake
index fa814d8d59b..5b1e41dc871 100644
--- a/cmake/linux/toolchain-aarch64.cmake
+++ b/cmake/linux/toolchain-aarch64.cmake
@@ -6,25 +6,18 @@ set (CMAKE_C_COMPILER_TARGET "aarch64-linux-gnu")
set (CMAKE_CXX_COMPILER_TARGET "aarch64-linux-gnu")
set (CMAKE_ASM_COMPILER_TARGET "aarch64-linux-gnu")
+# Will be changed later, but somehow needed to be set here.
+set (CMAKE_AR "ar")
+set (CMAKE_RANLIB "ranlib")
+
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-aarch64")
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/aarch64-linux-gnu/libc")
-find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
-find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
-
-set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
-set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
-
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
-set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
-
-set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
-set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
-
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
diff --git a/cmake/linux/toolchain-ppc64le.cmake b/cmake/linux/toolchain-ppc64le.cmake
index cf85fc20fc4..70915401046 100644
--- a/cmake/linux/toolchain-ppc64le.cmake
+++ b/cmake/linux/toolchain-ppc64le.cmake
@@ -6,25 +6,18 @@ set (CMAKE_C_COMPILER_TARGET "ppc64le-linux-gnu")
set (CMAKE_CXX_COMPILER_TARGET "ppc64le-linux-gnu")
set (CMAKE_ASM_COMPILER_TARGET "ppc64le-linux-gnu")
+# Will be changed later, but somehow needed to be set here.
+set (CMAKE_AR "ar")
+set (CMAKE_RANLIB "ranlib")
+
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-powerpc64le")
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/powerpc64le-linux-gnu/libc")
-find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
-find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
-
-set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
-set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
-
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
-set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
-
-set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
-set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
-
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
diff --git a/cmake/linux/toolchain-riscv64.cmake b/cmake/linux/toolchain-riscv64.cmake
index 1ccbd3ee0da..7ce6ff43a49 100644
--- a/cmake/linux/toolchain-riscv64.cmake
+++ b/cmake/linux/toolchain-riscv64.cmake
@@ -6,24 +6,20 @@ set (CMAKE_C_COMPILER_TARGET "riscv64-linux-gnu")
set (CMAKE_CXX_COMPILER_TARGET "riscv64-linux-gnu")
set (CMAKE_ASM_COMPILER_TARGET "riscv64-linux-gnu")
+# Will be changed later, but somehow needed to be set here.
+set (CMAKE_AR "ar")
+set (CMAKE_RANLIB "ranlib")
+
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-riscv64")
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}")
-find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
-find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
-
-set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
-set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
-
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
-set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
-
-set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
-set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
+set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=bfd")
+set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=bfd")
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
diff --git a/cmake/linux/toolchain-x86_64-musl.cmake b/cmake/linux/toolchain-x86_64-musl.cmake
index 0406b5de0ba..14dba6843b9 100644
--- a/cmake/linux/toolchain-x86_64-musl.cmake
+++ b/cmake/linux/toolchain-x86_64-musl.cmake
@@ -6,25 +6,18 @@ set (CMAKE_C_COMPILER_TARGET "x86_64-linux-musl")
set (CMAKE_CXX_COMPILER_TARGET "x86_64-linux-musl")
set (CMAKE_ASM_COMPILER_TARGET "x86_64-linux-musl")
+# Will be changed later, but somehow needed to be set here.
+set (CMAKE_AR "ar")
+set (CMAKE_RANLIB "ranlib")
+
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-x86_64-musl")
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}")
-find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
-find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
-
-set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
-set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
-
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
-set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
-
-set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
-set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
-
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
diff --git a/cmake/linux/toolchain-x86_64.cmake b/cmake/linux/toolchain-x86_64.cmake
index 1e139cec062..879f35feb83 100644
--- a/cmake/linux/toolchain-x86_64.cmake
+++ b/cmake/linux/toolchain-x86_64.cmake
@@ -6,25 +6,18 @@ set (CMAKE_C_COMPILER_TARGET "x86_64-linux-gnu")
set (CMAKE_CXX_COMPILER_TARGET "x86_64-linux-gnu")
set (CMAKE_ASM_COMPILER_TARGET "x86_64-linux-gnu")
+# Will be changed later, but somehow needed to be set here.
+set (CMAKE_AR "ar")
+set (CMAKE_RANLIB "ranlib")
+
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-x86_64")
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/x86_64-linux-gnu/libc")
-find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
-find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
-
-set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
-set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
-
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
-set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
-
-set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
-set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
-
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
diff --git a/cmake/print_flags.cmake b/cmake/print_flags.cmake
index ad6bca21933..66f2a8bfbc7 100644
--- a/cmake/print_flags.cmake
+++ b/cmake/print_flags.cmake
@@ -1,6 +1,12 @@
set (FULL_C_FLAGS "${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE_UC}}")
set (FULL_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE_UC}}")
set (FULL_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CMAKE_EXE_LINKER_FLAGS_${CMAKE_BUILD_TYPE_UC}}")
+
message (STATUS "compiler C = ${CMAKE_C_COMPILER} ${FULL_C_FLAGS}")
message (STATUS "compiler CXX = ${CMAKE_CXX_COMPILER} ${FULL_CXX_FLAGS}")
message (STATUS "LINKER_FLAGS = ${FULL_EXE_LINKER_FLAGS}")
+
+# Reproducible builds
+string (REPLACE "${CMAKE_SOURCE_DIR}" "." FULL_C_FLAGS_NORMALIZED "${FULL_C_FLAGS}")
+string (REPLACE "${CMAKE_SOURCE_DIR}" "." FULL_CXX_FLAGS_NORMALIZED "${FULL_CXX_FLAGS}")
+string (REPLACE "${CMAKE_SOURCE_DIR}" "." FULL_EXE_LINKER_FLAGS_NORMALIZED "${FULL_EXE_LINKER_FLAGS}")
diff --git a/cmake/sanitize.cmake b/cmake/sanitize.cmake
index f60f7431389..f052948e731 100644
--- a/cmake/sanitize.cmake
+++ b/cmake/sanitize.cmake
@@ -78,6 +78,15 @@ if (SANITIZE)
elseif (SANITIZE STREQUAL "undefined")
set (UBSAN_FLAGS "-fsanitize=undefined -fno-sanitize-recover=all -fno-sanitize=float-divide-by-zero")
+ if (ENABLE_FUZZING)
+ # Unsigned integer overflow is well defined behaviour from a perspective of C++ standard,
+ # compilers or CPU. We use in hash functions like SipHash and many other places in our codebase.
+ # This flag is needed only because fuzzers are run inside oss-fuzz infrastructure
+ # and they have a bunch of flags not halt the program if UIO happend and even to silence that warnings.
+ # But for unknown reason that flags don't work with ClickHouse or we don't understand how to properly use them,
+ # that's why we often receive reports about UIO. The simplest way to avoid this is just set this flag here.
+ set(UBSAN_FLAGS "${SAN_FLAGS} -fno-sanitize=unsigned-integer-overflow")
+ endif()
if (COMPILER_CLANG)
set (UBSAN_FLAGS "${UBSAN_FLAGS} -fsanitize-blacklist=${CMAKE_SOURCE_DIR}/tests/ubsan_suppressions.txt")
else()
diff --git a/cmake/tools.cmake b/cmake/tools.cmake
index 7a6e3672d17..c87887ddddd 100644
--- a/cmake/tools.cmake
+++ b/cmake/tools.cmake
@@ -1,3 +1,5 @@
+# Compiler
+
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set (COMPILER_GCC 1)
elseif (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
@@ -6,6 +8,8 @@ elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
set (COMPILER_CLANG 1)
endif ()
+execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version)
+
if (COMPILER_GCC)
# Require minimum version of gcc
set (GCC_MINIMUM_VERSION 11)
@@ -44,8 +48,10 @@ else ()
message (WARNING "You are using an unsupported compiler. Compilation has only been tested with Clang and GCC.")
endif ()
-STRING(REGEX MATCHALL "[0-9]+" COMPILER_VERSION_LIST ${CMAKE_CXX_COMPILER_VERSION})
-LIST(GET COMPILER_VERSION_LIST 0 COMPILER_VERSION_MAJOR)
+string (REGEX MATCHALL "[0-9]+" COMPILER_VERSION_LIST ${CMAKE_CXX_COMPILER_VERSION})
+list (GET COMPILER_VERSION_LIST 0 COMPILER_VERSION_MAJOR)
+
+# Linker
# Example values: `lld-10`, `gold`.
option (LINKER_NAME "Linker name or full path")
@@ -93,3 +99,69 @@ if (LINKER_NAME)
message(STATUS "Using custom linker by name: ${LINKER_NAME}")
endif ()
+
+# Archiver
+
+if (COMPILER_GCC)
+ find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11")
+else ()
+ find_program (LLVM_AR_PATH NAMES "llvm-ar-${COMPILER_VERSION_MAJOR}" "llvm-ar")
+endif ()
+
+if (LLVM_AR_PATH)
+ set (CMAKE_AR "${LLVM_AR_PATH}")
+endif ()
+
+# Ranlib
+
+if (COMPILER_GCC)
+ find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11")
+else ()
+ find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib-${COMPILER_VERSION_MAJOR}" "llvm-ranlib")
+endif ()
+
+if (LLVM_RANLIB_PATH)
+ set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}")
+endif ()
+
+# Install Name Tool
+
+if (COMPILER_GCC)
+ find_program (LLVM_INSTALL_NAME_TOOL_PATH NAMES "llvm-install-name-tool" "llvm-install-name-tool-13" "llvm-install-name-tool-12" "llvm-install-name-tool-11")
+else ()
+ find_program (LLVM_INSTALL_NAME_TOOL_PATH NAMES "llvm-install-name-tool-${COMPILER_VERSION_MAJOR}" "llvm-install-name-tool")
+endif ()
+
+if (LLVM_INSTALL_NAME_TOOL_PATH)
+ set (CMAKE_INSTALL_NAME_TOOL "${LLVM_INSTALL_NAME_TOOL_PATH}")
+endif ()
+
+# Objcopy
+
+if (COMPILER_GCC)
+ find_program (OBJCOPY_PATH NAMES "llvm-objcopy" "llvm-objcopy-13" "llvm-objcopy-12" "llvm-objcopy-11" "objcopy")
+else ()
+ find_program (OBJCOPY_PATH NAMES "llvm-objcopy-${COMPILER_VERSION_MAJOR}" "llvm-objcopy" "objcopy")
+endif ()
+
+if (NOT OBJCOPY_PATH AND OS_DARWIN)
+ find_program (BREW_PATH NAMES "brew")
+ if (BREW_PATH)
+ execute_process (COMMAND ${BREW_PATH} --prefix llvm ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE LLVM_PREFIX)
+ if (LLVM_PREFIX)
+ find_program (OBJCOPY_PATH NAMES "llvm-objcopy" PATHS "${LLVM_PREFIX}/bin" NO_DEFAULT_PATH)
+ endif ()
+ if (NOT OBJCOPY_PATH)
+ execute_process (COMMAND ${BREW_PATH} --prefix binutils ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE BINUTILS_PREFIX)
+ if (BINUTILS_PREFIX)
+ find_program (OBJCOPY_PATH NAMES "objcopy" PATHS "${BINUTILS_PREFIX}/bin" NO_DEFAULT_PATH)
+ endif ()
+ endif ()
+ endif ()
+endif ()
+
+if (OBJCOPY_PATH)
+ message (STATUS "Using objcopy: ${OBJCOPY_PATH}")
+else ()
+ message (FATAL_ERROR "Cannot find objcopy.")
+endif ()
diff --git a/cmake/warnings.cmake b/cmake/warnings.cmake
index 394c04d5c89..ecc31529dc4 100644
--- a/cmake/warnings.cmake
+++ b/cmake/warnings.cmake
@@ -7,9 +7,7 @@
# - sometimes warnings from 3rd party libraries may come from macro substitutions in our code
# and we have to wrap them with #pragma GCC/clang diagnostic ignored
-if (NOT MSVC)
- set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra")
-endif ()
+set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra")
# Add some warnings that are not available even with -Wall -Wextra -Wpedantic.
# Intended for exploration of new compiler warnings that may be found useful.
@@ -28,46 +26,6 @@ if (COMPILER_CLANG)
no_warning(zero-length-array)
no_warning(c11-extensions)
- add_warning(comma)
- add_warning(conditional-uninitialized)
- add_warning(covered-switch-default)
- add_warning(deprecated)
- add_warning(embedded-directive)
- add_warning(empty-init-stmt) # linux-only
- add_warning(extra-semi-stmt) # linux-only
- add_warning(extra-semi)
- add_warning(gnu-case-range)
- add_warning(inconsistent-missing-destructor-override)
- add_warning(newline-eof)
- add_warning(old-style-cast)
- add_warning(range-loop-analysis)
- add_warning(redundant-parens)
- add_warning(reserved-id-macro)
- add_warning(shadow-field)
- add_warning(shadow-uncaptured-local)
- add_warning(shadow)
- add_warning(string-plus-int)
- add_warning(undef)
- add_warning(unreachable-code-return)
- add_warning(unreachable-code)
- add_warning(unused-exception-parameter)
- add_warning(unused-macros)
- add_warning(unused-member-function)
- add_warning(unneeded-internal-declaration)
- add_warning(implicit-int-float-conversion)
- add_warning(no-delete-null-pointer-checks)
- add_warning(anon-enum-enum-conversion)
- add_warning(assign-enum)
- add_warning(bitwise-op-parentheses)
- add_warning(int-in-bool-context)
- add_warning(sometimes-uninitialized)
- add_warning(tautological-bitwise-compare)
-
- # XXX: libstdc++ has some of these for 3way compare
- if (USE_LIBCXX)
- add_warning(zero-as-null-pointer-constant)
- endif()
-
if (WEVERYTHING)
add_warning(everything)
no_warning(c++98-compat-pedantic)
@@ -103,6 +61,47 @@ if (COMPILER_CLANG)
endif()
# TODO Enable conversion, sign-conversion, double-promotion warnings.
+ else ()
+ add_warning(comma)
+ add_warning(conditional-uninitialized)
+ add_warning(covered-switch-default)
+ add_warning(deprecated)
+ add_warning(embedded-directive)
+ add_warning(empty-init-stmt) # linux-only
+ add_warning(extra-semi-stmt) # linux-only
+ add_warning(extra-semi)
+ add_warning(gnu-case-range)
+ add_warning(inconsistent-missing-destructor-override)
+ add_warning(newline-eof)
+ add_warning(old-style-cast)
+ add_warning(range-loop-analysis)
+ add_warning(redundant-parens)
+ add_warning(reserved-id-macro)
+ add_warning(shadow-field)
+ add_warning(shadow-uncaptured-local)
+ add_warning(shadow)
+ add_warning(string-plus-int)
+ add_warning(undef)
+ add_warning(unreachable-code-return)
+ add_warning(unreachable-code)
+ add_warning(unused-exception-parameter)
+ add_warning(unused-macros)
+ add_warning(unused-member-function)
+ add_warning(unneeded-internal-declaration)
+ add_warning(implicit-int-float-conversion)
+ add_warning(no-delete-null-pointer-checks)
+ add_warning(anon-enum-enum-conversion)
+ add_warning(assign-enum)
+ add_warning(bitwise-op-parentheses)
+ add_warning(int-in-bool-context)
+ add_warning(sometimes-uninitialized)
+ add_warning(tautological-bitwise-compare)
+
+ # XXX: libstdc++ has some of these for 3way compare
+ if (USE_LIBCXX)
+ add_warning(zero-as-null-pointer-constant)
+ endif()
+
endif ()
elseif (COMPILER_GCC)
# Add compiler options only to c++ compiler
diff --git a/contrib/CMakeLists.txt b/contrib/CMakeLists.txt
index 676654452de..278dba9bb70 100644
--- a/contrib/CMakeLists.txt
+++ b/contrib/CMakeLists.txt
@@ -51,7 +51,7 @@ if (USE_YAML_CPP)
endif()
if (USE_INTERNAL_XZ_LIBRARY)
- add_subdirectory (xz)
+ add_subdirectory (xz-cmake)
endif()
add_subdirectory (poco-cmake)
@@ -64,9 +64,7 @@ if (USE_INTERNAL_ZSTD_LIBRARY)
endif ()
if (USE_INTERNAL_RE2_LIBRARY)
- set(RE2_BUILD_TESTING 0 CACHE INTERNAL "")
- add_subdirectory (re2)
- add_subdirectory (re2_st)
+ add_subdirectory (re2-cmake)
endif ()
if (USE_INTERNAL_DOUBLE_CONVERSION_LIBRARY)
@@ -82,23 +80,10 @@ if (USE_INTERNAL_FARMHASH_LIBRARY)
endif ()
if (USE_INTERNAL_ZLIB_LIBRARY)
- set (ZLIB_ENABLE_TESTS 0 CACHE INTERNAL "")
- set (SKIP_INSTALL_ALL 1 CACHE INTERNAL "")
- set (ZLIB_COMPAT 1 CACHE INTERNAL "") # also enables WITH_GZFILEOP
- set (WITH_NATIVE_INSTRUCTIONS ${ARCH_NATIVE} CACHE INTERNAL "")
- if (OS_FREEBSD OR ARCH_I386)
- set (WITH_OPTIM 0 CACHE INTERNAL "") # Bug in assembler
- endif ()
- if (ARCH_AARCH64)
- set(WITH_NEON 1 CACHE INTERNAL "")
- set(WITH_ACLE 1 CACHE INTERNAL "")
- endif ()
-
- add_subdirectory (${INTERNAL_ZLIB_NAME})
- # We should use same defines when including zlib.h as used when zlib compiled
- target_compile_definitions (zlib PUBLIC ZLIB_COMPAT WITH_GZFILEOP)
- if (ARCH_AMD64 OR ARCH_AARCH64)
- target_compile_definitions (zlib PUBLIC X86_64 UNALIGNED_OK)
+ if (INTERNAL_ZLIB_NAME STREQUAL "zlib-ng")
+ add_subdirectory (zlib-ng-cmake)
+ else ()
+ add_subdirectory (${INTERNAL_ZLIB_NAME})
endif ()
endif ()
@@ -117,28 +102,8 @@ if (USE_INTERNAL_LDAP_LIBRARY)
add_subdirectory (openldap-cmake)
endif ()
-function(mysql_support)
- set(CLIENT_PLUGIN_CACHING_SHA2_PASSWORD STATIC)
- set(CLIENT_PLUGIN_SHA256_PASSWORD STATIC)
- set(CLIENT_PLUGIN_REMOTE_IO OFF)
- set(CLIENT_PLUGIN_DIALOG OFF)
- set(CLIENT_PLUGIN_AUTH_GSSAPI_CLIENT OFF)
- set(CLIENT_PLUGIN_CLIENT_ED25519 OFF)
- set(CLIENT_PLUGIN_MYSQL_CLEAR_PASSWORD OFF)
- set(SKIP_TESTS 1)
- if (GLIBC_COMPATIBILITY)
- set(LIBM glibc-compatibility)
- endif()
- if (USE_INTERNAL_ZLIB_LIBRARY)
- set(ZLIB_FOUND ON)
- set(ZLIB_LIBRARY ${ZLIB_LIBRARIES})
- set(WITH_EXTERNAL_ZLIB ON)
- endif()
- set(WITH_CURL OFF)
- add_subdirectory (mariadb-connector-c)
-endfunction()
-if (ENABLE_MYSQL AND USE_INTERNAL_MYSQL_LIBRARY)
- mysql_support()
+if (USE_INTERNAL_MYSQL_LIBRARY)
+ add_subdirectory (mariadb-connector-c-cmake)
endif ()
if (USE_INTERNAL_RDKAFKA_LIBRARY)
@@ -194,11 +159,7 @@ if (USE_INTERNAL_AVRO_LIBRARY)
endif()
if(USE_INTERNAL_GTEST_LIBRARY)
- set(GOOGLETEST_VERSION 1.10.0) # master
- # Google Test from sources
- add_subdirectory(${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest)
- # avoid problems with
- target_compile_definitions (gtest INTERFACE GTEST_HAS_POSIX_RE=0)
+ add_subdirectory(googletest-cmake)
elseif(GTEST_SRC_DIR)
add_subdirectory(${GTEST_SRC_DIR}/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest)
target_compile_definitions(gtest INTERFACE GTEST_HAS_POSIX_RE=0)
@@ -229,7 +190,7 @@ if (USE_EMBEDDED_COMPILER)
endif ()
if (USE_INTERNAL_LIBGSASL_LIBRARY)
- add_subdirectory(libgsasl)
+ add_subdirectory(libgsasl-cmake)
endif()
if (USE_INTERNAL_LIBXML2_LIBRARY)
@@ -281,14 +242,7 @@ if (USE_AMQPCPP)
add_subdirectory (amqpcpp-cmake)
endif()
if (USE_CASSANDRA)
- # Need to use C++17 since the compilation is not possible with C++20 currently.
- set (CMAKE_CXX_STANDARD_bak ${CMAKE_CXX_STANDARD})
- set (CMAKE_CXX_STANDARD 17)
-
- add_subdirectory (cassandra)
-
- set (CMAKE_CXX_STANDARD ${CMAKE_CXX_STANDARD_bak})
- unset (CMAKE_CXX_STANDARD_bak)
+ add_subdirectory (cassandra-cmake)
endif()
# Should go before:
@@ -296,16 +250,11 @@ endif()
add_subdirectory (curl-cmake)
if (USE_SENTRY)
- add_subdirectory (sentry-native)
+ add_subdirectory (sentry-native-cmake)
endif()
add_subdirectory (fmtlib-cmake)
-if (USE_STATS)
- add_subdirectory (stats-cmake)
- add_subdirectory (gcem)
-endif()
-
if (USE_KRB5)
add_subdirectory (krb5-cmake)
if (USE_CYRUS_SASL)
@@ -326,7 +275,7 @@ if (USE_NURAFT)
add_subdirectory(nuraft-cmake)
endif()
-add_subdirectory(fast_float)
+add_subdirectory(fast_float-cmake)
if (USE_NLP)
add_subdirectory(libstemmer-c-cmake)
diff --git a/contrib/arrow-cmake/CMakeLists.txt b/contrib/arrow-cmake/CMakeLists.txt
index 841c280d192..231185462dc 100644
--- a/contrib/arrow-cmake/CMakeLists.txt
+++ b/contrib/arrow-cmake/CMakeLists.txt
@@ -417,7 +417,49 @@ set(PARQUET_SRCS
#list(TRANSFORM PARQUET_SRCS PREPEND "${LIBRARY_DIR}/") # cmake 3.12
add_library(${PARQUET_LIBRARY} ${PARQUET_SRCS})
target_include_directories(${PARQUET_LIBRARY} SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src" "${CMAKE_CURRENT_SOURCE_DIR}/cpp/src" PRIVATE ${OPENSSL_INCLUDE_DIR})
-include("${ClickHouse_SOURCE_DIR}/contrib/thrift/build/cmake/ConfigureChecks.cmake") # makes config.h
+
+set (HAVE_ARPA_INET_H 1)
+set (HAVE_FCNTL_H 1)
+set (HAVE_GETOPT_H 1)
+set (HAVE_INTTYPES_H 1)
+set (HAVE_NETDB_H 1)
+set (HAVE_NETINET_IN_H 1)
+set (HAVE_SIGNAL_H 1)
+set (HAVE_STDINT_H 1)
+set (HAVE_UNISTD_H 1)
+set (HAVE_PTHREAD_H 1)
+set (HAVE_SYS_IOCTL_H 1)
+set (HAVE_SYS_PARAM_H 1)
+set (HAVE_SYS_RESOURCE_H 1)
+set (HAVE_SYS_SOCKET_H 1)
+set (HAVE_SYS_STAT_H 1)
+set (HAVE_SYS_TIME_H 1)
+set (HAVE_SYS_UN_H 1)
+set (HAVE_POLL_H 1)
+set (HAVE_SYS_POLL_H 1)
+set (HAVE_SYS_SELECT_H 1)
+set (HAVE_SCHED_H 1)
+set (HAVE_STRING_H 1)
+set (HAVE_STRINGS_H 1)
+set (HAVE_GETHOSTBYNAME 1)
+set (HAVE_STRERROR_R 1)
+set (HAVE_SCHED_GET_PRIORITY_MAX 1)
+set (HAVE_SCHED_GET_PRIORITY_MIN 1)
+
+if (OS_LINUX)
+ set (STRERROR_R_CHAR_P 1)
+endif ()
+
+#set(PACKAGE ${PACKAGE_NAME})
+#set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}")
+#set(VERSION ${thrift_VERSION})
+
+# generate a config.h file
+configure_file("${CMAKE_CURRENT_SOURCE_DIR}/build/cmake/config.h.in" "${CMAKE_CURRENT_BINARY_DIR}/thrift/config.h")
+
+include_directories("${CMAKE_CURRENT_BINARY_DIR}")
+
+
target_link_libraries(${PARQUET_LIBRARY} PUBLIC ${ARROW_LIBRARY} PRIVATE ${THRIFT_LIBRARY} boost::headers_only boost::regex ${OPENSSL_LIBRARIES})
if (SANITIZE STREQUAL "undefined")
diff --git a/contrib/base64 b/contrib/base64
index af9b331f2b4..9499e0c4945 160000
--- a/contrib/base64
+++ b/contrib/base64
@@ -1 +1 @@
-Subproject commit af9b331f2b4f30b41c70f3a571ff904a8251c1d3
+Subproject commit 9499e0c4945589973b9ea1bc927377cfbc84aa46
diff --git a/contrib/boost b/contrib/boost
index 79358a3106a..fcb058e1459 160000
--- a/contrib/boost
+++ b/contrib/boost
@@ -1 +1 @@
-Subproject commit 79358a3106aab6af464430ed67c7efafebf5cd6f
+Subproject commit fcb058e1459ac273ecfe7cdf72791cb1479115af
diff --git a/contrib/boost-cmake/CMakeLists.txt b/contrib/boost-cmake/CMakeLists.txt
index 27072910135..057a893e926 100644
--- a/contrib/boost-cmake/CMakeLists.txt
+++ b/contrib/boost-cmake/CMakeLists.txt
@@ -1,4 +1,4 @@
-option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ON)
if (NOT USE_INTERNAL_BOOST_LIBRARY)
# 1.70 like in contrib/boost
@@ -196,6 +196,12 @@ if (NOT EXTERNAL_BOOST_FOUND)
"${LIBRARY_DIR}/libs/context/src/asm/make_ppc64_sysv_elf_gas.S"
"${LIBRARY_DIR}/libs/context/src/asm/ontop_ppc64_sysv_elf_gas.S"
)
+ elseif (ARCH_RISCV64)
+ set (SRCS_CONTEXT ${SRCS_CONTEXT}
+ "${LIBRARY_DIR}/libs/context/src/asm/jump_riscv64_sysv_elf_gas.S"
+ "${LIBRARY_DIR}/libs/context/src/asm/make_riscv64_sysv_elf_gas.S"
+ "${LIBRARY_DIR}/libs/context/src/asm/ontop_riscv64_sysv_elf_gas.S"
+ )
elseif(OS_DARWIN)
set (SRCS_CONTEXT ${SRCS_CONTEXT}
"${LIBRARY_DIR}/libs/context/src/asm/jump_x86_64_sysv_macho_gas.S"
diff --git a/contrib/cassandra-cmake/CMakeLists.txt b/contrib/cassandra-cmake/CMakeLists.txt
new file mode 100644
index 00000000000..a8f2bec5e2b
--- /dev/null
+++ b/contrib/cassandra-cmake/CMakeLists.txt
@@ -0,0 +1,127 @@
+# Need to use C++17 since the compilation is not possible with C++20 currently.
+set (CMAKE_CXX_STANDARD 17)
+
+set(CASS_ROOT_DIR ${CMAKE_SOURCE_DIR}/contrib/cassandra)
+set(CASS_SRC_DIR "${CASS_ROOT_DIR}/src")
+set(CASS_INCLUDE_DIR "${CASS_ROOT_DIR}/include")
+
+# Ensure functions/modules are available
+list(APPEND CMAKE_MODULE_PATH ${CASS_ROOT_DIR}/cmake)
+
+set(CASS_BUILD_SHARED 1)
+set(CASS_BUILD_STATIC 1)
+set(CASS_USE_KERBEROS 0)
+set(CASS_USE_LIBSSH2 0)
+set(CASS_USE_OPENSSL 1)
+set(CASS_USE_STD_ATOMIC 1)
+set(CASS_USE_ZLIB 1)
+
+
+file(GLOB SOURCES ${CASS_SRC_DIR}/*.cpp)
+
+if(APPLE)
+ list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-unix.cpp ${CASS_SRC_DIR}/get_time-win.cpp)
+elseif(UNIX)
+ list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-mac.cpp ${CASS_SRC_DIR}/get_time-win.cpp)
+elseif(WIN32)
+ list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-mac.cpp ${CASS_SRC_DIR}/get_time-unix.cpp)
+endif()
+
+if(CASS_USE_OPENSSL)
+ list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/ssl)
+ list(APPEND SOURCES ${CASS_SRC_DIR}/ssl/ssl_openssl_impl.cpp ${CASS_SRC_DIR}/ssl/ring_buffer_bio.cpp)
+else()
+ list(APPEND SOURCES ${CASS_SRC_DIR}/ssl/ssl_no_impl.cpp)
+endif()
+
+if(CASS_USE_KERBEROS)
+ list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/gssapi)
+ list(APPEND SOURCES ${CASS_SRC_DIR}/gssapi/dse_auth_gssapi.cpp ${CASS_SRC_DIR}/gssapi/dse_auth_gssapi.hpp)
+endif()
+
+list(APPEND SOURCES ${CASS_SRC_DIR}/atomic/atomic_std.hpp)
+
+
+add_library(curl_hostcheck OBJECT ${CASS_SRC_DIR}/third_party/curl/hostcheck.cpp)
+add_library(hdr_histogram OBJECT ${CASS_SRC_DIR}/third_party/hdr_histogram/hdr_histogram.cpp)
+add_library(http-parser OBJECT ${CASS_SRC_DIR}/third_party/http-parser/http_parser.c)
+add_library(minizip OBJECT
+ ${CASS_SRC_DIR}/third_party/minizip/ioapi.c
+ ${CASS_SRC_DIR}/third_party/minizip/zip.c
+ ${CASS_SRC_DIR}/third_party/minizip/unzip.c)
+
+target_link_libraries(minizip zlib)
+target_compile_definitions(minizip PRIVATE "-Dz_crc_t=unsigned long")
+
+list(APPEND INCLUDE_DIRS
+ ${CASS_SRC_DIR}/third_party/curl
+ ${CASS_SRC_DIR}/third_party/hdr_histogram
+ ${CASS_SRC_DIR}/third_party/http-parser
+ ${CASS_SRC_DIR}/third_party/minizip
+ ${CASS_SRC_DIR}/third_party/mt19937_64
+ ${CASS_SRC_DIR}/third_party/rapidjson/rapidjson
+ ${CASS_SRC_DIR}/third_party/sparsehash/src)
+
+list(APPEND INCLUDE_DIRS ${CASS_INCLUDE_DIR} ${CASS_SRC_DIR})
+
+set(HASH_FUN_H "functional")
+set(HASH_NAME hash)
+set(HASH_NAMESPACE "std")
+set(HAVE_INTTYPES_H 1)
+set(HAVE_STDINT_H 1)
+set(HAVE_SYS_TYPES_H 1)
+set(HAVE_MEMCPY 1)
+set(HAVE_LONG_LONG 1)
+set(HAVE_UINT16_T 1)
+
+configure_file("${CASS_SRC_DIR}/third_party/sparsehash/config.h.cmake" "${CMAKE_CURRENT_BINARY_DIR}/sparsehash/internal/sparseconfig.h")
+
+
+
+# Determine random availability
+if (OS_LINUX)
+ #set (HAVE_GETRANDOM 1) - not on every Linux kernel
+elseif (OS_FREEBSD OR OS_DARWIN)
+ set (HAVE_ARC4RANDOM 1)
+endif ()
+
+# Determine if sigpipe is available
+if (OS_LINUX)
+ set (HAVE_SIGTIMEDWAIT 1)
+else (OS_FREEBSD OR OS_DARWIN)
+ set (HAVE_NOSIGPIPE 1)
+endif()
+
+set (HAVE_BUILTIN_BSWAP32 1)
+set (HAVE_BUILTIN_BSWAP64 1)
+
+set(HAVE_BOOST_ATOMIC 0)
+set(HAVE_STD_ATOMIC 1)
+
+set(HAVE_KERBEROS ${CASS_USE_KERBEROS})
+set(HAVE_OPENSSL ${CASS_USE_OPENSSL})
+set(HAVE_ZLIB ${CASS_USE_ZLIB})
+
+# Generate the driver_config.hpp file
+configure_file(
+ ${CASS_ROOT_DIR}/driver_config.hpp.in
+ ${CMAKE_CURRENT_BINARY_DIR}/driver_config.hpp)
+
+
+add_library(cassandra
+ ${SOURCES}
+ $
+ $
+ $
+ $)
+
+target_link_libraries(cassandra zlib)
+add_library(cassandra_static ALIAS cassandra)
+target_include_directories(cassandra PRIVATE ${CMAKE_CURRENT_BINARY_DIR} ${INCLUDE_DIRS})
+target_compile_definitions(cassandra PRIVATE CASS_BUILDING)
+
+target_link_libraries(cassandra uv)
+
+if(CASS_USE_OPENSSL)
+ target_link_libraries(cassandra ssl)
+endif()
diff --git a/contrib/cctz-cmake/CMakeLists.txt b/contrib/cctz-cmake/CMakeLists.txt
index d6697fd5d78..f0f5c103631 100644
--- a/contrib/cctz-cmake/CMakeLists.txt
+++ b/contrib/cctz-cmake/CMakeLists.txt
@@ -1,4 +1,4 @@
-option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ON)
if (NOT USE_INTERNAL_CCTZ_LIBRARY)
find_library (LIBRARY_CCTZ cctz)
@@ -82,7 +82,7 @@ if (NOT EXTERNAL_CCTZ_LIBRARY_FOUND OR NOT EXTERNAL_CCTZ_LIBRARY_WORKS)
# each file in that dir (except of tab and localtime) store the info about timezone
execute_process(COMMAND
- bash -c "cd ${TZDIR} && find * -type f -and ! -name '*.tab' -and ! -name 'localtime' | sort | paste -sd ';' -"
+ bash -c "cd ${TZDIR} && find * -type f -and ! -name '*.tab' -and ! -name 'localtime' | LC_ALL=C sort | paste -sd ';' -"
OUTPUT_STRIP_TRAILING_WHITESPACE
OUTPUT_VARIABLE TIMEZONES)
diff --git a/contrib/fast_float-cmake/CMakeLists.txt b/contrib/fast_float-cmake/CMakeLists.txt
new file mode 100644
index 00000000000..cd945f79a20
--- /dev/null
+++ b/contrib/fast_float-cmake/CMakeLists.txt
@@ -0,0 +1,2 @@
+add_library(fast_float INTERFACE)
+target_include_directories(fast_float INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/")
diff --git a/contrib/gcem b/contrib/gcem
deleted file mode 160000
index 8d4f1b5d76e..00000000000
--- a/contrib/gcem
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 8d4f1b5d76ea8f6ff12f3f4f34cda45424556b00
diff --git a/contrib/googletest-cmake/CMakeLists.txt b/contrib/googletest-cmake/CMakeLists.txt
new file mode 100644
index 00000000000..ec7ac91c471
--- /dev/null
+++ b/contrib/googletest-cmake/CMakeLists.txt
@@ -0,0 +1,11 @@
+set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest")
+
+add_library(gtest "${SRC_DIR}/src/gtest-all.cc")
+set_target_properties(gtest PROPERTIES VERSION "1.0.0")
+target_compile_definitions (gtest INTERFACE GTEST_HAS_POSIX_RE=0)
+target_include_directories(gtest SYSTEM PUBLIC "${SRC_DIR}/include")
+target_include_directories(gtest PRIVATE "${SRC_DIR}")
+
+add_library(gtest_main "${SRC_DIR}/src/gtest_main.cc")
+set_target_properties(gtest_main PROPERTIES VERSION "1.0.0")
+target_link_libraries(gtest_main PUBLIC gtest)
diff --git a/contrib/hyperscan-cmake/CMakeLists.txt b/contrib/hyperscan-cmake/CMakeLists.txt
index 6a364da126d..248551d0b0c 100644
--- a/contrib/hyperscan-cmake/CMakeLists.txt
+++ b/contrib/hyperscan-cmake/CMakeLists.txt
@@ -17,7 +17,7 @@ if (NOT ENABLE_HYPERSCAN)
return()
endif()
-option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ON)
if (NOT USE_INTERNAL_HYPERSCAN_LIBRARY)
find_library (LIBRARY_HYPERSCAN hs)
diff --git a/contrib/jemalloc-cmake/CMakeLists.txt b/contrib/jemalloc-cmake/CMakeLists.txt
index 30dd3baa55b..fd52ce4a4f3 100644
--- a/contrib/jemalloc-cmake/CMakeLists.txt
+++ b/contrib/jemalloc-cmake/CMakeLists.txt
@@ -1,5 +1,5 @@
if (SANITIZE OR NOT (
- ((OS_LINUX OR OS_FREEBSD) AND (ARCH_AMD64 OR ARCH_ARM OR ARCH_PPC64LE)) OR
+ ((OS_LINUX OR OS_FREEBSD) AND (ARCH_AMD64 OR ARCH_ARM OR ARCH_PPC64LE OR ARCH_RISCV64)) OR
(OS_DARWIN AND (CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo" OR CMAKE_BUILD_TYPE STREQUAL "Debug"))
))
if (ENABLE_JEMALLOC)
@@ -112,6 +112,8 @@ elseif (ARCH_ARM)
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_aarch64")
elseif (ARCH_PPC64LE)
set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_ppc64le")
+elseif (ARCH_RISCV64)
+ set(JEMALLOC_INCLUDE_PREFIX "${JEMALLOC_INCLUDE_PREFIX}_riscv64")
else ()
message (FATAL_ERROR "internal jemalloc: This arch is not supported")
endif ()
diff --git a/contrib/jemalloc-cmake/include_linux_riscv64/README b/contrib/jemalloc-cmake/include_linux_riscv64/README
new file mode 100644
index 00000000000..01b65655c55
--- /dev/null
+++ b/contrib/jemalloc-cmake/include_linux_riscv64/README
@@ -0,0 +1,8 @@
+Here are pre-generated files from jemalloc on Linux risc-v.
+You can obtain these files by running ./autogen.sh inside jemalloc source directory.
+
+Added #define GNU_SOURCE
+Added JEMALLOC_OVERRIDE___POSIX_MEMALIGN because why not.
+Removed JEMALLOC_HAVE_ATTR_FORMAT_GNU_PRINTF because it's non standard.
+Removed JEMALLOC_PURGE_MADVISE_FREE because it's available only from Linux 4.5.
+Added JEMALLOC_CONFIG_MALLOC_CONF substitution
diff --git a/contrib/jemalloc-cmake/include_linux_riscv64/jemalloc/internal/jemalloc_internal_defs.h.in b/contrib/jemalloc-cmake/include_linux_riscv64/jemalloc/internal/jemalloc_internal_defs.h.in
new file mode 100644
index 00000000000..5e0135cc0d0
--- /dev/null
+++ b/contrib/jemalloc-cmake/include_linux_riscv64/jemalloc/internal/jemalloc_internal_defs.h.in
@@ -0,0 +1,367 @@
+/* include/jemalloc/internal/jemalloc_internal_defs.h. Generated from jemalloc_internal_defs.h.in by configure. */
+#ifndef JEMALLOC_INTERNAL_DEFS_H_
+#define JEMALLOC_INTERNAL_DEFS_H_
+/*
+ * If JEMALLOC_PREFIX is defined via --with-jemalloc-prefix, it will cause all
+ * public APIs to be prefixed. This makes it possible, with some care, to use
+ * multiple allocators simultaneously.
+ */
+/* #undef JEMALLOC_PREFIX */
+/* #undef JEMALLOC_CPREFIX */
+
+/*
+ * Define overrides for non-standard allocator-related functions if they are
+ * present on the system.
+ */
+#define JEMALLOC_OVERRIDE___LIBC_CALLOC
+#define JEMALLOC_OVERRIDE___LIBC_FREE
+#define JEMALLOC_OVERRIDE___LIBC_MALLOC
+#define JEMALLOC_OVERRIDE___LIBC_MEMALIGN
+#define JEMALLOC_OVERRIDE___LIBC_REALLOC
+#define JEMALLOC_OVERRIDE___LIBC_VALLOC
+/* #undef JEMALLOC_OVERRIDE___POSIX_MEMALIGN */
+
+/*
+ * JEMALLOC_PRIVATE_NAMESPACE is used as a prefix for all library-private APIs.
+ * For shared libraries, symbol visibility mechanisms prevent these symbols
+ * from being exported, but for static libraries, naming collisions are a real
+ * possibility.
+ */
+#define JEMALLOC_PRIVATE_NAMESPACE je_
+
+/*
+ * Hyper-threaded CPUs may need a special instruction inside spin loops in
+ * order to yield to another virtual CPU.
+ */
+#define CPU_SPINWAIT
+/* 1 if CPU_SPINWAIT is defined, 0 otherwise. */
+#define HAVE_CPU_SPINWAIT 0
+
+/*
+ * Number of significant bits in virtual addresses. This may be less than the
+ * total number of bits in a pointer, e.g. on x64, for which the uppermost 16
+ * bits are the same as bit 47.
+ */
+#define LG_VADDR 48
+
+/* Defined if C11 atomics are available. */
+#define JEMALLOC_C11_ATOMICS 1
+
+/* Defined if GCC __atomic atomics are available. */
+#define JEMALLOC_GCC_ATOMIC_ATOMICS 1
+/* and the 8-bit variant support. */
+#define JEMALLOC_GCC_U8_ATOMIC_ATOMICS 1
+
+/* Defined if GCC __sync atomics are available. */
+#define JEMALLOC_GCC_SYNC_ATOMICS 1
+/* and the 8-bit variant support. */
+#define JEMALLOC_GCC_U8_SYNC_ATOMICS 1
+
+/*
+ * Defined if __builtin_clz() and __builtin_clzl() are available.
+ */
+#define JEMALLOC_HAVE_BUILTIN_CLZ
+
+/*
+ * Defined if os_unfair_lock_*() functions are available, as provided by Darwin.
+ */
+/* #undef JEMALLOC_OS_UNFAIR_LOCK */
+
+/* Defined if syscall(2) is usable. */
+#define JEMALLOC_USE_SYSCALL
+
+/*
+ * Defined if secure_getenv(3) is available.
+ */
+// #define JEMALLOC_HAVE_SECURE_GETENV
+
+/*
+ * Defined if issetugid(2) is available.
+ */
+/* #undef JEMALLOC_HAVE_ISSETUGID */
+
+/* Defined if pthread_atfork(3) is available. */
+#define JEMALLOC_HAVE_PTHREAD_ATFORK
+
+/* Defined if pthread_setname_np(3) is available. */
+#define JEMALLOC_HAVE_PTHREAD_SETNAME_NP
+
+/*
+ * Defined if clock_gettime(CLOCK_MONOTONIC_COARSE, ...) is available.
+ */
+#define JEMALLOC_HAVE_CLOCK_MONOTONIC_COARSE 1
+
+/*
+ * Defined if clock_gettime(CLOCK_MONOTONIC, ...) is available.
+ */
+#define JEMALLOC_HAVE_CLOCK_MONOTONIC 1
+
+/*
+ * Defined if mach_absolute_time() is available.
+ */
+/* #undef JEMALLOC_HAVE_MACH_ABSOLUTE_TIME */
+
+/*
+ * Defined if _malloc_thread_cleanup() exists. At least in the case of
+ * FreeBSD, pthread_key_create() allocates, which if used during malloc
+ * bootstrapping will cause recursion into the pthreads library. Therefore, if
+ * _malloc_thread_cleanup() exists, use it as the basis for thread cleanup in
+ * malloc_tsd.
+ */
+/* #undef JEMALLOC_MALLOC_THREAD_CLEANUP */
+
+/*
+ * Defined if threaded initialization is known to be safe on this platform.
+ * Among other things, it must be possible to initialize a mutex without
+ * triggering allocation in order for threaded allocation to be safe.
+ */
+#define JEMALLOC_THREADED_INIT
+
+/*
+ * Defined if the pthreads implementation defines
+ * _pthread_mutex_init_calloc_cb(), in which case the function is used in order
+ * to avoid recursive allocation during mutex initialization.
+ */
+/* #undef JEMALLOC_MUTEX_INIT_CB */
+
+/* Non-empty if the tls_model attribute is supported. */
+#define JEMALLOC_TLS_MODEL __attribute__((tls_model("initial-exec")))
+
+/*
+ * JEMALLOC_DEBUG enables assertions and other sanity checks, and disables
+ * inline functions.
+ */
+/* #undef JEMALLOC_DEBUG */
+
+/* JEMALLOC_STATS enables statistics calculation. */
+#define JEMALLOC_STATS
+
+/* JEMALLOC_EXPERIMENTAL_SMALLOCX_API enables experimental smallocx API. */
+/* #undef JEMALLOC_EXPERIMENTAL_SMALLOCX_API */
+
+/* JEMALLOC_PROF enables allocation profiling. */
+/* #undef JEMALLOC_PROF */
+
+/* Use libunwind for profile backtracing if defined. */
+/* #undef JEMALLOC_PROF_LIBUNWIND */
+
+/* Use libgcc for profile backtracing if defined. */
+/* #undef JEMALLOC_PROF_LIBGCC */
+
+/* Use gcc intrinsics for profile backtracing if defined. */
+/* #undef JEMALLOC_PROF_GCC */
+
+/*
+ * JEMALLOC_DSS enables use of sbrk(2) to allocate extents from the data storage
+ * segment (DSS).
+ */
+#define JEMALLOC_DSS
+
+/* Support memory filling (junk/zero). */
+#define JEMALLOC_FILL
+
+/* Support utrace(2)-based tracing. */
+/* #undef JEMALLOC_UTRACE */
+
+/* Support optional abort() on OOM. */
+/* #undef JEMALLOC_XMALLOC */
+
+/* Support lazy locking (avoid locking unless a second thread is launched). */
+/* #undef JEMALLOC_LAZY_LOCK */
+
+/*
+ * Minimum allocation alignment is 2^LG_QUANTUM bytes (ignoring tiny size
+ * classes).
+ */
+/* #undef LG_QUANTUM */
+
+/* One page is 2^LG_PAGE bytes. */
+#define LG_PAGE 16
+
+/*
+ * One huge page is 2^LG_HUGEPAGE bytes. Note that this is defined even if the
+ * system does not explicitly support huge pages; system calls that require
+ * explicit huge page support are separately configured.
+ */
+#define LG_HUGEPAGE 29
+
+/*
+ * If defined, adjacent virtual memory mappings with identical attributes
+ * automatically coalesce, and they fragment when changes are made to subranges.
+ * This is the normal order of things for mmap()/munmap(), but on Windows
+ * VirtualAlloc()/VirtualFree() operations must be precisely matched, i.e.
+ * mappings do *not* coalesce/fragment.
+ */
+#define JEMALLOC_MAPS_COALESCE
+
+/*
+ * If defined, retain memory for later reuse by default rather than using e.g.
+ * munmap() to unmap freed extents. This is enabled on 64-bit Linux because
+ * common sequences of mmap()/munmap() calls will cause virtual memory map
+ * holes.
+ */
+#define JEMALLOC_RETAIN
+
+/* TLS is used to map arenas and magazine caches to threads. */
+#define JEMALLOC_TLS
+
+/*
+ * Used to mark unreachable code to quiet "end of non-void" compiler warnings.
+ * Don't use this directly; instead use unreachable() from util.h
+ */
+#define JEMALLOC_INTERNAL_UNREACHABLE __builtin_unreachable
+
+/*
+ * ffs*() functions to use for bitmapping. Don't use these directly; instead,
+ * use ffs_*() from util.h.
+ */
+#define JEMALLOC_INTERNAL_FFSLL __builtin_ffsll
+#define JEMALLOC_INTERNAL_FFSL __builtin_ffsl
+#define JEMALLOC_INTERNAL_FFS __builtin_ffs
+
+/*
+ * popcount*() functions to use for bitmapping.
+ */
+#define JEMALLOC_INTERNAL_POPCOUNTL __builtin_popcountl
+#define JEMALLOC_INTERNAL_POPCOUNT __builtin_popcount
+
+/*
+ * If defined, explicitly attempt to more uniformly distribute large allocation
+ * pointer alignments across all cache indices.
+ */
+#define JEMALLOC_CACHE_OBLIVIOUS
+
+/*
+ * If defined, enable logging facilities. We make this a configure option to
+ * avoid taking extra branches everywhere.
+ */
+/* #undef JEMALLOC_LOG */
+
+/*
+ * If defined, use readlinkat() (instead of readlink()) to follow
+ * /etc/malloc_conf.
+ */
+/* #undef JEMALLOC_READLINKAT */
+
+/*
+ * Darwin (OS X) uses zones to work around Mach-O symbol override shortcomings.
+ */
+/* #undef JEMALLOC_ZONE */
+
+/*
+ * Methods for determining whether the OS overcommits.
+ * JEMALLOC_PROC_SYS_VM_OVERCOMMIT_MEMORY: Linux's
+ * /proc/sys/vm.overcommit_memory file.
+ * JEMALLOC_SYSCTL_VM_OVERCOMMIT: FreeBSD's vm.overcommit sysctl.
+ */
+/* #undef JEMALLOC_SYSCTL_VM_OVERCOMMIT */
+#define JEMALLOC_PROC_SYS_VM_OVERCOMMIT_MEMORY
+
+/* Defined if madvise(2) is available. */
+#define JEMALLOC_HAVE_MADVISE
+
+/*
+ * Defined if transparent huge pages are supported via the MADV_[NO]HUGEPAGE
+ * arguments to madvise(2).
+ */
+#define JEMALLOC_HAVE_MADVISE_HUGE
+
+/*
+ * Methods for purging unused pages differ between operating systems.
+ *
+ * madvise(..., MADV_FREE) : This marks pages as being unused, such that they
+ * will be discarded rather than swapped out.
+ * madvise(..., MADV_DONTNEED) : If JEMALLOC_PURGE_MADVISE_DONTNEED_ZEROS is
+ * defined, this immediately discards pages,
+ * such that new pages will be demand-zeroed if
+ * the address region is later touched;
+ * otherwise this behaves similarly to
+ * MADV_FREE, though typically with higher
+ * system overhead.
+ */
+#define JEMALLOC_PURGE_MADVISE_FREE
+#define JEMALLOC_PURGE_MADVISE_DONTNEED
+#define JEMALLOC_PURGE_MADVISE_DONTNEED_ZEROS
+
+/* Defined if madvise(2) is available but MADV_FREE is not (x86 Linux only). */
+/* #undef JEMALLOC_DEFINE_MADVISE_FREE */
+
+/*
+ * Defined if MADV_DO[NT]DUMP is supported as an argument to madvise.
+ */
+#define JEMALLOC_MADVISE_DONTDUMP
+
+/*
+ * Defined if transparent huge pages (THPs) are supported via the
+ * MADV_[NO]HUGEPAGE arguments to madvise(2), and THP support is enabled.
+ */
+/* #undef JEMALLOC_THP */
+
+/* Define if operating system has alloca.h header. */
+#define JEMALLOC_HAS_ALLOCA_H 1
+
+/* C99 restrict keyword supported. */
+#define JEMALLOC_HAS_RESTRICT 1
+
+/* For use by hash code. */
+/* #undef JEMALLOC_BIG_ENDIAN */
+
+/* sizeof(int) == 2^LG_SIZEOF_INT. */
+#define LG_SIZEOF_INT 2
+
+/* sizeof(long) == 2^LG_SIZEOF_LONG. */
+#define LG_SIZEOF_LONG 3
+
+/* sizeof(long long) == 2^LG_SIZEOF_LONG_LONG. */
+#define LG_SIZEOF_LONG_LONG 3
+
+/* sizeof(intmax_t) == 2^LG_SIZEOF_INTMAX_T. */
+#define LG_SIZEOF_INTMAX_T 3
+
+/* glibc malloc hooks (__malloc_hook, __realloc_hook, __free_hook). */
+#define JEMALLOC_GLIBC_MALLOC_HOOK
+
+/* glibc memalign hook. */
+#define JEMALLOC_GLIBC_MEMALIGN_HOOK
+
+/* pthread support */
+#define JEMALLOC_HAVE_PTHREAD
+
+/* dlsym() support */
+#define JEMALLOC_HAVE_DLSYM
+
+/* Adaptive mutex support in pthreads. */
+#define JEMALLOC_HAVE_PTHREAD_MUTEX_ADAPTIVE_NP
+
+/* GNU specific sched_getcpu support */
+#define JEMALLOC_HAVE_SCHED_GETCPU
+
+/* GNU specific sched_setaffinity support */
+#define JEMALLOC_HAVE_SCHED_SETAFFINITY
+
+/*
+ * If defined, all the features necessary for background threads are present.
+ */
+#define JEMALLOC_BACKGROUND_THREAD 1
+
+/*
+ * If defined, jemalloc symbols are not exported (doesn't work when
+ * JEMALLOC_PREFIX is not defined).
+ */
+/* #undef JEMALLOC_EXPORT */
+
+/* config.malloc_conf options string. */
+#define JEMALLOC_CONFIG_MALLOC_CONF "@JEMALLOC_CONFIG_MALLOC_CONF@"
+
+/* If defined, jemalloc takes the malloc/free/etc. symbol names. */
+#define JEMALLOC_IS_MALLOC 1
+
+/*
+ * Defined if strerror_r returns char * if _GNU_SOURCE is defined.
+ */
+#define JEMALLOC_STRERROR_R_RETURNS_CHAR_WITH_GNU_SOURCE
+
+/* Performs additional safety checks when defined. */
+/* #undef JEMALLOC_OPT_SAFETY_CHECKS */
+
+#endif /* JEMALLOC_INTERNAL_DEFS_H_ */
diff --git a/contrib/krb5-cmake/CMakeLists.txt b/contrib/krb5-cmake/CMakeLists.txt
index d6c3c23b14e..f7318a5bf8a 100644
--- a/contrib/krb5-cmake/CMakeLists.txt
+++ b/contrib/krb5-cmake/CMakeLists.txt
@@ -500,7 +500,6 @@ function(preprocess_et out_var)
COMMAND perl "${KRB5_SOURCE_DIR}/util/et/compile_et" -d "${KRB5_SOURCE_DIR}/util/et" ${in_f}
DEPENDS ${in_f} "${KRB5_SOURCE_DIR}/util/et/compile_et"
WORKING_DIRECTORY ${ET_PATH}
- COMMENT "Creating preprocessed file ${F_C}"
VERBATIM
)
list(APPEND result ${F_C})
@@ -526,7 +525,6 @@ add_custom_command(
add_custom_target(
ERROR_MAP_H
DEPENDS "${KRB5_SOURCE_DIR}/lib/gssapi/krb5/error_map.h"
- COMMENT "generating error_map.h"
VERBATIM
)
@@ -539,14 +537,12 @@ add_custom_command(
add_custom_target(
ERRMAP_H
DEPENDS "${KRB5_SOURCE_DIR}/lib/gssapi/generic/errmap.h"
- COMMENT "generating errmap.h"
VERBATIM
)
add_custom_target(
KRB_5_H
DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/include/krb5/krb5.h"
- COMMENT "generating krb5.h"
VERBATIM
)
diff --git a/contrib/libgsasl-cmake/CMakeLists.txt b/contrib/libgsasl-cmake/CMakeLists.txt
new file mode 100644
index 00000000000..102ef12b9f5
--- /dev/null
+++ b/contrib/libgsasl-cmake/CMakeLists.txt
@@ -0,0 +1,107 @@
+set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl")
+
+set(SRCS
+ ${SRC_DIR}/gl/gc-gnulib.c
+ ${SRC_DIR}/gl/printf-parse.c
+ ${SRC_DIR}/gl/c-ctype.c
+ ${SRC_DIR}/gl/float.c
+ ${SRC_DIR}/gl/printf-args.c
+ ${SRC_DIR}/gl/hmac-sha1.c
+ ${SRC_DIR}/gl/itold.c
+ ${SRC_DIR}/gl/hmac-md5.c
+ ${SRC_DIR}/gl/gc-pbkdf2-sha1.c
+ ${SRC_DIR}/gl/md5.c
+ ${SRC_DIR}/gl/base64.c
+ ${SRC_DIR}/gl/memxor.c
+ ${SRC_DIR}/gl/sha1.c
+ ${SRC_DIR}/openid20/client.c
+ ${SRC_DIR}/openid20/mechinfo.c
+ ${SRC_DIR}/openid20/server.c
+ ${SRC_DIR}/anonymous/client.c
+ ${SRC_DIR}/anonymous/mechinfo.c
+ ${SRC_DIR}/anonymous/server.c
+ ${SRC_DIR}/saml20/client.c
+ ${SRC_DIR}/saml20/mechinfo.c
+ ${SRC_DIR}/saml20/server.c
+ ${SRC_DIR}/scram/parser.c
+ ${SRC_DIR}/scram/printer.c
+ ${SRC_DIR}/scram/tokens.c
+ ${SRC_DIR}/scram/client.c
+ ${SRC_DIR}/scram/mechinfo.c
+ ${SRC_DIR}/scram/server.c
+ ${SRC_DIR}/scram/validate.c
+ ${SRC_DIR}/src/free.c
+ ${SRC_DIR}/src/supportp.c
+ ${SRC_DIR}/src/init.c
+ ${SRC_DIR}/src/mechtools.c
+ ${SRC_DIR}/src/error.c
+ ${SRC_DIR}/src/property.c
+ ${SRC_DIR}/src/done.c
+ ${SRC_DIR}/src/callback.c
+ ${SRC_DIR}/src/xstart.c
+ ${SRC_DIR}/src/xfinish.c
+ ${SRC_DIR}/src/version.c
+ ${SRC_DIR}/src/xstep.c
+ ${SRC_DIR}/src/mechname.c
+ ${SRC_DIR}/src/xcode.c
+ ${SRC_DIR}/src/crypto.c
+ ${SRC_DIR}/src/doxygen.c
+ ${SRC_DIR}/src/suggest.c
+ ${SRC_DIR}/src/saslprep.c
+ ${SRC_DIR}/src/listmech.c
+ ${SRC_DIR}/src/register.c
+ ${SRC_DIR}/src/base64.c
+ ${SRC_DIR}/src/md5pwd.c
+ ${SRC_DIR}/external/client.c
+ ${SRC_DIR}/external/mechinfo.c
+ ${SRC_DIR}/external/server.c
+ ${SRC_DIR}/securid/client.c
+ ${SRC_DIR}/securid/mechinfo.c
+ ${SRC_DIR}/securid/server.c
+ ${SRC_DIR}/plain/client.c
+ ${SRC_DIR}/plain/mechinfo.c
+ ${SRC_DIR}/plain/server.c
+ ${SRC_DIR}/cram-md5/client.c
+ ${SRC_DIR}/cram-md5/challenge.c
+ ${SRC_DIR}/cram-md5/mechinfo.c
+ ${SRC_DIR}/cram-md5/server.c
+ ${SRC_DIR}/cram-md5/digest.c
+ ${SRC_DIR}/digest-md5/client.c
+ ${SRC_DIR}/digest-md5/digesthmac.c
+ ${SRC_DIR}/digest-md5/free.c
+ ${SRC_DIR}/digest-md5/getsubopt.c
+ ${SRC_DIR}/digest-md5/mechinfo.c
+ ${SRC_DIR}/digest-md5/nonascii.c
+ ${SRC_DIR}/digest-md5/parser.c
+ ${SRC_DIR}/digest-md5/printer.c
+ ${SRC_DIR}/digest-md5/qop.c
+ ${SRC_DIR}/digest-md5/server.c
+ ${SRC_DIR}/digest-md5/session.c
+ ${SRC_DIR}/digest-md5/test-parser.c
+ ${SRC_DIR}/digest-md5/validate.c
+ ${SRC_DIR}/login/client.c
+ ${SRC_DIR}/login/mechinfo.c
+ ${SRC_DIR}/login/server.c
+)
+
+if (USE_KRB5)
+ set(SRCS ${SRCS}
+ ${SRC_DIR}/gssapi/client.c
+ ${SRC_DIR}/gssapi/mechinfo.c
+ ${SRC_DIR}/gssapi/server.c)
+endif()
+
+add_library(gsasl ${SRCS})
+
+target_include_directories(gsasl PUBLIC ${SRC_DIR})
+target_include_directories(gsasl PUBLIC ${SRC_DIR}/gl)
+target_include_directories(gsasl PUBLIC ${SRC_DIR}/src)
+target_include_directories(gsasl PUBLIC ${SRC_DIR}/digest-md5)
+target_include_directories(gsasl PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libgsasl-cmake/linux_x86_64/include")
+
+target_compile_definitions (gsasl PRIVATE HAVE_CONFIG_H=1)
+
+if (USE_KRB5)
+ target_link_libraries(gsasl PUBLIC ${KRB5_LIBRARY})
+ target_compile_definitions (gsasl PRIVATE HAVE_GSSAPI_H=1 USE_GSSAPI=1)
+endif()
diff --git a/contrib/libgsasl-cmake/linux_x86_64/include/config.h b/contrib/libgsasl-cmake/linux_x86_64/include/config.h
new file mode 100644
index 00000000000..0f5c1d359ea
--- /dev/null
+++ b/contrib/libgsasl-cmake/linux_x86_64/include/config.h
@@ -0,0 +1,1091 @@
+/* config.h. Generated from config.h.in by configure. */
+/* config.h.in. Generated from configure.ac by autoheader. */
+
+/* Define if building universal (internal helper macro) */
+/* #undef AC_APPLE_UNIVERSAL_BUILD */
+
+/* Define to the number of bits in type 'ptrdiff_t'. */
+/* #undef BITSIZEOF_PTRDIFF_T */
+
+/* Define to the number of bits in type 'sig_atomic_t'. */
+/* #undef BITSIZEOF_SIG_ATOMIC_T */
+
+/* Define to the number of bits in type 'size_t'. */
+/* #undef BITSIZEOF_SIZE_T */
+
+/* Define to the number of bits in type 'wchar_t'. */
+/* #undef BITSIZEOF_WCHAR_T */
+
+/* Define to the number of bits in type 'wint_t'. */
+/* #undef BITSIZEOF_WINT_T */
+
+/* Define to one of '_getb67', 'GETB67', 'getb67' for Cray-2 and Cray-YMP
+ systems. This function is required for 'alloca.c' support on those systems.
+ */
+/* #undef CRAY_STACKSEG_END */
+
+/* Define to 1 if using 'alloca.c'. */
+/* #undef C_ALLOCA */
+
+/* Define as the bit index in the word where to find bit 0 of the exponent of
+ 'double'. */
+#define DBL_EXPBIT0_BIT 20
+
+/* Define as the word index where to find the exponent of 'double'. */
+#define DBL_EXPBIT0_WORD 1
+
+/* Define to 1 if translation of program messages to the user's native
+ language is requested. */
+#ifdef __APPLE__
+#define ENABLE_NLS 0
+#else
+#define ENABLE_NLS 1
+#endif
+
+/* Define to a C preprocessor expression that evaluates to 1 or 0, depending
+ whether the gnulib module gc-hmac-md5 shall be considered present. */
+#define GNULIB_GC_HMAC_MD5 1
+
+/* Define to a C preprocessor expression that evaluates to 1 or 0, depending
+ whether the gnulib module gc-hmac-sha1 shall be considered present. */
+#define GNULIB_GC_HMAC_SHA1 1
+
+/* Define to a C preprocessor expression that evaluates to 1 or 0, depending
+ whether the gnulib module gc-md5 shall be considered present. */
+#define GNULIB_GC_MD5 1
+
+/* Define to a C preprocessor expression that evaluates to 1 or 0, depending
+ whether the gnulib module gc-random shall be considered present. */
+#define GNULIB_GC_RANDOM 1
+
+/* Define to a C preprocessor expression that evaluates to 1 or 0, depending
+ whether the gnulib module gc-sha1 shall be considered present. */
+#define GNULIB_GC_SHA1 1
+
+/* Define to 1 when the gnulib module fdopen should be tested. */
+#define GNULIB_TEST_FDOPEN 1
+
+/* Define to 1 when the gnulib module getdelim should be tested. */
+#define GNULIB_TEST_GETDELIM 1
+
+/* Define to 1 when the gnulib module getline should be tested. */
+#define GNULIB_TEST_GETLINE 1
+
+/* Define to 1 when the gnulib module getpagesize should be tested. */
+#define GNULIB_TEST_GETPAGESIZE 1
+
+/* Define to 1 when the gnulib module memchr should be tested. */
+#define GNULIB_TEST_MEMCHR 1
+
+/* Define to 1 when the gnulib module memmem should be tested. */
+#define GNULIB_TEST_MEMMEM 1
+
+/* Define to 1 when the gnulib module realloc-posix should be tested. */
+#define GNULIB_TEST_REALLOC_POSIX 1
+
+/* Define to 1 when the gnulib module strndup should be tested. */
+#define GNULIB_TEST_STRNDUP 1
+
+/* Define to 1 when the gnulib module strnlen should be tested. */
+#define GNULIB_TEST_STRNLEN 1
+
+/* Define to 1 when the gnulib module strverscmp should be tested. */
+#define GNULIB_TEST_STRVERSCMP 1
+
+/* Define to 1 when the gnulib module vasprintf should be tested. */
+#define GNULIB_TEST_VASPRINTF 1
+
+/* Define to 1 if you don't want backwards compatibility code. */
+#define GSASL_NO_OBSOLETE 1
+
+/* Define to 1 if you have 'alloca' after including , a header that
+ may be supplied by this distribution. */
+#define HAVE_ALLOCA 1
+
+/* Define to 1 if you have and it should be used (not on Ultrix).
+ */
+#define HAVE_ALLOCA_H 1
+
+/* Define to 1 if you have the header file. */
+/* #undef HAVE_BP_SYM_H */
+
+/* Define to 1 if you have the MacOS X function CFLocaleCopyCurrent in the
+ CoreFoundation framework. */
+/* #undef HAVE_CFLOCALECOPYCURRENT */
+
+/* Define to 1 if you have the MacOS X function CFPreferencesCopyAppValue in
+ the CoreFoundation framework. */
+/* #undef HAVE_CFPREFERENCESCOPYAPPVALUE */
+
+/* Define if the GNU dcgettext() function is already present or preinstalled.
+ */
+#define HAVE_DCGETTEXT 1
+
+/* Define to 1 if you have the declaration of `alarm', and to 0 if you don't.
+ */
+#define HAVE_DECL_ALARM 1
+
+/* Define to 1 if you have the declaration of `getc_unlocked', and to 0 if you
+ don't. */
+/* #undef HAVE_DECL_GETC_UNLOCKED */
+
+/* Define to 1 if you have the declaration of `getdelim', and to 0 if you
+ don't. */
+#define HAVE_DECL_GETDELIM 1
+
+/* Define to 1 if you have the declaration of `getline', and to 0 if you
+ don't. */
+#define HAVE_DECL_GETLINE 1
+
+/* Define to 1 if you have the declaration of `memmem', and to 0 if you don't.
+ */
+#define HAVE_DECL_MEMMEM 1
+
+/* Define to 1 if you have the declaration of `strndup', and to 0 if you
+ don't. */
+#define HAVE_DECL_STRNDUP 1
+
+/* Define to 1 if you have the declaration of `strnlen', and to 0 if you
+ don't. */
+#define HAVE_DECL_STRNLEN 1
+
+/* Define to 1 if you have the declaration of `_snprintf', and to 0 if you
+ don't. */
+#define HAVE_DECL__SNPRINTF 0
+
+/* Define to 1 if you have the header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the header file. */
+#ifdef __APPLE__
+#define HAVE_FEATURES_H 0
+#else
+#define HAVE_FEATURES_H 1
+#endif
+
+/* Define to 1 if you have the `flockfile' function. */
+/* #undef HAVE_FLOCKFILE */
+
+/* Define to 1 if you have the `funlockfile' function. */
+/* #undef HAVE_FUNLOCKFILE */
+
+/* Define to 1 if you have the `getdelim' function. */
+#define HAVE_GETDELIM 1
+
+/* Define to 1 if you have the `getpagesize' function. */
+#define HAVE_GETPAGESIZE 1
+
+/* Define if the GNU gettext() function is already present or preinstalled. */
+#define HAVE_GETTEXT 1
+
+/* Define to 1 if you have the header file. */
+/* #undef HAVE_GSSAPI_GSSAPI_H */
+
+/* Define to 1 if you have the `GSS_C_NT_HOSTBASED_SERVICE' function. */
+/* #undef HAVE_GSS_C_NT_HOSTBASED_SERVICE */
+
+/* Define to 1 if you have the `gss_decapsulate_token' function. */
+/* #undef HAVE_GSS_DECAPSULATE_TOKEN */
+
+/* Define to 1 if you have the `gss_encapsulate_token' function. */
+/* #undef HAVE_GSS_ENCAPSULATE_TOKEN */
+
+/* Define to 1 if you have the `gss_inquire_mech_for_saslname' function. */
+/* #undef HAVE_GSS_INQUIRE_MECH_FOR_SASLNAME */
+
+/* Define to 1 if you have the `gss_oid_equal' function. */
+/* #undef HAVE_GSS_OID_EQUAL */
+
+/* Define if you have the iconv() function and it works. */
+/* #undef HAVE_ICONV */
+
+/* Define if you have the 'intmax_t' type in or . */
+#define HAVE_INTMAX_T 1
+
+/* Define to 1 if you have the header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define if exists, doesn't clash with , and
+ declares uintmax_t. */
+#define HAVE_INTTYPES_H_WITH_UINTMAX 1
+
+/* Define if you have the libgcrypt library. */
+/* #undef HAVE_LIBGCRYPT */
+
+/* Define if you have the libgss library. */
+/* #undef HAVE_LIBGSS */
+
+/* Define if you have the libgssapi32 library. */
+/* #undef HAVE_LIBGSSAPI32 */
+
+/* Define if you have the libidn library. */
+#define HAVE_LIBIDN 0
+
+/* Define if you have the libntlm library. */
+/* #undef HAVE_LIBNTLM */
+
+/* Define if you have the libshishi library. */
+/* #undef HAVE_LIBSHISHI */
+
+/* Define to 1 if the system has the type `long long int'. */
+#define HAVE_LONG_LONG_INT 1
+
+/* Define to 1 if mmap()'s MAP_ANONYMOUS flag is available after including
+ config.h and . */
+#define HAVE_MAP_ANONYMOUS 1
+
+/* Define to 1 if you have the `mbrtowc' function. */
+#define HAVE_MBRTOWC 1
+
+/* Define to 1 if you have the `memmem' function. */
+#define HAVE_MEMMEM 1
+
+/* Define to 1 if you have the header file. */
+#define HAVE_MEMORY_H 1
+
+/* Define to 1 if defines the MIN and MAX macros. */
+/* #undef HAVE_MINMAX_IN_LIMITS_H */
+
+/* Define to 1 if defines the MIN and MAX macros. */
+#define HAVE_MINMAX_IN_SYS_PARAM_H 1
+
+/* Define to 1 if you have the `mprotect' function. */
+#define HAVE_MPROTECT 1
+
+/* Define to 1 on MSVC platforms that have the "invalid parameter handler"
+ concept. */
+/* #undef HAVE_MSVC_INVALID_PARAMETER_HANDLER */
+
+/* Define to 1 if you have the header file. */
+/* #undef HAVE_OS_H */
+
+/* Define to 1 if you have the `pr29_8z' function. */
+#define HAVE_PR29_8Z 0
+
+/* Define to 1 if you have the header file. */
+#define HAVE_PR29_H 0
+
+/* Define to 1 if atoll is declared even after undefining macros. */
+#define HAVE_RAW_DECL_ATOLL 1
+
+/* Define to 1 if btowc is declared even after undefining macros. */
+#define HAVE_RAW_DECL_BTOWC 1
+
+/* Define to 1 if canonicalize_file_name is declared even after undefining
+ macros. */
+#define HAVE_RAW_DECL_CANONICALIZE_FILE_NAME 1
+
+/* Define to 1 if chdir is declared even after undefining macros. */
+#define HAVE_RAW_DECL_CHDIR 1
+
+/* Define to 1 if chown is declared even after undefining macros. */
+#define HAVE_RAW_DECL_CHOWN 1
+
+/* Define to 1 if dprintf is declared even after undefining macros. */
+#define HAVE_RAW_DECL_DPRINTF 1
+
+/* Define to 1 if dup is declared even after undefining macros. */
+#define HAVE_RAW_DECL_DUP 1
+
+/* Define to 1 if dup2 is declared even after undefining macros. */
+#define HAVE_RAW_DECL_DUP2 1
+
+/* Define to 1 if dup3 is declared even after undefining macros. */
+#define HAVE_RAW_DECL_DUP3 1
+
+/* Define to 1 if endusershell is declared even after undefining macros. */
+#define HAVE_RAW_DECL_ENDUSERSHELL 1
+
+/* Define to 1 if environ is declared even after undefining macros. */
+#define HAVE_RAW_DECL_ENVIRON 1
+
+/* Define to 1 if euidaccess is declared even after undefining macros. */
+#define HAVE_RAW_DECL_EUIDACCESS 1
+
+/* Define to 1 if faccessat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FACCESSAT 1
+
+/* Define to 1 if fchdir is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FCHDIR 1
+
+/* Define to 1 if fchownat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FCHOWNAT 1
+
+/* Define to 1 if fdatasync is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FDATASYNC 1
+
+/* Define to 1 if ffsl is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FFSL 1
+
+/* Define to 1 if ffsll is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FFSLL 1
+
+/* Define to 1 if fpurge is declared even after undefining macros. */
+/* #undef HAVE_RAW_DECL_FPURGE */
+
+/* Define to 1 if fseeko is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FSEEKO 1
+
+/* Define to 1 if fsync is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FSYNC 1
+
+/* Define to 1 if ftello is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FTELLO 1
+
+/* Define to 1 if ftruncate is declared even after undefining macros. */
+#define HAVE_RAW_DECL_FTRUNCATE 1
+
+/* Define to 1 if getcwd is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETCWD 1
+
+/* Define to 1 if getdelim is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETDELIM 1
+
+/* Define to 1 if getdomainname is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETDOMAINNAME 1
+
+/* Define to 1 if getdtablesize is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETDTABLESIZE 1
+
+/* Define to 1 if getgroups is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETGROUPS 1
+
+/* Define to 1 if gethostname is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETHOSTNAME 1
+
+/* Define to 1 if getline is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETLINE 1
+
+/* Define to 1 if getloadavg is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETLOADAVG 1
+
+/* Define to 1 if getlogin is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETLOGIN 1
+
+/* Define to 1 if getlogin_r is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETLOGIN_R 1
+
+/* Define to 1 if getpagesize is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETPAGESIZE 1
+
+/* Define to 1 if gets is declared even after undefining macros. */
+/* #undef HAVE_RAW_DECL_GETS */
+
+/* Define to 1 if getsubopt is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETSUBOPT 1
+
+/* Define to 1 if getusershell is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GETUSERSHELL 1
+
+/* Define to 1 if grantpt is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GRANTPT 1
+
+/* Define to 1 if group_member is declared even after undefining macros. */
+#define HAVE_RAW_DECL_GROUP_MEMBER 1
+
+/* Define to 1 if imaxabs is declared even after undefining macros. */
+#define HAVE_RAW_DECL_IMAXABS 1
+
+/* Define to 1 if imaxdiv is declared even after undefining macros. */
+#define HAVE_RAW_DECL_IMAXDIV 1
+
+/* Define to 1 if initstate is declared even after undefining macros. */
+#define HAVE_RAW_DECL_INITSTATE 1
+
+/* Define to 1 if initstate_r is declared even after undefining macros. */
+#define HAVE_RAW_DECL_INITSTATE_R 1
+
+/* Define to 1 if isatty is declared even after undefining macros. */
+#define HAVE_RAW_DECL_ISATTY 1
+
+/* Define to 1 if lchown is declared even after undefining macros. */
+#define HAVE_RAW_DECL_LCHOWN 1
+
+/* Define to 1 if link is declared even after undefining macros. */
+#define HAVE_RAW_DECL_LINK 1
+
+/* Define to 1 if linkat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_LINKAT 1
+
+/* Define to 1 if lseek is declared even after undefining macros. */
+#define HAVE_RAW_DECL_LSEEK 1
+
+/* Define to 1 if mbrlen is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MBRLEN 1
+
+/* Define to 1 if mbrtowc is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MBRTOWC 1
+
+/* Define to 1 if mbsinit is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MBSINIT 1
+
+/* Define to 1 if mbsnrtowcs is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MBSNRTOWCS 1
+
+/* Define to 1 if mbsrtowcs is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MBSRTOWCS 1
+
+/* Define to 1 if memmem is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MEMMEM 1
+
+/* Define to 1 if mempcpy is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MEMPCPY 1
+
+/* Define to 1 if memrchr is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MEMRCHR 1
+
+/* Define to 1 if mkdtemp is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MKDTEMP 1
+
+/* Define to 1 if mkostemp is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MKOSTEMP 1
+
+/* Define to 1 if mkostemps is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MKOSTEMPS 1
+
+/* Define to 1 if mkstemp is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MKSTEMP 1
+
+/* Define to 1 if mkstemps is declared even after undefining macros. */
+#define HAVE_RAW_DECL_MKSTEMPS 1
+
+/* Define to 1 if pclose is declared even after undefining macros. */
+#define HAVE_RAW_DECL_PCLOSE 1
+
+/* Define to 1 if pipe is declared even after undefining macros. */
+#define HAVE_RAW_DECL_PIPE 1
+
+/* Define to 1 if pipe2 is declared even after undefining macros. */
+#define HAVE_RAW_DECL_PIPE2 1
+
+/* Define to 1 if popen is declared even after undefining macros. */
+#define HAVE_RAW_DECL_POPEN 1
+
+/* Define to 1 if posix_openpt is declared even after undefining macros. */
+#define HAVE_RAW_DECL_POSIX_OPENPT 1
+
+/* Define to 1 if pread is declared even after undefining macros. */
+#define HAVE_RAW_DECL_PREAD 1
+
+/* Define to 1 if ptsname is declared even after undefining macros. */
+#define HAVE_RAW_DECL_PTSNAME 1
+
+/* Define to 1 if ptsname_r is declared even after undefining macros. */
+#define HAVE_RAW_DECL_PTSNAME_R 1
+
+/* Define to 1 if pwrite is declared even after undefining macros. */
+#define HAVE_RAW_DECL_PWRITE 1
+
+/* Define to 1 if random is declared even after undefining macros. */
+#define HAVE_RAW_DECL_RANDOM 1
+
+/* Define to 1 if random_r is declared even after undefining macros. */
+#define HAVE_RAW_DECL_RANDOM_R 1
+
+/* Define to 1 if rawmemchr is declared even after undefining macros. */
+#define HAVE_RAW_DECL_RAWMEMCHR 1
+
+/* Define to 1 if readlink is declared even after undefining macros. */
+#define HAVE_RAW_DECL_READLINK 1
+
+/* Define to 1 if readlinkat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_READLINKAT 1
+
+/* Define to 1 if realpath is declared even after undefining macros. */
+#define HAVE_RAW_DECL_REALPATH 1
+
+/* Define to 1 if renameat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_RENAMEAT 1
+
+/* Define to 1 if rmdir is declared even after undefining macros. */
+#define HAVE_RAW_DECL_RMDIR 1
+
+/* Define to 1 if rpmatch is declared even after undefining macros. */
+#define HAVE_RAW_DECL_RPMATCH 1
+
+/* Define to 1 if setenv is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SETENV 1
+
+/* Define to 1 if sethostname is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SETHOSTNAME 1
+
+/* Define to 1 if setstate is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SETSTATE 1
+
+/* Define to 1 if setstate_r is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SETSTATE_R 1
+
+/* Define to 1 if setusershell is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SETUSERSHELL 1
+
+/* Define to 1 if sleep is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SLEEP 1
+
+/* Define to 1 if snprintf is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SNPRINTF 1
+
+/* Define to 1 if srandom is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SRANDOM 1
+
+/* Define to 1 if srandom_r is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SRANDOM_R 1
+
+/* Define to 1 if stpcpy is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STPCPY 1
+
+/* Define to 1 if stpncpy is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STPNCPY 1
+
+/* Define to 1 if strcasestr is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRCASESTR 1
+
+/* Define to 1 if strchrnul is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRCHRNUL 1
+
+/* Define to 1 if strdup is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRDUP 1
+
+/* Define to 1 if strerror_r is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRERROR_R 1
+
+/* Define to 1 if strncat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRNCAT 1
+
+/* Define to 1 if strndup is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRNDUP 1
+
+/* Define to 1 if strnlen is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRNLEN 1
+
+/* Define to 1 if strpbrk is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRPBRK 1
+
+/* Define to 1 if strsep is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRSEP 1
+
+/* Define to 1 if strsignal is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRSIGNAL 1
+
+/* Define to 1 if strtod is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRTOD 1
+
+/* Define to 1 if strtoimax is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRTOIMAX 1
+
+/* Define to 1 if strtok_r is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRTOK_R 1
+
+/* Define to 1 if strtoll is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRTOLL 1
+
+/* Define to 1 if strtoull is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRTOULL 1
+
+/* Define to 1 if strtoumax is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRTOUMAX 1
+
+/* Define to 1 if strverscmp is declared even after undefining macros. */
+#define HAVE_RAW_DECL_STRVERSCMP 1
+
+/* Define to 1 if symlink is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SYMLINK 1
+
+/* Define to 1 if symlinkat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_SYMLINKAT 1
+
+/* Define to 1 if tmpfile is declared even after undefining macros. */
+#define HAVE_RAW_DECL_TMPFILE 1
+
+/* Define to 1 if ttyname_r is declared even after undefining macros. */
+#define HAVE_RAW_DECL_TTYNAME_R 1
+
+/* Define to 1 if unlink is declared even after undefining macros. */
+#define HAVE_RAW_DECL_UNLINK 1
+
+/* Define to 1 if unlinkat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_UNLINKAT 1
+
+/* Define to 1 if unlockpt is declared even after undefining macros. */
+#define HAVE_RAW_DECL_UNLOCKPT 1
+
+/* Define to 1 if unsetenv is declared even after undefining macros. */
+#define HAVE_RAW_DECL_UNSETENV 1
+
+/* Define to 1 if usleep is declared even after undefining macros. */
+#define HAVE_RAW_DECL_USLEEP 1
+
+/* Define to 1 if vdprintf is declared even after undefining macros. */
+#define HAVE_RAW_DECL_VDPRINTF 1
+
+/* Define to 1 if vsnprintf is declared even after undefining macros. */
+#define HAVE_RAW_DECL_VSNPRINTF 1
+
+/* Define to 1 if wcpcpy is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCPCPY 1
+
+/* Define to 1 if wcpncpy is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCPNCPY 1
+
+/* Define to 1 if wcrtomb is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCRTOMB 1
+
+/* Define to 1 if wcscasecmp is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSCASECMP 1
+
+/* Define to 1 if wcscat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSCAT 1
+
+/* Define to 1 if wcschr is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSCHR 1
+
+/* Define to 1 if wcscmp is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSCMP 1
+
+/* Define to 1 if wcscoll is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSCOLL 1
+
+/* Define to 1 if wcscpy is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSCPY 1
+
+/* Define to 1 if wcscspn is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSCSPN 1
+
+/* Define to 1 if wcsdup is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSDUP 1
+
+/* Define to 1 if wcslen is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSLEN 1
+
+/* Define to 1 if wcsncasecmp is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSNCASECMP 1
+
+/* Define to 1 if wcsncat is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSNCAT 1
+
+/* Define to 1 if wcsncmp is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSNCMP 1
+
+/* Define to 1 if wcsncpy is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSNCPY 1
+
+/* Define to 1 if wcsnlen is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSNLEN 1
+
+/* Define to 1 if wcsnrtombs is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSNRTOMBS 1
+
+/* Define to 1 if wcspbrk is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSPBRK 1
+
+/* Define to 1 if wcsrchr is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSRCHR 1
+
+/* Define to 1 if wcsrtombs is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSRTOMBS 1
+
+/* Define to 1 if wcsspn is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSSPN 1
+
+/* Define to 1 if wcsstr is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSSTR 1
+
+/* Define to 1 if wcstok is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSTOK 1
+
+/* Define to 1 if wcswidth is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSWIDTH 1
+
+/* Define to 1 if wcsxfrm is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCSXFRM 1
+
+/* Define to 1 if wctob is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCTOB 1
+
+/* Define to 1 if wcwidth is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WCWIDTH 1
+
+/* Define to 1 if wmemchr is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WMEMCHR 1
+
+/* Define to 1 if wmemcmp is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WMEMCMP 1
+
+/* Define to 1 if wmemcpy is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WMEMCPY 1
+
+/* Define to 1 if wmemmove is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WMEMMOVE 1
+
+/* Define to 1 if wmemset is declared even after undefining macros. */
+#define HAVE_RAW_DECL_WMEMSET 1
+
+/* Define to 1 if _Exit is declared even after undefining macros. */
+#define HAVE_RAW_DECL__EXIT 1
+
+/* Define if the 'realloc' function is POSIX compliant. */
+#define HAVE_REALLOC_POSIX 1
+
+/* Define to 1 if 'sig_atomic_t' is a signed integer type. */
+/* #undef HAVE_SIGNED_SIG_ATOMIC_T */
+
+/* Define to 1 if 'wchar_t' is a signed integer type. */
+/* #undef HAVE_SIGNED_WCHAR_T */
+
+/* Define to 1 if 'wint_t' is a signed integer type. */
+/* #undef HAVE_SIGNED_WINT_T */
+
+/* Define to 1 if you have the `snprintf' function. */
+#define HAVE_SNPRINTF 1
+
+/* Define if the return value of the snprintf function is the number of of
+ bytes (excluding the terminating NUL) that would have been produced if the
+ buffer had been large enough. */
+#define HAVE_SNPRINTF_RETVAL_C99 1
+
+/* Define to 1 if you have the header file. */
+#define HAVE_STDINT_H 1
+
+/* Define if exists, doesn't clash with , and declares
+ uintmax_t. */
+#define HAVE_STDINT_H_WITH_UINTMAX 1
+
+/* Define to 1 if you have the header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strndup' function. */
+#define HAVE_STRNDUP 1
+
+/* Define to 1 if you have the `strnlen' function. */
+#define HAVE_STRNLEN 1
+
+/* Define to 1 if you have the `strverscmp' function. */
+#define HAVE_STRVERSCMP 1
+
+/* Define to 1 if you have the header file. */
+/* #undef HAVE_SYS_BITYPES_H */
+
+/* Define to 1 if you have the header file. */
+/* #undef HAVE_SYS_INTTYPES_H */
+
+/* Define to 1 if you have the header file. */
+#define HAVE_SYS_MMAN_H 1
+
+/* Define to 1 if you have the header file. */
+/* #undef HAVE_SYS_PARAM_H */
+
+/* Define to 1 if you have the header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the header file. */
+#define HAVE_UNISTD_H 1
+
+/* Define to 1 if the system has the type `unsigned long long int'. */
+#define HAVE_UNSIGNED_LONG_LONG_INT 1
+
+/* Define to 1 if you have the `vasnprintf' function. */
+/* #undef HAVE_VASNPRINTF */
+
+/* Define to 1 if you have the `vasprintf' function. */
+#define HAVE_VASPRINTF 1
+
+/* Define to 1 or 0, depending whether the compiler supports simple visibility
+ declarations. */
+#define HAVE_VISIBILITY 1
+
+/* Define to 1 if you have the header file. */
+#define HAVE_WCHAR_H 1
+
+/* Define if you have the 'wchar_t' type. */
+#define HAVE_WCHAR_T 1
+
+/* Define to 1 if you have the `wcrtomb' function. */
+#define HAVE_WCRTOMB 1
+
+/* Define to 1 if you have the `wcslen' function. */
+#define HAVE_WCSLEN 1
+
+/* Define to 1 if you have the `wcsnlen' function. */
+#define HAVE_WCSNLEN 1
+
+/* Define if you have the 'wint_t' type. */
+#define HAVE_WINT_T 1
+
+/* Define to 1 if the system has the type `_Bool'. */
+#define HAVE__BOOL 1
+
+/* Define to 1 if you have the `_set_invalid_parameter_handler' function. */
+/* #undef HAVE__SET_INVALID_PARAMETER_HANDLER */
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+ */
+#define LT_OBJDIR ".libs/"
+
+/* Define to a substitute value for mmap()'s MAP_ANONYMOUS flag. */
+/* #undef MAP_ANONYMOUS */
+
+/* defined to the name of the unpredictable nonce device */
+#define NAME_OF_NONCE_DEVICE "/dev/urandom"
+
+/* defined to the name of the pseudo random device */
+#define NAME_OF_PSEUDO_RANDOM_DEVICE "/dev/urandom"
+
+/* defined to the name of the (strong) random device */
+#define NAME_OF_RANDOM_DEVICE "/dev/random"
+
+/* Name of package */
+#define PACKAGE "libgsasl"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "bug-gsasl@gnu.org"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "libgsasl"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "libgsasl 1.8.0"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "libgsasl"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.8.0"
+
+/* Define if exists and defines unusable PRI* macros. */
+/* #undef PRI_MACROS_BROKEN */
+
+/* Define to l, ll, u, ul, ull, etc., as suitable for constants of type
+ 'ptrdiff_t'. */
+/* #undef PTRDIFF_T_SUFFIX */
+
+/* Define if vasnprintf exists but is overridden by gnulib. */
+/* #undef REPLACE_VASNPRINTF */
+
+/* Define to l, ll, u, ul, ull, etc., as suitable for constants of type
+ 'sig_atomic_t'. */
+/* #undef SIG_ATOMIC_T_SUFFIX */
+
+/* Define as the maximum value of type 'size_t', if the system doesn't define
+ it. */
+#ifndef SIZE_MAX
+/* # undef SIZE_MAX */
+#endif
+
+/* Define to l, ll, u, ul, ull, etc., as suitable for constants of type
+ 'size_t'. */
+/* #undef SIZE_T_SUFFIX */
+
+/* If using the C implementation of alloca, define if you know the
+ direction of stack growth for your system; otherwise it will be
+ automatically deduced at runtime.
+ STACK_DIRECTION > 0 => grows toward higher addresses
+ STACK_DIRECTION < 0 => grows toward lower addresses
+ STACK_DIRECTION = 0 => direction of growth unknown */
+/* #undef STACK_DIRECTION */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* Define to 1 if you want ANONYMOUS. */
+#define USE_ANONYMOUS 1
+
+/* Define to 1 if you want client code. */
+#define USE_CLIENT 1
+
+/* Define to 1 if you want CRAM-MD5. */
+#define USE_CRAM_MD5 1
+
+/* Define to 1 if you want DIGEST-MD5. */
+#define USE_DIGEST_MD5 1
+
+/* Define to 1 if you want EXTERNAL. */
+#define USE_EXTERNAL 1
+
+/* Define to 1 if you want GS2. */
+/* #undef USE_GS2 */
+
+/* Define to 1 if you want LOGIN. */
+#define USE_LOGIN 1
+
+/* Define to 1 if you want NTLM. */
+/* #undef USE_NTLM */
+
+/* Define to 1 if you want OPENID20. */
+#define USE_OPENID20 1
+
+/* Define to 1 if you want PLAIN. */
+#define USE_PLAIN 1
+
+/* Define to 1 if you want SAML20. */
+#define USE_SAML20 1
+
+/* Define to 1 if you want SCRAM-SHA-1. */
+#define USE_SCRAM_SHA1 1
+
+/* Define to 1 if you want SECURID. */
+#define USE_SECURID 1
+
+/* Define to 1 if you want server code. */
+#define USE_SERVER 1
+
+/* Version number of package */
+#define VERSION "1.8.0"
+
+/* Define to l, ll, u, ul, ull, etc., as suitable for constants of type
+ 'wchar_t'. */
+/* #undef WCHAR_T_SUFFIX */
+
+/* Define to l, ll, u, ul, ull, etc., as suitable for constants of type
+ 'wint_t'. */
+/* #undef WINT_T_SUFFIX */
+
+/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
+ significant byte first (like Motorola and SPARC, unlike Intel). */
+#if defined AC_APPLE_UNIVERSAL_BUILD
+# if defined __BIG_ENDIAN__
+# define WORDS_BIGENDIAN 1
+# endif
+#else
+# ifndef WORDS_BIGENDIAN
+/* # undef WORDS_BIGENDIAN */
+# endif
+#endif
+
+/* Define to 1 if on MINIX. */
+/* #undef _MINIX */
+
+/* The _Noreturn keyword of C11. */
+#if ! (defined _Noreturn \
+ || (defined __STDC_VERSION__ && 201112 <= __STDC_VERSION__))
+# if (3 <= __GNUC__ || (__GNUC__ == 2 && 8 <= __GNUC_MINOR__) \
+ || 0x5110 <= __SUNPRO_C)
+# define _Noreturn __attribute__ ((__noreturn__))
+# elif defined _MSC_VER && 1200 <= _MSC_VER
+# define _Noreturn __declspec (noreturn)
+# else
+# define _Noreturn
+# endif
+#endif
+
+
+/* Define to 2 if the system does not provide POSIX.1 features except with
+ this defined. */
+/* #undef _POSIX_1_SOURCE */
+
+/* Define to 1 if you need to in order for 'stat' and other things to work. */
+/* #undef _POSIX_SOURCE */
+
+/* Define to 500 only on HP-UX. */
+/* #undef _XOPEN_SOURCE */
+
+/* Enable extensions on AIX 3, Interix. */
+#ifndef _ALL_SOURCE
+# define _ALL_SOURCE 1
+#endif
+/* Enable general extensions on MacOS X. */
+#ifndef _DARWIN_C_SOURCE
+# define _DARWIN_C_SOURCE 1
+#endif
+/* Enable GNU extensions on systems that have them. */
+#ifndef _GNU_SOURCE
+# define _GNU_SOURCE 1
+#endif
+/* Enable threading extensions on Solaris. */
+#ifndef _POSIX_PTHREAD_SEMANTICS
+# define _POSIX_PTHREAD_SEMANTICS 1
+#endif
+/* Enable extensions on HP NonStop. */
+#ifndef _TANDEM_SOURCE
+# define _TANDEM_SOURCE 1
+#endif
+/* Enable general extensions on Solaris. */
+#ifndef __EXTENSIONS__
+# define __EXTENSIONS__ 1
+#endif
+
+
+/* Define to `__inline__' or `__inline' if that's what the C compiler
+ calls it, or to nothing if 'inline' is not supported under any name. */
+#ifndef __cplusplus
+/* #undef inline */
+#endif
+
+/* Define to long or long long if and don't define. */
+/* #undef intmax_t */
+
+/* Work around a bug in Apple GCC 4.0.1 build 5465: In C99 mode, it supports
+ the ISO C 99 semantics of 'extern inline' (unlike the GNU C semantics of
+ earlier versions), but does not display it by setting __GNUC_STDC_INLINE__.
+ __APPLE__ && __MACH__ test for MacOS X.
+ __APPLE_CC__ tests for the Apple compiler and its version.
+ __STDC_VERSION__ tests for the C99 mode. */
+#if defined __APPLE__ && defined __MACH__ && __APPLE_CC__ >= 5465 && !defined __cplusplus && __STDC_VERSION__ >= 199901L && !defined __GNUC_STDC_INLINE__
+# define __GNUC_STDC_INLINE__ 1
+#endif
+
+/* Define to `int' if does not define. */
+/* #undef mode_t */
+
+/* Define to `int' if does not define. */
+/* #undef pid_t */
+
+/* Define as the type of the result of subtracting two pointers, if the system
+ doesn't define it. */
+/* #undef ptrdiff_t */
+
+/* Define to the equivalent of the C99 'restrict' keyword, or to
+ nothing if this is not supported. Do not define if restrict is
+ supported directly. */
+#define restrict __restrict
+/* Work around a bug in Sun C++: it does not support _Restrict or
+ __restrict__, even though the corresponding Sun C compiler ends up with
+ "#define restrict _Restrict" or "#define restrict __restrict__" in the
+ previous line. Perhaps some future version of Sun C++ will work with
+ restrict; if so, hopefully it defines __RESTRICT like Sun C does. */
+#if defined __SUNPRO_CC && !defined __RESTRICT
+# define _Restrict
+# define __restrict__
+#endif
+
+/* Define to `unsigned int' if does not define. */
+/* #undef size_t */
+
+/* Define as a signed type of the same size as size_t. */
+/* #undef ssize_t */
+
+/* Define as a marker that can be attached to declarations that might not
+ be used. This helps to reduce warnings, such as from
+ GCC -Wunused-parameter. */
+#if __GNUC__ >= 3 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 7)
+# define _GL_UNUSED __attribute__ ((__unused__))
+#else
+# define _GL_UNUSED
+#endif
+/* The name _UNUSED_PARAMETER_ is an earlier spelling, although the name
+ is a misnomer outside of parameter lists. */
+#define _UNUSED_PARAMETER_ _GL_UNUSED
+
+/* The __pure__ attribute was added in gcc 2.96. */
+#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 96)
+# define _GL_ATTRIBUTE_PURE __attribute__ ((__pure__))
+#else
+# define _GL_ATTRIBUTE_PURE /* empty */
+#endif
+
+/* The __const__ attribute was added in gcc 2.95. */
+#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 95)
+# define _GL_ATTRIBUTE_CONST __attribute__ ((__const__))
+#else
+# define _GL_ATTRIBUTE_CONST /* empty */
+#endif
diff --git a/contrib/libhdfs3 b/contrib/libhdfs3
index 082e55f17d1..9194af44588 160000
--- a/contrib/libhdfs3
+++ b/contrib/libhdfs3
@@ -1 +1 @@
-Subproject commit 082e55f17d1c58bf124290fb044fea40e985ec11
+Subproject commit 9194af44588633c1b2dae44bf945804401ff883e
diff --git a/contrib/libhdfs3-cmake/CMakeLists.txt b/contrib/libhdfs3-cmake/CMakeLists.txt
index c9b9179d5e6..fcc4a15666c 100644
--- a/contrib/libhdfs3-cmake/CMakeLists.txt
+++ b/contrib/libhdfs3-cmake/CMakeLists.txt
@@ -1,23 +1,4 @@
-if (ENABLE_PROTOBUF AND NOT USE_INTERNAL_PROTOBUF_LIBRARY)
- option(PROTOBUF_OLD_ABI_COMPAT "Set to ON for compatiability with external protobuf which was compiled old C++ ABI" OFF)
-endif()
-
-if (PROTOBUF_OLD_ABI_COMPAT)
- if (NOT ENABLE_PROTOBUF OR USE_INTERNAL_PROTOBUF_LIBRARY)
- message (${RECONFIGURE_MESSAGE_LEVEL} "PROTOBUF_OLD_ABI_COMPAT option is ignored")
- endif()
-endif()
-
-if (NOT USE_INTERNAL_PROTOBUF_LIBRARY AND PROTOBUF_OLD_ABI_COMPAT)
- # compatiable with protobuf which was compiled old C++ ABI
- set(CMAKE_CXX_FLAGS "-D_GLIBCXX_USE_CXX11_ABI=0")
- set(CMAKE_C_FLAGS "")
- if (NOT (CMAKE_VERSION VERSION_LESS "3.8.0"))
- unset(CMAKE_CXX_STANDARD)
- endif ()
-endif()
-
-if (${ENABLE_LIBRARIES} AND ${ENABLE_KRB5})
+if (${ENABLE_KRB5})
SET(WITH_KERBEROS 1)
else()
SET(WITH_KERBEROS 0)
@@ -46,9 +27,7 @@ set(PROTO_FILES
"${HDFS3_SOURCE_DIR}/proto/datatransfer.proto"
)
-if(USE_PROTOBUF)
- PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES})
-endif()
+PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES})
configure_file("${HDFS3_SOURCE_DIR}/platform.h.in" "${CMAKE_CURRENT_BINARY_DIR}/platform.h")
@@ -108,95 +87,14 @@ set(SRCS
"${HDFS3_SOURCE_DIR}/common/Hash.cpp"
"${HDFS3_SOURCE_DIR}/common/SWCrc32c.cpp"
"${HDFS3_SOURCE_DIR}/common/Thread.cpp"
-
- "${HDFS3_SOURCE_DIR}/network/TcpSocket.h"
- "${HDFS3_SOURCE_DIR}/network/BufferedSocketReader.h"
- "${HDFS3_SOURCE_DIR}/network/Socket.h"
- "${HDFS3_SOURCE_DIR}/network/DomainSocket.h"
- "${HDFS3_SOURCE_DIR}/network/Syscall.h"
- "${HDFS3_SOURCE_DIR}/client/InputStreamImpl.h"
- "${HDFS3_SOURCE_DIR}/client/FileSystem.h"
- "${HDFS3_SOURCE_DIR}/client/ReadShortCircuitInfo.h"
- "${HDFS3_SOURCE_DIR}/client/InputStreamInter.h"
- "${HDFS3_SOURCE_DIR}/client/FileSystemImpl.h"
- "${HDFS3_SOURCE_DIR}/client/PacketPool.h"
- "${HDFS3_SOURCE_DIR}/client/Pipeline.h"
- "${HDFS3_SOURCE_DIR}/client/OutputStreamInter.h"
- "${HDFS3_SOURCE_DIR}/client/RemoteBlockReader.h"
- "${HDFS3_SOURCE_DIR}/client/Token.h"
- "${HDFS3_SOURCE_DIR}/client/KerberosName.h"
- "${HDFS3_SOURCE_DIR}/client/DirectoryIterator.h"
- "${HDFS3_SOURCE_DIR}/client/hdfs.h"
- "${HDFS3_SOURCE_DIR}/client/FileSystemStats.h"
- "${HDFS3_SOURCE_DIR}/client/FileSystemKey.h"
- "${HDFS3_SOURCE_DIR}/client/DataTransferProtocolSender.h"
- "${HDFS3_SOURCE_DIR}/client/Packet.h"
- "${HDFS3_SOURCE_DIR}/client/PacketHeader.h"
- "${HDFS3_SOURCE_DIR}/client/FileSystemInter.h"
- "${HDFS3_SOURCE_DIR}/client/LocalBlockReader.h"
- "${HDFS3_SOURCE_DIR}/client/TokenInternal.h"
- "${HDFS3_SOURCE_DIR}/client/InputStream.h"
- "${HDFS3_SOURCE_DIR}/client/PipelineAck.h"
- "${HDFS3_SOURCE_DIR}/client/BlockReader.h"
- "${HDFS3_SOURCE_DIR}/client/Permission.h"
- "${HDFS3_SOURCE_DIR}/client/OutputStreamImpl.h"
- "${HDFS3_SOURCE_DIR}/client/LeaseRenewer.h"
- "${HDFS3_SOURCE_DIR}/client/UserInfo.h"
- "${HDFS3_SOURCE_DIR}/client/PeerCache.h"
- "${HDFS3_SOURCE_DIR}/client/OutputStream.h"
- "${HDFS3_SOURCE_DIR}/client/FileStatus.h"
- "${HDFS3_SOURCE_DIR}/client/DataTransferProtocol.h"
- "${HDFS3_SOURCE_DIR}/client/BlockLocation.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcConfig.h"
- "${HDFS3_SOURCE_DIR}/rpc/SaslClient.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcAuth.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcClient.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcCall.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcContentWrapper.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcProtocolInfo.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcRemoteCall.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcServerInfo.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcChannel.h"
- "${HDFS3_SOURCE_DIR}/rpc/RpcChannelKey.h"
- "${HDFS3_SOURCE_DIR}/server/BlockLocalPathInfo.h"
- "${HDFS3_SOURCE_DIR}/server/LocatedBlocks.h"
- "${HDFS3_SOURCE_DIR}/server/DatanodeInfo.h"
- "${HDFS3_SOURCE_DIR}/server/RpcHelper.h"
- "${HDFS3_SOURCE_DIR}/server/ExtendedBlock.h"
- "${HDFS3_SOURCE_DIR}/server/NamenodeInfo.h"
- "${HDFS3_SOURCE_DIR}/server/NamenodeImpl.h"
- "${HDFS3_SOURCE_DIR}/server/LocatedBlock.h"
- "${HDFS3_SOURCE_DIR}/server/NamenodeProxy.h"
- "${HDFS3_SOURCE_DIR}/server/Datanode.h"
- "${HDFS3_SOURCE_DIR}/server/Namenode.h"
- "${HDFS3_SOURCE_DIR}/common/XmlConfig.h"
- "${HDFS3_SOURCE_DIR}/common/Logger.h"
- "${HDFS3_SOURCE_DIR}/common/WriteBuffer.h"
- "${HDFS3_SOURCE_DIR}/common/HWCrc32c.h"
- "${HDFS3_SOURCE_DIR}/common/Checksum.h"
- "${HDFS3_SOURCE_DIR}/common/SessionConfig.h"
- "${HDFS3_SOURCE_DIR}/common/Unordered.h"
- "${HDFS3_SOURCE_DIR}/common/BigEndian.h"
- "${HDFS3_SOURCE_DIR}/common/Thread.h"
- "${HDFS3_SOURCE_DIR}/common/StackPrinter.h"
- "${HDFS3_SOURCE_DIR}/common/Exception.h"
- "${HDFS3_SOURCE_DIR}/common/WritableUtils.h"
- "${HDFS3_SOURCE_DIR}/common/StringUtil.h"
- "${HDFS3_SOURCE_DIR}/common/LruMap.h"
- "${HDFS3_SOURCE_DIR}/common/Function.h"
- "${HDFS3_SOURCE_DIR}/common/DateTime.h"
- "${HDFS3_SOURCE_DIR}/common/Hash.h"
- "${HDFS3_SOURCE_DIR}/common/SWCrc32c.h"
- "${HDFS3_SOURCE_DIR}/common/ExceptionInternal.h"
- "${HDFS3_SOURCE_DIR}/common/Memory.h"
- "${HDFS3_SOURCE_DIR}/common/FileWrapper.h"
- )
+ ${PROTO_SOURCES}
+)
# old kernels (< 3.17) doesn't have SYS_getrandom. Always use POSIX implementation to have better compatibility
set_source_files_properties("${HDFS3_SOURCE_DIR}/rpc/RpcClient.cpp" PROPERTIES COMPILE_FLAGS "-DBOOST_UUID_RANDOM_PROVIDER_FORCE_POSIX=1")
# target
-add_library(hdfs3 ${SRCS} ${PROTO_SOURCES} ${PROTO_HEADERS})
+add_library(hdfs3 ${SRCS})
if(USE_INTERNAL_PROTOBUF_LIBRARY)
add_dependencies(hdfs3 protoc)
@@ -218,6 +116,7 @@ target_link_libraries(hdfs3 PRIVATE ${LIBXML2_LIBRARIES})
# inherit from parent cmake
target_include_directories(hdfs3 PRIVATE ${Protobuf_INCLUDE_DIR})
target_link_libraries(hdfs3 PRIVATE ${Protobuf_LIBRARY} boost::headers_only)
+
if(OPENSSL_INCLUDE_DIR AND OPENSSL_LIBRARIES)
target_include_directories(hdfs3 PRIVATE ${OPENSSL_INCLUDE_DIR})
target_link_libraries(hdfs3 PRIVATE ${OPENSSL_LIBRARIES})
diff --git a/contrib/librdkafka-cmake/config.h.in b/contrib/librdkafka-cmake/config.h.in
index 9fecb45e42d..b6d5fdf046f 100644
--- a/contrib/librdkafka-cmake/config.h.in
+++ b/contrib/librdkafka-cmake/config.h.in
@@ -66,7 +66,7 @@
#cmakedefine WITH_SASL_OAUTHBEARER 1
#cmakedefine WITH_SASL_CYRUS 1
// crc32chw
-#if !defined(__PPC__) && (!defined(__aarch64__) || defined(__ARM_FEATURE_CRC32)) && !(defined(__aarch64__) && defined(__APPLE__))
+#if !defined(__PPC__) && !defined(__riscv) && !defined(__aarch64__)
#define WITH_CRC32C_HW 1
#endif
// regex
diff --git a/contrib/lz4-cmake/CMakeLists.txt b/contrib/lz4-cmake/CMakeLists.txt
index 77e00d4295b..2c412d6e36a 100644
--- a/contrib/lz4-cmake/CMakeLists.txt
+++ b/contrib/lz4-cmake/CMakeLists.txt
@@ -1,4 +1,4 @@
-option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ${NOT_UNBUNDLED})
+option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ON)
if (NOT USE_INTERNAL_LZ4_LIBRARY)
find_library (LIBRARY_LZ4 lz4)
diff --git a/contrib/mariadb-connector-c-cmake/CMakeLists.txt b/contrib/mariadb-connector-c-cmake/CMakeLists.txt
new file mode 100644
index 00000000000..ea74e13b7f0
--- /dev/null
+++ b/contrib/mariadb-connector-c-cmake/CMakeLists.txt
@@ -0,0 +1,243 @@
+if (GLIBC_COMPATIBILITY)
+ set(LIBM glibc-compatibility)
+endif()
+
+# This is the LGPL libmariadb project.
+
+set(CC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/mariadb-connector-c)
+set(CC_BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR})
+
+set(WITH_SSL ON)
+
+set(MARIADB_CONNECTOR_C_COPYRIGHT "2013-2017 MariaDB Corporation Ab")
+
+set(PROTOCOL_VERSION 10) # we adapted new password option from PHP's mysqlnd !
+
+# if C/C is build as subproject inside MariaDB server tree we will
+# use the version defined by server
+if(MAJOR_VERSION)
+ set(MARIADB_CLIENT_VERSION_MAJOR ${MAJOR_VERSION})
+ set(MARIADB_CLIENT_VERSION_MINOR ${MINOR_VERSION})
+ set(MARIADB_CLIENT_VERSION_PATCH ${PATCH_VERSION})
+ set(MARIADB_CLIENT_VERSION_EXTRA ${EXTRA_VERSION})
+else()
+ set(MARIADB_CLIENT_VERSION_MAJOR "10")
+ set(MARIADB_CLIENT_VERSION_MINOR "4")
+ set(MARIADB_CLIENT_VERSION_PATCH "3")
+ set(MARIADB_CLIENT_VERSION_EXTRA "")
+endif()
+set(MARIADB_CLIENT_VERSION "${MARIADB_CLIENT_VERSION_MAJOR}.${MARIADB_CLIENT_VERSION_MINOR}.${MARIADB_CLIENT_VERSION_PATCH}${MARIADB_CLIENT_VERSION_EXTRA}")
+set(MARIADB_BASE_VERSION "mariadb-${MARIADB_CLIENT_VERSION_MAJOR}.${MARIADB_CLIENT_VERSION_MINOR}")
+MATH(EXPR MARIADB_VERSION_ID "${MARIADB_CLIENT_VERSION_MAJOR} * 10000 +
+ ${MARIADB_CLIENT_VERSION_MINOR} * 100 +
+ ${MARIADB_CLIENT_VERSION_PATCH}")
+
+IF (NOT MARIADB_PORT)
+ set(MARIADB_PORT 3306)
+ENDIF ()
+if(NOT MARIADB_UNIX_ADDR)
+ set(MARIADB_UNIX_ADDR "/tmp/mysql.sock")
+endif()
+
+set(HAVE_ALLOCA_H 1)
+set(HAVE_ARPA_INET_H 1)
+set(HAVE_DLFCN_H 1)
+set(HAVE_FCNTL_H 1)
+set(HAVE_FLOAT_H 1)
+set(HAVE_LIMITS_H 1)
+set(HAVE_PWD_H 1)
+set(HAVE_SCHED_H 1)
+set(HAVE_SELECT_H 0)
+set(INCLUDE_SIGNAL 1)
+set(HAVE_SIGNAL 1)
+set(HAVE_STDDEF_H 1)
+set(HAVE_STDINT_H 1)
+set(HAVE_STDLIB_H 1)
+set(HAVE_STRING_H 1)
+set(HAVE_STRINGS_H 1)
+set(HAVE_SYS_IOCTL_H 1)
+set(HAVE_SYS_SELECT_H 1)
+set(HAVE_SYS_SOCKET_H 1)
+set(HAVE_SYS_TYPES_H 1)
+set(HAVE_SYS_UN_H 1)
+set(HAVE_UNISTD_H 1)
+set(HAVE_UTIME_H 1)
+set(HAVE_UCONTEXT_H 1)
+set(HAVE_ALLOCA 1)
+set(HAVE_DLERROR 0)
+set(HAVE_DLOPEN 0)
+set(HAVE_FCNTL 1)
+set(HAVE_MEMCPY 1)
+set(HAVE_NL_LANGINFO 0)
+set(HAVE_SETLOCALE 0)
+set(HAVE_POLL 1)
+
+set(SIZEOF_CHARP 8)
+set(SIZEOF_INT 4)
+set(SIZEOF_LONG 8)
+set(SIZEOF_LONG_LONG 8)
+set(SIZEOF_SIZE_T 8)
+set(SOCKET_SIZE_TYPE socklen_t)
+
+
+set(SYSTEM_LIBS ${SYSTEM_LIBS} zlib)
+
+if(CMAKE_HAVE_PTHREAD_H)
+ set(CMAKE_REQUIRED_INCLUDES pthread.h)
+endif()
+
+add_definitions(-DMARIADB_SYSTEM_TYPE="${CMAKE_SYSTEM_NAME}")
+add_definitions(-DMARIADB_MACHINE_TYPE="${CMAKE_SYSTEM_PROCESSOR}")
+
+set(HAVE_THREADS 1)
+set(DEFAULT_CHARSET "utf8mb4")
+
+add_definitions(-DHAVE_OPENSSL -DHAVE_TLS)
+set(SSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
+include_directories(BEFORE ${OPENSSL_INCLUDE_DIR})
+set(TLS_LIBRARY_VERSION "OpenSSL ${OPENSSL_VERSION}")
+
+set(ENABLED_LOCAL_INFILE OFF)
+
+CONFIGURE_FILE(${CC_SOURCE_DIR}/include/ma_config.h.in
+ ${CC_BINARY_DIR}/include-private/ma_config.h)
+CONFIGURE_FILE(${CC_SOURCE_DIR}/include/ma_config.h.in
+ ${CC_BINARY_DIR}/include-private/config.h)
+CONFIGURE_FILE(${CC_SOURCE_DIR}/include/mariadb_version.h.in
+ ${CC_BINARY_DIR}/include-public/mariadb_version.h)
+
+if(WITH_SSL)
+ set(SYSTEM_LIBS ${SYSTEM_LIBS} ${SSL_LIBRARIES})
+endif()
+
+
+function(REGISTER_PLUGIN)
+
+ SET(one_value_keywords TARGET TYPE)
+ SET(multi_value_keywords SOURCES)
+
+ cmake_parse_arguments(CC_PLUGIN
+ "${options}"
+ "${one_value_keywords}"
+ "${multi_value_keywords}"
+ ${ARGN})
+
+ # overwrite default if it was specified with cmake option
+ string(TOUPPER ${CC_PLUGIN_TARGET} cc_plugin)
+ if(NOT "${CLIENT_PLUGIN_${cc_plugin}}" STREQUAL "")
+ SET(CC_PLUGIN_DEFAULT ${CLIENT_PLUGIN_${cc_plugin}})
+ endif()
+
+ # use uppercase
+ string(TOUPPER ${CC_PLUGIN_TARGET} target_name)
+ string(TOUPPER "${CC_PLUGIN_CONFIGURATIONS}" CC_PLUGIN_CONFIGURATIONS)
+
+ if(NOT ${PLUGIN_${target_name}} STREQUAL "")
+ string(TOUPPER ${PLUGIN_${target_name}} PLUGIN_${target_name})
+ set(CC_PLUGIN_DEFAULT ${PLUGIN_${target_name}})
+ endif()
+
+ set(PLUGINS_STATIC ${PLUGINS_STATIC} ${CC_PLUGIN_TARGET} PARENT_SCOPE)
+ set(LIBMARIADB_PLUGIN_CFLAGS ${LIBMARIADB_PLUGIN_CFLAGS} ${CC_PLUGIN_COMPILE_OPTIONS} PARENT_SCOPE)
+ set(LIBMARIADB_PLUGIN_INCLUDES ${LIBMARIADB_PLUGIN_INCLUDES} ${CC_PLUGIN_INCLUDES} PARENT_SCOPE)
+ set(LIBMARIADB_PLUGIN_SOURCES ${LIBMARIADB_PLUGIN_SOURCES} ${CC_PLUGIN_SOURCES} PARENT_SCOPE)
+ set(LIBMARIADB_PLUGIN_LIBS ${LIBMARIADB_PLUGIN_LIBS} ${CC_PLUGIN_LIBRARIES} PARENT_SCOPE)
+endfunction()
+
+SET(PLUGIN_EXTRA_FILES ${CC_SOURCE_DIR}/libmariadb/ma_errmsg.c)
+
+#native password
+REGISTER_PLUGIN(TARGET pvio_socket
+ TYPE MARIADB_CLIENT_PLUGIN_PVIO
+ SOURCES "${CC_SOURCE_DIR}/plugins/pvio/pvio_socket.c")
+
+# SHA256 caching plugin for MySQL 8.0 connection
+REGISTER_PLUGIN(TARGET caching_sha2_password
+ TYPE MARIADB_CLIENT_PLUGIN_AUTH
+ SOURCES "${CC_SOURCE_DIR}/plugins/auth/caching_sha2_pw.c")
+
+REGISTER_PLUGIN(TARGET sha256_password
+ TYPE MARIADB_CLIENT_PLUGIN_AUTH
+ SOURCES "${CC_SOURCE_DIR}/plugins/auth/sha256_pw.c")
+
+#native password
+REGISTER_PLUGIN(TARGET mysql_native_password
+ TYPE MARIADB_CLIENT_PLUGIN_AUTH
+ SOURCES "${CC_SOURCE_DIR}/plugins/auth/my_auth.c")
+
+REGISTER_PLUGIN(TARGET aurora
+ TYPE MARIADB_CLIENT_PLUGIN_CONNECTION
+ SOURCES "${CC_SOURCE_DIR}/plugins/connection/aurora.c")
+
+
+add_definitions(-D HAVE_COMPRESS)
+add_definitions(-D LIBMARIADB)
+add_definitions(-D THREAD)
+
+# handle static plugins
+set(LIBMARIADB_SOURCES ${LIBMARIADB_PLUGIN_SOURCES})
+set(SYSTEM_LIBS ${SYSTEM_LIBS} ${LIBMARIADB_PLUGIN_LIBS})
+add_definitions(${LIBMARIADB_PLUGIN_DEFS})
+FOREACH(plugin ${PLUGINS_STATIC})
+ set(EXTERNAL_PLUGINS "${EXTERNAL_PLUGINS} extern struct st_mysql_client_plugin ${plugin}_client_plugin;\n")
+ set(BUILTIN_PLUGINS "${BUILTIN_PLUGINS} (struct st_mysql_client_plugin *)&${plugin}_client_plugin,\n")
+ENDFOREACH()
+CONFIGURE_FILE(${CC_SOURCE_DIR}/libmariadb/ma_client_plugin.c.in
+ ${CC_BINARY_DIR}/libmariadb/ma_client_plugin.c)
+
+set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES}
+${CC_SOURCE_DIR}/plugins/auth/my_auth.c
+${CC_SOURCE_DIR}/libmariadb/ma_array.c
+${CC_SOURCE_DIR}/libmariadb/ma_charset.c
+${CC_SOURCE_DIR}/libmariadb/ma_hash.c
+${CC_SOURCE_DIR}/libmariadb/ma_net.c
+${CC_SOURCE_DIR}/libmariadb/mariadb_charset.c
+${CC_SOURCE_DIR}/libmariadb/ma_time.c
+${CC_SOURCE_DIR}/libmariadb/ma_default.c
+${CC_SOURCE_DIR}/libmariadb/ma_errmsg.c
+${CC_SOURCE_DIR}/libmariadb/mariadb_lib.c
+${CC_SOURCE_DIR}/libmariadb/ma_list.c
+${CC_SOURCE_DIR}/libmariadb/ma_pvio.c
+${CC_SOURCE_DIR}/libmariadb/ma_tls.c
+${CC_SOURCE_DIR}/libmariadb/ma_alloc.c
+${CC_SOURCE_DIR}/libmariadb/ma_compress.c
+${CC_SOURCE_DIR}/libmariadb/ma_init.c
+${CC_SOURCE_DIR}/libmariadb/ma_password.c
+${CC_SOURCE_DIR}/libmariadb/ma_ll2str.c
+${CC_SOURCE_DIR}/libmariadb/ma_sha1.c
+${CC_SOURCE_DIR}/libmariadb/mariadb_stmt.c
+${CC_SOURCE_DIR}/libmariadb/ma_loaddata.c
+${CC_SOURCE_DIR}/libmariadb/ma_stmt_codec.c
+${CC_SOURCE_DIR}/libmariadb/ma_string.c
+${CC_SOURCE_DIR}/libmariadb/ma_dtoa.c
+${CC_SOURCE_DIR}/libmariadb/mariadb_rpl.c
+${CC_SOURCE_DIR}/libmariadb/ma_io.c
+${CC_SOURCE_DIR}/libmariadb/secure/openssl.c
+${CC_SOURCE_DIR}/libmariadb/secure/openssl_crypt.c
+${CC_BINARY_DIR}/libmariadb/ma_client_plugin.c
+)
+
+if(ICONV_INCLUDE_DIR)
+ include_directories(BEFORE ${ICONV_INCLUDE_DIR})
+endif()
+add_definitions(-DLIBICONV_PLUG)
+
+if(ZLIB_FOUND AND WITH_EXTERNAL_ZLIB)
+ include_directories(${ZLIB_INCLUDE_DIR})
+endif()
+
+if(WITH_DYNCOL)
+ set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_dyncol.c)
+endif()
+
+set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_async.c ${CC_SOURCE_DIR}/libmariadb/ma_context.c)
+
+
+add_library(mariadbclient STATIC ${LIBMARIADB_SOURCES})
+target_link_libraries(mariadbclient ${SYSTEM_LIBS})
+
+target_include_directories(mariadbclient
+ PRIVATE ${CC_BINARY_DIR}/include-private
+ PUBLIC ${CC_BINARY_DIR}/include-public ${CC_SOURCE_DIR}/include ${CC_SOURCE_DIR}/libmariadb)
+
+set_target_properties(mariadbclient PROPERTIES IMPORTED_INTERFACE_LINK_LIBRARIES "${SYSTEM_LIBS}")
diff --git a/contrib/poco b/contrib/poco
index 39fd359765a..258b9ba6cd2 160000
--- a/contrib/poco
+++ b/contrib/poco
@@ -1 +1 @@
-Subproject commit 39fd359765a3a77b46d94ec3c5def3c7802a920f
+Subproject commit 258b9ba6cd245ff88e9346f75c43464c403f329d
diff --git a/contrib/poco-cmake/Foundation/CMakeLists.txt b/contrib/poco-cmake/Foundation/CMakeLists.txt
index a9a4933873c..0c13d109344 100644
--- a/contrib/poco-cmake/Foundation/CMakeLists.txt
+++ b/contrib/poco-cmake/Foundation/CMakeLists.txt
@@ -51,6 +51,7 @@ if (USE_INTERNAL_POCO_LIBRARY)
"${LIBRARY_DIR}/Foundation/src/Channel.cpp"
"${LIBRARY_DIR}/Foundation/src/Checksum.cpp"
"${LIBRARY_DIR}/Foundation/src/Clock.cpp"
+ "${LIBRARY_DIR}/Foundation/src/CompressedLogFile.cpp"
"${LIBRARY_DIR}/Foundation/src/Condition.cpp"
"${LIBRARY_DIR}/Foundation/src/Configurable.cpp"
"${LIBRARY_DIR}/Foundation/src/ConsoleChannel.cpp"
@@ -222,7 +223,7 @@ if (USE_INTERNAL_POCO_LIBRARY)
POCO_OS_FAMILY_UNIX
)
target_include_directories (_poco_foundation SYSTEM PUBLIC "${LIBRARY_DIR}/Foundation/include")
- target_link_libraries (_poco_foundation PRIVATE Poco::Foundation::PCRE ${ZLIB_LIBRARIES})
+ target_link_libraries (_poco_foundation PRIVATE Poco::Foundation::PCRE ${ZLIB_LIBRARIES} lz4)
else ()
add_library (Poco::Foundation UNKNOWN IMPORTED GLOBAL)
diff --git a/contrib/protobuf-cmake/CMakeLists.txt b/contrib/protobuf-cmake/CMakeLists.txt
index 37215b91dbb..92eec444e44 100644
--- a/contrib/protobuf-cmake/CMakeLists.txt
+++ b/contrib/protobuf-cmake/CMakeLists.txt
@@ -1,39 +1,238 @@
-set(protobuf_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf")
-set(protobuf_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/protobuf")
+set(protobuf_source_dir "${ClickHouse_SOURCE_DIR}/contrib/protobuf")
+set(protobuf_binary_dir "${ClickHouse_BINARY_DIR}/contrib/protobuf")
-set(protobuf_WITH_ZLIB 0 CACHE INTERNAL "" FORCE) # actually will use zlib, but skip find
-set(protobuf_BUILD_TESTS OFF CACHE INTERNAL "" FORCE)
-if (MAKE_STATIC_LIBRARIES)
- set(protobuf_BUILD_SHARED_LIBS OFF CACHE INTERNAL "" FORCE)
-else ()
- set(protobuf_BUILD_SHARED_LIBS ON CACHE INTERNAL "" FORCE)
-endif ()
+add_definitions(-DGOOGLE_PROTOBUF_CMAKE_BUILD)
-if (CMAKE_CROSSCOMPILING)
- # Will build 'protoc' for host arch instead of cross-compiling
- set(protobuf_BUILD_PROTOC_BINARIES OFF CACHE INTERNAL "" FORCE)
-endif ()
+add_definitions(-DHAVE_PTHREAD)
+add_definitions(-DHAVE_ZLIB)
-add_subdirectory("${protobuf_SOURCE_DIR}/cmake" "${protobuf_BINARY_DIR}")
+include_directories(
+ ${ZLIB_INCLUDE_DIRECTORIES}
+ ${protobuf_binary_dir}
+ ${protobuf_source_dir}/src)
-if (ENABLE_FUZZING)
- # `protoc` will be built with sanitizer and it could fail during ClickHouse build
- # It easily reproduces in oss-fuzz building pipeline
- # To avoid this we can try to build `protoc` without any sanitizer with option `-fno-sanitize=all`, but
- # it this case we will face with linker errors, because libcxx still will be built with sanitizer
- # So, we can simply suppress all of these failures with a combination this flag and an environment variable
- # export MSAN_OPTIONS=exit_code=0
- target_compile_options(protoc PRIVATE "-fsanitize-recover=all")
+set(libprotobuf_lite_files
+ ${protobuf_source_dir}/src/google/protobuf/any_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/arena.cc
+ ${protobuf_source_dir}/src/google/protobuf/arenastring.cc
+ ${protobuf_source_dir}/src/google/protobuf/extension_set.cc
+ ${protobuf_source_dir}/src/google/protobuf/field_access_listener.cc
+ ${protobuf_source_dir}/src/google/protobuf/generated_enum_util.cc
+ ${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc
+ ${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.cc
+ ${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc
+ ${protobuf_source_dir}/src/google/protobuf/io/io_win32.cc
+ ${protobuf_source_dir}/src/google/protobuf/io/strtod.cc
+ ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc
+ ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl.cc
+ ${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/map.cc
+ ${protobuf_source_dir}/src/google/protobuf/message_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/parse_context.cc
+ ${protobuf_source_dir}/src/google/protobuf/repeated_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/bytestream.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/common.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/int128.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/status.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/statusor.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/stringpiece.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/stringprintf.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/structurally_valid.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/strutil.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/time.cc
+ ${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc
+)
+
+add_library(libprotobuf-lite ${libprotobuf_lite_files})
+target_link_libraries(libprotobuf-lite pthread)
+if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
+ target_link_libraries(libprotobuf-lite log)
endif()
+target_include_directories(libprotobuf-lite SYSTEM PUBLIC ${protobuf_source_dir}/src)
+add_library(protobuf::libprotobuf-lite ALIAS libprotobuf-lite)
-# We don't want to stop compilation on warnings in protobuf's headers.
-# The following line overrides the value assigned by the command target_include_directories() in libprotobuf.cmake
-set_property(TARGET libprotobuf PROPERTY INTERFACE_SYSTEM_INCLUDE_DIRECTORIES "${protobuf_SOURCE_DIR}/src")
-if (CMAKE_CROSSCOMPILING)
+set(libprotobuf_files
+ ${protobuf_source_dir}/src/google/protobuf/any.cc
+ ${protobuf_source_dir}/src/google/protobuf/any.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/api.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/importer.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/parser.cc
+ ${protobuf_source_dir}/src/google/protobuf/descriptor.cc
+ ${protobuf_source_dir}/src/google/protobuf/descriptor.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/descriptor_database.cc
+ ${protobuf_source_dir}/src/google/protobuf/duration.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/dynamic_message.cc
+ ${protobuf_source_dir}/src/google/protobuf/empty.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/extension_set_heavy.cc
+ ${protobuf_source_dir}/src/google/protobuf/field_mask.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/generated_message_reflection.cc
+ ${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven.cc
+ ${protobuf_source_dir}/src/google/protobuf/io/gzip_stream.cc
+ ${protobuf_source_dir}/src/google/protobuf/io/printer.cc
+ ${protobuf_source_dir}/src/google/protobuf/io/tokenizer.cc
+ ${protobuf_source_dir}/src/google/protobuf/map_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/message.cc
+ ${protobuf_source_dir}/src/google/protobuf/reflection_ops.cc
+ ${protobuf_source_dir}/src/google/protobuf/service.cc
+ ${protobuf_source_dir}/src/google/protobuf/source_context.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/struct.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/stubs/substitute.cc
+ ${protobuf_source_dir}/src/google/protobuf/text_format.cc
+ ${protobuf_source_dir}/src/google/protobuf/timestamp.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/type.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/unknown_field_set.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/delimited_message_util.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/field_comparator.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/field_mask_util.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/datapiece.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/default_value_objectwriter.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/error_listener.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/field_mask_utility.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/json_escaping.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/json_objectwriter.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/json_stream_parser.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/object_writer.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/proto_writer.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectsource.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/type_info.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/type_info_test_helper.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/internal/utility.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/json_util.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/message_differencer.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/time_util.cc
+ ${protobuf_source_dir}/src/google/protobuf/util/type_resolver_util.cc
+ ${protobuf_source_dir}/src/google/protobuf/wire_format.cc
+ ${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc
+)
+
+add_library(libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files})
+if (ENABLE_FUZZING)
+ target_compile_options(libprotobuf PRIVATE "-fsanitize-recover=all")
+endif()
+target_link_libraries(libprotobuf pthread)
+target_link_libraries(libprotobuf ${ZLIB_LIBRARIES})
+if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
+ target_link_libraries(libprotobuf log)
+endif()
+target_include_directories(libprotobuf SYSTEM PUBLIC ${protobuf_source_dir}/src)
+add_library(protobuf::libprotobuf ALIAS libprotobuf)
+
+
+set(libprotoc_files
+ ${protobuf_source_dir}/src/google/protobuf/compiler/code_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/command_line_interface.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_extension.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_file.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_helpers.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_map_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_padding_optimizer.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_parse_function_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_service.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_string_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_doc_comment.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_field_base.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_helpers.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_map_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_primitive_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_reflection_class.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_context.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_doc_comment.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_file.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator_factory.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_helpers.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_kotlin_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_name_resolver.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_service.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_shared_code_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field_lite.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/js/js_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/js/well_known_types_embed.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_extension.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_file.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_map_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_oneof.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_primitive_field.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/php/php_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/plugin.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/plugin.pb.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/python/python_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/ruby/ruby_generator.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/subprocess.cc
+ ${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc
+)
+
+add_library(libprotoc ${libprotoc_files})
+target_link_libraries(libprotoc libprotobuf)
+add_library(protobuf::libprotoc ALIAS libprotoc)
+
+set(protoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/main.cc)
+
+if (CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
+ AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR)
+
+ add_executable(protoc ${protoc_files})
+ target_link_libraries(protoc libprotoc libprotobuf pthread)
+ add_executable(protobuf::protoc ALIAS protoc)
+
+ if (ENABLE_FUZZING)
+ # `protoc` will be built with sanitizer and it could fail during ClickHouse build
+ # It easily reproduces in oss-fuzz building pipeline
+ # To avoid this we can try to build `protoc` without any sanitizer with option `-fno-sanitize=all`, but
+ # it this case we will face with linker errors, because libcxx still will be built with sanitizer
+ # So, we can simply suppress all of these failures with a combination this flag and an environment variable
+ # export MSAN_OPTIONS=exit_code=0
+ target_compile_options(protoc PRIVATE "-fsanitize-recover=all")
+ endif()
+else ()
# Build 'protoc' for host arch
- set (PROTOC_BUILD_DIR "${protobuf_BINARY_DIR}/build")
+ set (PROTOC_BUILD_DIR "${protobuf_binary_dir}/build")
if (NOT EXISTS "${PROTOC_BUILD_DIR}/protoc")
@@ -53,7 +252,7 @@ if (CMAKE_CROSSCOMPILING)
"-Dprotobuf_BUILD_CONFORMANCE=0"
"-Dprotobuf_BUILD_EXAMPLES=0"
"-Dprotobuf_BUILD_PROTOC_BINARIES=1"
- "${protobuf_SOURCE_DIR}/cmake"
+ "${protobuf_source_dir}/cmake"
WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
COMMAND_ECHO STDOUT)
@@ -78,7 +277,7 @@ if (CMAKE_CROSSCOMPILING)
# -Dprotobuf_BUILD_CONFORMANCE=0
# -Dprotobuf_BUILD_EXAMPLES=0
# -Dprotobuf_BUILD_PROTOC_BINARIES=1
-# "${protobuf_SOURCE_DIR}/cmake"
+# "${protobuf_source_dir}/cmake"
#
# DEPENDS "${PROTOC_BUILD_DIR}"
# WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
@@ -97,5 +296,4 @@ if (CMAKE_CROSSCOMPILING)
add_executable(protoc IMPORTED GLOBAL)
set_target_properties (protoc PROPERTIES IMPORTED_LOCATION "${PROTOC_BUILD_DIR}/protoc")
add_dependencies(protoc "${PROTOC_BUILD_DIR}/protoc")
-
endif ()
diff --git a/contrib/re2_st/CMakeLists.txt b/contrib/re2-cmake/CMakeLists.txt
similarity index 56%
rename from contrib/re2_st/CMakeLists.txt
rename to contrib/re2-cmake/CMakeLists.txt
index c7243346988..ff8b3c43472 100644
--- a/contrib/re2_st/CMakeLists.txt
+++ b/contrib/re2-cmake/CMakeLists.txt
@@ -1,24 +1,54 @@
+# Copyright 2015 The RE2 Authors. All Rights Reserved.
+# Use of this source code is governed by a BSD-style
+# license that can be found in the LICENSE file.
+
+# This file was edited for ClickHouse
+
+set(SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2")
+
+set(RE2_SOURCES
+ ${SRC_DIR}/re2/bitstate.cc
+ ${SRC_DIR}/re2/compile.cc
+ ${SRC_DIR}/re2/dfa.cc
+ ${SRC_DIR}/re2/filtered_re2.cc
+ ${SRC_DIR}/re2/mimics_pcre.cc
+ ${SRC_DIR}/re2/nfa.cc
+ ${SRC_DIR}/re2/onepass.cc
+ ${SRC_DIR}/re2/parse.cc
+ ${SRC_DIR}/re2/perl_groups.cc
+ ${SRC_DIR}/re2/prefilter.cc
+ ${SRC_DIR}/re2/prefilter_tree.cc
+ ${SRC_DIR}/re2/prog.cc
+ ${SRC_DIR}/re2/re2.cc
+ ${SRC_DIR}/re2/regexp.cc
+ ${SRC_DIR}/re2/set.cc
+ ${SRC_DIR}/re2/simplify.cc
+ ${SRC_DIR}/re2/stringpiece.cc
+ ${SRC_DIR}/re2/tostring.cc
+ ${SRC_DIR}/re2/unicode_casefold.cc
+ ${SRC_DIR}/re2/unicode_groups.cc
+ ${SRC_DIR}/util/rune.cc
+ ${SRC_DIR}/util/strutil.cc
+)
+
+add_library(re2 ${RE2_SOURCES})
+target_include_directories(re2 PUBLIC "${SRC_DIR}")
+
+
# Building re2 which is thread-safe and re2_st which is not.
# re2 changes its state during matching of regular expression, e.g. creates temporary DFA.
# It uses RWLock to process the same regular expression object from different threads.
# In order to avoid redundant locks in some cases, we use not thread-safe version of the library (re2_st).
-set (RE2_SOURCE_DIR ${ClickHouse_SOURCE_DIR}/contrib/re2/)
-
-get_target_property (RE2_SOURCES_ re2 SOURCES)
-foreach (src ${RE2_SOURCES_})
- list(APPEND RE2_ST_SOURCES ${RE2_SOURCE_DIR}/${src})
-endforeach ()
-
-add_library(re2_st ${RE2_ST_SOURCES})
+add_library(re2_st ${RE2_SOURCES})
target_compile_definitions (re2_st PRIVATE NDEBUG NO_THREADS re2=re2_st)
target_include_directories (re2_st PRIVATE .)
-target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${RE2_SOURCE_DIR})
+target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${SRC_DIR})
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/re2_st)
foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h)
add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/re2_st/${FILENAME}"
- COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${RE2_SOURCE_DIR}/re2/${FILENAME}"
+ COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${SRC_DIR}/re2/${FILENAME}"
-DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/re2_st/${FILENAME}"
-P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake"
COMMENT "Creating ${FILENAME} for re2_st library.")
@@ -29,7 +59,7 @@ endforeach ()
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/util)
foreach (FILENAME mutex.h)
add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
- COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${RE2_SOURCE_DIR}/util/${FILENAME}"
+ COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${SRC_DIR}/util/${FILENAME}"
-DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
-P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake"
COMMENT "Creating ${FILENAME} for re2_st library.")
diff --git a/contrib/re2_st/re2_transform.cmake b/contrib/re2-cmake/re2_transform.cmake
similarity index 100%
rename from contrib/re2_st/re2_transform.cmake
rename to contrib/re2-cmake/re2_transform.cmake
diff --git a/contrib/replxx b/contrib/replxx
index b0c266c2d8a..f019cba7ea1 160000
--- a/contrib/replxx
+++ b/contrib/replxx
@@ -1 +1 @@
-Subproject commit b0c266c2d8a835784181e17292b421848c78c6b8
+Subproject commit f019cba7ea1bcd1b4feb7826f28ed57fb581b04c
diff --git a/contrib/rocksdb b/contrib/rocksdb
index 296c1b8b95f..e7c2b2f7bcf 160000
--- a/contrib/rocksdb
+++ b/contrib/rocksdb
@@ -1 +1 @@
-Subproject commit 296c1b8b95fd448b8097a1b2cc9f704ff4a73a2c
+Subproject commit e7c2b2f7bcf3b4b33892a1a6d25c32a93edfbdb9
diff --git a/contrib/rocksdb-cmake/CMakeLists.txt b/contrib/rocksdb-cmake/CMakeLists.txt
index 4c4cdf07544..db0b3942b79 100644
--- a/contrib/rocksdb-cmake/CMakeLists.txt
+++ b/contrib/rocksdb-cmake/CMakeLists.txt
@@ -190,7 +190,7 @@ if(HAVE_PTHREAD_MUTEX_ADAPTIVE_NP)
endif()
include(CheckCXXSymbolExists)
-if(CMAKE_SYSTEM_NAME MATCHES "^FreeBSD")
+if (OS_FREEBSD)
check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc_np.h" HAVE_MALLOC_USABLE_SIZE)
else()
check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc.h" HAVE_MALLOC_USABLE_SIZE)
@@ -199,20 +199,14 @@ if(HAVE_MALLOC_USABLE_SIZE)
add_definitions(-DROCKSDB_MALLOC_USABLE_SIZE)
endif()
-check_cxx_symbol_exists(sched_getcpu sched.h HAVE_SCHED_GETCPU)
-if(HAVE_SCHED_GETCPU)
+if (OS_LINUX)
add_definitions(-DROCKSDB_SCHED_GETCPU_PRESENT)
+ add_definitions(-DROCKSDB_AUXV_SYSAUXV_PRESENT)
+ add_definitions(-DROCKSDB_AUXV_GETAUXVAL_PRESENT)
+elseif (OS_FREEBSD)
+ add_definitions(-DROCKSDB_AUXV_SYSAUXV_PRESENT)
endif()
-check_cxx_symbol_exists(getauxval auvx.h HAVE_AUXV_GETAUXVAL)
-if(HAVE_AUXV_GETAUXVAL)
- add_definitions(-DROCKSDB_AUXV_GETAUXVAL_PRESENT)
-endif()
-
-check_cxx_symbol_exists(elf_aux_info sys/auxv.h HAVE_ELF_AUX_INFO)
-if(HAVE_ELF_AUX_INFO)
- add_definitions(-DROCKSDB_AUXV_GETAUXVAL_PRESENT)
-endif()
include_directories(${ROCKSDB_SOURCE_DIR})
include_directories("${ROCKSDB_SOURCE_DIR}/include")
diff --git a/contrib/sentry-native b/contrib/sentry-native
index 94644e92f0a..f431047ac8d 160000
--- a/contrib/sentry-native
+++ b/contrib/sentry-native
@@ -1 +1 @@
-Subproject commit 94644e92f0a3ff14bd35ed902a8622a2d15f7be4
+Subproject commit f431047ac8da13179c488018dddf1c0d0771a997
diff --git a/contrib/sentry-native-cmake/CMakeLists.txt b/contrib/sentry-native-cmake/CMakeLists.txt
new file mode 100644
index 00000000000..3b0057afe94
--- /dev/null
+++ b/contrib/sentry-native-cmake/CMakeLists.txt
@@ -0,0 +1,47 @@
+set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native")
+
+set (SRCS
+ ${SRC_DIR}/vendor/mpack.c
+ ${SRC_DIR}/src/sentry_alloc.c
+ ${SRC_DIR}/src/sentry_backend.c
+ ${SRC_DIR}/src/sentry_core.c
+ ${SRC_DIR}/src/sentry_database.c
+ ${SRC_DIR}/src/sentry_envelope.c
+ ${SRC_DIR}/src/sentry_json.c
+ ${SRC_DIR}/src/sentry_logger.c
+ ${SRC_DIR}/src/sentry_options.c
+ ${SRC_DIR}/src/sentry_random.c
+ ${SRC_DIR}/src/sentry_ratelimiter.c
+ ${SRC_DIR}/src/sentry_scope.c
+ ${SRC_DIR}/src/sentry_session.c
+ ${SRC_DIR}/src/sentry_slice.c
+ ${SRC_DIR}/src/sentry_string.c
+ ${SRC_DIR}/src/sentry_sync.c
+ ${SRC_DIR}/src/sentry_transport.c
+ ${SRC_DIR}/src/sentry_utils.c
+ ${SRC_DIR}/src/sentry_uuid.c
+ ${SRC_DIR}/src/sentry_value.c
+ ${SRC_DIR}/src/path/sentry_path.c
+ ${SRC_DIR}/src/transports/sentry_disk_transport.c
+ ${SRC_DIR}/src/transports/sentry_function_transport.c
+ ${SRC_DIR}/src/unwinder/sentry_unwinder.c
+ ${SRC_DIR}/src/sentry_unix_pageallocator.c
+ ${SRC_DIR}/src/path/sentry_path_unix.c
+ ${SRC_DIR}/src/symbolizer/sentry_symbolizer_unix.c
+ ${SRC_DIR}/src/modulefinder/sentry_modulefinder_linux.c
+ ${SRC_DIR}/src/transports/sentry_transport_curl.c
+ ${SRC_DIR}/src/backends/sentry_backend_none.c
+)
+
+add_library(sentry ${SRCS})
+add_library(sentry::sentry ALIAS sentry)
+
+if(BUILD_SHARED_LIBS)
+ target_compile_definitions(sentry PRIVATE SENTRY_BUILD_SHARED)
+else()
+ target_compile_definitions(sentry PUBLIC SENTRY_BUILD_STATIC)
+endif()
+
+target_link_libraries(sentry PRIVATE curl pthread)
+target_include_directories(sentry PUBLIC "${SRC_DIR}/include" PRIVATE "${SRC_DIR}/src")
+target_compile_definitions(sentry PRIVATE SENTRY_WITH_INPROC_BACKEND SIZEOF_LONG=8)
diff --git a/contrib/snappy-cmake/CMakeLists.txt b/contrib/snappy-cmake/CMakeLists.txt
index 0407e8bb30d..750060db93f 100644
--- a/contrib/snappy-cmake/CMakeLists.txt
+++ b/contrib/snappy-cmake/CMakeLists.txt
@@ -1,16 +1,13 @@
set (SOURCE_DIR "${CMAKE_SOURCE_DIR}/contrib/snappy")
-set(SNAPPY_IS_BIG_ENDIAN 0)
+set (SNAPPY_IS_BIG_ENDIAN 0)
-include(CheckIncludeFile)
-check_include_file("byteswap.h" HAVE_BYTESWAP_H)
-check_include_file("sys/endian.h" HAVE_SYS_ENDIAN_H)
-check_include_file("sys/mman.h" HAVE_SYS_MMAN_H)
-check_include_file("sys/resource.h" HAVE_SYS_RESOURCE_H)
-check_include_file("sys/time.h" HAVE_SYS_TIME_H)
-check_include_file("sys/uio.h" HAVE_SYS_UIO_H)
-check_include_file("unistd.h" HAVE_UNISTD_H)
-check_include_file("windows.h" HAVE_WINDOWS_H)
+set (HAVE_BYTESWAP_H 1)
+set (HAVE_SYS_MMAN_H 1)
+set (HAVE_SYS_RESOURCE_H 1)
+set (HAVE_SYS_TIME_H 1)
+set (HAVE_SYS_UIO_H 1)
+set (HAVE_UNISTD_H 1)
set (HAVE_BUILTIN_EXPECT 1)
set (HAVE_BUILTIN_CTZ 1)
diff --git a/contrib/stats b/contrib/stats
deleted file mode 160000
index b6dd459c10a..00000000000
--- a/contrib/stats
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit b6dd459c10a88c7ea04693c007e9e35820c5d9ad
diff --git a/contrib/sysroot b/contrib/sysroot
index 6172893931e..4ef348b7f30 160000
--- a/contrib/sysroot
+++ b/contrib/sysroot
@@ -1 +1 @@
-Subproject commit 6172893931e19b028f9cabb7095a44361be863df
+Subproject commit 4ef348b7f30f2ad5b02b266268b3c948e51ad457
diff --git a/contrib/xz-cmake/CMakeLists.txt b/contrib/xz-cmake/CMakeLists.txt
new file mode 100644
index 00000000000..af8ebb0ebc1
--- /dev/null
+++ b/contrib/xz-cmake/CMakeLists.txt
@@ -0,0 +1,263 @@
+set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/xz")
+
+# Author: Lasse Collin
+#
+# This file has been put into the public domain.
+# You can do whatever you want with this file.
+#
+# The file was edited for ClickHouse
+
+# Get the package version from version.h into XZ_VERSION variable.
+file(READ ${SRC_DIR}/src/liblzma/api/lzma/version.h XZ_VERSION)
+string(REGEX REPLACE
+"^.*\n\
+#define LZMA_VERSION_MAJOR ([0-9]+)\n\
+#define LZMA_VERSION_MINOR ([0-9]+)\n\
+#define LZMA_VERSION_PATCH ([0-9]+)\n\
+.*$"
+ "\\1.\\2.\\3" XZ_VERSION "${XZ_VERSION}")
+
+# Definitions common to all targets:
+add_compile_definitions(
+ # Package info:
+ PACKAGE_NAME="XZ Utils"
+ PACKAGE_BUGREPORT="lasse.collin@tukaani.org"
+ PACKAGE_URL="https://tukaani.org/xz/"
+
+ # Features:
+ HAVE_CHECK_CRC32
+ HAVE_CHECK_CRC64
+ HAVE_CHECK_SHA256
+ HAVE_DECODERS
+ HAVE_DECODER_ARM
+ HAVE_DECODER_ARMTHUMB
+ HAVE_DECODER_DELTA
+ HAVE_DECODER_IA64
+ HAVE_DECODER_LZMA1
+ HAVE_DECODER_LZMA2
+ HAVE_DECODER_POWERPC
+ HAVE_DECODER_SPARC
+ HAVE_DECODER_X86
+ HAVE_ENCODERS
+ HAVE_ENCODER_ARM
+ HAVE_ENCODER_ARMTHUMB
+ HAVE_ENCODER_DELTA
+ HAVE_ENCODER_IA64
+ HAVE_ENCODER_LZMA1
+ HAVE_ENCODER_LZMA2
+ HAVE_ENCODER_POWERPC
+ HAVE_ENCODER_SPARC
+ HAVE_ENCODER_X86
+ HAVE_MF_BT2
+ HAVE_MF_BT3
+ HAVE_MF_BT4
+ HAVE_MF_HC3
+ HAVE_MF_HC4
+
+ # Standard headers and types are available:
+ HAVE_STDBOOL_H
+ HAVE__BOOL
+ HAVE_STDINT_H
+ HAVE_INTTYPES_H
+
+ HAVE___BUILTIN_BSWAPXX
+ HAVE___BUILTIN_ASSUME_ALIGNED
+
+ _GNU_SOURCE
+ __EXTENSIONS__
+ _POSIX_PTHREAD_SEMANTICS
+ _TANDEM_SOURCE
+ _ALL_SOURCE
+
+ HAVE_CLOCK_GETTIME=1
+ HAVE_DECL_CLOCK_MONOTONIC=1
+
+ HAVE_PTHREAD_CONDATTR_SETCLOCK
+ MYTHREAD_POSIX
+)
+
+if (OS_LINUX)
+ add_compile_definitions(
+ TUKLIB_CPUCORES_SCHED_GETAFFINITY
+ TUKLIB_PHYSMEM_SYSCONF)
+elseif (OS_FREEBSD)
+ add_compile_definitions(
+ TUKLIB_CPUCORES_CPUSET
+ TUKLIB_PHYSMEM_SYSCTL)
+elseif (OS_DARWIN)
+ add_compile_definitions(
+ TUKLIB_CPUCORES_SYSCTL
+ TUKLIB_PHYSMEM_SYSCTL)
+endif ()
+
+if (ARCH_AMD64 OR ARCH_AARCH64)
+ add_compile_definitions(TUKLIB_FAST_UNALIGNED_ACCESS=1)
+endif ()
+
+find_package(Threads REQUIRED)
+
+
+add_library(liblzma
+ ${SRC_DIR}/src/common/mythread.h
+ ${SRC_DIR}/src/common/sysdefs.h
+ ${SRC_DIR}/src/common/tuklib_common.h
+ ${SRC_DIR}/src/common/tuklib_config.h
+ ${SRC_DIR}/src/common/tuklib_cpucores.c
+ ${SRC_DIR}/src/common/tuklib_cpucores.h
+ ${SRC_DIR}/src/common/tuklib_integer.h
+ ${SRC_DIR}/src/common/tuklib_physmem.c
+ ${SRC_DIR}/src/common/tuklib_physmem.h
+ ${SRC_DIR}/src/liblzma/api/lzma.h
+ ${SRC_DIR}/src/liblzma/api/lzma/base.h
+ ${SRC_DIR}/src/liblzma/api/lzma/bcj.h
+ ${SRC_DIR}/src/liblzma/api/lzma/block.h
+ ${SRC_DIR}/src/liblzma/api/lzma/check.h
+ ${SRC_DIR}/src/liblzma/api/lzma/container.h
+ ${SRC_DIR}/src/liblzma/api/lzma/delta.h
+ ${SRC_DIR}/src/liblzma/api/lzma/filter.h
+ ${SRC_DIR}/src/liblzma/api/lzma/hardware.h
+ ${SRC_DIR}/src/liblzma/api/lzma/index.h
+ ${SRC_DIR}/src/liblzma/api/lzma/index_hash.h
+ ${SRC_DIR}/src/liblzma/api/lzma/lzma12.h
+ ${SRC_DIR}/src/liblzma/api/lzma/stream_flags.h
+ ${SRC_DIR}/src/liblzma/api/lzma/version.h
+ ${SRC_DIR}/src/liblzma/api/lzma/vli.h
+ ${SRC_DIR}/src/liblzma/check/check.c
+ ${SRC_DIR}/src/liblzma/check/check.h
+ ${SRC_DIR}/src/liblzma/check/crc32_fast.c
+ ${SRC_DIR}/src/liblzma/check/crc32_table.c
+ ${SRC_DIR}/src/liblzma/check/crc32_table_be.h
+ ${SRC_DIR}/src/liblzma/check/crc32_table_le.h
+ ${SRC_DIR}/src/liblzma/check/crc64_fast.c
+ ${SRC_DIR}/src/liblzma/check/crc64_table.c
+ ${SRC_DIR}/src/liblzma/check/crc64_table_be.h
+ ${SRC_DIR}/src/liblzma/check/crc64_table_le.h
+ ${SRC_DIR}/src/liblzma/check/crc_macros.h
+ ${SRC_DIR}/src/liblzma/check/sha256.c
+ ${SRC_DIR}/src/liblzma/common/alone_decoder.c
+ ${SRC_DIR}/src/liblzma/common/alone_decoder.h
+ ${SRC_DIR}/src/liblzma/common/alone_encoder.c
+ ${SRC_DIR}/src/liblzma/common/auto_decoder.c
+ ${SRC_DIR}/src/liblzma/common/block_buffer_decoder.c
+ ${SRC_DIR}/src/liblzma/common/block_buffer_encoder.c
+ ${SRC_DIR}/src/liblzma/common/block_buffer_encoder.h
+ ${SRC_DIR}/src/liblzma/common/block_decoder.c
+ ${SRC_DIR}/src/liblzma/common/block_decoder.h
+ ${SRC_DIR}/src/liblzma/common/block_encoder.c
+ ${SRC_DIR}/src/liblzma/common/block_encoder.h
+ ${SRC_DIR}/src/liblzma/common/block_header_decoder.c
+ ${SRC_DIR}/src/liblzma/common/block_header_encoder.c
+ ${SRC_DIR}/src/liblzma/common/block_util.c
+ ${SRC_DIR}/src/liblzma/common/common.c
+ ${SRC_DIR}/src/liblzma/common/common.h
+ ${SRC_DIR}/src/liblzma/common/easy_buffer_encoder.c
+ ${SRC_DIR}/src/liblzma/common/easy_decoder_memusage.c
+ ${SRC_DIR}/src/liblzma/common/easy_encoder.c
+ ${SRC_DIR}/src/liblzma/common/easy_encoder_memusage.c
+ ${SRC_DIR}/src/liblzma/common/easy_preset.c
+ ${SRC_DIR}/src/liblzma/common/easy_preset.h
+ ${SRC_DIR}/src/liblzma/common/file_info.c
+ ${SRC_DIR}/src/liblzma/common/filter_buffer_decoder.c
+ ${SRC_DIR}/src/liblzma/common/filter_buffer_encoder.c
+ ${SRC_DIR}/src/liblzma/common/filter_common.c
+ ${SRC_DIR}/src/liblzma/common/filter_common.h
+ ${SRC_DIR}/src/liblzma/common/filter_decoder.c
+ ${SRC_DIR}/src/liblzma/common/filter_decoder.h
+ ${SRC_DIR}/src/liblzma/common/filter_encoder.c
+ ${SRC_DIR}/src/liblzma/common/filter_encoder.h
+ ${SRC_DIR}/src/liblzma/common/filter_flags_decoder.c
+ ${SRC_DIR}/src/liblzma/common/filter_flags_encoder.c
+ ${SRC_DIR}/src/liblzma/common/hardware_cputhreads.c
+ ${SRC_DIR}/src/liblzma/common/hardware_physmem.c
+ ${SRC_DIR}/src/liblzma/common/index.c
+ ${SRC_DIR}/src/liblzma/common/index.h
+ ${SRC_DIR}/src/liblzma/common/index_decoder.c
+ ${SRC_DIR}/src/liblzma/common/index_decoder.h
+ ${SRC_DIR}/src/liblzma/common/index_encoder.c
+ ${SRC_DIR}/src/liblzma/common/index_encoder.h
+ ${SRC_DIR}/src/liblzma/common/index_hash.c
+ ${SRC_DIR}/src/liblzma/common/memcmplen.h
+ ${SRC_DIR}/src/liblzma/common/outqueue.c
+ ${SRC_DIR}/src/liblzma/common/outqueue.h
+ ${SRC_DIR}/src/liblzma/common/stream_buffer_decoder.c
+ ${SRC_DIR}/src/liblzma/common/stream_buffer_encoder.c
+ ${SRC_DIR}/src/liblzma/common/stream_decoder.c
+ ${SRC_DIR}/src/liblzma/common/stream_decoder.h
+ ${SRC_DIR}/src/liblzma/common/stream_encoder.c
+ ${SRC_DIR}/src/liblzma/common/stream_encoder_mt.c
+ ${SRC_DIR}/src/liblzma/common/stream_flags_common.c
+ ${SRC_DIR}/src/liblzma/common/stream_flags_common.h
+ ${SRC_DIR}/src/liblzma/common/stream_flags_decoder.c
+ ${SRC_DIR}/src/liblzma/common/stream_flags_encoder.c
+ ${SRC_DIR}/src/liblzma/common/vli_decoder.c
+ ${SRC_DIR}/src/liblzma/common/vli_encoder.c
+ ${SRC_DIR}/src/liblzma/common/vli_size.c
+ ${SRC_DIR}/src/liblzma/delta/delta_common.c
+ ${SRC_DIR}/src/liblzma/delta/delta_common.h
+ ${SRC_DIR}/src/liblzma/delta/delta_decoder.c
+ ${SRC_DIR}/src/liblzma/delta/delta_decoder.h
+ ${SRC_DIR}/src/liblzma/delta/delta_encoder.c
+ ${SRC_DIR}/src/liblzma/delta/delta_encoder.h
+ ${SRC_DIR}/src/liblzma/delta/delta_private.h
+ ${SRC_DIR}/src/liblzma/lz/lz_decoder.c
+ ${SRC_DIR}/src/liblzma/lz/lz_decoder.h
+ ${SRC_DIR}/src/liblzma/lz/lz_encoder.c
+ ${SRC_DIR}/src/liblzma/lz/lz_encoder.h
+ ${SRC_DIR}/src/liblzma/lz/lz_encoder_hash.h
+ ${SRC_DIR}/src/liblzma/lz/lz_encoder_hash_table.h
+ ${SRC_DIR}/src/liblzma/lz/lz_encoder_mf.c
+ ${SRC_DIR}/src/liblzma/lzma/fastpos.h
+ ${SRC_DIR}/src/liblzma/lzma/fastpos_table.c
+ ${SRC_DIR}/src/liblzma/lzma/lzma2_decoder.c
+ ${SRC_DIR}/src/liblzma/lzma/lzma2_decoder.h
+ ${SRC_DIR}/src/liblzma/lzma/lzma2_encoder.c
+ ${SRC_DIR}/src/liblzma/lzma/lzma2_encoder.h
+ ${SRC_DIR}/src/liblzma/lzma/lzma_common.h
+ ${SRC_DIR}/src/liblzma/lzma/lzma_decoder.c
+ ${SRC_DIR}/src/liblzma/lzma/lzma_decoder.h
+ ${SRC_DIR}/src/liblzma/lzma/lzma_encoder.c
+ ${SRC_DIR}/src/liblzma/lzma/lzma_encoder.h
+ ${SRC_DIR}/src/liblzma/lzma/lzma_encoder_optimum_fast.c
+ ${SRC_DIR}/src/liblzma/lzma/lzma_encoder_optimum_normal.c
+ ${SRC_DIR}/src/liblzma/lzma/lzma_encoder_presets.c
+ ${SRC_DIR}/src/liblzma/lzma/lzma_encoder_private.h
+ ${SRC_DIR}/src/liblzma/rangecoder/price.h
+ ${SRC_DIR}/src/liblzma/rangecoder/price_table.c
+ ${SRC_DIR}/src/liblzma/rangecoder/range_common.h
+ ${SRC_DIR}/src/liblzma/rangecoder/range_decoder.h
+ ${SRC_DIR}/src/liblzma/rangecoder/range_encoder.h
+ ${SRC_DIR}/src/liblzma/simple/arm.c
+ ${SRC_DIR}/src/liblzma/simple/armthumb.c
+ ${SRC_DIR}/src/liblzma/simple/ia64.c
+ ${SRC_DIR}/src/liblzma/simple/powerpc.c
+ ${SRC_DIR}/src/liblzma/simple/simple_coder.c
+ ${SRC_DIR}/src/liblzma/simple/simple_coder.h
+ ${SRC_DIR}/src/liblzma/simple/simple_decoder.c
+ ${SRC_DIR}/src/liblzma/simple/simple_decoder.h
+ ${SRC_DIR}/src/liblzma/simple/simple_encoder.c
+ ${SRC_DIR}/src/liblzma/simple/simple_encoder.h
+ ${SRC_DIR}/src/liblzma/simple/simple_private.h
+ ${SRC_DIR}/src/liblzma/simple/sparc.c
+ ${SRC_DIR}/src/liblzma/simple/x86.c
+)
+
+target_include_directories(liblzma PRIVATE
+ ${SRC_DIR}/src/liblzma/api
+ ${SRC_DIR}/src/liblzma/common
+ ${SRC_DIR}/src/liblzma/check
+ ${SRC_DIR}/src/liblzma/lz
+ ${SRC_DIR}/src/liblzma/rangecoder
+ ${SRC_DIR}/src/liblzma/lzma
+ ${SRC_DIR}/src/liblzma/delta
+ ${SRC_DIR}/src/liblzma/simple
+ ${SRC_DIR}/src/common
+)
+
+target_link_libraries(liblzma Threads::Threads)
+
+# Put the tuklib functions under the lzma_ namespace.
+target_compile_definitions(liblzma PRIVATE TUKLIB_SYMBOL_PREFIX=lzma_)
+
+if (ENABLE_SSE2)
+ target_compile_definitions(liblzma PRIVATE HAVE_IMMINTRIN_H HAVE__MM_MOVEMASK_EPI8)
+endif()
diff --git a/contrib/zlib-ng-cmake/CMakeLists.txt b/contrib/zlib-ng-cmake/CMakeLists.txt
new file mode 100644
index 00000000000..96c29a09a28
--- /dev/null
+++ b/contrib/zlib-ng-cmake/CMakeLists.txt
@@ -0,0 +1,161 @@
+set (SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/zlib-ng)
+
+add_definitions(-DZLIB_COMPAT)
+add_definitions(-DWITH_GZFILEOP)
+add_definitions(-DUNALIGNED_OK)
+add_definitions(-DUNALIGNED64_OK)
+
+set (HAVE_UNISTD_H 1)
+add_definitions(-D_LARGEFILE64_SOURCE=1 -D__USE_LARGEFILE64)
+
+add_definitions(-DHAVE_VISIBILITY_HIDDEN)
+add_definitions(-DHAVE_VISIBILITY_INTERNAL)
+add_definitions(-DHAVE_BUILTIN_CTZ)
+add_definitions(-DHAVE_BUILTIN_CTZLL)
+
+set(ZLIB_ARCH_SRCS)
+set(ZLIB_ARCH_HDRS)
+
+set(ARCHDIR "arch/generic")
+
+if(ARCH_AARCH64)
+ set(ARCHDIR "${SOURCE_DIR}/arch/arm")
+
+ add_definitions(-DARM_FEATURES)
+ add_definitions(-DARM_AUXV_HAS_CRC32 -DARM_ASM_HWCAP)
+ add_definitions(-DARM_AUXV_HAS_NEON)
+ add_definitions(-DARM_ACLE_CRC_HASH)
+ add_definitions(-DARM_NEON_ADLER32 -DARM_NEON_CHUNKSET -DARM_NEON_SLIDEHASH)
+
+ list(APPEND ZLIB_ARCH_HDRS ${ARCHDIR}/arm.h)
+ list(APPEND ZLIB_ARCH_SRCS ${ARCHDIR}/armfeature.c)
+ set(ACLE_SRCS ${ARCHDIR}/crc32_acle.c ${ARCHDIR}/insert_string_acle.c)
+ list(APPEND ZLIB_ARCH_SRCS ${ACLE_SRCS})
+ set(NEON_SRCS ${ARCHDIR}/adler32_neon.c ${ARCHDIR}/chunkset_neon.c ${ARCHDIR}/slide_neon.c)
+ list(APPEND ZLIB_ARCH_SRCS ${NEON_SRCS})
+
+elseif(ARCH_PPC64LE)
+ set(ARCHDIR "${SOURCE_DIR}/arch/power")
+
+ add_definitions(-DPOWER8)
+ add_definitions(-DPOWER_FEATURES)
+ add_definitions(-DPOWER8_VSX_ADLER32)
+ add_definitions(-DPOWER8_VSX_SLIDEHASH)
+
+ list(APPEND ZLIB_ARCH_HDRS ${ARCHDIR}/power.h)
+ list(APPEND ZLIB_ARCH_SRCS ${ARCHDIR}/power.c)
+ set(POWER8_SRCS ${ARCHDIR}/adler32_power8.c ${ARCHDIR}/slide_hash_power8.c)
+ list(APPEND ZLIB_ARCH_SRCS ${POWER8_SRCS})
+
+elseif(ARCH_AMD64)
+ set(ARCHDIR "${SOURCE_DIR}/arch/x86")
+
+ add_definitions(-DX86_FEATURES)
+ list(APPEND ZLIB_ARCH_HDRS ${ARCHDIR}/x86.h)
+ list(APPEND ZLIB_ARCH_SRCS ${ARCHDIR}/x86.c)
+ if(ENABLE_AVX2)
+ add_definitions(-DX86_AVX2 -DX86_AVX2_ADLER32 -DX86_AVX_CHUNKSET)
+ set(AVX2_SRCS ${ARCHDIR}/slide_avx.c)
+ list(APPEND AVX2_SRCS ${ARCHDIR}/chunkset_avx.c)
+ list(APPEND AVX2_SRCS ${ARCHDIR}/compare258_avx.c)
+ list(APPEND AVX2_SRCS ${ARCHDIR}/adler32_avx.c)
+ list(APPEND ZLIB_ARCH_SRCS ${AVX2_SRCS})
+ endif()
+ if(ENABLE_SSE42)
+ add_definitions(-DX86_SSE42_CRC_HASH)
+ set(SSE42_SRCS ${ARCHDIR}/insert_string_sse.c)
+ list(APPEND ZLIB_ARCH_SRCS ${SSE42_SRCS})
+ add_definitions(-DX86_SSE42_CRC_INTRIN)
+ add_definitions(-DX86_SSE42_CMP_STR)
+ set(SSE42_SRCS ${ARCHDIR}/compare258_sse.c)
+ list(APPEND ZLIB_ARCH_SRCS ${SSE42_SRCS})
+ endif()
+ if(ENABLE_SSSE3)
+ add_definitions(-DX86_SSSE3 -DX86_SSSE3_ADLER32)
+ set(SSSE3_SRCS ${ARCHDIR}/adler32_ssse3.c)
+ list(APPEND ZLIB_ARCH_SRCS ${SSSE3_SRCS})
+ endif()
+ if(ENABLE_PCLMULQDQ)
+ add_definitions(-DX86_PCLMULQDQ_CRC)
+ set(PCLMULQDQ_SRCS ${ARCHDIR}/crc_folding.c)
+ list(APPEND ZLIB_ARCH_SRCS ${PCLMULQDQ_SRCS})
+ endif()
+
+ add_definitions(-DX86_SSE2 -DX86_SSE2_CHUNKSET -DX86_SSE2_SLIDEHASH)
+ set(SSE2_SRCS ${ARCHDIR}/chunkset_sse.c ${ARCHDIR}/slide_sse.c)
+ list(APPEND ZLIB_ARCH_SRCS ${SSE2_SRCS})
+ add_definitions(-DX86_NOCHECK_SSE2)
+endif ()
+
+macro(generate_cmakein input output)
+ file(REMOVE ${output})
+ file(STRINGS ${input} _lines)
+ foreach(_line IN LISTS _lines)
+ string(REGEX REPLACE "#ifdef HAVE_UNISTD_H.*" "@ZCONF_UNISTD_LINE@" _line "${_line}")
+ string(REGEX REPLACE "#ifdef NEED_PTRDIFF_T.*" "@ZCONF_PTRDIFF_LINE@" _line "${_line}")
+ if(NEED_PTRDIFF_T)
+ string(REGEX REPLACE "typedef PTRDIFF_TYPE" "typedef @PTRDIFF_TYPE@" _line "${_line}")
+ endif()
+ file(APPEND ${output} "${_line}\n")
+ endforeach()
+endmacro(generate_cmakein)
+
+generate_cmakein(${SOURCE_DIR}/zconf.h.in ${CMAKE_CURRENT_BINARY_DIR}/zconf.h.cmakein)
+
+set(ZLIB_SRCS
+ ${SOURCE_DIR}/adler32.c
+ ${SOURCE_DIR}/chunkset.c
+ ${SOURCE_DIR}/compare258.c
+ ${SOURCE_DIR}/compress.c
+ ${SOURCE_DIR}/crc32.c
+ ${SOURCE_DIR}/crc32_comb.c
+ ${SOURCE_DIR}/deflate.c
+ ${SOURCE_DIR}/deflate_fast.c
+ ${SOURCE_DIR}/deflate_medium.c
+ ${SOURCE_DIR}/deflate_quick.c
+ ${SOURCE_DIR}/deflate_slow.c
+ ${SOURCE_DIR}/functable.c
+ ${SOURCE_DIR}/infback.c
+ ${SOURCE_DIR}/inffast.c
+ ${SOURCE_DIR}/inflate.c
+ ${SOURCE_DIR}/inftrees.c
+ ${SOURCE_DIR}/insert_string.c
+ ${SOURCE_DIR}/trees.c
+ ${SOURCE_DIR}/uncompr.c
+ ${SOURCE_DIR}/zutil.c
+ ${SOURCE_DIR}/gzlib.c
+ ${SOURCE_DIR}/gzread.c
+ ${SOURCE_DIR}/gzwrite.c
+)
+
+set(ZLIB_ALL_SRCS ${ZLIB_SRCS} ${ZLIB_ARCH_SRCS})
+
+add_library(zlib ${ZLIB_ALL_SRCS})
+add_library(zlibstatic ALIAS zlib)
+
+# https://github.com/zlib-ng/zlib-ng/pull/733
+# This is disabed by default
+add_compile_definitions(Z_TLS=__thread)
+
+if(HAVE_UNISTD_H)
+ SET(ZCONF_UNISTD_LINE "#if 1 /* was set to #if 1 by configure/cmake/etc */")
+else()
+ SET(ZCONF_UNISTD_LINE "#if 0 /* was set to #if 0 by configure/cmake/etc */")
+endif()
+if(NEED_PTRDIFF_T)
+ SET(ZCONF_PTRDIFF_LINE "#if 1 /* was set to #if 1 by configure/cmake/etc */")
+else()
+ SET(ZCONF_PTRDIFF_LINE "#ifdef NEED_PTRDIFF_T /* may be set to #if 1 by configure/cmake/etc */")
+endif()
+
+set(ZLIB_PC ${CMAKE_CURRENT_BINARY_DIR}/zlib.pc)
+configure_file(${SOURCE_DIR}/zlib.pc.cmakein ${ZLIB_PC} @ONLY)
+configure_file(${CMAKE_CURRENT_BINARY_DIR}/zconf.h.cmakein ${CMAKE_CURRENT_BINARY_DIR}/zconf.h @ONLY)
+
+# We should use same defines when including zlib.h as used when zlib compiled
+target_compile_definitions (zlib PUBLIC ZLIB_COMPAT WITH_GZFILEOP)
+if (ARCH_AMD64 OR ARCH_AARCH64)
+ target_compile_definitions (zlib PUBLIC X86_64 UNALIGNED_OK)
+endif ()
+
+target_include_directories(zlib PUBLIC ${SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR})
diff --git a/docker/builder/Dockerfile b/docker/builder/Dockerfile
index 5a998ffbe3e..49c40d576e7 100644
--- a/docker/builder/Dockerfile
+++ b/docker/builder/Dockerfile
@@ -1,8 +1,10 @@
FROM ubuntu:20.04
-ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
diff --git a/docker/client/Dockerfile b/docker/client/Dockerfile
index 6f9a957852e..e84cb601c0f 100644
--- a/docker/client/Dockerfile
+++ b/docker/client/Dockerfile
@@ -1,10 +1,12 @@
FROM ubuntu:18.04
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
+
ARG repository="deb https://repo.clickhouse.com/deb/stable/ main/"
ARG version=21.12.1.*
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
-
RUN apt-get update \
&& apt-get install --yes --no-install-recommends \
apt-transport-https \
diff --git a/docker/docs/builder/Dockerfile b/docker/docs/builder/Dockerfile
index 8afddefa41a..50e3fadf9ac 100644
--- a/docker/docs/builder/Dockerfile
+++ b/docker/docs/builder/Dockerfile
@@ -1,9 +1,11 @@
# docker build -t clickhouse/docs-build .
FROM ubuntu:20.04
-ENV LANG=C.UTF-8
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+ENV LANG=C.UTF-8
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \
diff --git a/docker/images.json b/docker/images.json
index a6cc821108e..a696b0597df 100644
--- a/docker/images.json
+++ b/docker/images.json
@@ -1,9 +1,7 @@
{
"docker/packager/deb": {
"name": "clickhouse/deb-builder",
- "dependent": [
- "docker/packager/unbundled"
- ]
+ "dependent": []
},
"docker/packager/binary": {
"name": "clickhouse/binary-builder",
@@ -13,10 +11,6 @@
"docker/test/codebrowser"
]
},
- "docker/packager/unbundled": {
- "name": "clickhouse/unbundled-builder",
- "dependent": []
- },
"docker/test/compatibility/centos": {
"name": "clickhouse/test-old-centos",
"dependent": []
@@ -138,23 +132,11 @@
"name": "clickhouse/test-base",
"dependent": [
"docker/test/stateless",
- "docker/test/stateless_unbundled",
"docker/test/integration/base",
"docker/test/fuzzer",
"docker/test/keeper-jepsen"
]
},
- "docker/packager/unbundled": {
- "name": "clickhouse/unbundled-builder",
- "dependent": [
- "docker/test/stateless_unbundled"
- ]
- },
- "docker/test/stateless_unbundled": {
- "name": "clickhouse/stateless-unbundled-test",
- "dependent": [
- ]
- },
"docker/test/integration/kerberized_hadoop": {
"name": "clickhouse/kerberized-hadoop",
"dependent": []
diff --git a/docker/packager/binary/Dockerfile b/docker/packager/binary/Dockerfile
index 51d29b822e8..6a6d0e7212c 100644
--- a/docker/packager/binary/Dockerfile
+++ b/docker/packager/binary/Dockerfile
@@ -1,9 +1,11 @@
# docker build -t clickhouse/binary-builder .
FROM ubuntu:20.04
-ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \
&& apt-get install \
@@ -93,9 +95,6 @@ RUN git clone https://github.com/tpoechtrager/cctools-port.git \
# Download toolchain and SDK for Darwin
RUN wget -nv https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.0.sdk.tar.xz
-# Download toolchain for FreeBSD 11.3
-RUN wget -nv https://clickhouse-datasets.s3.yandex.net/toolchains/toolchains/freebsd-11.3-toolchain.tar.xz
-
# NOTE: Seems like gcc-11 is too new for ubuntu20 repository
RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
&& apt-get update \
diff --git a/docker/packager/binary/build.sh b/docker/packager/binary/build.sh
index f78af924b66..2f18b07ffe1 100755
--- a/docker/packager/binary/build.sh
+++ b/docker/packager/binary/build.sh
@@ -6,9 +6,6 @@ mkdir -p build/cmake/toolchain/darwin-x86_64
tar xJf MacOSX11.0.sdk.tar.xz -C build/cmake/toolchain/darwin-x86_64 --strip-components=1
ln -sf darwin-x86_64 build/cmake/toolchain/darwin-aarch64
-mkdir -p build/cmake/toolchain/freebsd-x86_64
-tar xJf freebsd-11.3-toolchain.tar.xz -C build/cmake/toolchain/freebsd-x86_64 --strip-components=1
-
# Uncomment to debug ccache. Don't put ccache log in /output right away, or it
# will be confusingly packed into the "performance" package.
# export CCACHE_LOGFILE=/build/ccache.log
@@ -81,11 +78,11 @@ then
fi
# Also build fuzzers if any sanitizer specified
-if [ -n "$SANITIZER" ]
-then
- # Currently we are in build/build_docker directory
- ../docker/packager/other/fuzzer.sh
-fi
+# if [ -n "$SANITIZER" ]
+# then
+# # Currently we are in build/build_docker directory
+# ../docker/packager/other/fuzzer.sh
+# fi
ccache --show-config ||:
ccache --show-stats ||:
diff --git a/docker/packager/deb/Dockerfile b/docker/packager/deb/Dockerfile
index 318b960e0b4..873edfe4afc 100644
--- a/docker/packager/deb/Dockerfile
+++ b/docker/packager/deb/Dockerfile
@@ -1,9 +1,11 @@
# docker build -t clickhouse/deb-builder .
FROM ubuntu:20.04
-ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
@@ -33,7 +35,6 @@ RUN curl -O https://clickhouse-datasets.s3.yandex.net/utils/1/dpkg-deb \
&& chmod +x dpkg-deb \
&& cp dpkg-deb /usr/bin
-# Libraries from OS are only needed to test the "unbundled" build (this is not used in production).
RUN apt-get update \
&& apt-get install \
alien \
diff --git a/docker/packager/deb/build.sh b/docker/packager/deb/build.sh
index 46f6404363d..e1272317c8a 100755
--- a/docker/packager/deb/build.sh
+++ b/docker/packager/deb/build.sh
@@ -31,15 +31,15 @@ then
fi
# Also build fuzzers if any sanitizer specified
-if [ -n "$SANITIZER" ]
-then
- # Script is supposed that we are in build directory.
- mkdir -p build/build_docker
- cd build/build_docker
- # Launching build script
- ../docker/packager/other/fuzzer.sh
- cd
-fi
+# if [ -n "$SANITIZER" ]
+# then
+# # Script is supposed that we are in build directory.
+# mkdir -p build/build_docker
+# cd build/build_docker
+# # Launching build script
+# ../docker/packager/other/fuzzer.sh
+# cd
+# fi
ccache --show-config ||:
ccache --show-stats ||:
diff --git a/docker/packager/packager b/docker/packager/packager
index ae7b99200ee..9cce12be949 100755
--- a/docker/packager/packager
+++ b/docker/packager/packager
@@ -11,7 +11,6 @@ SCRIPT_PATH = os.path.realpath(__file__)
IMAGE_MAP = {
"deb": "clickhouse/deb-builder",
"binary": "clickhouse/binary-builder",
- "unbundled": "clickhouse/unbundled-builder"
}
def check_image_exists_locally(image_name):
@@ -55,7 +54,7 @@ def run_docker_image_with_env(image_name, output, env_variables, ch_root, ccache
subprocess.check_call(cmd, shell=True)
-def parse_env_variables(build_type, compiler, sanitizer, package_type, image_type, cache, distcc_hosts, unbundled, split_binary, clang_tidy, version, author, official, alien_pkgs, with_coverage, with_binaries):
+def parse_env_variables(build_type, compiler, sanitizer, package_type, image_type, cache, distcc_hosts, split_binary, clang_tidy, version, author, official, alien_pkgs, with_coverage, with_binaries):
CLANG_PREFIX = "clang"
DARWIN_SUFFIX = "-darwin"
DARWIN_ARM_SUFFIX = "-darwin-aarch64"
@@ -64,7 +63,7 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
PPC_SUFFIX = '-ppc64le'
result = []
- cmake_flags = ['$CMAKE_FLAGS', '-DADD_GDB_INDEX_FOR_GOLD=1']
+ cmake_flags = ['$CMAKE_FLAGS']
is_clang = compiler.startswith(CLANG_PREFIX)
is_cross_darwin = compiler.endswith(DARWIN_SUFFIX)
@@ -74,11 +73,6 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
is_cross_freebsd = compiler.endswith(FREEBSD_SUFFIX)
is_cross_compile = is_cross_darwin or is_cross_darwin_arm or is_cross_arm or is_cross_freebsd or is_cross_ppc
- # Explicitly use LLD with Clang by default.
- # Don't force linker for cross-compilation.
- if is_clang and not is_cross_compile:
- cmake_flags.append("-DLINKER_NAME=ld.lld")
-
if is_cross_darwin:
cc = compiler[:-len(DARWIN_SUFFIX)]
cmake_flags.append("-DCMAKE_AR:FILEPATH=/cctools/bin/x86_64-apple-darwin-ar")
@@ -107,7 +101,7 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
cxx = cc.replace('gcc', 'g++').replace('clang', 'clang++')
- if image_type == "deb" or image_type == "unbundled":
+ if image_type == "deb":
result.append("DEB_CC={}".format(cc))
result.append("DEB_CXX={}".format(cxx))
# For building fuzzers
@@ -159,15 +153,6 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
cmake_flags.append('-DENABLE_TESTS=1')
cmake_flags.append('-DUSE_GTEST=1')
- # "Unbundled" build is not suitable for any production usage.
- # But it is occasionally used by some developers.
- # The whole idea of using unknown version of libraries from the OS distribution is deeply flawed.
- # We wish these developers good luck.
- if unbundled:
- # We also disable all CPU features except basic x86_64.
- # It is only slightly related to "unbundled" build, but it is a good place to test if code compiles without these instruction sets.
- cmake_flags.append('-DUNBUNDLED=1 -DUSE_INTERNAL_RDKAFKA_LIBRARY=1 -DENABLE_ARROW=0 -DENABLE_AVRO=0 -DENABLE_ORC=0 -DENABLE_PARQUET=0 -DENABLE_SSSE3=0 -DENABLE_SSE41=0 -DENABLE_SSE42=0 -DENABLE_PCLMULQDQ=0 -DENABLE_POPCNT=0 -DENABLE_AVX=0 -DENABLE_AVX2=0')
-
if split_binary:
cmake_flags.append('-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1')
# We can't always build utils because it requires too much space, but
@@ -213,7 +198,6 @@ if __name__ == "__main__":
"clang-13", "clang-13-darwin", "clang-13-darwin-aarch64", "clang-13-aarch64", "clang-13-ppc64le",
"clang-11-freebsd", "clang-12-freebsd", "clang-13-freebsd", "gcc-11"), default="clang-13")
parser.add_argument("--sanitizer", choices=("address", "thread", "memory", "undefined", ""), default="")
- parser.add_argument("--unbundled", action="store_true")
parser.add_argument("--split-binary", action="store_true")
parser.add_argument("--clang-tidy", action="store_true")
parser.add_argument("--cache", choices=("", "ccache", "distcc"), default="")
@@ -232,7 +216,7 @@ if __name__ == "__main__":
if not os.path.isabs(args.output_dir):
args.output_dir = os.path.abspath(os.path.join(os.getcwd(), args.output_dir))
- image_type = 'binary' if args.package_type == 'performance' else 'unbundled' if args.unbundled else args.package_type
+ image_type = 'binary' if args.package_type == 'performance' else args.package_type
image_name = IMAGE_MAP[image_type]
if not os.path.isabs(args.clickhouse_repo_path):
@@ -256,7 +240,7 @@ if __name__ == "__main__":
build_image(image_with_version, dockerfile)
env_prepared = parse_env_variables(
args.build_type, args.compiler, args.sanitizer, args.package_type, image_type,
- args.cache, args.distcc_hosts, args.unbundled, args.split_binary, args.clang_tidy,
+ args.cache, args.distcc_hosts, args.split_binary, args.clang_tidy,
args.version, args.author, args.official, args.alien_pkgs, args.with_coverage, args.with_binaries)
run_docker_image_with_env(image_name, args.output_dir, env_prepared, ch_root, args.ccache_dir, args.docker_image_version)
diff --git a/docker/packager/unbundled/Dockerfile b/docker/packager/unbundled/Dockerfile
deleted file mode 100644
index f3d8e1ea658..00000000000
--- a/docker/packager/unbundled/Dockerfile
+++ /dev/null
@@ -1,69 +0,0 @@
-# docker build -t clickhouse/unbundled-builder .
-FROM clickhouse/deb-builder
-
-RUN export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
- && wget -nv -O /tmp/arrow-keyring.deb "https://apache.jfrog.io/artifactory/arrow/ubuntu/apache-arrow-apt-source-latest-${CODENAME}.deb" \
- && dpkg -i /tmp/arrow-keyring.deb
-
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
-
-# Libraries from OS are only needed to test the "unbundled" build (that is not used in production).
-RUN apt-get update \
- && apt-get install \
- libicu-dev \
- gperf \
- perl \
- pkg-config \
- devscripts \
- libc++-dev \
- libc++abi-dev \
- libboost-all-dev \
- zlib1g-dev \
- liblz4-dev \
- libdouble-conversion-dev \
- libxml2-dev \
- librdkafka-dev \
- libgoogle-perftools-dev \
- libzstd-dev \
- libltdl-dev \
- libre2-dev \
- libjemalloc-dev \
- libmsgpack-dev \
- libcurl4-openssl-dev \
- unixodbc-dev \
- odbcinst \
- tzdata \
- alien \
- libcapnp-dev \
- cmake \
- gdb \
- pigz \
- moreutils \
- libcctz-dev \
- libldap2-dev \
- libsasl2-dev \
- libgsasl7-dev \
- heimdal-multidev \
- libhyperscan-dev \
- libbrotli-dev \
- protobuf-compiler \
- libprotoc-dev \
- libgrpc++-dev \
- protobuf-compiler-grpc \
- libc-ares-dev \
- rapidjson-dev \
- libsnappy-dev \
- libparquet-dev \
- libthrift-dev \
- libutf8proc-dev \
- libbz2-dev \
- libavro-dev \
- libfarmhash-dev \
- librocksdb-dev \
- libgflags-dev \
- libmysqlclient-dev \
- --yes --no-install-recommends
-
-COPY build.sh /
-
-CMD ["/bin/bash", "/build.sh"]
diff --git a/docker/packager/unbundled/build.sh b/docker/packager/unbundled/build.sh
deleted file mode 100755
index c43c6b5071e..00000000000
--- a/docker/packager/unbundled/build.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env bash
-
-set -x -e
-
-ccache --show-stats ||:
-ccache --zero-stats ||:
-read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
-build/release "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
-mv /*.deb /output
-mv -- *.changes /output
-mv -- *.buildinfo /output
-mv /*.rpm /output ||: # if exists
-mv /*.tgz /output ||: # if exists
-
-ccache --show-stats ||:
diff --git a/docker/server/Dockerfile b/docker/server/Dockerfile
index 04842e7a3de..96e7e73af33 100644
--- a/docker/server/Dockerfile
+++ b/docker/server/Dockerfile
@@ -1,5 +1,9 @@
FROM ubuntu:20.04
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
+
ARG repository="deb https://repo.clickhouse.com/deb/stable/ main/"
ARG version=21.12.1.*
ARG gosu_ver=1.10
@@ -26,8 +30,6 @@ ARG DEBIAN_FRONTEND=noninteractive
# installed to prevent picking those uid / gid by some unrelated software.
# The same uid / gid (101) is used both for alpine and ubuntu.
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
-
RUN groupadd -r clickhouse --gid=101 \
&& useradd -r -g clickhouse --uid=101 --home-dir=/var/lib/clickhouse --shell=/bin/bash clickhouse \
&& apt-get update \
diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile
index caaeff912e7..a661f8875a2 100644
--- a/docker/test/base/Dockerfile
+++ b/docker/test/base/Dockerfile
@@ -1,9 +1,11 @@
# docker build -t clickhouse/test-base .
FROM clickhouse/test-util
-ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
@@ -52,14 +54,18 @@ RUN apt-get update \
--yes --no-install-recommends
# Sanitizer options for services (clickhouse-server)
-RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment; \
+# Set resident memory limit for TSAN to 45GiB (46080MiB) to avoid OOMs in Stress tests
+# and MEMORY_LIMIT_EXCEEDED exceptions in Functional tests (total memory limit in Functional tests is ~55.24 GiB).
+# TSAN will flush shadow memory when reaching this limit.
+# It may cause false-negatives, but it's better than OOM.
+RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7 memory_limit_mb=46080'" >> /etc/environment; \
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \
echo "MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'" >> /etc/environment; \
echo "LSAN_OPTIONS='suppressions=/usr/share/clickhouse-test/config/lsan_suppressions.txt'" >> /etc/environment; \
ln -s /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
-ENV TSAN_OPTIONS='halt_on_error=1 history_size=7'
+ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080'
ENV UBSAN_OPTIONS='print_stacktrace=1'
ENV MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'
diff --git a/docker/test/codebrowser/Dockerfile b/docker/test/codebrowser/Dockerfile
index 94aa321252b..25fabca67b5 100644
--- a/docker/test/codebrowser/Dockerfile
+++ b/docker/test/codebrowser/Dockerfile
@@ -2,7 +2,9 @@
# docker run --volume=path_to_repo:/repo_folder --volume=path_to_result:/test_output clickhouse/codebrowser
FROM clickhouse/binary-builder
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-9 libllvm9 libclang-9-dev
diff --git a/docker/test/fasttest/Dockerfile b/docker/test/fasttest/Dockerfile
index 798910fb952..6fa5b0aa9db 100644
--- a/docker/test/fasttest/Dockerfile
+++ b/docker/test/fasttest/Dockerfile
@@ -1,9 +1,11 @@
# docker build -t clickhouse/fasttest .
FROM clickhouse/test-util
-ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=13
RUN apt-get update \
&& apt-get install ca-certificates lsb-release wget gnupg apt-transport-https \
diff --git a/docker/test/fasttest/run.sh b/docker/test/fasttest/run.sh
index 925ac6f797c..10c896c15f2 100755
--- a/docker/test/fasttest/run.sh
+++ b/docker/test/fasttest/run.sh
@@ -159,6 +159,7 @@ function clone_submodules
cd "$FASTTEST_SOURCE"
SUBMODULES_TO_UPDATE=(
+ contrib/sysroot
contrib/magic_enum
contrib/abseil-cpp
contrib/boost
diff --git a/docker/test/fuzzer/Dockerfile b/docker/test/fuzzer/Dockerfile
index 6444e745c47..c602cba50aa 100644
--- a/docker/test/fuzzer/Dockerfile
+++ b/docker/test/fuzzer/Dockerfile
@@ -1,12 +1,14 @@
# docker build -t clickhouse/fuzzer .
FROM clickhouse/test-base
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
+
ENV LANG=C.UTF-8
ENV TZ=Europe/Moscow
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
-
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \
ca-certificates \
diff --git a/docker/test/fuzzer/run-fuzzer.sh b/docker/test/fuzzer/run-fuzzer.sh
index 0c11e0a615d..351b4a3c541 100755
--- a/docker/test/fuzzer/run-fuzzer.sh
+++ b/docker/test/fuzzer/run-fuzzer.sh
@@ -77,7 +77,7 @@ function configure
function watchdog
{
- sleep 3600
+ sleep 1800
echo "Fuzzing run has timed out"
for _ in {1..10}
@@ -256,6 +256,12 @@ continue
task_exit_code=0
echo "success" > status.txt
echo "OK" > description.txt
+ elif [ "$fuzzer_exit_code" == "137" ]
+ then
+ # Killed.
+ task_exit_code=$fuzzer_exit_code
+ echo "failure" > status.txt
+ echo "Killed" > description.txt
else
# The server was alive, but the fuzzer returned some error. This might
# be some client-side error detected by fuzzing, or a problem in the
diff --git a/docker/test/integration/base/Dockerfile b/docker/test/integration/base/Dockerfile
index 519c64297e5..add4dad0132 100644
--- a/docker/test/integration/base/Dockerfile
+++ b/docker/test/integration/base/Dockerfile
@@ -19,6 +19,7 @@ RUN apt-get update \
sqlite3 \
curl \
tar \
+ lz4 \
krb5-user \
iproute2 \
lsof \
diff --git a/docker/test/integration/kerberized_hadoop/Dockerfile b/docker/test/integration/kerberized_hadoop/Dockerfile
index 00944cbfc00..025f4b27fde 100644
--- a/docker/test/integration/kerberized_hadoop/Dockerfile
+++ b/docker/test/integration/kerberized_hadoop/Dockerfile
@@ -2,18 +2,17 @@
FROM sequenceiq/hadoop-docker:2.7.0
-RUN sed -i -e 's/^\#baseurl/baseurl/' /etc/yum.repos.d/CentOS-Base.repo && \
- sed -i -e 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/CentOS-Base.repo && \
- sed -i -e 's#http://mirror.centos.org/#http://vault.centos.org/#' /etc/yum.repos.d/CentOS-Base.repo
-
# https://community.letsencrypt.org/t/rhel-centos-6-openssl-client-compatibility-after-dst-root-ca-x3-expiration/161032/81
RUN sed -i s/xMDkzMDE0MDExNVow/0MDkzMDE4MTQwM1ow/ /etc/pki/tls/certs/ca-bundle.crt
-RUN yum clean all && \
- rpm --rebuilddb && \
- yum -y update && \
- yum -y install yum-plugin-ovl && \
- yum --quiet -y install krb5-workstation.x86_64
+
+RUN curl -o krb5-libs-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.10/os/x86_64/Packages/krb5-libs-1.10.3-65.el6.x86_64.rpm && \
+ curl -o krb5-workstation-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/os/x86_64/Packages/krb5-workstation-1.10.3-65.el6.x86_64.rpm && \
+ curl -o libkadm5-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.10/os/x86_64/Packages/libkadm5-1.10.3-65.el6.x86_64.rpm && \
+ curl -o libss-1.41.12-24.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/cr/x86_64/Packages/libss-1.41.12-24.el6.x86_64.rpm && \
+ curl -o libcom_err-1.41.12-24.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/cr/x86_64/Packages/libcom_err-1.41.12-24.el6.x86_64.rpm && \
+ rpm -Uvh libkadm5-1.10.3-65.el6.x86_64.rpm libss-1.41.12-24.el6.x86_64.rpm krb5-libs-1.10.3-65.el6.x86_64.rpm krb5-workstation-1.10.3-65.el6.x86_64.rpm libcom_err-1.41.12-24.el6.x86_64.rpm && \
+ rm -fr *.rpm
RUN cd /tmp && \
curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \
diff --git a/docker/test/integration/runner/Dockerfile b/docker/test/integration/runner/Dockerfile
index 06e1f64ced2..5695be70b9a 100644
--- a/docker/test/integration/runner/Dockerfile
+++ b/docker/test/integration/runner/Dockerfile
@@ -1,7 +1,9 @@
# docker build -t clickhouse/integration-tests-runner .
FROM ubuntu:20.04
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update \
&& env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
@@ -60,7 +62,7 @@ RUN dockerd --version; docker --version
RUN python3 -m pip install \
PyMySQL \
aerospike==4.0.0 \
- avro \
+ avro==1.10.2 \
cassandra-driver \
confluent-kafka==1.5.0 \
dict2xml \
diff --git a/docker/test/integration/runner/compose/docker_compose_postgres.yml b/docker/test/integration/runner/compose/docker_compose_postgres.yml
index c444e71798e..15ea548e218 100644
--- a/docker/test/integration/runner/compose/docker_compose_postgres.yml
+++ b/docker/test/integration/runner/compose/docker_compose_postgres.yml
@@ -2,7 +2,7 @@ version: '2.3'
services:
postgres1:
image: postgres
- command: ["postgres", "-c", "logging_collector=on", "-c", "log_directory=/postgres/logs", "-c", "log_filename=postgresql.log", "-c", "log_statement=all", "-c", "max_connections=200"]
+ command: ["postgres", "-c", "wal_level=logical", "-c", "max_replication_slots=2", "-c", "logging_collector=on", "-c", "log_directory=/postgres/logs", "-c", "log_filename=postgresql.log", "-c", "log_statement=all", "-c", "max_connections=200"]
restart: always
expose:
- ${POSTGRES_PORT}
@@ -11,7 +11,6 @@ services:
interval: 10s
timeout: 5s
retries: 5
- command: [ "postgres", "-c", "wal_level=logical", "-c", "max_replication_slots=2"]
networks:
default:
aliases:
diff --git a/docker/test/performance-comparison/Dockerfile b/docker/test/performance-comparison/Dockerfile
index 73d9454ab7f..eddaf969f33 100644
--- a/docker/test/performance-comparison/Dockerfile
+++ b/docker/test/performance-comparison/Dockerfile
@@ -1,12 +1,14 @@
# docker build -t clickhouse/performance-comparison .
FROM ubuntu:18.04
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
+
ENV LANG=C.UTF-8
ENV TZ=Europe/Moscow
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
-
RUN apt-get update \
&& DEBIAN_FRONTEND=noninteractive apt-get install --yes --no-install-recommends \
bash \
diff --git a/docker/test/performance-comparison/compare.sh b/docker/test/performance-comparison/compare.sh
index 2e8517d71c9..b6a06be2ac7 100755
--- a/docker/test/performance-comparison/compare.sh
+++ b/docker/test/performance-comparison/compare.sh
@@ -308,12 +308,7 @@ function get_profiles_watchdog
function get_profiles
{
# Collect the profiles
- clickhouse-client --port $LEFT_SERVER_PORT --query "set query_profiler_cpu_time_period_ns = 0"
- clickhouse-client --port $LEFT_SERVER_PORT --query "set query_profiler_real_time_period_ns = 0"
clickhouse-client --port $LEFT_SERVER_PORT --query "system flush logs" &
-
- clickhouse-client --port $RIGHT_SERVER_PORT --query "set query_profiler_cpu_time_period_ns = 0"
- clickhouse-client --port $RIGHT_SERVER_PORT --query "set query_profiler_real_time_period_ns = 0"
clickhouse-client --port $RIGHT_SERVER_PORT --query "system flush logs" &
wait
diff --git a/docker/test/performance-comparison/config/config.d/zzz-perf-comparison-tweaks-config.xml b/docker/test/performance-comparison/config/config.d/zzz-perf-comparison-tweaks-config.xml
index cc5dc3795bb..39c29bb61ca 100644
--- a/docker/test/performance-comparison/config/config.d/zzz-perf-comparison-tweaks-config.xml
+++ b/docker/test/performance-comparison/config/config.d/zzz-perf-comparison-tweaks-config.xml
@@ -5,22 +5,44 @@
+
::
true
-
-
-
+
+
+
+
+
+
-
- system
-
- 7500
- 1000
+
+
+ ENGINE = Memory
+
+
+
+ ENGINE = Memory
+
+
+
+ ENGINE = Memory
+
+
+
+ ENGINE = Memory
+
+
+ ENGINE = Memory
+
+
1000000000
diff --git a/docker/test/performance-comparison/report.py b/docker/test/performance-comparison/report.py
index 35e1008e0d7..4cff6b41949 100755
--- a/docker/test/performance-comparison/report.py
+++ b/docker/test/performance-comparison/report.py
@@ -227,10 +227,20 @@ def tableEnd():
return '
'
def tsvRows(n):
- result = []
try:
with open(n, encoding='utf-8') as fd:
- return [row for row in csv.reader(fd, delimiter="\t", quotechar='"')]
+ result = []
+ for row in csv.reader(fd, delimiter="\t", quoting=csv.QUOTE_NONE):
+ new_row = []
+ for e in row:
+ # The first one .encode('utf-8').decode('unicode-escape') decodes the escape characters from the strings.
+ # The second one (encode('latin1').decode('utf-8')) fixes the changes with unicode vs utf-8 chars, so
+ # 'Чем зÐ�нимаеÑ�ЬÑ�Ñ�' is transformed back into 'Чем зАнимаешЬся'.
+
+ new_row.append(e.encode('utf-8').decode('unicode-escape').encode('latin1').decode('utf-8'))
+ result.append(new_row)
+ return result
+
except:
report_errors.append(
traceback.format_exception_only(
diff --git a/docker/test/pvs/Dockerfile b/docker/test/pvs/Dockerfile
index 77cbd910922..5dc32ebcc22 100644
--- a/docker/test/pvs/Dockerfile
+++ b/docker/test/pvs/Dockerfile
@@ -40,7 +40,7 @@ RUN set -x \
ENV CCACHE_DIR=/test_output/ccache
CMD echo "Running PVS version $PKG_VERSION" && mkdir -p $CCACHE_DIR && cd /repo_folder && pvs-studio-analyzer credentials $LICENCE_NAME $LICENCE_KEY -o ./licence.lic \
- && cmake . -D"ENABLE_EMBEDDED_COMPILER"=OFF -D"USE_INTERNAL_PROTOBUF_LIBRARY"=OFF -D"USE_INTERNAL_GRPC_LIBRARY"=OFF -DCMAKE_C_COMPILER=clang-13 -DCMAKE_CXX_COMPILER=clang\+\+-13 \
+ && cmake . -D"ENABLE_EMBEDDED_COMPILER"=OFF -D"DISABLE_HERMETIC_BUILD"=ON -DCMAKE_C_COMPILER=clang-13 -DCMAKE_CXX_COMPILER=clang\+\+-13 \
&& ninja re2_st clickhouse_grpc_protos \
&& pvs-studio-analyzer analyze -o pvs-studio.log -e contrib -j 4 -l ./licence.lic; \
cp /repo_folder/pvs-studio.log /test_output; \
diff --git a/docker/test/sqlancer/Dockerfile b/docker/test/sqlancer/Dockerfile
index e73fd03fb6d..0821d516e23 100644
--- a/docker/test/sqlancer/Dockerfile
+++ b/docker/test/sqlancer/Dockerfile
@@ -1,7 +1,9 @@
# docker build -t clickhouse/sqlancer-test .
FROM ubuntu:20.04
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update --yes && env DEBIAN_FRONTEND=noninteractive apt-get install wget unzip git default-jdk maven python3 --yes --no-install-recommends
RUN wget https://github.com/sqlancer/sqlancer/archive/master.zip -O /sqlancer.zip
diff --git a/docker/test/stateful/run.sh b/docker/test/stateful/run.sh
index a8cae760d55..680392df43e 100755
--- a/docker/test/stateful/run.sh
+++ b/docker/test/stateful/run.sh
@@ -120,8 +120,12 @@ timeout "$MAX_RUN_TIME" bash -c run_tests ||:
./process_functional_tests_result.py || echo -e "failure\tCannot parse results" > /test_output/check_status.tsv
grep -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server.log ||:
+
pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhouse-server.log.gz ||:
+# FIXME: remove once only github actions will be left
+rm /var/log/clickhouse-server/clickhouse-server.log
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:
+
if [[ -n "$WITH_COVERAGE" ]] && [[ "$WITH_COVERAGE" -eq 1 ]]; then
tar -chf /test_output/clickhouse_coverage.tar.gz /profraw ||:
fi
@@ -130,6 +134,9 @@ if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]
grep -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server2.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.gz ||:
pigz < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.gz ||:
+ # FIXME: remove once only github actions will be left
+ rm /var/log/clickhouse-server/clickhouse-server1.log
+ rm /var/log/clickhouse-server/clickhouse-server2.log
mv /var/log/clickhouse-server/stderr1.log /test_output/ ||:
mv /var/log/clickhouse-server/stderr2.log /test_output/ ||:
fi
diff --git a/docker/test/stateless/run.sh b/docker/test/stateless/run.sh
index ad0a805488f..93f64fdec66 100755
--- a/docker/test/stateless/run.sh
+++ b/docker/test/stateless/run.sh
@@ -135,6 +135,8 @@ done
wait ||:
+# Compressed (FIXME: remove once only github actions will be left)
+rm /var/log/clickhouse-server/clickhouse-server.log
mv /var/log/clickhouse-server/stderr.log /test_output/ ||:
if [[ -n "$WITH_COVERAGE" ]] && [[ "$WITH_COVERAGE" -eq 1 ]]; then
tar -chf /test_output/clickhouse_coverage.tar.gz /profraw ||:
@@ -155,6 +157,9 @@ if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]
grep -Fa "Fatal" /var/log/clickhouse-server/clickhouse-server2.log ||:
pigz < /var/log/clickhouse-server/clickhouse-server1.log > /test_output/clickhouse-server1.log.gz ||:
pigz < /var/log/clickhouse-server/clickhouse-server2.log > /test_output/clickhouse-server2.log.gz ||:
+ # FIXME: remove once only github actions will be left
+ rm /var/log/clickhouse-server/clickhouse-server1.log
+ rm /var/log/clickhouse-server/clickhouse-server2.log
mv /var/log/clickhouse-server/stderr1.log /test_output/ ||:
mv /var/log/clickhouse-server/stderr2.log /test_output/ ||:
tar -chf /test_output/zookeeper_log_dump1.tar /var/lib/clickhouse1/data/system/zookeeper_log ||:
diff --git a/docker/test/stateless_unbundled/Dockerfile b/docker/test/stateless_unbundled/Dockerfile
deleted file mode 100644
index dfe441e08a6..00000000000
--- a/docker/test/stateless_unbundled/Dockerfile
+++ /dev/null
@@ -1,82 +0,0 @@
-# docker build -t clickhouse/stateless-unbundled-test .
-FROM clickhouse/test-base
-
-ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
-
-RUN apt-get --allow-unauthenticated update -y \
- && env DEBIAN_FRONTEND=noninteractive \
- apt-get --allow-unauthenticated install --yes --no-install-recommends \
- alien \
- brotli \
- zstd \
- cmake \
- devscripts \
- expect \
- gdb \
- gperf \
- heimdal-multidev \
- libboost-filesystem-dev \
- libboost-iostreams-dev \
- libboost-program-options-dev \
- libboost-regex-dev \
- libboost-system-dev \
- libboost-thread-dev \
- libc++-dev \
- libc++abi-dev \
- libcapnp-dev \
- libcctz-dev \
- libcurl4-openssl-dev \
- libdouble-conversion-dev \
- libgoogle-perftools-dev \
- libhyperscan-dev \
- libicu-dev \
- libjemalloc-dev \
- libldap2-dev \
- libltdl-dev \
- liblz4-dev \
- libmsgpack-dev \
- libpoco-dev \
- libpoconetssl62 \
- librdkafka-dev \
- libre2-dev \
- libreadline-dev \
- libsasl2-dev \
- libzstd-dev \
- librocksdb-dev \
- libgflags-dev \
- lsof \
- moreutils \
- ncdu \
- netcat-openbsd \
- odbcinst \
- openssl \
- perl \
- pigz \
- pkg-config \
- python3 \
- python3-lxml \
- python3-requests \
- python3-termcolor \
- python3-pip \
- qemu-user-static \
- sudo \
- telnet \
- tree \
- tzdata \
- unixodbc \
- unixodbc-dev \
- wget \
- zlib1g-dev
-
-RUN pip3 install numpy scipy pandas
-
-RUN mkdir -p /tmp/clickhouse-odbc-tmp \
- && wget -nv -O - ${odbc_driver_url} | tar --strip-components=1 -xz -C /tmp/clickhouse-odbc-tmp \
- && cp /tmp/clickhouse-odbc-tmp/lib64/*.so /usr/local/lib/ \
- && odbcinst -i -d -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbcinst.ini.sample \
- && odbcinst -i -s -l -f /tmp/clickhouse-odbc-tmp/share/doc/clickhouse-odbc/config/odbc.ini.sample \
- && rm -rf /tmp/clickhouse-odbc-tmp
-
-COPY run.sh /
-CMD ["/bin/bash", "/run.sh"]
-
diff --git a/docker/test/stateless_unbundled/clickhouse-statelest-test-runner.Dockerfile b/docker/test/stateless_unbundled/clickhouse-statelest-test-runner.Dockerfile
deleted file mode 100644
index 562141ba147..00000000000
--- a/docker/test/stateless_unbundled/clickhouse-statelest-test-runner.Dockerfile
+++ /dev/null
@@ -1,15 +0,0 @@
-# Since right now we can't set volumes to the docker during build, we split building container in stages:
-# 1. build base container
-# 2. run base conatiner with mounted volumes
-# 3. commit container as image
-FROM ubuntu:18.10 as clickhouse-test-runner-base
-
-# A volume where directory with clickhouse packages to be mounted,
-# for later installing.
-VOLUME /packages
-
-CMD apt-get update ;\
- DEBIAN_FRONTEND=noninteractive \
- apt install -y /packages/clickhouse-common-static_*.deb \
- /packages/clickhouse-client_*.deb \
- /packages/clickhouse-test_*.deb
diff --git a/docker/test/stateless_unbundled/run.sh b/docker/test/stateless_unbundled/run.sh
deleted file mode 100755
index f8396706ed2..00000000000
--- a/docker/test/stateless_unbundled/run.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-
-set -e -x
-
-dpkg -i package_folder/clickhouse-common-static_*.deb
-dpkg -i package_folder/clickhouse-common-static-dbg_*.deb
-dpkg -i package_folder/clickhouse-server_*.deb
-dpkg -i package_folder/clickhouse-client_*.deb
-dpkg -i package_folder/clickhouse-test_*.deb
-
-# install test configs
-/usr/share/clickhouse-test/config/install.sh
-
-service clickhouse-server start && sleep 5
-
-clickhouse-test --testname --shard --zookeeper "$ADDITIONAL_OPTIONS" "$SKIP_TESTS_OPTION" 2>&1 | ts '%Y-%m-%d %H:%M:%S' | tee test_output/test_result.txt
diff --git a/docker/test/stress/run.sh b/docker/test/stress/run.sh
index 04845f2a4d1..a2810c7ce75 100755
--- a/docker/test/stress/run.sh
+++ b/docker/test/stress/run.sh
@@ -37,6 +37,12 @@ function configure()
# install test configs
/usr/share/clickhouse-test/config/install.sh
+ # avoid too slow startup
+ sudo cat /etc/clickhouse-server/config.d/keeper_port.xml | sed "s|100000|10000|" > /etc/clickhouse-server/config.d/keeper_port.xml.tmp
+ sudo mv /etc/clickhouse-server/config.d/keeper_port.xml.tmp /etc/clickhouse-server/config.d/keeper_port.xml
+ sudo chown clickhouse /etc/clickhouse-server/config.d/keeper_port.xml
+ sudo chgrp clickhouse /etc/clickhouse-server/config.d/keeper_port.xml
+
# for clickhouse-server (via service)
echo "ASAN_OPTIONS='malloc_context_size=10 verbosity=1 allocator_release_to_os_interval_ms=10000'" >> /etc/environment
# for clickhouse-client
@@ -179,6 +185,8 @@ zgrep -Fa "########################################" /test_output/* > /dev/null
for log_file in /var/log/clickhouse-server/clickhouse-server.log*
do
pigz < "${log_file}" > /test_output/"$(basename ${log_file})".gz
+ # FIXME: remove once only github actions will be left
+ rm "${log_file}"
done
tar -chf /test_output/coordination.tar /var/lib/clickhouse/coordination ||:
diff --git a/docker/test/stress/stress b/docker/test/stress/stress
index 5e98c67d8e1..2202fed8a92 100755
--- a/docker/test/stress/stress
+++ b/docker/test/stress/stress
@@ -138,7 +138,7 @@ if __name__ == "__main__":
parser.add_argument("--client-cmd", default='clickhouse-client')
parser.add_argument("--server-log-folder", default='/var/log/clickhouse-server')
parser.add_argument("--output-folder")
- parser.add_argument("--global-time-limit", type=int, default=3600)
+ parser.add_argument("--global-time-limit", type=int, default=1800)
parser.add_argument("--num-parallel", type=int, default=cpu_count())
parser.add_argument('--hung-check', action='store_true', default=False)
# make sense only for hung check
diff --git a/docker/test/style/Dockerfile b/docker/test/style/Dockerfile
index a9c1eca8de4..824af0de022 100644
--- a/docker/test/style/Dockerfile
+++ b/docker/test/style/Dockerfile
@@ -1,7 +1,9 @@
# docker build -t clickhouse/style-test .
FROM ubuntu:20.04
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
shellcheck \
@@ -10,7 +12,7 @@ RUN apt-get update && env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
python3-pip \
pylint \
yamllint \
- && pip3 install codespell PyGithub boto3 unidiff
+ && pip3 install codespell PyGithub boto3 unidiff dohq-artifactory
COPY run.sh /
COPY process_style_check_result.py /
diff --git a/docker/test/testflows/runner/Dockerfile b/docker/test/testflows/runner/Dockerfile
index 91d0eb844d9..8ea3cd46973 100644
--- a/docker/test/testflows/runner/Dockerfile
+++ b/docker/test/testflows/runner/Dockerfile
@@ -1,7 +1,9 @@
# docker build -t clickhouse/testflows-runner .
FROM ubuntu:20.04
-RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
+# ARG for quick switch to a given ubuntu mirror
+ARG apt_archive="http://archive.ubuntu.com"
+RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
RUN apt-get update \
&& env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
diff --git a/docs/_includes/install/universal.sh b/docs/_includes/install/universal.sh
index ad0eb4979a3..3861862eb55 100755
--- a/docs/_includes/install/universal.sh
+++ b/docs/_includes/install/universal.sh
@@ -34,7 +34,7 @@ then
if [ "${ARCH}" = "x86_64" ]
then
DIR="macos"
- elif [ "${ARCH}" = "aarch64" ]
+ elif [ "${ARCH}" = "aarch64" -o "${ARCH}" = "arm64" ]
then
DIR="macos-aarch64"
fi
diff --git a/docs/en/development/browse-code.md b/docs/en/development/browse-code.md
index 35555bbd79c..fa57d2289b3 100644
--- a/docs/en/development/browse-code.md
+++ b/docs/en/development/browse-code.md
@@ -1,5 +1,5 @@
---
-toc_priority: 71
+toc_priority: 72
toc_title: Source Code Browser
---
diff --git a/docs/en/development/build-cross-osx.md b/docs/en/development/build-cross-osx.md
index 6f3b3a717d0..c7e40013113 100644
--- a/docs/en/development/build-cross-osx.md
+++ b/docs/en/development/build-cross-osx.md
@@ -9,14 +9,14 @@ This is for the case when you have Linux machine and want to use it to build `cl
The cross-build for Mac OS X is based on the [Build instructions](../development/build.md), follow them first.
-## Install Clang-8 {#install-clang-8}
+## Install Clang-13
Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup.
For example the commands for Bionic are like:
``` bash
-sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-8 main" >> /etc/apt/sources.list
-sudo apt-get install clang-8
+sudo echo "deb [trusted=yes] http://apt.llvm.org/bionic/ llvm-toolchain-bionic-13 main" >> /etc/apt/sources.list
+sudo apt-get install clang-13
```
## Install Cross-Compilation Toolset {#install-cross-compilation-toolset}
@@ -25,6 +25,7 @@ Let’s remember the path where we install `cctools` as ${CCTOOLS}
``` bash
mkdir ${CCTOOLS}
+cd ${CCTOOLS}
git clone https://github.com/tpoechtrager/apple-libtapi.git
cd apple-libtapi
@@ -34,7 +35,7 @@ cd ..
git clone https://github.com/tpoechtrager/cctools-port.git
cd cctools-port/cctools
-./configure --prefix=${CCTOOLS} --with-libtapi=${CCTOOLS} --target=x86_64-apple-darwin
+./configure --prefix=$(readlink -f ${CCTOOLS}) --with-libtapi=$(readlink -f ${CCTOOLS}) --target=x86_64-apple-darwin
make install
```
@@ -51,12 +52,10 @@ tar xJf MacOSX10.15.sdk.tar.xz -C build-darwin/cmake/toolchain/darwin-x86_64 --s
``` bash
cd ClickHouse
-mkdir build-osx
-CC=clang-8 CXX=clang++-8 cmake . -Bbuild-osx -DCMAKE_TOOLCHAIN_FILE=cmake/darwin/toolchain-x86_64.cmake \
- -DCMAKE_AR:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ar \
- -DCMAKE_RANLIB:FILEPATH=${CCTOOLS}/bin/x86_64-apple-darwin-ranlib \
- -DLINKER_NAME=${CCTOOLS}/bin/x86_64-apple-darwin-ld
-ninja -C build-osx
+mkdir build-darwin
+cd build-darwin
+CC=clang-13 CXX=clang++-13 cmake -DCMAKE_AR:FILEPATH=${CCTOOLS}/bin/aarch64-apple-darwin-ar -DCMAKE_INSTALL_NAME_TOOL=${CCTOOLS}/bin/aarch64-apple-darwin-install_name_tool -DCMAKE_RANLIB:FILEPATH=${CCTOOLS}/bin/aarch64-apple-darwin-ranlib -DLINKER_NAME=${CCTOOLS}/bin/aarch64-apple-darwin-ld -DCMAKE_TOOLCHAIN_FILE=cmake/darwin/toolchain-x86_64.cmake ..
+ninja
```
The resulting binary will have a Mach-O executable format and can’t be run on Linux.
diff --git a/docs/en/development/build-cross-riscv.md b/docs/en/development/build-cross-riscv.md
new file mode 100644
index 00000000000..977387af207
--- /dev/null
+++ b/docs/en/development/build-cross-riscv.md
@@ -0,0 +1,30 @@
+---
+toc_priority: 68
+toc_title: Build on Linux for RISC-V 64
+---
+
+# How to Build ClickHouse on Linux for RISC-V 64 Architecture {#how-to-build-clickhouse-on-linux-for-risc-v-64-architecture}
+
+As of writing (11.11.2021) building for risc-v considered to be highly experimental. Not all features can be enabled.
+
+This is for the case when you have Linux machine and want to use it to build `clickhouse` binary that will run on another Linux machine with RISC-V 64 CPU architecture. This is intended for continuous integration checks that run on Linux servers.
+
+The cross-build for RISC-V 64 is based on the [Build instructions](../development/build.md), follow them first.
+
+## Install Clang-13
+
+Follow the instructions from https://apt.llvm.org/ for your Ubuntu or Debian setup or do
+```
+sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
+```
+
+## Build ClickHouse {#build-clickhouse}
+
+``` bash
+cd ClickHouse
+mkdir build-riscv64
+CC=clang-13 CXX=clang++-13 cmake . -Bbuild-riscv64 -G Ninja -DCMAKE_TOOLCHAIN_FILE=cmake/linux/toolchain-riscv64.cmake -DGLIBC_COMPATIBILITY=OFF -DENABLE_LDAP=OFF -DOPENSSL_NO_ASM=ON -DENABLE_JEMALLOC=ON -DENABLE_PARQUET=OFF -DUSE_INTERNAL_PARQUET_LIBRARY=OFF -DENABLE_ORC=OFF -DUSE_INTERNAL_ORC_LIBRARY=OFF -DUSE_UNWIND=OFF -DUSE_INTERNAL_PROTOBUF_LIBRARY=ON -DENABLE_GRPC=OFF -DUSE_INTERNAL_GRPC_LIBRARY=OFF -DENABLE_HDFS=OFF -DUSE_INTERNAL_HDFS3_LIBRARY=OFF -DENABLE_MYSQL=OFF -DUSE_INTERNAL_MYSQL_LIBRARY=OFF
+ninja -C build-riscv64
+```
+
+The resulting binary will run only on Linux with the RISC-V 64 CPU architecture.
diff --git a/docs/en/development/build.md b/docs/en/development/build.md
index 633549c68f3..982ba0556a9 100644
--- a/docs/en/development/build.md
+++ b/docs/en/development/build.md
@@ -74,7 +74,7 @@ The build requires the following components:
- Git (is used only to checkout the sources, it’s not needed for the build)
- CMake 3.10 or newer
- Ninja
-- C++ compiler: clang-11 or newer
+- C++ compiler: clang-13 or newer
- Linker: lld
- Python (is only used inside LLVM build and it is optional)
diff --git a/docs/en/development/continuous-integration.md b/docs/en/development/continuous-integration.md
index 0176a2dcb76..69f01ee63b4 100644
--- a/docs/en/development/continuous-integration.md
+++ b/docs/en/development/continuous-integration.md
@@ -126,7 +126,6 @@ Builds ClickHouse in various configurations for use in further steps. You have t
- **Compiler**: `gcc-9` or `clang-10` (or `clang-10-xx` for other architectures e.g. `clang-10-freebsd`).
- **Build type**: `Debug` or `RelWithDebInfo` (cmake).
- **Sanitizer**: `none` (without sanitizers), `address` (ASan), `memory` (MSan), `undefined` (UBSan), or `thread` (TSan).
-- **Bundled**: `bundled` build uses libraries from `contrib` folder, and `unbundled` build uses system libraries.
- **Splitted** `splitted` is a [split build](../development/build.md#split-build)
- **Status**: `success` or `fail`
- **Build log**: link to the building and files copying log, useful when build failed.
diff --git a/docs/en/development/contrib.md b/docs/en/development/contrib.md
index a9b9a5d1e44..07969f8ef6a 100644
--- a/docs/en/development/contrib.md
+++ b/docs/en/development/contrib.md
@@ -1,5 +1,5 @@
---
-toc_priority: 70
+toc_priority: 71
toc_title: Third-Party Libraries Used
---
diff --git a/docs/en/development/developer-instruction.md b/docs/en/development/developer-instruction.md
index 024ce27d60d..52fa307333c 100644
--- a/docs/en/development/developer-instruction.md
+++ b/docs/en/development/developer-instruction.md
@@ -37,7 +37,7 @@ Next, you need to download the source files onto your working machine. This is c
In the command line terminal run:
- git clone git@github.com:your_github_username/ClickHouse.git
+ git clone --recursive git@github.com:your_github_username/ClickHouse.git
cd ClickHouse
Note: please, substitute *your_github_username* with what is appropriate!
@@ -65,7 +65,7 @@ It generally means that the SSH keys for connecting to GitHub are missing. These
You can also clone the repository via https protocol:
- git clone https://github.com/ClickHouse/ClickHouse.git
+ git clone --recursive https://github.com/ClickHouse/ClickHouse.git
This, however, will not let you send your changes to the server. You can still use it temporarily and add the SSH keys later replacing the remote address of the repository with `git remote` command.
@@ -241,7 +241,7 @@ Adding third-party libraries: https://clickhouse.com/docs/en/development/contrib
Writing tests: https://clickhouse.com/docs/en/development/tests/
-List of tasks: https://github.com/ClickHouse/ClickHouse/issues?q=is%3Aopen+is%3Aissue+label%3A%22easy+task%22
+List of tasks: https://github.com/ClickHouse/ClickHouse/issues?q=is%3Aopen+is%3Aissue+label%3Ahacktoberfest
## Test Data {#test-data}
diff --git a/docs/en/development/style.md b/docs/en/development/style.md
index bc38f0711cf..49b2f68b9f3 100644
--- a/docs/en/development/style.md
+++ b/docs/en/development/style.md
@@ -1,5 +1,5 @@
---
-toc_priority: 68
+toc_priority: 69
toc_title: C++ Guide
---
diff --git a/docs/en/development/tests.md b/docs/en/development/tests.md
index 0e2aa348483..ea32f608124 100644
--- a/docs/en/development/tests.md
+++ b/docs/en/development/tests.md
@@ -1,5 +1,5 @@
---
-toc_priority: 69
+toc_priority: 70
toc_title: Testing
---
diff --git a/docs/en/engines/database-engines/materialized-postgresql.md b/docs/en/engines/database-engines/materialized-postgresql.md
index 0f0ffaca343..6f5ebafdb61 100644
--- a/docs/en/engines/database-engines/materialized-postgresql.md
+++ b/docs/en/engines/database-engines/materialized-postgresql.md
@@ -23,15 +23,15 @@ ENGINE = MaterializedPostgreSQL('host:port', ['database' | database], 'user', 'p
- `user` — PostgreSQL user.
- `password` — User password.
-## Dynamically adding new tables to replication
+## Dynamically adding new tables to replication {#dynamically-adding-table-to-replication}
``` sql
ATTACH TABLE postgres_database.new_table;
```
-It will work as well if there is a setting `materialized_postgresql_tables_list`.
+When specifying a specific list of tables in the database using the setting [materialized_postgresql_tables_list](../../operations/settings/settings.md#materialized-postgresql-tables-list), it will be updated to the current state, taking into account the tables which were added by the `ATTACH TABLE` query.
-## Dynamically removing tables from replication
+## Dynamically removing tables from replication {#dynamically-removing-table-from-replication}
``` sql
DETACH TABLE postgres_database.table_to_remove;
@@ -58,7 +58,7 @@ SETTINGS materialized_postgresql_max_block_size = 65536,
SELECT * FROM database1.table1;
```
-It is also possible to change settings at run time.
+The settings can be changed, if necessary, using a DDL query. But it is impossible to change the setting `materialized_postgresql_tables_list`. To update the list of tables in this setting use the `ATTACH TABLE` query.
``` sql
ALTER DATABASE postgres_database MODIFY SETTING materialized_postgresql_max_block_size = ;
diff --git a/docs/en/engines/table-engines/integrations/s3.md b/docs/en/engines/table-engines/integrations/s3.md
index e494e9aec6a..691666cffef 100644
--- a/docs/en/engines/table-engines/integrations/s3.md
+++ b/docs/en/engines/table-engines/integrations/s3.md
@@ -11,7 +11,8 @@ This engine provides integration with [Amazon S3](https://aws.amazon.com/s3/) ec
``` sql
CREATE TABLE s3_engine_table (name String, value UInt32)
-ENGINE = S3(path, [aws_access_key_id, aws_secret_access_key,] format, [compression])
+ ENGINE = S3(path, [aws_access_key_id, aws_secret_access_key,] format, [compression])
+ [SETTINGS ...]
```
**Engine parameters**
@@ -23,21 +24,13 @@ ENGINE = S3(path, [aws_access_key_id, aws_secret_access_key,] format, [compressi
**Example**
-1. Set up the `s3_engine_table` table:
-
``` sql
-CREATE TABLE s3_engine_table (name String, value UInt32) ENGINE=S3('https://storage.yandexcloud.net/my-test-bucket-768/test-data.csv.gz', 'CSV', 'gzip');
-```
+CREATE TABLE s3_engine_table (name String, value UInt32)
+ ENGINE=S3('https://storage.yandexcloud.net/my-test-bucket-768/test-data.csv.gz', 'CSV', 'gzip')
+ SETTINGS input_format_with_names_use_header = 0;
-2. Fill file:
-
-``` sql
INSERT INTO s3_engine_table VALUES ('one', 1), ('two', 2), ('three', 3);
-```
-3. Query the data:
-
-``` sql
SELECT * FROM s3_engine_table LIMIT 2;
```
@@ -73,57 +66,54 @@ For more information about virtual columns see [here](../../../engines/table-eng
Constructions with `{}` are similar to the [remote](../../../sql-reference/table-functions/remote.md) table function.
-**Example**
+!!! warning "Warning"
+ If the listing of files contains number ranges with leading zeros, use the construction with braces for each digit separately or use `?`.
-1. Suppose we have several files in CSV format with the following URIs on S3:
-
-- 'https://storage.yandexcloud.net/my-test-bucket-768/some_prefix/some_file_1.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/some_prefix/some_file_2.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/some_prefix/some_file_3.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/another_prefix/some_file_1.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/another_prefix/some_file_2.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/another_prefix/some_file_3.csv'
-
-There are several ways to make a table consisting of all six files:
-
-The first way:
-
-``` sql
-CREATE TABLE table_with_range (name String, value UInt32) ENGINE = S3('https://storage.yandexcloud.net/my-test-bucket-768/{some,another}_prefix/some_file_{1..3}', 'CSV');
-```
-
-Another way:
-
-``` sql
-CREATE TABLE table_with_question_mark (name String, value UInt32) ENGINE = S3('https://storage.yandexcloud.net/my-test-bucket-768/{some,another}_prefix/some_file_?', 'CSV');
-```
-
-Table consists of all the files in both directories (all files should satisfy format and schema described in query):
-
-``` sql
-CREATE TABLE table_with_asterisk (name String, value UInt32) ENGINE = S3('https://storage.yandexcloud.net/my-test-bucket-768/{some,another}_prefix/*', 'CSV');
-```
-
-If the listing of files contains number ranges with leading zeros, use the construction with braces for each digit separately or use `?`.
-
-**Example**
+**Example with wildcards 1**
Create table with files named `file-000.csv`, `file-001.csv`, … , `file-999.csv`:
``` sql
-CREATE TABLE big_table (name String, value UInt32) ENGINE = S3('https://storage.yandexcloud.net/my-test-bucket-768/big_prefix/file-{000..999}.csv', 'CSV');
+CREATE TABLE big_table (name String, value UInt32)
+ ENGINE = S3('https://storage.yandexcloud.net/my-bucket/my_folder/file-{000..999}.csv', 'CSV');
```
-## Virtual Columns {#virtual-columns}
+**Example with wildcards 2**
-- `_path` — Path to the file.
-- `_file` — Name of the file.
+Suppose we have several files in CSV format with the following URIs on S3:
-**See Also**
+- 'https://storage.yandexcloud.net/my-bucket/some_folder/some_file_1.csv'
+- 'https://storage.yandexcloud.net/my-bucket/some_folder/some_file_2.csv'
+- 'https://storage.yandexcloud.net/my-bucket/some_folder/some_file_3.csv'
+- 'https://storage.yandexcloud.net/my-bucket/another_folder/some_file_1.csv'
+- 'https://storage.yandexcloud.net/my-bucket/another_folder/some_file_2.csv'
+- 'https://storage.yandexcloud.net/my-bucket/another_folder/some_file_3.csv'
-- [Virtual columns](../../../engines/table-engines/index.md#table_engines-virtual_columns)
-## S3-related settings {#settings}
+There are several ways to make a table consisting of all six files:
+
+1. Specify the range of file postfixes:
+
+``` sql
+CREATE TABLE table_with_range (name String, value UInt32)
+ ENGINE = S3('https://storage.yandexcloud.net/my-bucket/{some,another}_folder/some_file_{1..3}', 'CSV');
+```
+
+2. Take all files with `some_file_` prefix (there should be no extra files with such prefix in both folders):
+
+``` sql
+CREATE TABLE table_with_question_mark (name String, value UInt32)
+ ENGINE = S3('https://storage.yandexcloud.net/my-bucket/{some,another}_folder/some_file_?', 'CSV');
+```
+
+3. Take all the files in both folders (all files should satisfy format and schema described in query):
+
+``` sql
+CREATE TABLE table_with_asterisk (name String, value UInt32)
+ ENGINE = S3('https://storage.yandexcloud.net/my-bucket/{some,another}_folder/*', 'CSV');
+```
+
+## S3-related Settings {#settings}
The following settings can be set before query execution or placed into configuration file.
@@ -165,49 +155,6 @@ The following settings can be specified in configuration file for given endpoint
```
-## Usage {#usage-examples}
-
-Suppose we have several files in CSV format with the following URIs on S3:
-
-- 'https://storage.yandexcloud.net/my-test-bucket-768/some_prefix/some_file_1.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/some_prefix/some_file_2.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/some_prefix/some_file_3.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/another_prefix/some_file_1.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/another_prefix/some_file_2.csv'
-- 'https://storage.yandexcloud.net/my-test-bucket-768/another_prefix/some_file_3.csv'
-
-
-1. There are several ways to make a table consisting of all six files:
-
-``` sql
-CREATE TABLE table_with_range (name String, value UInt32)
-ENGINE = S3('https://storage.yandexcloud.net/my-test-bucket-768/{some,another}_prefix/some_file_{1..3}', 'CSV');
-```
-
-2. Another way:
-
-``` sql
-CREATE TABLE table_with_question_mark (name String, value UInt32)
-ENGINE = S3('https://storage.yandexcloud.net/my-test-bucket-768/{some,another}_prefix/some_file_?', 'CSV');
-```
-
-3. Table consists of all the files in both directories (all files should satisfy format and schema described in query):
-
-``` sql
-CREATE TABLE table_with_asterisk (name String, value UInt32)
-ENGINE = S3('https://storage.yandexcloud.net/my-test-bucket-768/{some,another}_prefix/*', 'CSV');
-```
-
-!!! warning "Warning"
- If the listing of files contains number ranges with leading zeros, use the construction with braces for each digit separately or use `?`.
-
-4. Create table with files named `file-000.csv`, `file-001.csv`, … , `file-999.csv`:
-
-``` sql
-CREATE TABLE big_table (name String, value UInt32)
-ENGINE = S3('https://storage.yandexcloud.net/my-test-bucket-768/big_prefix/file-{000..999}.csv', 'CSV');
-```
-
## See also
- [s3 table function](../../../sql-reference/table-functions/s3.md)
diff --git a/docs/en/engines/table-engines/mergetree-family/mergetree.md b/docs/en/engines/table-engines/mergetree-family/mergetree.md
index aeaf39e28cb..d08de080e6b 100644
--- a/docs/en/engines/table-engines/mergetree-family/mergetree.md
+++ b/docs/en/engines/table-engines/mergetree-family/mergetree.md
@@ -787,6 +787,8 @@ Moving data does not interfere with data replication. Therefore, different stora
After the completion of background merges and mutations, old parts are removed only after a certain amount of time (`old_parts_lifetime`).
During this time, they are not moved to other volumes or disks. Therefore, until the parts are finally removed, they are still taken into account for evaluation of the occupied disk space.
+User can assign new big parts to different disks of a [JBOD](https://en.wikipedia.org/wiki/Non-RAID_drive_architectures) volume in a balanced way using the [min_bytes_to_rebalance_partition_over_jbod](../../../operations/settings/merge-tree-settings.md#min-bytes-to-rebalance-partition-over-jbod) setting.
+
## Using S3 for Data Storage {#table_engine-mergetree-s3}
`MergeTree` family table engines can store data to [S3](https://aws.amazon.com/s3/) using a disk with type `s3`.
diff --git a/docs/en/engines/table-engines/special/merge.md b/docs/en/engines/table-engines/special/merge.md
index 19ce19fcc64..27f783a3cea 100644
--- a/docs/en/engines/table-engines/special/merge.md
+++ b/docs/en/engines/table-engines/special/merge.md
@@ -9,45 +9,57 @@ The `Merge` engine (not to be confused with `MergeTree`) does not store data its
Reading is automatically parallelized. Writing to a table is not supported. When reading, the indexes of tables that are actually being read are used, if they exist.
-The `Merge` engine accepts parameters: the database name and a regular expression for tables.
-
-## Examples {#examples}
-
-Example 1:
+## Creating a Table {#creating-a-table}
``` sql
-Merge(hits, '^WatchLog')
+ CREATE TABLE ... Engine=Merge(db_name, tables_regexp)
```
-Data will be read from the tables in the `hits` database that have names that match the regular expression ‘`^WatchLog`’.
+**Engine Parameters**
-Instead of the database name, you can use a constant expression that returns a string. For example, `currentDatabase()`.
+- `db_name` — Possible values:
+ - database name,
+ - constant expression that returns a string with a database name, for example, `currentDatabase()`,
+ - `REGEXP(expression)`, where `expression` is a regular expression to match the DB names.
+
+- `tables_regexp` — A regular expression to match the table names in the specified DB or DBs.
Regular expressions — [re2](https://github.com/google/re2) (supports a subset of PCRE), case-sensitive.
-See the notes about escaping symbols in regular expressions in the “match” section.
+See the notes about escaping symbols in regular expressions in the "match" section.
-When selecting tables to read, the `Merge` table itself will not be selected, even if it matches the regex. This is to avoid loops.
-It is possible to create two `Merge` tables that will endlessly try to read each others’ data, but this is not a good idea.
+## Usage {#usage}
+
+When selecting tables to read, the `Merge` table itself is not selected, even if it matches the regex. This is to avoid loops.
+It is possible to create two `Merge` tables that will endlessly try to read each others' data, but this is not a good idea.
The typical way to use the `Merge` engine is for working with a large number of `TinyLog` tables as if with a single table.
-Example 2:
+## Examples {#examples}
-Let’s say you have a old table (WatchLog_old) and decided to change partitioning without moving data to a new table (WatchLog_new) and you need to see data from both tables.
+**Example 1**
+
+Consider two databases `ABC_corporate_site` and `ABC_store`. The `all_visitors` table will contain IDs from the tables `visitors` in both databases.
``` sql
-CREATE TABLE WatchLog_old(date Date, UserId Int64, EventType String, Cnt UInt64)
-ENGINE=MergeTree(date, (UserId, EventType), 8192);
+CREATE TABLE all_visitors (id UInt32) ENGINE=Merge(REGEXP('ABC_*'), 'visitors');
+```
+
+**Example 2**
+
+Let's say you have an old table `WatchLog_old` and decided to change partitioning without moving data to a new table `WatchLog_new`, and you need to see data from both tables.
+
+``` sql
+CREATE TABLE WatchLog_old(date Date, UserId Int64, EventType String, Cnt UInt64)
+ ENGINE=MergeTree(date, (UserId, EventType), 8192);
INSERT INTO WatchLog_old VALUES ('2018-01-01', 1, 'hit', 3);
-CREATE TABLE WatchLog_new(date Date, UserId Int64, EventType String, Cnt UInt64)
-ENGINE=MergeTree PARTITION BY date ORDER BY (UserId, EventType) SETTINGS index_granularity=8192;
+CREATE TABLE WatchLog_new(date Date, UserId Int64, EventType String, Cnt UInt64)
+ ENGINE=MergeTree PARTITION BY date ORDER BY (UserId, EventType) SETTINGS index_granularity=8192;
INSERT INTO WatchLog_new VALUES ('2018-01-02', 2, 'hit', 3);
CREATE TABLE WatchLog as WatchLog_old ENGINE=Merge(currentDatabase(), '^WatchLog');
-SELECT *
-FROM WatchLog
+SELECT * FROM WatchLog;
```
``` text
@@ -68,5 +80,4 @@ FROM WatchLog
**See Also**
- [Virtual columns](../../../engines/table-engines/special/index.md#table_engines-virtual_columns)
-
-[Original article](https://clickhouse.com/docs/en/operations/table_engines/merge/)
+- [merge](../../../sql-reference/table-functions/merge.md) table function
diff --git a/docs/en/getting-started/example-datasets/nyc-taxi.md b/docs/en/getting-started/example-datasets/nyc-taxi.md
index b3233315db3..64810d3fa37 100644
--- a/docs/en/getting-started/example-datasets/nyc-taxi.md
+++ b/docs/en/getting-started/example-datasets/nyc-taxi.md
@@ -332,7 +332,7 @@ ORDER BY year, count(*) DESC
The following server was used:
-Two Intel(R) Xeon(R) CPU E5-2650 v2 @ 2.60GHz, 16 physical kernels total,128 GiB RAM,8x6 TB HD on hardware RAID-5
+Two Intel(R) Xeon(R) CPU E5-2650 v2 @ 2.60GHz, 16 physical cores total, 128 GiB RAM, 8x6 TB HD on hardware RAID-5
Execution time is the best of three runs. But starting from the second run, queries read data from the file system cache. No further caching occurs: the data is read out and processed in each run.
@@ -381,8 +381,11 @@ We ran queries using a client located in a Yandex datacenter in Finland on a clu
| servers | Q1 | Q2 | Q3 | Q4 |
|---------|-------|-------|-------|-------|
-| 1 | 0.490 | 1.224 | 2.104 | 3.593 |
-| 3 | 0.212 | 0.438 | 0.733 | 1.241 |
-| 140 | 0.028 | 0.043 | 0.051 | 0.072 |
+| 1, E5-2650v2 | 0.490 | 1.224 | 2.104 | 3.593 |
+| 3, E5-2650v2 | 0.212 | 0.438 | 0.733 | 1.241 |
+| 1, AWS c5n.4xlarge | 0.249 | 1.279 | 1.738 | 3.527 |
+| 1, AWS c5n.9xlarge | 0.130 | 0.584 | 0.777 | 1.811 |
+| 3, AWS c5n.9xlarge | 0.057 | 0.231 | 0.285 | 0.641 |
+| 140, E5-2650v2 | 0.028 | 0.043 | 0.051 | 0.072 |
[Original article](https://clickhouse.com/docs/en/getting_started/example_datasets/nyc_taxi/)
diff --git a/docs/en/getting-started/example-datasets/ontime.md b/docs/en/getting-started/example-datasets/ontime.md
index bcba4cfb77d..efc807b75fa 100644
--- a/docs/en/getting-started/example-datasets/ontime.md
+++ b/docs/en/getting-started/example-datasets/ontime.md
@@ -15,7 +15,7 @@ This dataset can be obtained in two ways:
Downloading data:
``` bash
-echo https://transtats.bts.gov/PREZIP/On_Time_Reporting_Carrier_On_Time_Performance_1987_present_{1987..2021}_{1..12}.zip | xargs -P10 wget --no-check-certificate --continue
+wget --no-check-certificate --continue https://transtats.bts.gov/PREZIP/On_Time_Reporting_Carrier_On_Time_Performance_1987_present_{1987..2021}_{1..12}.zip
```
Creating a table:
diff --git a/docs/en/interfaces/formats.md b/docs/en/interfaces/formats.md
index 5b7213d0bfe..c16db5c3db2 100644
--- a/docs/en/interfaces/formats.md
+++ b/docs/en/interfaces/formats.md
@@ -61,7 +61,7 @@ The supported formats are:
| [Native](#native) | ✔ | ✔ |
| [Null](#null) | ✗ | ✔ |
| [XML](#xml) | ✗ | ✔ |
-| [CapnProto](#capnproto) | ✔ | ✗ |
+| [CapnProto](#capnproto) | ✔ | ✔ |
| [LineAsString](#lineasstring) | ✔ | ✗ |
| [Regexp](#data-format-regexp) | ✔ | ✗ |
| [RawBLOB](#rawblob) | ✔ | ✔ |
@@ -130,13 +130,17 @@ Only a small set of symbols are escaped. You can easily stumble onto a string va
Arrays are written as a list of comma-separated values in square brackets. Number items in the array are formatted as normally. `Date` and `DateTime` types are written in single quotes. Strings are written in single quotes with the same escaping rules as above.
-[NULL](../sql-reference/syntax.md) is formatted as `\N`.
+[NULL](../sql-reference/syntax.md) is formatted according to setting [format_tsv_null_representation](../operations/settings/settings.md#settings-format_tsv_null_representation) (default value is `\N`).
+
If setting [input_format_tsv_empty_as_default](../operations/settings/settings.md#settings-input_format_tsv_empty_as_default) is enabled,
empty input fields are replaced with default values. For complex default expressions [input_format_defaults_for_omitted_fields](../operations/settings/settings.md#settings-input_format_defaults_for_omitted_fields) must be enabled too.
Each element of [Nested](../sql-reference/data-types/nested-data-structures/nested.md) structures is represented as array.
+In input data, ENUM values can be represented as names or as ids. First, we try to match the input value to the ENUM name. If we fail and the input value is a number, we try to match this number to ENUM id.
+If input data contains only ENUM ids, it's recommended to enable the setting [input_format_tsv_enum_as_number](../operations/settings/settings.md#settings-input_format_tsv_enum_as_number) to optimize ENUM parsing.
+
For example:
``` sql
@@ -173,6 +177,9 @@ This format is also available under the name `TSVRaw`.
## TabSeparatedWithNames {#tabseparatedwithnames}
Differs from the `TabSeparated` format in that the column names are written in the first row.
+
+During parsing, the first row is expected to contain the column names. You can use column names to determine their position and to check their correctness.
+
If setting [input_format_with_names_use_header](../operations/settings/settings.md#settings-input_format_with_names_use_header) is set to 1,
the columns from input data will be mapped to the columns from the table by their names, columns with unknown names will be skipped if setting [input_format_skip_unknown_fields](../operations/settings/settings.md#settings-input_format_skip_unknown_fields) is set to 1.
Otherwise, the first row will be skipped.
@@ -405,7 +412,10 @@ When parsing, all values can be parsed either with or without quotes. Both doubl
If setting [input_format_csv_empty_as_default](../operations/settings/settings.md#settings-input_format_csv_empty_as_default) is enabled,
empty unquoted input values are replaced with default values. For complex default expressions [input_format_defaults_for_omitted_fields](../operations/settings/settings.md#settings-input_format_defaults_for_omitted_fields) must be enabled too.
-`NULL` is formatted as `\N` or `NULL` or an empty unquoted string (see settings [input_format_csv_unquoted_null_literal_as_null](../operations/settings/settings.md#settings-input_format_csv_unquoted_null_literal_as_null) and [input_format_defaults_for_omitted_fields](../operations/settings/settings.md#session_settings-input_format_defaults_for_omitted_fields)).
+`NULL` is formatted according to setting [format_csv_null_representation](../operations/settings/settings.md#settings-format_csv_null_representation) (default value is `\N`).
+
+In input data, ENUM values can be represented as names or as ids. First, we try to match the input value to the ENUM name. If we fail and the input value is a number, we try to match this number to ENUM id.
+If input data contains only ENUM ids, it's recommended to enable the setting [input_format_csv_enum_as_number](../operations/settings/settings.md#settings-input_format_csv_enum_as_number) to optimize ENUM parsing.
The CSV format supports the output of totals and extremes the same way as `TabSeparated`.
@@ -1085,12 +1095,44 @@ Arrays are output as `HelloWorld...`,an
## CapnProto {#capnproto}
-Cap’n Proto is a binary message format similar to Protocol Buffers and Thrift, but not like JSON or MessagePack.
+CapnProto is a binary message format similar to [Protocol Buffers](https://developers.google.com/protocol-buffers/) and [Thrift](https://en.wikipedia.org/wiki/Apache_Thrift), but not like [JSON](#json) or [MessagePack](https://msgpack.org/).
-Cap’n Proto messages are strictly typed and not self-describing, meaning they need an external schema description. The schema is applied on the fly and cached for each query.
+CapnProto messages are strictly typed and not self-describing, meaning they need an external schema description. The schema is applied on the fly and cached for each query.
+
+See also [Format Schema](#formatschema).
+
+### Data Types Matching {#data_types-matching-capnproto}
+
+The table below shows supported data types and how they match ClickHouse [data types](../sql-reference/data-types/index.md) in `INSERT` and `SELECT` queries.
+
+| CapnProto data type (`INSERT`) | ClickHouse data type | CapnProto data type (`SELECT`) |
+|--------------------------------|-----------------------------------------------------------|--------------------------------|
+| `UINT8`, `BOOL` | [UInt8](../sql-reference/data-types/int-uint.md) | `UINT8` |
+| `INT8` | [Int8](../sql-reference/data-types/int-uint.md) | `INT8` |
+| `UINT16` | [UInt16](../sql-reference/data-types/int-uint.md), [Date](../sql-reference/data-types/date.md) | `UINT16` |
+| `INT16` | [Int16](../sql-reference/data-types/int-uint.md) | `INT16` |
+| `UINT32` | [UInt32](../sql-reference/data-types/int-uint.md), [DateTime](../sql-reference/data-types/datetime.md) | `UINT32` |
+| `INT32` | [Int32](../sql-reference/data-types/int-uint.md) | `INT32` |
+| `UINT64` | [UInt64](../sql-reference/data-types/int-uint.md) | `UINT64` |
+| `INT64` | [Int64](../sql-reference/data-types/int-uint.md), [DateTime64](../sql-reference/data-types/datetime.md) | `INT64` |
+| `FLOAT32` | [Float32](../sql-reference/data-types/float.md) | `FLOAT32` |
+| `FLOAT64` | [Float64](../sql-reference/data-types/float.md) | `FLOAT64` |
+| `TEXT, DATA` | [String](../sql-reference/data-types/string.md), [FixedString](../sql-reference/data-types/fixedstring.md) | `TEXT, DATA` |
+| `union(T, Void), union(Void, T)` | [Nullable(T)](../sql-reference/data-types/date.md) | `union(T, Void), union(Void, T)` |
+| `ENUM` | [Enum(8\|16)](../sql-reference/data-types/enum.md) | `ENUM` |
+| `LIST` | [Array](../sql-reference/data-types/array.md) | `LIST` |
+| `STRUCT` | [Tuple](../sql-reference/data-types/tuple.md) | `STRUCT` |
+
+For working with `Enum` in CapnProto format use the [format_capn_proto_enum_comparising_mode](../operations/settings/settings.md#format-capn-proto-enum-comparising-mode) setting.
+
+Arrays can be nested and can have a value of the `Nullable` type as an argument. `Tuple` type also can be nested.
+
+### Inserting and Selecting Data {#inserting-and-selecting-data-capnproto}
+
+You can insert CapnProto data from a file into ClickHouse table by the following command:
``` bash
-$ cat capnproto_messages.bin | clickhouse-client --query "INSERT INTO test.hits FORMAT CapnProto SETTINGS format_schema='schema:Message'"
+$ cat capnproto_messages.bin | clickhouse-client --query "INSERT INTO test.hits FORMAT CapnProto SETTINGS format_schema = 'schema:Message'"
```
Where `schema.capnp` looks like this:
@@ -1102,9 +1144,11 @@ struct Message {
}
```
-Deserialization is effective and usually does not increase the system load.
+You can select data from a ClickHouse table and save them into some file in the CapnProto format by the following command:
-See also [Format Schema](#formatschema).
+``` bash
+$ clickhouse-client --query = "SELECT * FROM test.hits FORMAT CapnProto SETTINGS format_schema = 'schema:Message'"
+```
## Protobuf {#protobuf}
diff --git a/docs/en/interfaces/http.md b/docs/en/interfaces/http.md
index 6454262122f..a2f0944de47 100644
--- a/docs/en/interfaces/http.md
+++ b/docs/en/interfaces/http.md
@@ -432,7 +432,7 @@ Example:
[^/]+)(/(?P[^/]+))?]]>
- GET
+ GET
TEST_HEADER_VALUE
[^/]+)(/(?P[^/]+))?]]>
@@ -639,4 +639,4 @@ $ curl -vv -H 'XXX:xxx' 'http://localhost:8123/get_relative_path_static_handler'
<
Relative Path File
* Connection #0 to host localhost left intact
-```
\ No newline at end of file
+```
diff --git a/docs/en/interfaces/third-party/client-libraries.md b/docs/en/interfaces/third-party/client-libraries.md
index cb8679e4bdd..342b1c9a496 100644
--- a/docs/en/interfaces/third-party/client-libraries.md
+++ b/docs/en/interfaces/third-party/client-libraries.md
@@ -35,6 +35,8 @@ toc_title: Client Libraries
- NodeJs
- [clickhouse (NodeJs)](https://github.com/TimonKK/clickhouse)
- [node-clickhouse](https://github.com/apla/node-clickhouse)
+ - [nestjs-clickhouse](https://github.com/depyronick/nestjs-clickhouse)
+ - [clickhouse-client](https://github.com/depyronick/clickhouse-client)
- Perl
- [perl-DBD-ClickHouse](https://github.com/elcamlost/perl-DBD-ClickHouse)
- [HTTP-ClickHouse](https://metacpan.org/release/HTTP-ClickHouse)
diff --git a/docs/en/introduction/adopters.md b/docs/en/introduction/adopters.md
index 3006c08fce6..325cd1ff825 100644
--- a/docs/en/introduction/adopters.md
+++ b/docs/en/introduction/adopters.md
@@ -11,6 +11,7 @@ toc_title: Adopters
| Company | Industry | Usecase | Cluster Size | (Un)Compressed Data Size\* | Reference |
|---------|----------|---------|--------------|------------------------------------------------------------------------------|-----------|
| 2gis | Maps | Monitoring | — | — | [Talk in Russian, July 2019](https://youtu.be/58sPkXfq6nw) |
+| Adapty | Subscription Analytics | Main product | — | — | [Tweet, November 2021](https://twitter.com/iwitaly/status/1462698148061659139) |
| Admiral | Martech | Engagement Management | — | — | [Webinar Slides, June 2020](https://altinity.com/presentations/2020/06/16/big-data-in-real-time-how-clickhouse-powers-admirals-visitor-relationships-for-publishers) |
| AdScribe | Ads | TV Analytics | — | — | [A quote from CTO](https://altinity.com/24x7-support/) |
| Ahrefs | SEO | Analytics | — | — | [Job listing](https://ahrefs.com/jobs/data-scientist-search) |
@@ -19,7 +20,7 @@ toc_title: Adopters
| Aloha Browser | Mobile App | Browser backend | — | — | [Slides in Russian, May 2019](https://presentations.clickhouse.com/meetup22/aloha.pdf) |
| Altinity | Cloud, SaaS | Main product | — | — | [Official Website](https://altinity.com/) |
| Amadeus | Travel | Analytics | — | — | [Press Release, April 2018](https://www.altinity.com/blog/2018/4/5/amadeus-technologies-launches-investment-and-insights-tool-based-on-machine-learning-and-strategy-algorithms) |
-| ApiRoad | API marketplace | Analytics | — | — | [Blog post, Nov 2018, Mar 2020](https://pixeljets.com/blog/clickhouse-vs-elasticsearch/) |
+| ApiRoad | API marketplace | Analytics | — | — | [Blog post, November 2018, March 2020](https://pixeljets.com/blog/clickhouse-vs-elasticsearch/) |
| Appsflyer | Mobile analytics | Main product | — | — | [Talk in Russian, July 2019](https://www.youtube.com/watch?v=M3wbRlcpBbY) |
| ArenaData | Data Platform | Main product | — | — | [Slides in Russian, December 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup38/indexes.pdf) |
| Argedor | ClickHouse support | — | — | — | [Official website](https://www.argedor.com/en/clickhouse/) |
@@ -50,6 +51,7 @@ toc_title: Adopters
| Cryptology | Digital Assets Trading Platform | — | — | — | [Job advertisement, March 2021](https://career.habr.com/companies/cryptology/vacancies) |
| Dataliance for China Telecom | Telecom | Analytics | — | — | [Slides in Chinese, January 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup12/telecom.pdf) |
| Deutsche Bank | Finance | BI Analytics | — | — | [Slides in English, October 2019](https://bigdatadays.ru/wp-content/uploads/2019/10/D2-H3-3_Yakunin-Goihburg.pdf) |
+| Deepl | Machine Learning | — | — | — | [Video, October 2021](https://www.youtube.com/watch?v=WIYJiPwxXdM&t=1182s) |
| Deeplay | Gaming Analytics | — | — | — | [Job advertisement, 2020](https://career.habr.com/vacancies/1000062568) |
| Diva-e | Digital consulting | Main Product | — | — | [Slides in English, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup29/ClickHouse-MeetUp-Unusual-Applications-sd-2019-09-17.pdf) |
| Ecommpay | Payment Processing | Logs | — | — | [Video, Nov 2019](https://www.youtube.com/watch?v=d3GdZTOWGLk) |
@@ -65,6 +67,7 @@ toc_title: Adopters
| Gigapipe | Managed ClickHouse | Main product | — | — | [Official website](https://gigapipe.com/) |
| Glaber | Monitoring | Main product | — | — | [Website](https://glaber.io/) |
| GraphCDN | CDN | Traffic Analytics | — | — | [Blog Post in English, August 2021](https://altinity.com/blog/delivering-insight-on-graphql-apis-with-clickhouse-at-graphcdn/) |
+| Grouparoo | Data Warehouse Integrations | Main product | — | — | [Official Website, November 2021](https://www.grouparoo.com/integrations) |
| HUYA | Video Streaming | Analytics | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/7.%20ClickHouse万亿数据分析实践%20李本旺(sundy-li)%20虎牙.pdf) |
| Hydrolix | Cloud data platform | Main product | — | — | [Documentation](https://docs.hydrolix.io/guide/query) |
| ICA | FinTech | Risk Management | — | — | [Blog Post in English, Sep 2020](https://altinity.com/blog/clickhouse-vs-redshift-performance-for-fintech-risk-management?utm_campaign=ClickHouse%20vs%20RedShift&utm_content=143520807&utm_medium=social&utm_source=twitter&hss_channel=tw-3894792263) |
@@ -78,7 +81,8 @@ toc_title: Adopters
| Ippon Technologies | Technology Consulting | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=205) |
| Ivi | Online Cinema | Analytics, Monitoring | — | — | [Article in Russian, Jan 2018](https://habr.com/en/company/ivi/blog/347408/) |
| Jinshuju 金数据 | BI Analytics | Main product | — | — | [Slides in Chinese, October 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/3.%20金数据数据架构调整方案Public.pdf) |
-| kakaocorp | Internet company | — | — | — | [if(kakao)2020 conference](https://if.kakao.com/session/117) |
+| Jitsu | Cloud Software | Data Pipeline | — | — | [Documentation](https://jitsu.com/docs/destinations-configuration/clickhouse-destination), [Hacker News](https://news.ycombinator.com/item?id=29106082) |
+| kakaocorp | Internet company | — | — | — | [if(kakao)2020](https://tv.kakao.com/channel/3693125/cliplink/414129353), [if(kakao)2021](https://if.kakao.com/session/24) |
| Kodiak Data | Clouds | Main product | — | — | [Slides in Engish, April 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup13/kodiak_data.pdf) |
| Kontur | Software Development | Metrics | — | — | [Talk in Russian, November 2018](https://www.youtube.com/watch?v=U4u4Bd0FtrY) |
| Kuaishou | Video | — | — | — | [ClickHouse Meetup, October 2018](https://clickhouse.com/blog/en/2018/clickhouse-community-meetup-in-beijing-on-october-28-2018/) |
@@ -88,7 +92,7 @@ toc_title: Adopters
| Mail.ru Cloud Solutions | Cloud services | Main product | — | — | [Article in Russian](https://mcs.mail.ru/help/db-create/clickhouse#) |
| MAXILECT | Ad Tech, Blockchain, ML, AI | — | — | — | [Job advertisement, 2021](https://www.linkedin.com/feed/update/urn:li:activity:6780842017229430784/) |
| Marilyn | Advertising | Statistics | — | — | [Talk in Russian, June 2017](https://www.youtube.com/watch?v=iXlIgx2khwc) |
-| Mello | Marketing | Analytics | 1 server | — | [Article, Oct 2020](https://vc.ru/marketing/166180-razrabotka-tipovogo-otcheta-skvoznoy-analitiki) |
+| Mello | Marketing | Analytics | 1 server | — | [Article, October 2020](https://vc.ru/marketing/166180-razrabotka-tipovogo-otcheta-skvoznoy-analitiki) |
| MessageBird | Telecommunications | Statistics | — | — | [Slides in English, November 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup20/messagebird.pdf) |
| Microsoft | Web Analytics | Clarity (Main Product) | — | — | [A question on GitHub](https://github.com/ClickHouse/ClickHouse/issues/21556) |
| MindsDB | Machine Learning | Main Product | — | — | [Official Website](https://www.mindsdb.com/blog/machine-learning-models-as-tables-in-ch) |
@@ -99,15 +103,16 @@ toc_title: Adopters
| NOC Project | Network Monitoring | Analytics | Main Product | — | [Official Website](https://getnoc.com/features/big-data/) |
| Noction | Network Technology | Main Product | — | — | [Official Website](https://www.noction.com/news/irp-3-11-remote-triggered-blackholing-capability)
| Nuna Inc. | Health Data Analytics | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=170) |
-| Ok.ru | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, Oct 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) |
-| Omnicomm | Transportation Monitoring | — | — | — | [Facebook post, Oct 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) |
-| OneAPM | Monitorings and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) |
+| Ok.ru | Social Network | — | 72 servers | 810 TB compressed, 50bn rows/day, 1.5 TB/day | [SmartData conference, October 2021](https://assets.ctfassets.net/oxjq45e8ilak/4JPHkbJenLgZhBGGyyonFP/57472ec6987003ec4078d0941740703b/____________________ClickHouse_______________________.pdf) |
+| Omnicomm | Transportation Monitoring | — | — | — | [Facebook post, October 2021](https://www.facebook.com/OmnicommTeam/posts/2824479777774500) |
+| OneAPM | Monitoring and Data Analysis | Main product | — | — | [Slides in Chinese, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup19/8.%20clickhouse在OneAPM的应用%20杜龙.pdf) |
+| Open Targets | Genome Research | Genome Search | — | — | [Tweet, October 2021](https://twitter.com/OpenTargets/status/1452570865342758913?s=20), [Blog](https://blog.opentargets.org/graphql/) |
| OZON | E-commerce | — | — | — | [Official website](https://job.ozon.ru/vacancy/razrabotchik-clickhouse-ekspluatatsiya-40991870/) |
| Panelbear | Analytics | Monitoring and Analytics | — | — | [Tech Stack, November 2020](https://panelbear.com/blog/tech-stack/) |
| Percent 百分点 | Analytics | Main Product | — | — | [Slides in Chinese, June 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup24/4.%20ClickHouse万亿数据双中心的设计与实践%20.pdf) |
| Percona | Performance analysis | Percona Monitoring and Management | — | — | [Official website, Mar 2020](https://www.percona.com/blog/2020/03/30/advanced-query-analysis-in-percona-monitoring-and-management-with-direct-clickhouse-access/) |
| Plausible | Analytics | Main Product | — | — | [Blog post, June 2020](https://twitter.com/PlausibleHQ/status/1273889629087969280) |
-| PostHog | Product Analytics | Main Product | — | — | [Release Notes, Oct 2020](https://posthog.com/blog/the-posthog-array-1-15-0) |
+| PostHog | Product Analytics | Main Product | — | — | [Release Notes, October 2020](https://posthog.com/blog/the-posthog-array-1-15-0), [Blog, November 2021](https://posthog.com/blog/how-we-turned-clickhouse-into-our-eventmansion) |
| Postmates | Delivery | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=188) |
| Pragma Innovation | Telemetry and Big Data Analysis | Main product | — | — | [Slides in English, October 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup18/4_pragma_innovation.pdf) |
| PRANA | Industrial predictive analytics | Main product | — | — | [News (russian), Feb 2021](https://habr.com/en/news/t/541392/) |
@@ -121,7 +126,7 @@ toc_title: Adopters
| Rspamd | Antispam | Analytics | — | — | [Official Website](https://rspamd.com/doc/modules/clickhouse.html) |
| RuSIEM | SIEM | Main Product | — | — | [Official Website](https://rusiem.com/en/products/architecture) |
| S7 Airlines | Airlines | Metrics, Logging | — | — | [Talk in Russian, March 2019](https://www.youtube.com/watch?v=nwG68klRpPg&t=15s) |
-| Sber | Banking, Fintech, Retail, Cloud, Media | — | — | — | [Job advertisement, March 2021](https://career.habr.com/vacancies/1000073536) |
+| Sber | Banking, Fintech, Retail, Cloud, Media | — | 128 servers | >1 PB | [Job advertisement, March 2021](https://career.habr.com/vacancies/1000073536) |
| scireum GmbH | e-Commerce | Main product | — | — | [Talk in German, February 2020](https://www.youtube.com/watch?v=7QWAn5RbyR4) |
| Segment | Data processing | Main product | 9 * i3en.3xlarge nodes 7.5TB NVME SSDs, 96GB Memory, 12 vCPUs | — | [Slides, 2019](https://slides.com/abraithwaite/segment-clickhouse) |
| sembot.io | Shopping Ads | — | — | — | A comment on LinkedIn, 2020 |
@@ -149,6 +154,7 @@ toc_title: Adopters
| Traffic Stars | AD network | — | 300 servers in Europe/US | 1.8 PiB, 700 000 insert rps (as of 2021) | [Slides in Russian, May 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup15/lightning/ninja.pdf) |
| Uber | Taxi | Logging | — | — | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/uber.pdf) |
| UTMSTAT | Analytics | Main product | — | — | [Blog post, June 2020](https://vc.ru/tribuna/133956-striming-dannyh-iz-servisa-skvoznoy-analitiki-v-clickhouse) |
+| Vercel | Traffic and Performance Analytics | — | — | — | Direct reference, October 2021 |
| VKontakte | Social Network | Statistics, Logging | — | — | [Slides in Russian, August 2018](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup17/3_vk.pdf) |
| VMware | Cloud | VeloCloud, SDN | — | — | [Product documentation](https://docs.vmware.com/en/vRealize-Operations-Manager/8.3/com.vmware.vcom.metrics.doc/GUID-A9AD72E1-C948-4CA2-971B-919385AB3CA8.html) |
| Walmart Labs | Internet, Retail | — | — | — | [Talk in English, July 2020](https://youtu.be/GMiXCMFDMow?t=144) |
@@ -164,6 +170,7 @@ toc_title: Adopters
| Yandex Market | e-Commerce | Metrics, Logging | — | — | [Talk in Russian, January 2019](https://youtu.be/_l1qP0DyBcA?t=478) |
| Yandex Metrica | Web analytics | Main product | 630 servers in one cluster, 360 servers in another cluster, 1862 servers in one department | 133 PiB / 8.31 PiB / 120 trillion records | [Slides, February 2020](https://presentations.clickhouse.com/meetup40/introduction/#13) |
| Yotascale | Cloud | Data pipeline | — | 2 bn records/day | [LinkedIn (Accomplishments)](https://www.linkedin.com/in/adilsaleem/) |
+| Your Analytics | Product Analytics | Main Product | — | - | [Tweet, November 2021](https://twitter.com/mikenikles/status/1459737241165565953) |
| Zagrava Trading | — | — | — | — | [Job offer, May 2021](https://twitter.com/datastackjobs/status/1394707267082063874) |
| ЦВТ | Software Development | Metrics, Logging | — | — | [Blog Post, March 2019, in Russian](https://vc.ru/dev/62715-kak-my-stroili-monitoring-na-prometheus-clickhouse-i-elk) |
| МКБ | Bank | Web-system monitoring | — | — | [Slides in Russian, September 2019](https://github.com/ClickHouse/clickhouse-presentations/blob/master/meetup28/mkb.pdf) |
@@ -171,6 +178,5 @@ toc_title: Adopters
| Цифровой Рабочий | Industrial IoT, Analytics | — | — | — | [Blog post in Russian, March 2021](https://habr.com/en/company/croc/blog/548018/) |
| ООО «МПЗ Богородский» | Agriculture | — | — | — | [Article in Russian, November 2020](https://cloud.yandex.ru/cases/okraina) |
| ДомКлик | Real Estate | — | — | — | [Article in Russian, October 2021](https://habr.com/ru/company/domclick/blog/585936/) |
-| Deepl | Machine Learning | — | — | — | [Video, October 2021](https://www.youtube.com/watch?v=WIYJiPwxXdM&t=1182s) |
[Original article](https://clickhouse.com/docs/en/introduction/adopters/)
diff --git a/docs/en/operations/clickhouse-keeper.md b/docs/en/operations/clickhouse-keeper.md
index 58c59ce9f79..b1c53b61b12 100644
--- a/docs/en/operations/clickhouse-keeper.md
+++ b/docs/en/operations/clickhouse-keeper.md
@@ -21,7 +21,7 @@ By default, ClickHouse Keeper provides the same guarantees as ZooKeeper (lineari
ClickHouse Keeper can be used as a standalone replacement for ZooKeeper or as an internal part of the ClickHouse server, but in both cases configuration is almost the same `.xml` file. The main ClickHouse Keeper configuration tag is ``. Keeper configuration has the following parameters:
- `tcp_port` — Port for a client to connect (default for ZooKeeper is `2181`).
-- `tcp_port_secure` — Secure port for a client to connect.
+- `tcp_port_secure` — Secure port for an SSL connection between client and keeper-server.
- `server_id` — Unique server id, each participant of the ClickHouse Keeper cluster must have a unique number (1, 2, 3, and so on).
- `log_storage_path` — Path to coordination logs, better to store logs on the non-busy device (same for ZooKeeper).
- `snapshot_storage_path` — Path to coordination snapshots.
@@ -49,8 +49,13 @@ Internal coordination settings are located in `..` section and contain servers description. The only parameter for the whole quorum is `secure`, which enables encrypted connection for communication between quorum participants. The main parameters for each `` are:
+Quorum configuration is located in `.` section and contain servers description.
+
+The only parameter for the whole quorum is `secure`, which enables encrypted connection for communication between quorum participants. The parameter can be set `true` if SSL connection is required for internal communication between nodes, or left unspecified otherwise.
+
+The main parameters for each `` are:
- `id` — Server identifier in a quorum.
- `hostname` — Hostname where this server is placed.
@@ -100,6 +105,196 @@ ClickHouse Keeper is bundled into the ClickHouse server package, just add config
clickhouse-keeper --config /etc/your_path_to_config/config.xml --daemon
```
+## Four Latter Word Commands
+
+ClickHouse Keeper also provides 4lw commands which are almost the same with Zookeeper. Each command is composed of four letters such as `mntr`, `stat` etc. There are some more interesting commands: `stat` gives some general information about the server and connected clients, while `srvr` and `cons` give extended details on server and connections respectively.
+
+The 4lw commands has a white list configuration `four_letter_word_white_list` which has default value "conf,cons,crst,envi,ruok,srst,srvr,stat,wchc,wchs,dirs,mntr,isro".
+
+You can issue the commands to ClickHouse Keeper via telnet or nc, at the client port.
+```
+echo mntr | nc localhost 9181
+```
+
+Bellow is the detailed 4lw commands:
+
+- ruok : Tests if server is running in a non-error state. The server will respond with imok if it is running. Otherwise it will not respond at all. A response of "imok" does not necessarily indicate that the server has joined the quorum, just that the server process is active and bound to the specified client port. Use "stat" for details on state wrt quorum and client connection information.
+
+```
+imok
+```
+
+- mntr : Outputs a list of variables that could be used for monitoring the health of the cluster.
+
+```
+zk_version v21.11.1.1-prestable-7a4a0b0edef0ad6e0aa662cd3b90c3f4acf796e7
+zk_avg_latency 0
+zk_max_latency 0
+zk_min_latency 0
+zk_packets_received 68
+zk_packets_sent 68
+zk_num_alive_connections 1
+zk_outstanding_requests 0
+zk_server_state leader
+zk_znode_count 4
+zk_watch_count 1
+zk_ephemerals_count 0
+zk_approximate_data_size 723
+zk_open_file_descriptor_count 310
+zk_max_file_descriptor_count 10240
+zk_followers 0
+zk_synced_followers 0
+```
+
+- srvr : Lists full details for the server.
+
+```
+ClickHouse Keeper version: v21.11.1.1-prestable-7a4a0b0edef0ad6e0aa662cd3b90c3f4acf796e7
+Latency min/avg/max: 0/0/0
+
+Received: 2
+Sent : 2
+Connections: 1
+Outstanding: 0
+Zxid: 34
+Mode: leader
+Node count: 4
+```
+
+- stat : Lists brief details for the server and connected clients.
+
+```
+ClickHouse Keeper version: v21.11.1.1-prestable-7a4a0b0edef0ad6e0aa662cd3b90c3f4acf796e7
+Clients:
+ 192.168.1.1:52852(recved=0,sent=0)
+ 192.168.1.1:52042(recved=24,sent=48)
+
+Latency min/avg/max: 0/0/0
+
+Received: 4
+Sent : 4
+Connections: 1
+Outstanding: 0
+Zxid: 36
+Mode: leader
+Node count: 4
+
+```
+
+- srst : Reset server statistics. The command will affect the result of `srvr`, `mntr` and `stat`.
+
+```
+Server stats reset.
+```
+
+- conf : Print details about serving configuration.
+
+```
+server_id=1
+tcp_port=2181
+four_letter_word_white_list=*
+log_storage_path=./coordination/logs
+snapshot_storage_path=./coordination/snapshots
+max_requests_batch_size=100
+session_timeout_ms=30000
+operation_timeout_ms=10000
+dead_session_check_period_ms=500
+heart_beat_interval_ms=500
+election_timeout_lower_bound_ms=1000
+election_timeout_upper_bound_ms=2000
+reserved_log_items=1000000000000000
+snapshot_distance=10000
+auto_forwarding=true
+shutdown_timeout=5000
+startup_timeout=240000
+raft_logs_level=information
+snapshots_to_keep=3
+rotate_log_storage_interval=100000
+stale_log_gap=10000
+fresh_log_gap=200
+max_requests_batch_size=100
+quorum_reads=false
+force_sync=false
+compress_logs=true
+compress_snapshots_with_zstd_format=true
+configuration_change_tries_count=20
+```
+
+- cons : List full connection/session details for all clients connected to this server. Includes information on numbers of packets received/sent, session id, operation latencies, last operation performed, etc...
+
+```
+ 192.168.1.1:52163(recved=0,sent=0,sid=0xffffffffffffffff,lop=NA,est=1636454787393,to=30000,lzxid=0xffffffffffffffff,lresp=0,llat=0,minlat=0,avglat=0,maxlat=0)
+ 192.168.1.1:52042(recved=9,sent=18,sid=0x0000000000000001,lop=List,est=1636454739887,to=30000,lcxid=0x0000000000000005,lzxid=0x0000000000000005,lresp=1636454739892,llat=0,minlat=0,avglat=0,maxlat=0)
+```
+
+- crst : Reset connection/session statistics for all connections.
+
+```
+Connection stats reset.
+```
+
+- envi : Print details about serving environment
+
+```
+Environment:
+clickhouse.keeper.version=v21.11.1.1-prestable-7a4a0b0edef0ad6e0aa662cd3b90c3f4acf796e7
+host.name=ZBMAC-C02D4054M.local
+os.name=Darwin
+os.arch=x86_64
+os.version=19.6.0
+cpu.count=12
+user.name=root
+user.home=/Users/JackyWoo/
+user.dir=/Users/JackyWoo/project/jd/clickhouse/cmake-build-debug/programs/
+user.tmp=/var/folders/b4/smbq5mfj7578f2jzwn602tt40000gn/T/
+```
+
+
+- dirs : Shows the total size of snapshot and log files in bytes
+
+```
+snapshot_dir_size: 0
+log_dir_size: 3875
+```
+
+- isro: Tests if server is running in read-only mode. The server will respond with "ro" if in read-only mode or "rw" if not in read-only mode.
+
+```
+rw
+```
+
+- wchs : Lists brief information on watches for the server.
+
+```
+1 connections watching 1 paths
+Total watches:1
+```
+
+- wchc : Lists detailed information on watches for the server, by session. This outputs a list of sessions(connections) with associated watches (paths). Note, depending on the number of watches this operation may be expensive (ie impact server performance), use it carefully.
+
+```
+0x0000000000000001
+ /clickhouse/task_queue/ddl
+```
+
+- wchp : Lists detailed information on watches for the server, by path. This outputs a list of paths (znodes) with associated sessions. Note, depending on the number of watches this operation may be expensive (ie impact server performance), use it carefully.
+
+```
+/clickhouse/task_queue/ddl
+ 0x0000000000000001
+```
+
+- dump : Lists the outstanding sessions and ephemeral nodes. This only works on the leader.
+
+```
+Sessions dump (2):
+0x0000000000000001
+0x0000000000000002
+Sessions with Ephemerals (1):
+0x0000000000000001
+ /clickhouse/task_queue/ddl
+```
+
## [experimental] Migration from ZooKeeper
Seamlessly migration from ZooKeeper to ClickHouse Keeper is impossible you have to stop your ZooKeeper cluster, convert data and start ClickHouse Keeper. `clickhouse-keeper-converter` tool allows converting ZooKeeper logs and snapshots to ClickHouse Keeper snapshot. It works only with ZooKeeper > 3.4. Steps for migration:
diff --git a/docs/en/operations/server-configuration-parameters/settings.md b/docs/en/operations/server-configuration-parameters/settings.md
index 6ec0d122e6a..d60aac84673 100644
--- a/docs/en/operations/server-configuration-parameters/settings.md
+++ b/docs/en/operations/server-configuration-parameters/settings.md
@@ -107,7 +107,7 @@ Loading key from the environment variable:
```xml
-
+
```
@@ -120,7 +120,7 @@ Each of these methods can be applied for multiple keys:
00112233445566778899aabbccddeeff
-
+
1
@@ -370,7 +370,7 @@ Opens `https://tabix.io/` when accessing `http://localhost: http_port`.