mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-21 15:12:02 +00:00
Merge branch 'master' into classification
This commit is contained in:
commit
2b2ffcc8ea
379
.github/workflows/backport_branches.yml
vendored
Normal file
379
.github/workflows/backport_branches.yml
vendored
Normal file
@ -0,0 +1,379 @@
|
||||
name: BackportPR
|
||||
on: # yamllint disable-line rule:truthy
|
||||
push:
|
||||
branches:
|
||||
- 'backport/**'
|
||||
jobs:
|
||||
DockerHubPush:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 docker_images_check.py
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images.json
|
||||
CompatibilityCheck:
|
||||
needs: [BuilderDebRelease]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: CompatibilityCheck
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/compatibility_check
|
||||
REPO_COPY: ${{runner.temp}}/compatibility_check/ClickHouse
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 compatibility_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
#########################################################################################
|
||||
#################################### ORDINARY BUILDS ####################################
|
||||
#########################################################################################
|
||||
BuilderDebRelease:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_release'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebAsan:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_asan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebTsan:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_tsan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebDebug:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_debug'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
############################################################################################
|
||||
##################################### BUILD REPORTER #######################################
|
||||
############################################################################################
|
||||
BuilderReport:
|
||||
needs:
|
||||
- BuilderDebRelease
|
||||
- BuilderDebAsan
|
||||
- BuilderDebTsan
|
||||
- BuilderDebUBsan
|
||||
- BuilderDebMsan
|
||||
- BuilderDebDebug
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Report Builder
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/report_check
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
##############################################################################################
|
||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||
##############################################################################################
|
||||
FunctionalStatelessTestAsan:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (address, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
##############################################################################################
|
||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||
##############################################################################################
|
||||
FunctionalStatefulTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateful tests (debug, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
|
||||
KILL_TIMEOUT: 3600
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
##############################################################################################
|
||||
######################################### STRESS TESTS #######################################
|
||||
##############################################################################################
|
||||
StressTestTsan:
|
||||
needs: [BuilderDebTsan]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Stress test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stress_thread
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stress test (thread, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 stress_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
#############################################################################################
|
||||
############################# INTEGRATION TESTS #############################################
|
||||
#############################################################################################
|
||||
IntegrationTestsRelease:
|
||||
needs: [BuilderDebRelease, FunctionalStatelessTestRelease]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Integration test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/integration_tests_release
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Integration tests (release, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/integration_tests_release/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 integration_test_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FinishCheck:
|
||||
needs:
|
||||
- DockerHubPush
|
||||
- BuilderReport
|
||||
- FunctionalStatelessTestAsan
|
||||
- FunctionalStatefulTestDebug
|
||||
- StressTestTsan
|
||||
- IntegrationTestsRelease
|
||||
- CompatibilityCheck
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Finish label
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 finish_check.py
|
3
.github/workflows/cancel.yml
vendored
3
.github/workflows/cancel.yml
vendored
@ -1,7 +1,7 @@
|
||||
name: Cancel
|
||||
on: # yamllint disable-line rule:truthy
|
||||
workflow_run:
|
||||
workflows: ["CIGithubActions"]
|
||||
workflows: ["CIGithubActions", "ReleaseCI", "DocsCheck", "BackportPR"]
|
||||
types:
|
||||
- requested
|
||||
jobs:
|
||||
@ -10,4 +10,5 @@ jobs:
|
||||
steps:
|
||||
- uses: styfle/cancel-workflow-action@0.9.1
|
||||
with:
|
||||
all_but_latest: true
|
||||
workflow_id: ${{ github.event.workflow.id }}
|
||||
|
62
.github/workflows/docs_check.yml
vendored
Normal file
62
.github/workflows/docs_check.yml
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
name: DocsCheck
|
||||
on: # yamllint disable-line rule:truthy
|
||||
pull_request:
|
||||
types:
|
||||
- synchronize
|
||||
- reopened
|
||||
- opened
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'website/**'
|
||||
jobs:
|
||||
CheckLabels:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Labels check
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 run_check.py
|
||||
DockerHubPush:
|
||||
needs: CheckLabels
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 docker_images_check.py
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images.json
|
||||
DocsCheck:
|
||||
needs: DockerHubPush
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/docs_check
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Docs Check
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/docs_check
|
||||
REPO_COPY: ${{runner.temp}}/docs_check/ClickHouse
|
||||
run: |
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 docs_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
464
.github/workflows/main.yml
vendored
464
.github/workflows/main.yml
vendored
@ -2,13 +2,14 @@ name: CIGithubActions
|
||||
on: # yamllint disable-line rule:truthy
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- unlabeled
|
||||
- synchronize
|
||||
- reopened
|
||||
- opened
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- 'website/**'
|
||||
##########################################################################################
|
||||
##################################### SMALL CHECKS #######################################
|
||||
##########################################################################################
|
||||
@ -60,34 +61,8 @@ jobs:
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
DocsCheck:
|
||||
needs: DockerHubPush
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/docs_check
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Docs Check
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/docs_check
|
||||
REPO_COPY: ${{runner.temp}}/docs_check/ClickHouse
|
||||
run: |
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 docs_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FastTest:
|
||||
needs: DockerHubPush
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
@ -109,8 +84,7 @@ jobs:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
PVSCheck:
|
||||
needs: DockerHubPush
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
needs: [DockerHubPush, FastTest]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
@ -134,7 +108,6 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
CompatibilityCheck:
|
||||
needs: [BuilderDebRelease]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
@ -152,7 +125,7 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 compatibility_check.py 0
|
||||
cd $REPO_COPY/tests/ci && python3 compatibility_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
@ -161,7 +134,6 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
SplitBuildSmokeTest:
|
||||
needs: [BuilderDebSplitted]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
@ -191,7 +163,6 @@ jobs:
|
||||
#########################################################################################
|
||||
BuilderDebRelease:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
@ -211,12 +182,12 @@ jobs:
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NUMBER: 0
|
||||
BUILD_NAME: 'package_release'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@ -230,7 +201,6 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderBinRelease:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
@ -250,12 +220,12 @@ jobs:
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NUMBER: 9
|
||||
BUILD_NAME: 'binary_release'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@ -269,7 +239,6 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebAsan:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
@ -289,12 +258,12 @@ jobs:
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NUMBER: 3
|
||||
BUILD_NAME: 'package_asan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@ -308,7 +277,6 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebUBsan:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
@ -328,12 +296,12 @@ jobs:
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NUMBER: 4
|
||||
BUILD_NAME: 'package_ubsan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@ -347,7 +315,6 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebTsan:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
@ -367,12 +334,12 @@ jobs:
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NUMBER: 5
|
||||
BUILD_NAME: 'package_tsan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@ -386,7 +353,6 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebMsan:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
@ -406,12 +372,12 @@ jobs:
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NUMBER: 6
|
||||
BUILD_NAME: 'package_msan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@ -425,7 +391,6 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebDebug:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
@ -445,12 +410,12 @@ jobs:
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NUMBER: 7
|
||||
BUILD_NAME: 'package_debug'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@ -467,7 +432,6 @@ jobs:
|
||||
##########################################################################################
|
||||
BuilderDebSplitted:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
@ -486,13 +450,241 @@ jobs:
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse special build check (actions)'
|
||||
BUILD_NUMBER: 1
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'binary_splitted'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderBinTidy:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'binary_tidy'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderBinDarwin:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'binary_darwin'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderBinAarch64:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'binary_aarch64'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderBinFreeBSD:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'binary_freebsd'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderBinDarwinAarch64:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'binary_darwin_aarch64'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderBinPPC64:
|
||||
needs: [DockerHubPush, FastTest]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'binary_ppc64le'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
@ -516,8 +708,8 @@ jobs:
|
||||
- BuilderDebUBsan
|
||||
- BuilderDebMsan
|
||||
- BuilderDebDebug
|
||||
- BuilderDebSplitted
|
||||
runs-on: [self-hosted, style-checker]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -541,6 +733,40 @@ jobs:
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderSpecialReport:
|
||||
needs:
|
||||
- BuilderDebSplitted
|
||||
- BuilderBinTidy
|
||||
- BuilderBinDarwin
|
||||
- BuilderBinAarch64
|
||||
- BuilderBinFreeBSD
|
||||
- BuilderBinDarwinAarch64
|
||||
- BuilderBinPPC64
|
||||
runs-on: [self-hosted, style-checker]
|
||||
if: always()
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Report Builder
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/report_check
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'ClickHouse special build check (actions)'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
##############################################################################################
|
||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||
##############################################################################################
|
||||
@ -556,10 +782,68 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_debug
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_release
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (release, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
|
||||
REPO_COPY: ${{runner.temp}}/stateless_release/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatelessTestReleaseDatabaseReplicated:
|
||||
needs: [BuilderDebRelease]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_database_replicated
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (release, DatabaseReplicated, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_database_replicated/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatelessTestReleaseWideParts:
|
||||
needs: [BuilderDebRelease]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_wide_parts
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (release, wide parts enabled, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_wide_parts/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
@ -762,10 +1046,10 @@ jobs:
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_debug
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_release
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateful tests (release, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
|
||||
REPO_COPY: ${{runner.temp}}/stateful_release/ClickHouse
|
||||
KILL_TIMEOUT: 3600
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
@ -1072,7 +1356,7 @@ jobs:
|
||||
##############################################################################################
|
||||
ASTFuzzerTestAsan:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1100,7 +1384,7 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
ASTFuzzerTestTsan:
|
||||
needs: [BuilderDebTsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1128,7 +1412,7 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
ASTFuzzerTestUBSan:
|
||||
needs: [BuilderDebUBsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1156,7 +1440,7 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
ASTFuzzerTestMSan:
|
||||
needs: [BuilderDebMsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1184,7 +1468,7 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
ASTFuzzerTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1297,12 +1581,40 @@ jobs:
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
IntegrationTestsFlakyCheck:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Integration test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/integration_tests_asan_flaky_check
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Integration tests flaky check (asan, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/integration_tests_asan_flaky_check/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 integration_test_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
#############################################################################################
|
||||
#################################### UNIT TESTS #############################################
|
||||
#############################################################################################
|
||||
UnitTestsAsan:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1330,7 +1642,7 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
UnitTestsReleaseClang:
|
||||
needs: [BuilderBinRelease]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1358,7 +1670,7 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
UnitTestsTsan:
|
||||
needs: [BuilderDebTsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1386,7 +1698,7 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
UnitTestsMsan:
|
||||
needs: [BuilderDebMsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1414,7 +1726,7 @@ jobs:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
UnitTestsUBsan:
|
||||
needs: [BuilderDebUBsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
runs-on: [self-hosted, fuzzer-unit-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
@ -1426,7 +1738,7 @@ jobs:
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/unit_tests_ubsan
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Unit tests (msan, actions)'
|
||||
CHECK_NAME: 'Unit tests (ubsan, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/unit_tests_ubsan/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
@ -1449,6 +1761,8 @@ jobs:
|
||||
- FastTest
|
||||
- FunctionalStatelessTestDebug
|
||||
- FunctionalStatelessTestRelease
|
||||
- FunctionalStatelessTestReleaseDatabaseReplicated
|
||||
- FunctionalStatelessTestReleaseWideParts
|
||||
- FunctionalStatelessTestAsan
|
||||
- FunctionalStatelessTestTsan
|
||||
- FunctionalStatelessTestMsan
|
||||
@ -1459,7 +1773,6 @@ jobs:
|
||||
- FunctionalStatefulTestTsan
|
||||
- FunctionalStatefulTestMsan
|
||||
- FunctionalStatefulTestUBsan
|
||||
- DocsCheck
|
||||
- StressTestDebug
|
||||
- StressTestAsan
|
||||
- StressTestTsan
|
||||
@ -1481,6 +1794,7 @@ jobs:
|
||||
- UnitTestsReleaseClang
|
||||
- SplitBuildSmokeTest
|
||||
- CompatibilityCheck
|
||||
- IntegrationTestsFlakyCheck
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
|
1685
.github/workflows/master.yml
vendored
Normal file
1685
.github/workflows/master.yml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
932
.github/workflows/release_branches.yml
vendored
Normal file
932
.github/workflows/release_branches.yml
vendored
Normal file
@ -0,0 +1,932 @@
|
||||
name: ReleaseCI
|
||||
on: # yamllint disable-line rule:truthy
|
||||
push:
|
||||
branches:
|
||||
- '21.**'
|
||||
- '22.**'
|
||||
- '23.**'
|
||||
- '24.**'
|
||||
jobs:
|
||||
DockerHubPush:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 docker_images_check.py
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images.json
|
||||
CompatibilityCheck:
|
||||
needs: [BuilderDebRelease]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: CompatibilityCheck
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/compatibility_check
|
||||
REPO_COPY: ${{runner.temp}}/compatibility_check/ClickHouse
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 compatibility_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
#########################################################################################
|
||||
#################################### ORDINARY BUILDS ####################################
|
||||
#########################################################################################
|
||||
BuilderDebRelease:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_release'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebAsan:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_asan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebUBsan:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_ubsan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebTsan:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_tsan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebMsan:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_msan'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebDebug:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NAME: 'package_debug'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
############################################################################################
|
||||
##################################### BUILD REPORTER #######################################
|
||||
############################################################################################
|
||||
BuilderReport:
|
||||
needs:
|
||||
- BuilderDebRelease
|
||||
- BuilderDebAsan
|
||||
- BuilderDebTsan
|
||||
- BuilderDebUBsan
|
||||
- BuilderDebMsan
|
||||
- BuilderDebDebug
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Report Builder
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/report_check
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
##############################################################################################
|
||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||
##############################################################################################
|
||||
FunctionalStatelessTestRelease:
|
||||
needs: [BuilderDebRelease]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (release, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatelessTestAsan:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (address, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatelessTestTsan:
|
||||
needs: [BuilderDebTsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_tsan
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (thread, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_tsan/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatelessTestUBsan:
|
||||
needs: [BuilderDebUBsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_ubsan
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (ubsan, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_ubsan/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatelessTestMsan:
|
||||
needs: [BuilderDebMsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_memory
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (memory, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_memory/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatelessTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (debug, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
##############################################################################################
|
||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||
##############################################################################################
|
||||
FunctionalStatefulTestRelease:
|
||||
needs: [BuilderDebRelease]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateful tests (release, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
|
||||
KILL_TIMEOUT: 3600
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatefulTestAsan:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateful tests (address, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
|
||||
KILL_TIMEOUT: 3600
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatefulTestTsan:
|
||||
needs: [BuilderDebTsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_tsan
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateful tests (thread, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateful_tsan/ClickHouse
|
||||
KILL_TIMEOUT: 3600
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatefulTestMsan:
|
||||
needs: [BuilderDebMsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_msan
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateful tests (memory, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateful_msan/ClickHouse
|
||||
KILL_TIMEOUT: 3600
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatefulTestUBsan:
|
||||
needs: [BuilderDebUBsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_ubsan
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateful tests (ubsan, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateful_ubsan/ClickHouse
|
||||
KILL_TIMEOUT: 3600
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatefulTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateful tests (debug, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
|
||||
KILL_TIMEOUT: 3600
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
##############################################################################################
|
||||
######################################### STRESS TESTS #######################################
|
||||
##############################################################################################
|
||||
StressTestAsan:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Stress test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stress_thread
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stress test (address, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 stress_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
StressTestTsan:
|
||||
needs: [BuilderDebTsan]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Stress test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stress_thread
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stress test (thread, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 stress_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
StressTestMsan:
|
||||
needs: [BuilderDebMsan]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Stress test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stress_memory
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stress test (memory, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stress_memory/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 stress_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
StressTestUBsan:
|
||||
needs: [BuilderDebUBsan]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Stress test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stress_undefined
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stress test (undefined, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stress_undefined/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 stress_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
StressTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Stress test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stress_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stress test (debug, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stress_debug/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 stress_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
#############################################################################################
|
||||
############################# INTEGRATION TESTS #############################################
|
||||
#############################################################################################
|
||||
IntegrationTestsAsan:
|
||||
needs: [BuilderDebAsan, FunctionalStatelessTestAsan]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Integration test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/integration_tests_asan
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Integration tests (asan, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/integration_tests_asan/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 integration_test_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
IntegrationTestsTsan:
|
||||
needs: [BuilderDebTsan, FunctionalStatelessTestTsan]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Integration test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/integration_tests_tsan
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Integration tests (thread, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/integration_tests_tsan/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 integration_test_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
IntegrationTestsRelease:
|
||||
needs: [BuilderDebRelease, FunctionalStatelessTestRelease]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Integration test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/integration_tests_release
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Integration tests (release, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/integration_tests_release/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 integration_test_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FinishCheck:
|
||||
needs:
|
||||
- DockerHubPush
|
||||
- BuilderReport
|
||||
- FunctionalStatelessTestDebug
|
||||
- FunctionalStatelessTestRelease
|
||||
- FunctionalStatelessTestAsan
|
||||
- FunctionalStatelessTestTsan
|
||||
- FunctionalStatelessTestMsan
|
||||
- FunctionalStatelessTestUBsan
|
||||
- FunctionalStatefulTestDebug
|
||||
- FunctionalStatefulTestRelease
|
||||
- FunctionalStatefulTestAsan
|
||||
- FunctionalStatefulTestTsan
|
||||
- FunctionalStatefulTestMsan
|
||||
- FunctionalStatefulTestUBsan
|
||||
- StressTestDebug
|
||||
- StressTestAsan
|
||||
- StressTestTsan
|
||||
- StressTestMsan
|
||||
- StressTestUBsan
|
||||
- IntegrationTestsAsan
|
||||
- IntegrationTestsRelease
|
||||
- IntegrationTestsTsan
|
||||
- CompatibilityCheck
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Finish label
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 finish_check.py
|
13
.gitmodules
vendored
13
.gitmodules
vendored
@ -17,7 +17,7 @@
|
||||
[submodule "contrib/zlib-ng"]
|
||||
path = contrib/zlib-ng
|
||||
url = https://github.com/ClickHouse-Extras/zlib-ng.git
|
||||
branch = clickhouse-new
|
||||
branch = clickhouse-2.0.x
|
||||
[submodule "contrib/googletest"]
|
||||
path = contrib/googletest
|
||||
url = https://github.com/google/googletest.git
|
||||
@ -135,12 +135,9 @@
|
||||
[submodule "contrib/flatbuffers"]
|
||||
path = contrib/flatbuffers
|
||||
url = https://github.com/ClickHouse-Extras/flatbuffers.git
|
||||
[submodule "contrib/libc-headers"]
|
||||
path = contrib/libc-headers
|
||||
url = https://github.com/ClickHouse-Extras/libc-headers.git
|
||||
[submodule "contrib/replxx"]
|
||||
path = contrib/replxx
|
||||
url = https://github.com/AmokHuginnsson/replxx.git
|
||||
url = https://github.com/ClickHouse-Extras/replxx.git
|
||||
[submodule "contrib/avro"]
|
||||
path = contrib/avro
|
||||
url = https://github.com/ClickHouse-Extras/avro.git
|
||||
@ -171,12 +168,6 @@
|
||||
[submodule "contrib/sentry-native"]
|
||||
path = contrib/sentry-native
|
||||
url = https://github.com/ClickHouse-Extras/sentry-native.git
|
||||
[submodule "contrib/gcem"]
|
||||
path = contrib/gcem
|
||||
url = https://github.com/kthohr/gcem.git
|
||||
[submodule "contrib/stats"]
|
||||
path = contrib/stats
|
||||
url = https://github.com/kthohr/stats.git
|
||||
[submodule "contrib/krb5"]
|
||||
path = contrib/krb5
|
||||
url = https://github.com/ClickHouse-Extras/krb5
|
||||
|
@ -17,7 +17,7 @@
|
||||
* Support `EXISTS (subquery)`. Closes [#6852](https://github.com/ClickHouse/ClickHouse/issues/6852). [#29731](https://github.com/ClickHouse/ClickHouse/pull/29731) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||
* Session logging for audit. Logging all successful and failed login and logout events to a new `system.session_log` table. [#22415](https://github.com/ClickHouse/ClickHouse/pull/22415) ([Vasily Nemkov](https://github.com/Enmk)) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||
* Support multidimensional cosine distance and euclidean distance functions; L1, L2, Lp, Linf distances and norms. Scalar product on tuples and various arithmetic operators on tuples. This fully closes [#4509](https://github.com/ClickHouse/ClickHouse/issues/4509) and even more. [#27933](https://github.com/ClickHouse/ClickHouse/pull/27933) ([Alexey Boykov](https://github.com/mathalex)).
|
||||
* Add support for compression and decompression for `INTO OUTPUT` and `FROM INFILE` (with autodetect or with additional optional parameter). [#27135](https://github.com/ClickHouse/ClickHouse/pull/27135) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
|
||||
* Add support for compression and decompression for `INTO OUTFILE` and `FROM INFILE` (with autodetect or with additional optional parameter). [#27135](https://github.com/ClickHouse/ClickHouse/pull/27135) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
|
||||
* Add CORS (Cross Origin Resource Sharing) support with HTTP `OPTIONS` request. It means, now Grafana will work with serverless requests without a kludges. Closes [#18693](https://github.com/ClickHouse/ClickHouse/issues/18693). [#29155](https://github.com/ClickHouse/ClickHouse/pull/29155) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
|
||||
* Queries with JOIN ON now supports disjunctions (OR). [#21320](https://github.com/ClickHouse/ClickHouse/pull/21320) ([Ilya Golshtein](https://github.com/ilejn)).
|
||||
* Added function `tokens`. That allow to split string into tokens using non-alpha numeric ASCII characters as separators. [#29981](https://github.com/ClickHouse/ClickHouse/pull/29981) ([Maksim Kita](https://github.com/kitaisreal)). Added function `ngrams` to extract ngrams from text. Closes [#29699](https://github.com/ClickHouse/ClickHouse/issues/29699). [#29738](https://github.com/ClickHouse/ClickHouse/pull/29738) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||
|
107
CMakeLists.txt
107
CMakeLists.txt
@ -149,6 +149,10 @@ if (ENABLE_FUZZING)
|
||||
set (ENABLE_JEMALLOC 0)
|
||||
set (ENABLE_CHECK_HEAVY_BUILDS 1)
|
||||
set (GLIBC_COMPATIBILITY OFF)
|
||||
|
||||
# For codegen_select_fuzzer
|
||||
set (ENABLE_PROTOBUF 1)
|
||||
set (USE_INTERNAL_PROTOBUF_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
# Global libraries
|
||||
@ -169,9 +173,7 @@ endif ()
|
||||
include (cmake/check_flags.cmake)
|
||||
include (cmake/add_warning.cmake)
|
||||
|
||||
if (NOT MSVC)
|
||||
set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wall") # -Werror and many more is also added inside cmake/warnings.cmake
|
||||
endif ()
|
||||
set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wall") # -Werror and many more is also added inside cmake/warnings.cmake
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
# clang: warning: argument unused during compilation: '-specs=/usr/share/dpkg/no-pie-compile.specs' [-Wunused-command-line-argument]
|
||||
@ -201,7 +203,7 @@ endif ()
|
||||
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
||||
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
||||
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
||||
# Only for Linux, x86_64 or aarch64.
|
||||
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
|
||||
elseif(GLIBC_COMPATIBILITY)
|
||||
@ -219,37 +221,13 @@ endif()
|
||||
# Make sure the final executable has symbols exported
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
|
||||
|
||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy" "llvm-objcopy-13" "llvm-objcopy-12" "llvm-objcopy-11" "llvm-objcopy-10" "llvm-objcopy-9" "llvm-objcopy-8" "objcopy")
|
||||
|
||||
if (NOT OBJCOPY_PATH AND OS_DARWIN)
|
||||
find_program (BREW_PATH NAMES "brew")
|
||||
if (BREW_PATH)
|
||||
execute_process (COMMAND ${BREW_PATH} --prefix llvm ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE LLVM_PREFIX)
|
||||
if (LLVM_PREFIX)
|
||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy" PATHS "${LLVM_PREFIX}/bin" NO_DEFAULT_PATH)
|
||||
endif ()
|
||||
if (NOT OBJCOPY_PATH)
|
||||
execute_process (COMMAND ${BREW_PATH} --prefix binutils ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE BINUTILS_PREFIX)
|
||||
if (BINUTILS_PREFIX)
|
||||
find_program (OBJCOPY_PATH NAMES "objcopy" PATHS "${BINUTILS_PREFIX}/bin" NO_DEFAULT_PATH)
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (OBJCOPY_PATH)
|
||||
message (STATUS "Using objcopy: ${OBJCOPY_PATH}")
|
||||
else ()
|
||||
message (FATAL_ERROR "Cannot find objcopy.")
|
||||
endif ()
|
||||
|
||||
if (OS_DARWIN)
|
||||
# The `-all_load` flag forces loading of all symbols from all libraries,
|
||||
# and leads to multiply-defined symbols. This flag allows force loading
|
||||
# from a _specific_ library, which is what we need.
|
||||
set(WHOLE_ARCHIVE -force_load)
|
||||
# The `-noall_load` flag is the default and now obsolete.
|
||||
set(NO_WHOLE_ARCHIVE "")
|
||||
set(NO_WHOLE_ARCHIVE "-undefined,error") # Effectively, a no-op. Here to avoid empty "-Wl, " sequence to be generated in the command line.
|
||||
else ()
|
||||
set(WHOLE_ARCHIVE --whole-archive)
|
||||
set(NO_WHOLE_ARCHIVE --no-whole-archive)
|
||||
@ -278,6 +256,13 @@ if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
set(USE_DEBUG_HELPERS ON)
|
||||
else ()
|
||||
set(USE_DEBUG_HELPERS ON)
|
||||
endif()
|
||||
option(USE_DEBUG_HELPERS "Enable debug helpers" ${USE_DEBUG_HELPERS})
|
||||
|
||||
# Create BuildID when using lld. For other linkers it is created by default.
|
||||
if (LINKER_NAME MATCHES "lld$")
|
||||
# SHA1 is not cryptographically secure but it is the best what lld is offering.
|
||||
@ -312,6 +297,10 @@ include(cmake/cpu_features.cmake)
|
||||
# Enable it explicitly.
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -fasynchronous-unwind-tables")
|
||||
|
||||
# Reproducible builds
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffile-prefix-map=${CMAKE_SOURCE_DIR}=.")
|
||||
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} -ffile-prefix-map=${CMAKE_SOURCE_DIR}=.")
|
||||
|
||||
if (${CMAKE_VERSION} VERSION_LESS "3.12.4")
|
||||
# CMake < 3.12 doesn't support setting 20 as a C++ standard version.
|
||||
# We will add C++ standard controlling flag in CMAKE_CXX_FLAGS manually for now.
|
||||
@ -392,6 +381,8 @@ if (COMPILER_CLANG)
|
||||
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
||||
endif()
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstrict-vtable-pointers")
|
||||
|
||||
# Set new experimental pass manager, it's a performance, build time and binary size win.
|
||||
# Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang.
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager")
|
||||
@ -402,32 +393,13 @@ if (COMPILER_CLANG)
|
||||
# completely.
|
||||
if (ENABLE_THINLTO AND NOT ENABLE_TESTS AND NOT SANITIZE)
|
||||
# Link time optimization
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin")
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin")
|
||||
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
||||
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
||||
elseif (ENABLE_THINLTO)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable ThinLTO")
|
||||
endif ()
|
||||
|
||||
# Always prefer llvm tools when using clang. For instance, we cannot use GNU ar when llvm LTO is enabled
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
|
||||
|
||||
if (LLVM_AR_PATH)
|
||||
message(STATUS "Using llvm-ar: ${LLVM_AR_PATH}.")
|
||||
set (CMAKE_AR ${LLVM_AR_PATH})
|
||||
else ()
|
||||
message(WARNING "Cannot find llvm-ar. System ar will be used instead. It does not work with ThinLTO.")
|
||||
endif ()
|
||||
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9" "llvm-ranlib-8")
|
||||
|
||||
if (LLVM_RANLIB_PATH)
|
||||
message(STATUS "Using llvm-ranlib: ${LLVM_RANLIB_PATH}.")
|
||||
set (CMAKE_RANLIB ${LLVM_RANLIB_PATH})
|
||||
else ()
|
||||
message(WARNING "Cannot find llvm-ranlib. System ranlib will be used instead. It does not work with ThinLTO.")
|
||||
endif ()
|
||||
|
||||
elseif (ENABLE_THINLTO)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "ThinLTO is only available with CLang")
|
||||
endif ()
|
||||
@ -435,20 +407,7 @@ endif ()
|
||||
# Turns on all external libs like s3, kafka, ODBC, ...
|
||||
option(ENABLE_LIBRARIES "Enable all external libraries by default" ON)
|
||||
|
||||
# We recommend avoiding this mode for production builds because we can't guarantee
|
||||
# all needed libraries exist in your system.
|
||||
# This mode exists for enthusiastic developers who are searching for trouble.
|
||||
# The whole idea of using unknown version of libraries from the OS distribution is deeply flawed.
|
||||
# Useful for maintainers of OS packages.
|
||||
option (UNBUNDLED "Use system libraries instead of ones in contrib/" OFF)
|
||||
|
||||
if (UNBUNDLED)
|
||||
set(NOT_UNBUNDLED OFF)
|
||||
else ()
|
||||
set(NOT_UNBUNDLED ON)
|
||||
endif ()
|
||||
|
||||
if (UNBUNDLED OR NOT (OS_LINUX OR OS_DARWIN))
|
||||
if (NOT (OS_LINUX OR OS_DARWIN))
|
||||
# Using system libs can cause a lot of warnings in includes (on macro expansion).
|
||||
option(WERROR "Enable -Werror compiler option" OFF)
|
||||
else ()
|
||||
@ -494,19 +453,6 @@ else ()
|
||||
set (CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
endif ()
|
||||
|
||||
# https://github.com/include-what-you-use/include-what-you-use
|
||||
option (USE_INCLUDE_WHAT_YOU_USE "Automatically reduce unneeded includes in source code (external tool)" OFF)
|
||||
|
||||
if (USE_INCLUDE_WHAT_YOU_USE)
|
||||
find_program(IWYU_PATH NAMES include-what-you-use iwyu)
|
||||
if (NOT IWYU_PATH)
|
||||
message(FATAL_ERROR "Could not find the program include-what-you-use")
|
||||
endif()
|
||||
if (${CMAKE_VERSION} VERSION_LESS "3.3.0")
|
||||
message(FATAL_ERROR "include-what-you-use requires CMake version at least 3.3.")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
message (STATUS "Unit tests are enabled")
|
||||
else()
|
||||
@ -527,7 +473,6 @@ message (STATUS
|
||||
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
|
||||
MAKE_STATIC_LIBRARIES=${MAKE_STATIC_LIBRARIES}
|
||||
SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES}
|
||||
UNBUNDLED=${UNBUNDLED}
|
||||
CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}")
|
||||
|
||||
include (GNUInstallDirs)
|
||||
@ -591,7 +536,6 @@ include (cmake/find/avro.cmake)
|
||||
include (cmake/find/msgpack.cmake)
|
||||
include (cmake/find/cassandra.cmake)
|
||||
include (cmake/find/sentry.cmake)
|
||||
include (cmake/find/stats.cmake)
|
||||
include (cmake/find/datasketches.cmake)
|
||||
include (cmake/find/libprotobuf-mutator.cmake)
|
||||
|
||||
@ -609,8 +553,6 @@ include (cmake/find/mysqlclient.cmake)
|
||||
|
||||
# When testing for memory leaks with Valgrind, don't link tcmalloc or jemalloc.
|
||||
|
||||
include (cmake/print_flags.cmake)
|
||||
|
||||
if (TARGET global-group)
|
||||
install (EXPORT global DESTINATION cmake)
|
||||
endif ()
|
||||
@ -662,6 +604,7 @@ include_directories(${ConfigIncludePath})
|
||||
|
||||
# Add as many warnings as possible for our own code.
|
||||
include (cmake/warnings.cmake)
|
||||
include (cmake/print_flags.cmake)
|
||||
|
||||
add_subdirectory (base)
|
||||
add_subdirectory (src)
|
||||
|
@ -18,3 +18,26 @@ if (NOT DEFINED ENV{CLION_IDE} AND NOT DEFINED ENV{XCODE_IDE})
|
||||
set(CMAKE_GENERATOR "Ninja" CACHE INTERNAL "" FORCE)
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
|
||||
# Default toolchain - this is needed to avoid dependency on OS files.
|
||||
execute_process(COMMAND uname -s OUTPUT_VARIABLE OS)
|
||||
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCH)
|
||||
|
||||
if (OS MATCHES "Linux"
|
||||
AND NOT DEFINED CMAKE_TOOLCHAIN_FILE
|
||||
AND NOT DISABLE_HERMETIC_BUILD
|
||||
AND ($ENV{CC} MATCHES ".*clang.*" OR CMAKE_C_COMPILER MATCHES ".*clang.*")
|
||||
AND (USE_STATIC_LIBRARIES OR NOT DEFINED USE_STATIC_LIBRARIES))
|
||||
|
||||
if (ARCH MATCHES "amd64|x86_64")
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-x86_64.cmake" CACHE INTERNAL "" FORCE)
|
||||
elseif (ARCH MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-aarch64.cmake" CACHE INTERNAL "" FORCE)
|
||||
elseif (ARCH MATCHES "^(ppc64le.*|PPC64LE.*)")
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-ppc64le.cmake" CACHE INTERNAL "" FORCE)
|
||||
else ()
|
||||
message (FATAL_ERROR "Unsupported architecture: ${ARCH}")
|
||||
endif ()
|
||||
|
||||
endif()
|
||||
|
@ -19,9 +19,11 @@ The following versions of ClickHouse server are currently being supported with s
|
||||
| 21.4 | :x: |
|
||||
| 21.5 | :x: |
|
||||
| 21.6 | :x: |
|
||||
| 21.7 | ✅ |
|
||||
| 21.7 | :x: |
|
||||
| 21.8 | ✅ |
|
||||
| 21.9 | ✅ |
|
||||
| 21.10 | ✅ |
|
||||
| 21.11 | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -29,7 +29,8 @@ elseif (ENABLE_READLINE)
|
||||
endif ()
|
||||
|
||||
if (USE_DEBUG_HELPERS)
|
||||
set (INCLUDE_DEBUG_HELPERS "-include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"")
|
||||
get_target_property(MAGIC_ENUM_INCLUDE_DIR magic_enum INTERFACE_INCLUDE_DIRECTORIES)
|
||||
set (INCLUDE_DEBUG_HELPERS "-I\"${MAGIC_ENUM_INCLUDE_DIR}\" -include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"")
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${INCLUDE_DEBUG_HELPERS}")
|
||||
endif ()
|
||||
|
||||
|
@ -18,8 +18,8 @@ struct CachedFn
|
||||
{
|
||||
private:
|
||||
using Traits = FnTraits<decltype(Func)>;
|
||||
using DecayedArgs = TLMap<std::decay_t, typename Traits::Args>;
|
||||
using Key = TLChangeRoot<std::tuple, DecayedArgs>;
|
||||
using DecayedArgs = TypeListMap<std::decay_t, typename Traits::Args>;
|
||||
using Key = TypeListChangeRoot<std::tuple, DecayedArgs>;
|
||||
using Result = typename Traits::Ret;
|
||||
|
||||
std::map<Key, Result> cache; // Can't use hashmap as tuples are unhashable by default
|
||||
|
@ -1,6 +1,6 @@
|
||||
#pragma once
|
||||
|
||||
#include "Typelist.h"
|
||||
#include "TypeList.h"
|
||||
|
||||
namespace detail
|
||||
{
|
||||
@ -14,7 +14,7 @@ struct FnTraits<R(A...)>
|
||||
static constexpr bool value = std::is_invocable_r_v<R, F, A...>;
|
||||
|
||||
using Ret = R;
|
||||
using Args = Typelist<A...>;
|
||||
using Args = TypeList<A...>;
|
||||
};
|
||||
|
||||
template <class R, class ...A>
|
||||
|
@ -53,7 +53,6 @@ protected:
|
||||
|
||||
String input;
|
||||
|
||||
private:
|
||||
bool multiline;
|
||||
|
||||
Patterns extenders;
|
||||
|
@ -22,7 +22,14 @@ namespace
|
||||
/// Trim ending whitespace inplace
|
||||
void trim(String & s)
|
||||
{
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](int ch) { return !std::isspace(ch); }).base(), s.end());
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](unsigned char ch) { return !std::isspace(ch); }).base(), s.end());
|
||||
}
|
||||
|
||||
/// Check if string ends with given character after skipping whitespaces.
|
||||
bool ends_with(const std::string_view & s, const std::string_view & p)
|
||||
{
|
||||
auto ss = std::string_view(s.data(), s.rend() - std::find_if(s.rbegin(), s.rend(), [](unsigned char ch) { return !std::isspace(ch); }));
|
||||
return ss.ends_with(p);
|
||||
}
|
||||
|
||||
std::string getEditor()
|
||||
@ -189,8 +196,28 @@ ReplxxLineReader::ReplxxLineReader(
|
||||
rx.bind_key(Replxx::KEY::control('N'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_NEXT, code); });
|
||||
rx.bind_key(Replxx::KEY::control('P'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_PREVIOUS, code); });
|
||||
|
||||
auto commit_action = [this](char32_t code)
|
||||
{
|
||||
std::string_view str = rx.get_state().text();
|
||||
|
||||
/// Always commit line when we see extender at the end. It will start a new prompt.
|
||||
for (const auto * extender : extenders)
|
||||
if (ends_with(str, extender))
|
||||
return rx.invoke(Replxx::ACTION::COMMIT_LINE, code);
|
||||
|
||||
/// If we see an delimiter at the end, commit right away.
|
||||
for (const auto * delimiter : delimiters)
|
||||
if (ends_with(str, delimiter))
|
||||
return rx.invoke(Replxx::ACTION::COMMIT_LINE, code);
|
||||
|
||||
/// If we allow multiline and there is already something in the input, start a newline.
|
||||
if (multiline && !input.empty())
|
||||
return rx.invoke(Replxx::ACTION::NEW_LINE, code);
|
||||
return rx.invoke(Replxx::ACTION::COMMIT_LINE, code);
|
||||
};
|
||||
/// bind C-j to ENTER action.
|
||||
rx.bind_key(Replxx::KEY::control('J'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::COMMIT_LINE, code); });
|
||||
rx.bind_key(Replxx::KEY::control('J'), commit_action);
|
||||
rx.bind_key(Replxx::KEY::ENTER, commit_action);
|
||||
|
||||
/// By default COMPLETE_NEXT/COMPLETE_PREV was binded to C-p/C-n, re-bind
|
||||
/// to M-P/M-N (that was used for HISTORY_COMMON_PREFIX_SEARCH before, but
|
||||
|
44
base/base/TypeList.h
Normal file
44
base/base/TypeList.h
Normal file
@ -0,0 +1,44 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
#include "defines.h"
|
||||
#include "TypePair.h"
|
||||
|
||||
/// General-purpose typelist. Easy on compilation times as it does not use recursion.
|
||||
template <typename ...Args>
|
||||
struct TypeList { static constexpr size_t size = sizeof...(Args); };
|
||||
|
||||
namespace TypeListUtils /// In some contexts it's more handy to use functions instead of aliases
|
||||
{
|
||||
template <typename ...LArgs, typename ...RArgs>
|
||||
constexpr TypeList<LArgs..., RArgs...> concat(TypeList<LArgs...>, TypeList<RArgs...>) { return {}; }
|
||||
|
||||
template <typename T, typename ...Args>
|
||||
constexpr TypeList<T, Args...> prepend(TypeList<Args...>) { return {}; }
|
||||
|
||||
template <typename T, typename ...Args>
|
||||
constexpr TypeList<Args..., T> append(TypeList<Args...>) { return {}; }
|
||||
|
||||
template <template <typename> typename F, typename ...Args>
|
||||
constexpr TypeList<F<Args>...> map(TypeList<Args...>) { return {}; }
|
||||
|
||||
template <template <typename...> typename Root, typename ...Args>
|
||||
constexpr Root<Args...> changeRoot(TypeList<Args...>) { return {}; }
|
||||
|
||||
template <typename F, typename ...Args>
|
||||
constexpr void forEach(TypeList<Args...>, F && f) { (std::forward<F>(f)(Id<Args>{}), ...); }
|
||||
}
|
||||
|
||||
template <typename TypeListLeft, typename TypeListRight>
|
||||
using TypeListConcat = decltype(TypeListUtils::concat(TypeListLeft{}, TypeListRight{}));
|
||||
|
||||
template <typename T, typename List> using TypeListPrepend = decltype(TypeListUtils::prepend<T>(List{}));
|
||||
template <typename T, typename List> using TypeListAppend = decltype(TypeListUtils::append<T>(List{}));
|
||||
|
||||
template <template <typename> typename F, typename List>
|
||||
using TypeListMap = decltype(TypeListUtils::map<F>(List{}));
|
||||
|
||||
template <template <typename...> typename Root, typename List>
|
||||
using TypeListChangeRoot = decltype(TypeListUtils::changeRoot<Root>(List{}));
|
21
base/base/TypeLists.h
Normal file
21
base/base/TypeLists.h
Normal file
@ -0,0 +1,21 @@
|
||||
#pragma once
|
||||
|
||||
#include "TypeList.h"
|
||||
#include "extended_types.h"
|
||||
#include "Decimal.h"
|
||||
#include "UUID.h"
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
using TypeListNativeInt = TypeList<UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64>;
|
||||
using TypeListFloat = TypeList<Float32, Float64>;
|
||||
using TypeListNativeNumber = TypeListConcat<TypeListNativeInt, TypeListFloat>;
|
||||
using TypeListWideInt = TypeList<UInt128, Int128, UInt256, Int256>;
|
||||
using TypeListInt = TypeListConcat<TypeListNativeInt, TypeListWideInt>;
|
||||
using TypeListIntAndFloat = TypeListConcat<TypeListInt, TypeListFloat>;
|
||||
using TypeListDecimal = TypeList<Decimal32, Decimal64, Decimal128, Decimal256>;
|
||||
using TypeListNumber = TypeListConcat<TypeListIntAndFloat, TypeListDecimal>;
|
||||
using TypeListNumberWithUUID = TypeListAppend<UUID, TypeListNumber>;
|
||||
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
#pragma once
|
||||
|
||||
template <class T, class V> struct TypePair { };
|
||||
template <class T> struct Id { };
|
||||
template <typename T, typename V> struct TypePair {};
|
||||
template <typename T> struct Id {};
|
||||
|
@ -1,44 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
#include "defines.h"
|
||||
#include "TypePair.h"
|
||||
|
||||
/// General-purpose typelist. Easy on compilation times as it does not use recursion.
|
||||
template <class ...Args>
|
||||
struct Typelist { static constexpr size_t size = sizeof...(Args); };
|
||||
|
||||
namespace TLUtils /// In some contexts it's more handy to use functions instead of aliases
|
||||
{
|
||||
template <class ...LArgs, class ...RArgs>
|
||||
constexpr Typelist<LArgs..., RArgs...> concat(Typelist<LArgs...>, Typelist<RArgs...>) { return {}; }
|
||||
|
||||
template <class T, class ...Args>
|
||||
constexpr Typelist<T, Args...> prepend(Typelist<Args...>) { return {}; }
|
||||
|
||||
template <class T, class ...Args>
|
||||
constexpr Typelist<Args..., T> append(Typelist<Args...>) { return {}; }
|
||||
|
||||
template <template<class> class F, class ...Args>
|
||||
constexpr Typelist<F<Args>...> map(Typelist<Args...>) { return {}; }
|
||||
|
||||
template <template<class...> class Root, class ...Args>
|
||||
constexpr Root<Args...> changeRoot(Typelist<Args...>) { return {}; }
|
||||
|
||||
template <class F, class ...Args>
|
||||
constexpr void forEach(Typelist<Args...>, F && f) { (std::forward<F>(f)(Id<Args>{}), ...); }
|
||||
}
|
||||
|
||||
template <class TLLeft, class TLRight>
|
||||
using TLConcat = decltype(TLUtils::concat(TLLeft{}, TLRight{}));
|
||||
|
||||
template <class T, class Typelist> using TLPrepend = decltype(TLUtils::prepend<T>(Typelist{}));
|
||||
template <class T, class Typelist> using TLAppend = decltype(TLUtils::append<T>(Typelist{}));
|
||||
|
||||
template <template<class> class F, class Typelist>
|
||||
using TLMap = decltype(TLUtils::map<F>(Typelist{}));
|
||||
|
||||
template <template<class...> class Root, class Typelist>
|
||||
using TLChangeRoot = decltype(TLUtils::changeRoot<Root>(Typelist{}));
|
@ -1,18 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "Typelist.h"
|
||||
#include "extended_types.h"
|
||||
#include "Decimal.h"
|
||||
#include "UUID.h"
|
||||
|
||||
namespace DB
|
||||
{
|
||||
using TLIntegral = Typelist<UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64, Float32, Float64>;
|
||||
using TLExtendedIntegral = Typelist<UInt128, Int128, UInt256, Int256>;
|
||||
using TLDecimals = Typelist<Decimal32, Decimal64, Decimal128, Decimal256>;
|
||||
|
||||
using TLIntegralWithExtended = TLConcat<TLIntegral, TLExtendedIntegral>;
|
||||
|
||||
using TLNumbers = TLConcat<TLIntegralWithExtended, TLDecimals>;
|
||||
using TLNumbersWithUUID = TLAppend<UUID, TLNumbers>;
|
||||
}
|
@ -5,5 +5,5 @@
|
||||
|
||||
namespace DB
|
||||
{
|
||||
using UUID = StrongTypedef<UInt128, struct UUIDTag>;
|
||||
using UUID = StrongTypedef<UInt128, struct UUIDTag>;
|
||||
}
|
||||
|
@ -6,6 +6,7 @@
|
||||
#include <tuple>
|
||||
#include <iomanip>
|
||||
#include <iostream>
|
||||
#include <magic_enum.hpp>
|
||||
|
||||
/** Usage:
|
||||
*
|
||||
@ -61,6 +62,13 @@ std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<
|
||||
}
|
||||
|
||||
|
||||
template <int priority, typename Out, typename T>
|
||||
std::enable_if_t<priority == 3 && std::is_enum_v<std::decay_t<T>>, Out> &
|
||||
dumpImpl(Out & out, T && x)
|
||||
{
|
||||
return out << magic_enum::enum_name(x);
|
||||
}
|
||||
|
||||
/// string and const char * - output not as container or pointer.
|
||||
|
||||
template <int priority, typename Out, typename T>
|
||||
@ -131,15 +139,26 @@ Out & dumpValue(Out & out, T && x)
|
||||
template <typename Out, typename T>
|
||||
Out & dump(Out & out, const char * name, T && x)
|
||||
{
|
||||
// Dumping string literal, printing name and demangled type is irrelevant.
|
||||
if constexpr (std::is_same_v<const char *, std::decay_t<std::remove_reference_t<T>>>)
|
||||
{
|
||||
const auto name_len = strlen(name);
|
||||
const auto value_len = strlen(x);
|
||||
// `name` is the same as quoted `x`
|
||||
if (name_len > 2 && value_len > 0 && name[0] == '"' && name[name_len - 1] == '"'
|
||||
&& strncmp(name + 1, x, std::min(value_len, name_len) - 1) == 0)
|
||||
return out << x;
|
||||
}
|
||||
|
||||
out << demangle(typeid(x).name()) << " " << name << " = ";
|
||||
return dumpValue(out, x);
|
||||
return dumpValue(out, x) << "; ";
|
||||
}
|
||||
|
||||
#ifdef __clang__
|
||||
#pragma clang diagnostic ignored "-Wgnu-zero-variadic-macro-arguments"
|
||||
#endif
|
||||
|
||||
#define DUMPVAR(VAR) ::dump(std::cerr, #VAR, (VAR)); std::cerr << "; ";
|
||||
#define DUMPVAR(VAR) ::dump(std::cerr, #VAR, (VAR));
|
||||
#define DUMPHEAD std::cerr << __FILE__ << ':' << __LINE__ << " [ " << getThreadId() << " ] ";
|
||||
#define DUMPTAIL std::cerr << '\n';
|
||||
|
||||
|
@ -13,30 +13,21 @@
|
||||
#if defined(__linux__)
|
||||
#include <sys/prctl.h>
|
||||
#endif
|
||||
#include <fcntl.h>
|
||||
#include <errno.h>
|
||||
#include <string.h>
|
||||
#include <signal.h>
|
||||
#include <cxxabi.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <typeinfo>
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <memory>
|
||||
#include <base/scope_guard.h>
|
||||
|
||||
#include <Poco/Observer.h>
|
||||
#include <Poco/AutoPtr.h>
|
||||
#include <Poco/PatternFormatter.h>
|
||||
#include <Poco/Message.h>
|
||||
#include <Poco/Util/Application.h>
|
||||
#include <Poco/Exception.h>
|
||||
#include <Poco/ErrorHandler.h>
|
||||
#include <Poco/Condition.h>
|
||||
#include <Poco/SyslogChannel.h>
|
||||
#include <Poco/DirectoryIterator.h>
|
||||
|
||||
#include <base/logger_useful.h>
|
||||
#include <base/ErrorHandlers.h>
|
||||
@ -56,7 +47,6 @@
|
||||
#include <Common/getMultipleKeysFromConfig.h>
|
||||
#include <Common/ClickHouseRevision.h>
|
||||
#include <Common/Config/ConfigProcessor.h>
|
||||
#include <Common/MemorySanitizer.h>
|
||||
#include <Common/SymbolIndex.h>
|
||||
#include <Common/getExecutablePath.h>
|
||||
#include <Common/getHashOfLoadedBinary.h>
|
||||
@ -114,7 +104,7 @@ static void writeSignalIDtoSignalPipe(int sig)
|
||||
errno = saved_errno;
|
||||
}
|
||||
|
||||
/** Signal handler for HUP / USR1 */
|
||||
/** Signal handler for HUP */
|
||||
static void closeLogsSignalHandler(int sig, siginfo_t *, void *)
|
||||
{
|
||||
DENY_ALLOCATIONS_IN_SCOPE;
|
||||
@ -171,7 +161,7 @@ __attribute__((__weak__)) void collectCrashLog(
|
||||
|
||||
|
||||
/** The thread that read info about signal or std::terminate from pipe.
|
||||
* On HUP / USR1, close log files (for new files to be opened later).
|
||||
* On HUP, close log files (for new files to be opened later).
|
||||
* On information about std::terminate, write it to log.
|
||||
* On other signals, write info to log.
|
||||
*/
|
||||
@ -211,7 +201,7 @@ public:
|
||||
LOG_INFO(log, "Stop SignalListener thread");
|
||||
break;
|
||||
}
|
||||
else if (sig == SIGHUP || sig == SIGUSR1)
|
||||
else if (sig == SIGHUP)
|
||||
{
|
||||
LOG_DEBUG(log, "Received signal to close logs.");
|
||||
BaseDaemon::instance().closeLogs(BaseDaemon::instance().logger());
|
||||
@ -842,7 +832,7 @@ void BaseDaemon::initializeTerminationAndSignalProcessing()
|
||||
/// SIGTSTP is added for debugging purposes. To output a stack trace of any running thread at anytime.
|
||||
|
||||
addSignalHandler({SIGABRT, SIGSEGV, SIGILL, SIGBUS, SIGSYS, SIGFPE, SIGPIPE, SIGTSTP, SIGTRAP}, signalHandler, &handled_signals);
|
||||
addSignalHandler({SIGHUP, SIGUSR1}, closeLogsSignalHandler, &handled_signals);
|
||||
addSignalHandler({SIGHUP}, closeLogsSignalHandler, &handled_signals);
|
||||
addSignalHandler({SIGINT, SIGQUIT, SIGTERM}, terminateRequestedSignalHandler, &handled_signals);
|
||||
|
||||
#if defined(SANITIZER)
|
||||
@ -1016,7 +1006,7 @@ void BaseDaemon::setupWatchdog()
|
||||
|
||||
/// Forward signals to the child process.
|
||||
addSignalHandler(
|
||||
{SIGHUP, SIGUSR1, SIGINT, SIGQUIT, SIGTERM},
|
||||
{SIGHUP, SIGINT, SIGQUIT, SIGTERM},
|
||||
[](int sig, siginfo_t *, void *)
|
||||
{
|
||||
/// Forward all signals except INT as it can be send by terminal to the process group when user press Ctrl+C,
|
||||
|
@ -37,11 +37,3 @@ GraphiteWriter::GraphiteWriter(const std::string & config_name, const std::strin
|
||||
root_path += sub_path;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
std::string GraphiteWriter::getPerServerPath(const std::string & server_name, const std::string & root_path)
|
||||
{
|
||||
std::string path = root_path + "." + server_name;
|
||||
std::replace(path.begin() + root_path.size() + 1, path.end(), '.', '_');
|
||||
return path;
|
||||
}
|
||||
|
@ -8,10 +8,10 @@
|
||||
#include <base/logger_useful.h>
|
||||
|
||||
|
||||
/// пишет в Graphite данные в формате
|
||||
/// Writes to Graphite in the following format
|
||||
/// path value timestamp\n
|
||||
/// path может иметь любую вложенность. Директории разделяются с помощью "."
|
||||
/// у нас принят следующий формат path - root_path.server_name.sub_path.key
|
||||
/// path can be arbitrary nested. Elements are separated by '.'
|
||||
/// Example: root_path.server_name.sub_path.key
|
||||
class GraphiteWriter
|
||||
{
|
||||
public:
|
||||
@ -32,8 +32,6 @@ public:
|
||||
writeImpl(key_val_vec, timestamp, custom_root_path);
|
||||
}
|
||||
|
||||
/// возвращает путь root_path.server_name
|
||||
static std::string getPerServerPath(const std::string & server_name, const std::string & root_path = "one_min");
|
||||
private:
|
||||
template <typename T>
|
||||
void writeImpl(const T & data, time_t timestamp, const std::string & custom_root_path)
|
||||
|
@ -1,16 +1,12 @@
|
||||
#include "OwnPatternFormatter.h"
|
||||
|
||||
#include <functional>
|
||||
#include <optional>
|
||||
#include <sys/time.h>
|
||||
#include <IO/WriteBufferFromString.h>
|
||||
#include <IO/WriteHelpers.h>
|
||||
#include <Common/HashTable/Hash.h>
|
||||
#include <Interpreters/InternalTextLogsQueue.h>
|
||||
#include <Common/CurrentThread.h>
|
||||
#include <base/getThreadId.h>
|
||||
#include <base/terminalColors.h>
|
||||
#include "Loggers.h"
|
||||
|
||||
|
||||
OwnPatternFormatter::OwnPatternFormatter(bool color_)
|
||||
|
@ -13,10 +13,7 @@ add_library (mysqlxx
|
||||
|
||||
target_include_directories (mysqlxx PUBLIC ..)
|
||||
|
||||
if (USE_INTERNAL_MYSQL_LIBRARY)
|
||||
target_include_directories (mysqlxx PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/include")
|
||||
target_include_directories (mysqlxx PUBLIC "${ClickHouse_BINARY_DIR}/contrib/mariadb-connector-c/include")
|
||||
else ()
|
||||
if (NOT USE_INTERNAL_MYSQL_LIBRARY)
|
||||
set(PLATFORM_LIBRARIES ${CMAKE_DL_LIBS})
|
||||
|
||||
if (USE_MYSQL)
|
||||
|
@ -12,13 +12,13 @@ macro (add_warning flag)
|
||||
if (SUPPORTS_CXXFLAG_${underscored_flag})
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -W${flag}")
|
||||
else ()
|
||||
message (WARNING "Flag -W${flag} is unsupported")
|
||||
message (STATUS "Flag -W${flag} is unsupported")
|
||||
endif ()
|
||||
|
||||
if (SUPPORTS_CFLAG_${underscored_flag})
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -W${flag}")
|
||||
else ()
|
||||
message (WARNING "Flag -W${flag} is unsupported")
|
||||
message (STATUS "Flag -W${flag} is unsupported")
|
||||
endif ()
|
||||
|
||||
endmacro ()
|
||||
@ -39,7 +39,7 @@ macro (target_add_warning target flag)
|
||||
if (SUPPORTS_CXXFLAG_${underscored_flag})
|
||||
target_compile_options (${target} PRIVATE "-W${flag}")
|
||||
else ()
|
||||
message (WARNING "Flag -W${flag} is unsupported")
|
||||
message (STATUS "Flag -W${flag} is unsupported")
|
||||
endif ()
|
||||
endmacro ()
|
||||
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||
SET(VERSION_REVISION 54457)
|
||||
SET(VERSION_REVISION 54458)
|
||||
SET(VERSION_MAJOR 21)
|
||||
SET(VERSION_MINOR 12)
|
||||
SET(VERSION_MINOR 13)
|
||||
SET(VERSION_PATCH 1)
|
||||
SET(VERSION_GITHASH 503a418dedf0011e9040c3a1b6913e0b5488be4c)
|
||||
SET(VERSION_DESCRIBE v21.12.1.1-prestable)
|
||||
SET(VERSION_STRING 21.12.1.1)
|
||||
SET(VERSION_GITHASH 4cc45c1e15912ee300bca7cc8b8da2b888a70e2a)
|
||||
SET(VERSION_DESCRIBE v21.13.1.1-prestable)
|
||||
SET(VERSION_STRING 21.13.1.1)
|
||||
# end of autochange
|
||||
|
@ -4,7 +4,7 @@ macro(find_contrib_lib LIB_NAME)
|
||||
string(TOUPPER ${LIB_NAME} LIB_NAME_UC)
|
||||
string(REPLACE "-" "_" LIB_NAME_UC ${LIB_NAME_UC})
|
||||
|
||||
option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ON)
|
||||
|
||||
if (NOT USE_INTERNAL_${LIB_NAME_UC}_LIBRARY)
|
||||
find_package ("${LIB_NAME}")
|
||||
|
@ -28,6 +28,9 @@ option (ARCH_NATIVE "Add -march=native compiler flag. This makes your binaries n
|
||||
if (ARCH_NATIVE)
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=native")
|
||||
|
||||
elseif (ARCH_AARCH64)
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=armv8-a+crc")
|
||||
|
||||
else ()
|
||||
set (TEST_FLAG "-mssse3")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
@ -43,7 +46,6 @@ else ()
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||
endif ()
|
||||
|
||||
|
||||
set (TEST_FLAG "-msse4.1")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
check_cxx_source_compiles("
|
||||
@ -138,7 +140,7 @@ else ()
|
||||
#include <immintrin.h>
|
||||
int main() {
|
||||
auto a = _mm512_setzero_epi32();
|
||||
(void)a;
|
||||
(void)a;
|
||||
auto b = _mm512_add_epi16(__m512i(), __m512i());
|
||||
(void)b;
|
||||
return 0;
|
||||
@ -160,9 +162,9 @@ else ()
|
||||
" HAVE_BMI)
|
||||
if (HAVE_BMI AND ENABLE_BMI)
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
#Limit avx2/avx512 flag for specific source build
|
||||
# Limit avx2/avx512 flag for specific source build
|
||||
set (X86_INTRINSICS_FLAGS "")
|
||||
if (ENABLE_AVX2_FOR_SPEC_OP)
|
||||
if (HAVE_BMI)
|
||||
|
@ -9,7 +9,7 @@ if (NOT ENABLE_AMQPCPP)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/CMakeLists.txt")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/src")
|
||||
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library")
|
||||
set (USE_AMQPCPP 0)
|
||||
|
@ -8,10 +8,9 @@ if (NOT ENABLE_AVRO)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_AVRO_LIBRARY
|
||||
"Set to FALSE to use system avro library instead of bundled" ON) # TODO: provide unbundled support
|
||||
option (USE_INTERNAL_AVRO_LIBRARY "Set to FALSE to use system avro library instead of bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang")
|
||||
if (USE_INTERNAL_AVRO_LIBRARY)
|
||||
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro")
|
||||
|
@ -7,12 +7,7 @@ if (NOT ENABLE_BROTLI)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (UNBUNDLED)
|
||||
# Many system ship only dynamic brotly libraries, so we back off to bundled by default
|
||||
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ${USE_STATIC_LIBRARIES})
|
||||
else()
|
||||
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
|
||||
endif()
|
||||
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
|
||||
if (USE_INTERNAL_BROTLI_LIBRARY)
|
||||
|
@ -7,9 +7,9 @@ if (NOT ENABLE_CAPNP)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/CMakeLists.txt")
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/c++")
|
||||
if(USE_INTERNAL_CAPNP_LIBRARY)
|
||||
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto")
|
||||
|
@ -7,7 +7,7 @@ if (NOT ENABLE_CURL)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_CURL "Use internal curl library" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_CURL "Use internal curl library" ON)
|
||||
|
||||
if (NOT USE_INTERNAL_CURL)
|
||||
find_package (CURL)
|
||||
@ -22,8 +22,6 @@ if (NOT CURL_FOUND)
|
||||
|
||||
# find_package(CURL) compatibility for the following packages that uses
|
||||
# find_package(CURL)/include(FindCURL):
|
||||
# - mariadb-connector-c
|
||||
# - aws-s3-cmake
|
||||
# - sentry-native
|
||||
set (CURL_FOUND ON CACHE BOOL "")
|
||||
set (CURL_ROOT_DIR ${CURL_LIBRARY_DIR} CACHE PATH "")
|
||||
|
@ -1,4 +1,4 @@
|
||||
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ${NOT_UNBUNDLED})
|
||||
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ON)
|
||||
|
||||
if (NOT USE_LIBCXX)
|
||||
if (USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
@ -10,12 +10,12 @@ if (NOT USE_LIBCXX)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ${NOT_UNBUNDLED})
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ON)
|
||||
|
||||
option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi libraries instead of bundled"
|
||||
${USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt")
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/src")
|
||||
if (USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx")
|
||||
|
@ -2,7 +2,7 @@ option (ENABLE_DATASKETCHES "Enable DataSketches" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_DATASKETCHES)
|
||||
|
||||
option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt")
|
||||
if (USE_INTERNAL_DATASKETCHES_LIBRARY)
|
||||
|
@ -22,7 +22,7 @@ endif()
|
||||
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
|
||||
# The external gRPC framework can be installed in the system by running
|
||||
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
|
||||
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
|
||||
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
|
||||
if(USE_INTERNAL_GRPC_LIBRARY)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# included only if ENABLE_TESTS=1
|
||||
|
||||
option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt")
|
||||
if (USE_INTERNAL_GTEST_LIBRARY)
|
||||
|
@ -12,7 +12,7 @@ if (NOT ENABLE_ICU)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE")
|
||||
if (USE_INTERNAL_ICU_LIBRARY)
|
||||
|
@ -1,7 +1,3 @@
|
||||
if (UNBUNDLED AND USE_STATIC_LIBRARIES)
|
||||
set (ENABLE_LDAP OFF CACHE INTERNAL "")
|
||||
endif()
|
||||
|
||||
option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_LDAP)
|
||||
@ -11,7 +7,7 @@ if (NOT ENABLE_LDAP)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
|
||||
if (USE_INTERNAL_LDAP_LIBRARY)
|
||||
|
@ -7,12 +7,7 @@ if (NOT ENABLE_GSASL_LIBRARY)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (UNBUNDLED)
|
||||
# when USE_STATIC_LIBRARIES we usually need to pick up hell a lot of dependencies for libgsasl
|
||||
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${USE_STATIC_LIBRARIES})
|
||||
else()
|
||||
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
|
||||
endif()
|
||||
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h")
|
||||
if (USE_INTERNAL_LIBGSASL_LIBRARY)
|
||||
@ -35,7 +30,7 @@ if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
|
||||
elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY)
|
||||
set (LIBGSASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src" "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include")
|
||||
set (USE_INTERNAL_LIBGSASL_LIBRARY 1)
|
||||
set (LIBGSASL_LIBRARY libgsasl)
|
||||
set (LIBGSASL_LIBRARY gsasl)
|
||||
endif ()
|
||||
|
||||
if(LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
|
||||
|
@ -4,7 +4,7 @@ if (NOT ENABLE_LIBPQXX)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/CMakeLists.txt")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/src")
|
||||
message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library")
|
||||
set (USE_LIBPQXX 0)
|
||||
|
@ -1,4 +1,4 @@
|
||||
option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h")
|
||||
if (USE_INTERNAL_LIBXML2_LIBRARY)
|
||||
|
@ -7,7 +7,7 @@ if(NOT ENABLE_MSGPACK)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp")
|
||||
if(USE_INTERNAL_MSGPACK_LIBRARY)
|
||||
|
@ -12,7 +12,7 @@ if(NOT ENABLE_MYSQL)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README")
|
||||
if(USE_INTERNAL_MYSQL_LIBRARY)
|
||||
|
@ -6,7 +6,7 @@ if (NOT USE_INTERNAL_NANODBC_LIBRARY)
|
||||
message (FATAL_ERROR "Only the bundled nanodbc library can be used")
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/CMakeLists.txt")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/nanodbc")
|
||||
message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init")
|
||||
endif()
|
||||
|
||||
|
@ -13,7 +13,7 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libstemmer_c/Makefile")
|
||||
return()
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/CMakeLists.txt")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/wnb")
|
||||
message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled")
|
||||
set (USE_NLP 0)
|
||||
|
@ -4,7 +4,7 @@ if (NOT ENABLE_NURAFT)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/CMakeLists.txt")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/src")
|
||||
message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library")
|
||||
set (USE_NURAFT 0)
|
||||
|
@ -19,7 +19,7 @@ if (NOT ENABLE_ODBC)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ON)
|
||||
|
||||
if (NOT USE_INTERNAL_ODBC_LIBRARY)
|
||||
find_library (LIBRARY_ODBC NAMES unixodbc odbc)
|
||||
|
@ -13,7 +13,7 @@ if (NOT ENABLE_PARQUET)
|
||||
endif()
|
||||
|
||||
if (NOT OS_FREEBSD) # Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory
|
||||
option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ON)
|
||||
elseif(USE_INTERNAL_PARQUET_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet is not supported on freebsd")
|
||||
endif()
|
||||
|
@ -11,7 +11,7 @@ endif()
|
||||
# You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case.
|
||||
# The external protobuf library can be installed in the system by running
|
||||
# sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev
|
||||
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
|
||||
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
|
||||
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
|
@ -6,7 +6,7 @@ if(NOT ENABLE_RAPIDJSON)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h")
|
||||
if(USE_INTERNAL_RAPIDJSON_LIBRARY)
|
||||
|
@ -7,9 +7,9 @@ if (NOT ENABLE_RDKAFKA)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/src")
|
||||
if(USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka")
|
||||
@ -18,7 +18,7 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
|
||||
set (MISSING_INTERNAL_CPPKAFKA_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/CMakeLists.txt")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src")
|
||||
if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY)
|
||||
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka")
|
||||
@ -40,7 +40,7 @@ if (NOT USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sasl2 library needed for static librdkafka")
|
||||
endif()
|
||||
endif ()
|
||||
set (CPPKAFKA_LIBRARY cppkafka) # TODO: try to use unbundled version.
|
||||
set (CPPKAFKA_LIBRARY cppkafka)
|
||||
endif ()
|
||||
|
||||
if (RDKAFKA_LIB AND RDKAFKA_INCLUDE_DIR)
|
||||
|
@ -1,6 +1,6 @@
|
||||
option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/CMakeLists.txt")
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/re2")
|
||||
if(USE_INTERNAL_RE2_LIBRARY)
|
||||
message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library")
|
||||
|
@ -11,9 +11,9 @@ if (NOT ENABLE_ROCKSDB)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ON)
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/CMakeLists.txt")
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/include")
|
||||
if (USE_INTERNAL_ROCKSDB_LIBRARY)
|
||||
message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb")
|
||||
|
@ -9,7 +9,7 @@ if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h")
|
||||
return()
|
||||
endif ()
|
||||
|
||||
if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT_UNBUNDLED AND NOT (OS_DARWIN AND COMPILER_CLANG))
|
||||
if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT (OS_DARWIN AND COMPILER_CLANG))
|
||||
option (USE_SENTRY "Use Sentry" ${ENABLE_LIBRARIES})
|
||||
set (SENTRY_TRANSPORT "curl" CACHE STRING "")
|
||||
set (SENTRY_BACKEND "none" CACHE STRING "")
|
||||
@ -18,8 +18,6 @@ if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT_UNBUNDLED AND NOT (OS_
|
||||
set (SENTRY_PIC OFF CACHE BOOL "")
|
||||
set (BUILD_SHARED_LIBS OFF)
|
||||
message (STATUS "Using sentry=${USE_SENTRY}: ${SENTRY_LIBRARY}")
|
||||
|
||||
include_directories("${SENTRY_INCLUDE_DIR}")
|
||||
elseif (USE_SENTRY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Sentry is not supported in current configuration")
|
||||
endif ()
|
||||
|
@ -1,4 +1,4 @@
|
||||
option(USE_SNAPPY "Enable snappy library" ${ENABLE_LIBRARIES})
|
||||
option(USE_SNAPPY "Enable snappy library" ON)
|
||||
|
||||
if(NOT USE_SNAPPY)
|
||||
if (USE_INTERNAL_SNAPPY_LIBRARY)
|
||||
@ -7,7 +7,7 @@ if(NOT USE_SNAPPY)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ON)
|
||||
|
||||
if(NOT USE_INTERNAL_SNAPPY_LIBRARY)
|
||||
find_library(SNAPPY_LIBRARY snappy)
|
||||
|
@ -9,7 +9,7 @@ if(NOT ENABLE_SSL)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md")
|
||||
if(USE_INTERNAL_SSL_LIBRARY)
|
||||
|
@ -1,24 +0,0 @@
|
||||
option(ENABLE_STATS "Enable StatsLib library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_STATS)
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/stats")
|
||||
message (WARNING "submodule contrib/stats is missing. to fix try run: \n git submodule update --init")
|
||||
set (ENABLE_STATS 0)
|
||||
set (USE_STATS 0)
|
||||
elseif (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/gcem")
|
||||
message (WARNING "submodule contrib/gcem is missing. to fix try run: \n git submodule update --init")
|
||||
set (ENABLE_STATS 0)
|
||||
set (USE_STATS 0)
|
||||
else()
|
||||
set(STATS_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/stats/include)
|
||||
set(GCEM_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/gcem/include)
|
||||
set (USE_STATS 1)
|
||||
endif()
|
||||
|
||||
if (NOT USE_STATS)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable stats library")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
message (STATUS "Using stats=${USE_STATS} : ${STATS_INCLUDE_DIR}")
|
||||
message (STATUS "Using gcem=${USE_STATS}: ${GCEM_INCLUDE_DIR}")
|
@ -1,4 +1,4 @@
|
||||
option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h")
|
||||
if(USE_INTERNAL_XZ_LIBRARY)
|
||||
|
@ -1,4 +1,4 @@
|
||||
option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ON)
|
||||
|
||||
if (NOT MSVC)
|
||||
set (INTERNAL_ZLIB_NAME "zlib-ng" CACHE INTERNAL "")
|
||||
@ -29,9 +29,6 @@ if (NOT USE_INTERNAL_ZLIB_LIBRARY)
|
||||
endif ()
|
||||
|
||||
if (NOT ZLIB_FOUND AND NOT MISSING_INTERNAL_ZLIB_LIBRARY)
|
||||
# https://github.com/zlib-ng/zlib-ng/pull/733
|
||||
# This is disabed by default
|
||||
add_compile_definitions(Z_TLS=__thread)
|
||||
set (USE_INTERNAL_ZLIB_LIBRARY 1)
|
||||
set (ZLIB_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}" "${ClickHouse_BINARY_DIR}/contrib/${INTERNAL_ZLIB_NAME}" CACHE INTERNAL "") # generated zconf.h
|
||||
set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR}) # for poco
|
||||
|
@ -1,4 +1,4 @@
|
||||
option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library instead of bundled" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library instead of bundled" ON)
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h")
|
||||
if(USE_INTERNAL_ZSTD_LIBRARY)
|
||||
|
@ -3,17 +3,13 @@ set (CMAKE_SYSTEM_PROCESSOR "aarch64")
|
||||
set (CMAKE_C_COMPILER_TARGET "aarch64-unknown-freebsd12")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "aarch64-unknown-freebsd12")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "aarch64-unknown-freebsd12")
|
||||
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../toolchain/freebsd-aarch64")
|
||||
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-aarch64")
|
||||
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake
|
||||
|
||||
set (CMAKE_AR "/usr/bin/ar" CACHE FILEPATH "" FORCE)
|
||||
set (CMAKE_RANLIB "/usr/bin/ranlib" CACHE FILEPATH "" FORCE)
|
||||
|
||||
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
|
||||
|
||||
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
# Will be changed later, but somehow needed to be set here.
|
||||
set (CMAKE_AR "ar")
|
||||
set (CMAKE_RANLIB "ranlib")
|
||||
|
||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
@ -3,17 +3,13 @@ set (CMAKE_SYSTEM_PROCESSOR "x86_64")
|
||||
set (CMAKE_C_COMPILER_TARGET "x86_64-pc-freebsd11")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "x86_64-pc-freebsd11")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "x86_64-pc-freebsd11")
|
||||
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../toolchain/freebsd-x86_64")
|
||||
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-x86_64")
|
||||
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake
|
||||
|
||||
set (CMAKE_AR "/usr/bin/ar" CACHE FILEPATH "" FORCE)
|
||||
set (CMAKE_RANLIB "/usr/bin/ranlib" CACHE FILEPATH "" FORCE)
|
||||
|
||||
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
|
||||
|
||||
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
# Will be changed later, but somehow needed to be set here.
|
||||
set (CMAKE_AR "ar")
|
||||
set (CMAKE_RANLIB "ranlib")
|
||||
|
||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
@ -5,8 +5,12 @@ set (DEFAULT_LIBS "-nodefaultlibs")
|
||||
|
||||
# We need builtins from Clang's RT even without libcxx - for ubsan+int128.
|
||||
# See https://bugs.llvm.org/show_bug.cgi?id=16404
|
||||
if (COMPILER_CLANG AND NOT CMAKE_CROSSCOMPILING)
|
||||
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --print-libgcc-file-name --rtlib=compiler-rt OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
if (COMPILER_CLANG)
|
||||
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --target=${CMAKE_CXX_COMPILER_TARGET} --print-libgcc-file-name --rtlib=compiler-rt OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
|
||||
if (NOT EXISTS "${BUILTINS_LIBRARY}")
|
||||
set (BUILTINS_LIBRARY "-lgcc")
|
||||
endif ()
|
||||
else ()
|
||||
set (BUILTINS_LIBRARY "-lgcc")
|
||||
endif ()
|
||||
@ -25,14 +29,6 @@ message(STATUS "Default libraries: ${DEFAULT_LIBS}")
|
||||
set(CMAKE_CXX_STANDARD_LIBRARIES ${DEFAULT_LIBS})
|
||||
set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS})
|
||||
|
||||
# glibc-compatibility library relies to constant version of libc headers
|
||||
# (because minor changes in function attributes between different glibc versions will introduce incompatibilities)
|
||||
# This is for x86_64. For other architectures we have separate toolchains.
|
||||
if (ARCH_AMD64 AND NOT_UNBUNDLED AND NOT CMAKE_CROSSCOMPILING)
|
||||
set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
|
||||
set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
|
||||
endif ()
|
||||
|
||||
# Unfortunately '-pthread' doesn't work with '-nodefaultlibs'.
|
||||
# Just make sure we have pthreads at all.
|
||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
|
@ -6,25 +6,18 @@ set (CMAKE_C_COMPILER_TARGET "aarch64-linux-gnu")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "aarch64-linux-gnu")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "aarch64-linux-gnu")
|
||||
|
||||
# Will be changed later, but somehow needed to be set here.
|
||||
set (CMAKE_AR "ar")
|
||||
set (CMAKE_RANLIB "ranlib")
|
||||
|
||||
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-aarch64")
|
||||
|
||||
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/aarch64-linux-gnu/libc")
|
||||
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
|
||||
|
||||
set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
|
||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
|
||||
|
||||
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
|
||||
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
|
||||
|
||||
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
|
||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
||||
|
@ -6,25 +6,18 @@ set (CMAKE_C_COMPILER_TARGET "ppc64le-linux-gnu")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "ppc64le-linux-gnu")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "ppc64le-linux-gnu")
|
||||
|
||||
# Will be changed later, but somehow needed to be set here.
|
||||
set (CMAKE_AR "ar")
|
||||
set (CMAKE_RANLIB "ranlib")
|
||||
|
||||
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-powerpc64le")
|
||||
|
||||
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/powerpc64le-linux-gnu/libc")
|
||||
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
|
||||
|
||||
set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
|
||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
|
||||
|
||||
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
|
||||
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
|
||||
|
||||
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
|
||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
||||
|
@ -6,22 +6,18 @@ set (CMAKE_C_COMPILER_TARGET "riscv64-linux-gnu")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "riscv64-linux-gnu")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "riscv64-linux-gnu")
|
||||
|
||||
# Will be changed later, but somehow needed to be set here.
|
||||
set (CMAKE_AR "ar")
|
||||
set (CMAKE_RANLIB "ranlib")
|
||||
|
||||
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-riscv64")
|
||||
|
||||
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}")
|
||||
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
|
||||
|
||||
set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
|
||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
|
||||
|
||||
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
|
||||
set (LINKER_NAME "riscv64-linux-gnu-ld.bfd" CACHE STRING "" FORCE)
|
||||
|
||||
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=bfd")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=bfd")
|
||||
|
||||
|
@ -6,25 +6,18 @@ set (CMAKE_C_COMPILER_TARGET "x86_64-linux-musl")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "x86_64-linux-musl")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "x86_64-linux-musl")
|
||||
|
||||
# Will be changed later, but somehow needed to be set here.
|
||||
set (CMAKE_AR "ar")
|
||||
set (CMAKE_RANLIB "ranlib")
|
||||
|
||||
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-x86_64-musl")
|
||||
|
||||
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}")
|
||||
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
|
||||
|
||||
set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
|
||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
|
||||
|
||||
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
|
||||
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
|
||||
|
||||
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
|
||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
||||
|
@ -6,25 +6,18 @@ set (CMAKE_C_COMPILER_TARGET "x86_64-linux-gnu")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "x86_64-linux-gnu")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "x86_64-linux-gnu")
|
||||
|
||||
# Will be changed later, but somehow needed to be set here.
|
||||
set (CMAKE_AR "ar")
|
||||
set (CMAKE_RANLIB "ranlib")
|
||||
|
||||
set (TOOLCHAIN_PATH "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/linux-x86_64")
|
||||
|
||||
set (CMAKE_SYSROOT "${TOOLCHAIN_PATH}/x86_64-linux-gnu/libc")
|
||||
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9")
|
||||
|
||||
set (CMAKE_AR "${LLVM_AR_PATH}" CACHE FILEPATH "" FORCE)
|
||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}" CACHE FILEPATH "" FORCE)
|
||||
|
||||
set (CMAKE_C_FLAGS_INIT "${CMAKE_C_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_CXX_FLAGS_INIT "${CMAKE_CXX_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
set (CMAKE_ASM_FLAGS_INIT "${CMAKE_ASM_FLAGS} --gcc-toolchain=${TOOLCHAIN_PATH}")
|
||||
|
||||
set (LINKER_NAME "ld.lld" CACHE STRING "" FORCE)
|
||||
|
||||
set (CMAKE_EXE_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS_INIT "-fuse-ld=lld")
|
||||
|
||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
||||
|
@ -1,6 +1,12 @@
|
||||
set (FULL_C_FLAGS "${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE_UC}}")
|
||||
set (FULL_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE_UC}}")
|
||||
set (FULL_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${CMAKE_EXE_LINKER_FLAGS_${CMAKE_BUILD_TYPE_UC}}")
|
||||
|
||||
message (STATUS "compiler C = ${CMAKE_C_COMPILER} ${FULL_C_FLAGS}")
|
||||
message (STATUS "compiler CXX = ${CMAKE_CXX_COMPILER} ${FULL_CXX_FLAGS}")
|
||||
message (STATUS "LINKER_FLAGS = ${FULL_EXE_LINKER_FLAGS}")
|
||||
|
||||
# Reproducible builds
|
||||
string (REPLACE "${CMAKE_SOURCE_DIR}" "." FULL_C_FLAGS_NORMALIZED "${FULL_C_FLAGS}")
|
||||
string (REPLACE "${CMAKE_SOURCE_DIR}" "." FULL_CXX_FLAGS_NORMALIZED "${FULL_CXX_FLAGS}")
|
||||
string (REPLACE "${CMAKE_SOURCE_DIR}" "." FULL_EXE_LINKER_FLAGS_NORMALIZED "${FULL_EXE_LINKER_FLAGS}")
|
||||
|
@ -78,6 +78,15 @@ if (SANITIZE)
|
||||
|
||||
elseif (SANITIZE STREQUAL "undefined")
|
||||
set (UBSAN_FLAGS "-fsanitize=undefined -fno-sanitize-recover=all -fno-sanitize=float-divide-by-zero")
|
||||
if (ENABLE_FUZZING)
|
||||
# Unsigned integer overflow is well defined behaviour from a perspective of C++ standard,
|
||||
# compilers or CPU. We use in hash functions like SipHash and many other places in our codebase.
|
||||
# This flag is needed only because fuzzers are run inside oss-fuzz infrastructure
|
||||
# and they have a bunch of flags not halt the program if UIO happend and even to silence that warnings.
|
||||
# But for unknown reason that flags don't work with ClickHouse or we don't understand how to properly use them,
|
||||
# that's why we often receive reports about UIO. The simplest way to avoid this is just set this flag here.
|
||||
set(UBSAN_FLAGS "${SAN_FLAGS} -fno-sanitize=unsigned-integer-overflow")
|
||||
endif()
|
||||
if (COMPILER_CLANG)
|
||||
set (UBSAN_FLAGS "${UBSAN_FLAGS} -fsanitize-blacklist=${CMAKE_SOURCE_DIR}/tests/ubsan_suppressions.txt")
|
||||
else()
|
||||
|
@ -1,3 +1,5 @@
|
||||
# Compiler
|
||||
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (COMPILER_GCC 1)
|
||||
elseif (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
||||
@ -6,6 +8,8 @@ elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||
set (COMPILER_CLANG 1)
|
||||
endif ()
|
||||
|
||||
execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version)
|
||||
|
||||
if (COMPILER_GCC)
|
||||
# Require minimum version of gcc
|
||||
set (GCC_MINIMUM_VERSION 11)
|
||||
@ -18,9 +22,10 @@ if (COMPILER_GCC)
|
||||
elseif (COMPILER_CLANG)
|
||||
# Require minimum version of clang/apple-clang
|
||||
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
||||
# If you are developer you can figure out what exact versions of AppleClang are Ok,
|
||||
# simply remove the following line.
|
||||
message (FATAL_ERROR "AppleClang is not supported, you should install clang from brew. See the instruction: https://clickhouse.com/docs/en/development/build-osx/")
|
||||
# (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
|
||||
if (NOT ALLOW_APPLECLANG AND NOT DEFINED ENV{ALLOW_APPLECLANG})
|
||||
message (FATAL_ERROR "AppleClang is not supported, you should install clang from brew. See the instruction: https://clickhouse.com/docs/en/development/build-osx/")
|
||||
endif ()
|
||||
|
||||
# AppleClang 10.0.1 (Xcode 10.2) corresponds to LLVM/Clang upstream version 7.0.0
|
||||
# AppleClang 11.0.0 (Xcode 11.0) corresponds to LLVM/Clang upstream version 8.0.0
|
||||
@ -44,8 +49,10 @@ else ()
|
||||
message (WARNING "You are using an unsupported compiler. Compilation has only been tested with Clang and GCC.")
|
||||
endif ()
|
||||
|
||||
STRING(REGEX MATCHALL "[0-9]+" COMPILER_VERSION_LIST ${CMAKE_CXX_COMPILER_VERSION})
|
||||
LIST(GET COMPILER_VERSION_LIST 0 COMPILER_VERSION_MAJOR)
|
||||
string (REGEX MATCHALL "[0-9]+" COMPILER_VERSION_LIST ${CMAKE_CXX_COMPILER_VERSION})
|
||||
list (GET COMPILER_VERSION_LIST 0 COMPILER_VERSION_MAJOR)
|
||||
|
||||
# Linker
|
||||
|
||||
# Example values: `lld-10`, `gold`.
|
||||
option (LINKER_NAME "Linker name or full path")
|
||||
@ -93,3 +100,69 @@ if (LINKER_NAME)
|
||||
|
||||
message(STATUS "Using custom linker by name: ${LINKER_NAME}")
|
||||
endif ()
|
||||
|
||||
# Archiver
|
||||
|
||||
if (COMPILER_GCC)
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11")
|
||||
else ()
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar-${COMPILER_VERSION_MAJOR}" "llvm-ar")
|
||||
endif ()
|
||||
|
||||
if (LLVM_AR_PATH)
|
||||
set (CMAKE_AR "${LLVM_AR_PATH}")
|
||||
endif ()
|
||||
|
||||
# Ranlib
|
||||
|
||||
if (COMPILER_GCC)
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11")
|
||||
else ()
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib-${COMPILER_VERSION_MAJOR}" "llvm-ranlib")
|
||||
endif ()
|
||||
|
||||
if (LLVM_RANLIB_PATH)
|
||||
set (CMAKE_RANLIB "${LLVM_RANLIB_PATH}")
|
||||
endif ()
|
||||
|
||||
# Install Name Tool
|
||||
|
||||
if (COMPILER_GCC)
|
||||
find_program (LLVM_INSTALL_NAME_TOOL_PATH NAMES "llvm-install-name-tool" "llvm-install-name-tool-13" "llvm-install-name-tool-12" "llvm-install-name-tool-11")
|
||||
else ()
|
||||
find_program (LLVM_INSTALL_NAME_TOOL_PATH NAMES "llvm-install-name-tool-${COMPILER_VERSION_MAJOR}" "llvm-install-name-tool")
|
||||
endif ()
|
||||
|
||||
if (LLVM_INSTALL_NAME_TOOL_PATH)
|
||||
set (CMAKE_INSTALL_NAME_TOOL "${LLVM_INSTALL_NAME_TOOL_PATH}")
|
||||
endif ()
|
||||
|
||||
# Objcopy
|
||||
|
||||
if (COMPILER_GCC)
|
||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy" "llvm-objcopy-13" "llvm-objcopy-12" "llvm-objcopy-11" "objcopy")
|
||||
else ()
|
||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy-${COMPILER_VERSION_MAJOR}" "llvm-objcopy" "objcopy")
|
||||
endif ()
|
||||
|
||||
if (NOT OBJCOPY_PATH AND OS_DARWIN)
|
||||
find_program (BREW_PATH NAMES "brew")
|
||||
if (BREW_PATH)
|
||||
execute_process (COMMAND ${BREW_PATH} --prefix llvm ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE LLVM_PREFIX)
|
||||
if (LLVM_PREFIX)
|
||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy" PATHS "${LLVM_PREFIX}/bin" NO_DEFAULT_PATH)
|
||||
endif ()
|
||||
if (NOT OBJCOPY_PATH)
|
||||
execute_process (COMMAND ${BREW_PATH} --prefix binutils ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE BINUTILS_PREFIX)
|
||||
if (BINUTILS_PREFIX)
|
||||
find_program (OBJCOPY_PATH NAMES "objcopy" PATHS "${BINUTILS_PREFIX}/bin" NO_DEFAULT_PATH)
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (OBJCOPY_PATH)
|
||||
message (STATUS "Using objcopy: ${OBJCOPY_PATH}")
|
||||
else ()
|
||||
message (FATAL_ERROR "Cannot find objcopy.")
|
||||
endif ()
|
||||
|
@ -7,9 +7,7 @@
|
||||
# - sometimes warnings from 3rd party libraries may come from macro substitutions in our code
|
||||
# and we have to wrap them with #pragma GCC/clang diagnostic ignored
|
||||
|
||||
if (NOT MSVC)
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra")
|
||||
endif ()
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra")
|
||||
|
||||
# Add some warnings that are not available even with -Wall -Wextra -Wpedantic.
|
||||
# Intended for exploration of new compiler warnings that may be found useful.
|
||||
@ -28,46 +26,6 @@ if (COMPILER_CLANG)
|
||||
no_warning(zero-length-array)
|
||||
no_warning(c11-extensions)
|
||||
|
||||
add_warning(comma)
|
||||
add_warning(conditional-uninitialized)
|
||||
add_warning(covered-switch-default)
|
||||
add_warning(deprecated)
|
||||
add_warning(embedded-directive)
|
||||
add_warning(empty-init-stmt) # linux-only
|
||||
add_warning(extra-semi-stmt) # linux-only
|
||||
add_warning(extra-semi)
|
||||
add_warning(gnu-case-range)
|
||||
add_warning(inconsistent-missing-destructor-override)
|
||||
add_warning(newline-eof)
|
||||
add_warning(old-style-cast)
|
||||
add_warning(range-loop-analysis)
|
||||
add_warning(redundant-parens)
|
||||
add_warning(reserved-id-macro)
|
||||
add_warning(shadow-field)
|
||||
add_warning(shadow-uncaptured-local)
|
||||
add_warning(shadow)
|
||||
add_warning(string-plus-int)
|
||||
add_warning(undef)
|
||||
add_warning(unreachable-code-return)
|
||||
add_warning(unreachable-code)
|
||||
add_warning(unused-exception-parameter)
|
||||
add_warning(unused-macros)
|
||||
add_warning(unused-member-function)
|
||||
add_warning(unneeded-internal-declaration)
|
||||
add_warning(implicit-int-float-conversion)
|
||||
add_warning(no-delete-null-pointer-checks)
|
||||
add_warning(anon-enum-enum-conversion)
|
||||
add_warning(assign-enum)
|
||||
add_warning(bitwise-op-parentheses)
|
||||
add_warning(int-in-bool-context)
|
||||
add_warning(sometimes-uninitialized)
|
||||
add_warning(tautological-bitwise-compare)
|
||||
|
||||
# XXX: libstdc++ has some of these for 3way compare
|
||||
if (USE_LIBCXX)
|
||||
add_warning(zero-as-null-pointer-constant)
|
||||
endif()
|
||||
|
||||
if (WEVERYTHING)
|
||||
add_warning(everything)
|
||||
no_warning(c++98-compat-pedantic)
|
||||
@ -103,6 +61,47 @@ if (COMPILER_CLANG)
|
||||
endif()
|
||||
|
||||
# TODO Enable conversion, sign-conversion, double-promotion warnings.
|
||||
else ()
|
||||
add_warning(comma)
|
||||
add_warning(conditional-uninitialized)
|
||||
add_warning(covered-switch-default)
|
||||
add_warning(deprecated)
|
||||
add_warning(embedded-directive)
|
||||
add_warning(empty-init-stmt) # linux-only
|
||||
add_warning(extra-semi-stmt) # linux-only
|
||||
add_warning(extra-semi)
|
||||
add_warning(gnu-case-range)
|
||||
add_warning(inconsistent-missing-destructor-override)
|
||||
add_warning(newline-eof)
|
||||
add_warning(old-style-cast)
|
||||
add_warning(range-loop-analysis)
|
||||
add_warning(redundant-parens)
|
||||
add_warning(reserved-id-macro)
|
||||
add_warning(shadow-field)
|
||||
add_warning(shadow-uncaptured-local)
|
||||
add_warning(shadow)
|
||||
add_warning(string-plus-int)
|
||||
add_warning(undef)
|
||||
add_warning(unreachable-code-return)
|
||||
add_warning(unreachable-code)
|
||||
add_warning(unused-exception-parameter)
|
||||
add_warning(unused-macros)
|
||||
add_warning(unused-member-function)
|
||||
add_warning(unneeded-internal-declaration)
|
||||
add_warning(implicit-int-float-conversion)
|
||||
add_warning(no-delete-null-pointer-checks)
|
||||
add_warning(anon-enum-enum-conversion)
|
||||
add_warning(assign-enum)
|
||||
add_warning(bitwise-op-parentheses)
|
||||
add_warning(int-in-bool-context)
|
||||
add_warning(sometimes-uninitialized)
|
||||
add_warning(tautological-bitwise-compare)
|
||||
|
||||
# XXX: libstdc++ has some of these for 3way compare
|
||||
if (USE_LIBCXX)
|
||||
add_warning(zero-as-null-pointer-constant)
|
||||
endif()
|
||||
|
||||
endif ()
|
||||
elseif (COMPILER_GCC)
|
||||
# Add compiler options only to c++ compiler
|
||||
|
82
contrib/CMakeLists.txt
vendored
82
contrib/CMakeLists.txt
vendored
@ -53,7 +53,7 @@ if (USE_YAML_CPP)
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_XZ_LIBRARY)
|
||||
add_subdirectory (xz)
|
||||
add_subdirectory (xz-cmake)
|
||||
endif()
|
||||
|
||||
add_subdirectory (poco-cmake)
|
||||
@ -66,9 +66,7 @@ if (USE_INTERNAL_ZSTD_LIBRARY)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_RE2_LIBRARY)
|
||||
set(RE2_BUILD_TESTING 0 CACHE INTERNAL "")
|
||||
add_subdirectory (re2)
|
||||
add_subdirectory (re2_st)
|
||||
add_subdirectory (re2-cmake)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_DOUBLE_CONVERSION_LIBRARY)
|
||||
@ -84,23 +82,10 @@ if (USE_INTERNAL_FARMHASH_LIBRARY)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_ZLIB_LIBRARY)
|
||||
set (ZLIB_ENABLE_TESTS 0 CACHE INTERNAL "")
|
||||
set (SKIP_INSTALL_ALL 1 CACHE INTERNAL "")
|
||||
set (ZLIB_COMPAT 1 CACHE INTERNAL "") # also enables WITH_GZFILEOP
|
||||
set (WITH_NATIVE_INSTRUCTIONS ${ARCH_NATIVE} CACHE INTERNAL "")
|
||||
if (OS_FREEBSD OR ARCH_I386)
|
||||
set (WITH_OPTIM 0 CACHE INTERNAL "") # Bug in assembler
|
||||
endif ()
|
||||
if (ARCH_AARCH64)
|
||||
set(WITH_NEON 1 CACHE INTERNAL "")
|
||||
set(WITH_ACLE 1 CACHE INTERNAL "")
|
||||
endif ()
|
||||
|
||||
add_subdirectory (${INTERNAL_ZLIB_NAME})
|
||||
# We should use same defines when including zlib.h as used when zlib compiled
|
||||
target_compile_definitions (zlib PUBLIC ZLIB_COMPAT WITH_GZFILEOP)
|
||||
if (ARCH_AMD64 OR ARCH_AARCH64)
|
||||
target_compile_definitions (zlib PUBLIC X86_64 UNALIGNED_OK)
|
||||
if (INTERNAL_ZLIB_NAME STREQUAL "zlib-ng")
|
||||
add_subdirectory (zlib-ng-cmake)
|
||||
else ()
|
||||
add_subdirectory (${INTERNAL_ZLIB_NAME})
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
@ -119,28 +104,8 @@ if (USE_INTERNAL_LDAP_LIBRARY)
|
||||
add_subdirectory (openldap-cmake)
|
||||
endif ()
|
||||
|
||||
function(mysql_support)
|
||||
set(CLIENT_PLUGIN_CACHING_SHA2_PASSWORD STATIC)
|
||||
set(CLIENT_PLUGIN_SHA256_PASSWORD STATIC)
|
||||
set(CLIENT_PLUGIN_REMOTE_IO OFF)
|
||||
set(CLIENT_PLUGIN_DIALOG OFF)
|
||||
set(CLIENT_PLUGIN_AUTH_GSSAPI_CLIENT OFF)
|
||||
set(CLIENT_PLUGIN_CLIENT_ED25519 OFF)
|
||||
set(CLIENT_PLUGIN_MYSQL_CLEAR_PASSWORD OFF)
|
||||
set(SKIP_TESTS 1)
|
||||
if (GLIBC_COMPATIBILITY)
|
||||
set(LIBM glibc-compatibility)
|
||||
endif()
|
||||
if (USE_INTERNAL_ZLIB_LIBRARY)
|
||||
set(ZLIB_FOUND ON)
|
||||
set(ZLIB_LIBRARY ${ZLIB_LIBRARIES})
|
||||
set(WITH_EXTERNAL_ZLIB ON)
|
||||
endif()
|
||||
set(WITH_CURL OFF)
|
||||
add_subdirectory (mariadb-connector-c)
|
||||
endfunction()
|
||||
if (ENABLE_MYSQL AND USE_INTERNAL_MYSQL_LIBRARY)
|
||||
mysql_support()
|
||||
if (USE_INTERNAL_MYSQL_LIBRARY)
|
||||
add_subdirectory (mariadb-connector-c-cmake)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_RDKAFKA_LIBRARY)
|
||||
@ -196,11 +161,7 @@ if (USE_INTERNAL_AVRO_LIBRARY)
|
||||
endif()
|
||||
|
||||
if(USE_INTERNAL_GTEST_LIBRARY)
|
||||
set(GOOGLETEST_VERSION 1.10.0) # master
|
||||
# Google Test from sources
|
||||
add_subdirectory(${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest)
|
||||
# avoid problems with <regexp.h>
|
||||
target_compile_definitions (gtest INTERFACE GTEST_HAS_POSIX_RE=0)
|
||||
add_subdirectory(googletest-cmake)
|
||||
elseif(GTEST_SRC_DIR)
|
||||
add_subdirectory(${GTEST_SRC_DIR}/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest)
|
||||
target_compile_definitions(gtest INTERFACE GTEST_HAS_POSIX_RE=0)
|
||||
@ -231,7 +192,7 @@ if (USE_EMBEDDED_COMPILER)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_LIBGSASL_LIBRARY)
|
||||
add_subdirectory(libgsasl)
|
||||
add_subdirectory(libgsasl-cmake)
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_LIBXML2_LIBRARY)
|
||||
@ -283,14 +244,7 @@ if (USE_AMQPCPP)
|
||||
add_subdirectory (amqpcpp-cmake)
|
||||
endif()
|
||||
if (USE_CASSANDRA)
|
||||
# Need to use C++17 since the compilation is not possible with C++20 currently.
|
||||
set (CMAKE_CXX_STANDARD_bak ${CMAKE_CXX_STANDARD})
|
||||
set (CMAKE_CXX_STANDARD 17)
|
||||
|
||||
add_subdirectory (cassandra)
|
||||
|
||||
set (CMAKE_CXX_STANDARD ${CMAKE_CXX_STANDARD_bak})
|
||||
unset (CMAKE_CXX_STANDARD_bak)
|
||||
add_subdirectory (cassandra-cmake)
|
||||
endif()
|
||||
|
||||
# Should go before:
|
||||
@ -298,16 +252,11 @@ endif()
|
||||
add_subdirectory (curl-cmake)
|
||||
|
||||
if (USE_SENTRY)
|
||||
add_subdirectory (sentry-native)
|
||||
add_subdirectory (sentry-native-cmake)
|
||||
endif()
|
||||
|
||||
add_subdirectory (fmtlib-cmake)
|
||||
|
||||
if (USE_STATS)
|
||||
add_subdirectory (stats-cmake)
|
||||
add_subdirectory (gcem)
|
||||
endif()
|
||||
|
||||
if (USE_KRB5)
|
||||
add_subdirectory (krb5-cmake)
|
||||
if (USE_CYRUS_SASL)
|
||||
@ -328,7 +277,7 @@ if (USE_NURAFT)
|
||||
add_subdirectory(nuraft-cmake)
|
||||
endif()
|
||||
|
||||
add_subdirectory(fast_float)
|
||||
add_subdirectory(fast_float-cmake)
|
||||
|
||||
if (USE_NLP)
|
||||
add_subdirectory(libstemmer-c-cmake)
|
||||
@ -354,9 +303,10 @@ endif()
|
||||
# instead of controlling it via CMAKE_FOLDER.
|
||||
|
||||
function (ensure_target_rooted_in _target _folder)
|
||||
# Skip INTERFACE library targets, since FOLDER property is not available for them.
|
||||
# Skip aliases and INTERFACE library targets, since FOLDER property is not available/writable for them.
|
||||
get_target_property (_target_aliased "${_target}" ALIASED_TARGET)
|
||||
get_target_property (_target_type "${_target}" TYPE)
|
||||
if (_target_type STREQUAL "INTERFACE_LIBRARY")
|
||||
if (_target_aliased OR _target_type STREQUAL "INTERFACE_LIBRARY")
|
||||
return ()
|
||||
endif ()
|
||||
|
||||
|
@ -417,7 +417,49 @@ set(PARQUET_SRCS
|
||||
#list(TRANSFORM PARQUET_SRCS PREPEND "${LIBRARY_DIR}/") # cmake 3.12
|
||||
add_library(${PARQUET_LIBRARY} ${PARQUET_SRCS})
|
||||
target_include_directories(${PARQUET_LIBRARY} SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src" "${CMAKE_CURRENT_SOURCE_DIR}/cpp/src" PRIVATE ${OPENSSL_INCLUDE_DIR})
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/thrift/build/cmake/ConfigureChecks.cmake") # makes config.h
|
||||
|
||||
set (HAVE_ARPA_INET_H 1)
|
||||
set (HAVE_FCNTL_H 1)
|
||||
set (HAVE_GETOPT_H 1)
|
||||
set (HAVE_INTTYPES_H 1)
|
||||
set (HAVE_NETDB_H 1)
|
||||
set (HAVE_NETINET_IN_H 1)
|
||||
set (HAVE_SIGNAL_H 1)
|
||||
set (HAVE_STDINT_H 1)
|
||||
set (HAVE_UNISTD_H 1)
|
||||
set (HAVE_PTHREAD_H 1)
|
||||
set (HAVE_SYS_IOCTL_H 1)
|
||||
set (HAVE_SYS_PARAM_H 1)
|
||||
set (HAVE_SYS_RESOURCE_H 1)
|
||||
set (HAVE_SYS_SOCKET_H 1)
|
||||
set (HAVE_SYS_STAT_H 1)
|
||||
set (HAVE_SYS_TIME_H 1)
|
||||
set (HAVE_SYS_UN_H 1)
|
||||
set (HAVE_POLL_H 1)
|
||||
set (HAVE_SYS_POLL_H 1)
|
||||
set (HAVE_SYS_SELECT_H 1)
|
||||
set (HAVE_SCHED_H 1)
|
||||
set (HAVE_STRING_H 1)
|
||||
set (HAVE_STRINGS_H 1)
|
||||
set (HAVE_GETHOSTBYNAME 1)
|
||||
set (HAVE_STRERROR_R 1)
|
||||
set (HAVE_SCHED_GET_PRIORITY_MAX 1)
|
||||
set (HAVE_SCHED_GET_PRIORITY_MIN 1)
|
||||
|
||||
if (OS_LINUX)
|
||||
set (STRERROR_R_CHAR_P 1)
|
||||
endif ()
|
||||
|
||||
#set(PACKAGE ${PACKAGE_NAME})
|
||||
#set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}")
|
||||
#set(VERSION ${thrift_VERSION})
|
||||
|
||||
# generate a config.h file
|
||||
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/build/cmake/config.h.in" "${CMAKE_CURRENT_BINARY_DIR}/thrift/config.h")
|
||||
|
||||
include_directories("${CMAKE_CURRENT_BINARY_DIR}")
|
||||
|
||||
|
||||
target_link_libraries(${PARQUET_LIBRARY} PUBLIC ${ARROW_LIBRARY} PRIVATE ${THRIFT_LIBRARY} boost::headers_only boost::regex ${OPENSSL_LIBRARIES})
|
||||
|
||||
if (SANITIZE STREQUAL "undefined")
|
||||
|
2
contrib/base64
vendored
2
contrib/base64
vendored
@ -1 +1 @@
|
||||
Subproject commit af9b331f2b4f30b41c70f3a571ff904a8251c1d3
|
||||
Subproject commit 9499e0c4945589973b9ea1bc927377cfbc84aa46
|
@ -1,4 +1,4 @@
|
||||
option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ON)
|
||||
|
||||
if (NOT USE_INTERNAL_BOOST_LIBRARY)
|
||||
# 1.70 like in contrib/boost
|
||||
|
127
contrib/cassandra-cmake/CMakeLists.txt
Normal file
127
contrib/cassandra-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,127 @@
|
||||
# Need to use C++17 since the compilation is not possible with C++20 currently.
|
||||
set (CMAKE_CXX_STANDARD 17)
|
||||
|
||||
set(CASS_ROOT_DIR ${CMAKE_SOURCE_DIR}/contrib/cassandra)
|
||||
set(CASS_SRC_DIR "${CASS_ROOT_DIR}/src")
|
||||
set(CASS_INCLUDE_DIR "${CASS_ROOT_DIR}/include")
|
||||
|
||||
# Ensure functions/modules are available
|
||||
list(APPEND CMAKE_MODULE_PATH ${CASS_ROOT_DIR}/cmake)
|
||||
|
||||
set(CASS_BUILD_SHARED 1)
|
||||
set(CASS_BUILD_STATIC 1)
|
||||
set(CASS_USE_KERBEROS 0)
|
||||
set(CASS_USE_LIBSSH2 0)
|
||||
set(CASS_USE_OPENSSL 1)
|
||||
set(CASS_USE_STD_ATOMIC 1)
|
||||
set(CASS_USE_ZLIB 1)
|
||||
|
||||
|
||||
file(GLOB SOURCES ${CASS_SRC_DIR}/*.cpp)
|
||||
|
||||
if(APPLE)
|
||||
list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-unix.cpp ${CASS_SRC_DIR}/get_time-win.cpp)
|
||||
elseif(UNIX)
|
||||
list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-mac.cpp ${CASS_SRC_DIR}/get_time-win.cpp)
|
||||
elseif(WIN32)
|
||||
list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-mac.cpp ${CASS_SRC_DIR}/get_time-unix.cpp)
|
||||
endif()
|
||||
|
||||
if(CASS_USE_OPENSSL)
|
||||
list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/ssl)
|
||||
list(APPEND SOURCES ${CASS_SRC_DIR}/ssl/ssl_openssl_impl.cpp ${CASS_SRC_DIR}/ssl/ring_buffer_bio.cpp)
|
||||
else()
|
||||
list(APPEND SOURCES ${CASS_SRC_DIR}/ssl/ssl_no_impl.cpp)
|
||||
endif()
|
||||
|
||||
if(CASS_USE_KERBEROS)
|
||||
list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/gssapi)
|
||||
list(APPEND SOURCES ${CASS_SRC_DIR}/gssapi/dse_auth_gssapi.cpp ${CASS_SRC_DIR}/gssapi/dse_auth_gssapi.hpp)
|
||||
endif()
|
||||
|
||||
list(APPEND SOURCES ${CASS_SRC_DIR}/atomic/atomic_std.hpp)
|
||||
|
||||
|
||||
add_library(curl_hostcheck OBJECT ${CASS_SRC_DIR}/third_party/curl/hostcheck.cpp)
|
||||
add_library(hdr_histogram OBJECT ${CASS_SRC_DIR}/third_party/hdr_histogram/hdr_histogram.cpp)
|
||||
add_library(http-parser OBJECT ${CASS_SRC_DIR}/third_party/http-parser/http_parser.c)
|
||||
add_library(minizip OBJECT
|
||||
${CASS_SRC_DIR}/third_party/minizip/ioapi.c
|
||||
${CASS_SRC_DIR}/third_party/minizip/zip.c
|
||||
${CASS_SRC_DIR}/third_party/minizip/unzip.c)
|
||||
|
||||
target_link_libraries(minizip zlib)
|
||||
target_compile_definitions(minizip PRIVATE "-Dz_crc_t=unsigned long")
|
||||
|
||||
list(APPEND INCLUDE_DIRS
|
||||
${CASS_SRC_DIR}/third_party/curl
|
||||
${CASS_SRC_DIR}/third_party/hdr_histogram
|
||||
${CASS_SRC_DIR}/third_party/http-parser
|
||||
${CASS_SRC_DIR}/third_party/minizip
|
||||
${CASS_SRC_DIR}/third_party/mt19937_64
|
||||
${CASS_SRC_DIR}/third_party/rapidjson/rapidjson
|
||||
${CASS_SRC_DIR}/third_party/sparsehash/src)
|
||||
|
||||
list(APPEND INCLUDE_DIRS ${CASS_INCLUDE_DIR} ${CASS_SRC_DIR})
|
||||
|
||||
set(HASH_FUN_H "functional")
|
||||
set(HASH_NAME hash)
|
||||
set(HASH_NAMESPACE "std")
|
||||
set(HAVE_INTTYPES_H 1)
|
||||
set(HAVE_STDINT_H 1)
|
||||
set(HAVE_SYS_TYPES_H 1)
|
||||
set(HAVE_MEMCPY 1)
|
||||
set(HAVE_LONG_LONG 1)
|
||||
set(HAVE_UINT16_T 1)
|
||||
|
||||
configure_file("${CASS_SRC_DIR}/third_party/sparsehash/config.h.cmake" "${CMAKE_CURRENT_BINARY_DIR}/sparsehash/internal/sparseconfig.h")
|
||||
|
||||
|
||||
|
||||
# Determine random availability
|
||||
if (OS_LINUX)
|
||||
#set (HAVE_GETRANDOM 1) - not on every Linux kernel
|
||||
elseif (OS_FREEBSD OR OS_DARWIN)
|
||||
set (HAVE_ARC4RANDOM 1)
|
||||
endif ()
|
||||
|
||||
# Determine if sigpipe is available
|
||||
if (OS_LINUX)
|
||||
set (HAVE_SIGTIMEDWAIT 1)
|
||||
else (OS_FREEBSD OR OS_DARWIN)
|
||||
set (HAVE_NOSIGPIPE 1)
|
||||
endif()
|
||||
|
||||
set (HAVE_BUILTIN_BSWAP32 1)
|
||||
set (HAVE_BUILTIN_BSWAP64 1)
|
||||
|
||||
set(HAVE_BOOST_ATOMIC 0)
|
||||
set(HAVE_STD_ATOMIC 1)
|
||||
|
||||
set(HAVE_KERBEROS ${CASS_USE_KERBEROS})
|
||||
set(HAVE_OPENSSL ${CASS_USE_OPENSSL})
|
||||
set(HAVE_ZLIB ${CASS_USE_ZLIB})
|
||||
|
||||
# Generate the driver_config.hpp file
|
||||
configure_file(
|
||||
${CASS_ROOT_DIR}/driver_config.hpp.in
|
||||
${CMAKE_CURRENT_BINARY_DIR}/driver_config.hpp)
|
||||
|
||||
|
||||
add_library(cassandra
|
||||
${SOURCES}
|
||||
$<TARGET_OBJECTS:curl_hostcheck>
|
||||
$<TARGET_OBJECTS:hdr_histogram>
|
||||
$<TARGET_OBJECTS:http-parser>
|
||||
$<TARGET_OBJECTS:minizip>)
|
||||
|
||||
target_link_libraries(cassandra zlib)
|
||||
add_library(cassandra_static ALIAS cassandra)
|
||||
target_include_directories(cassandra PRIVATE ${CMAKE_CURRENT_BINARY_DIR} ${INCLUDE_DIRS})
|
||||
target_compile_definitions(cassandra PRIVATE CASS_BUILDING)
|
||||
|
||||
target_link_libraries(cassandra uv)
|
||||
|
||||
if(CASS_USE_OPENSSL)
|
||||
target_link_libraries(cassandra ssl)
|
||||
endif()
|
@ -1,4 +1,4 @@
|
||||
option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ON)
|
||||
|
||||
if (NOT USE_INTERNAL_CCTZ_LIBRARY)
|
||||
find_library (LIBRARY_CCTZ cctz)
|
||||
@ -82,7 +82,7 @@ if (NOT EXTERNAL_CCTZ_LIBRARY_FOUND OR NOT EXTERNAL_CCTZ_LIBRARY_WORKS)
|
||||
|
||||
# each file in that dir (except of tab and localtime) store the info about timezone
|
||||
execute_process(COMMAND
|
||||
bash -c "cd ${TZDIR} && find * -type f -and ! -name '*.tab' -and ! -name 'localtime' | sort | paste -sd ';' -"
|
||||
bash -c "cd ${TZDIR} && find * -type f -and ! -name '*.tab' -and ! -name 'localtime' | LC_ALL=C sort | paste -sd ';' -"
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
OUTPUT_VARIABLE TIMEZONES)
|
||||
|
||||
|
2
contrib/fast_float-cmake/CMakeLists.txt
Normal file
2
contrib/fast_float-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,2 @@
|
||||
add_library(fast_float INTERFACE)
|
||||
target_include_directories(fast_float INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/")
|
1
contrib/gcem
vendored
1
contrib/gcem
vendored
@ -1 +0,0 @@
|
||||
Subproject commit 8d4f1b5d76ea8f6ff12f3f4f34cda45424556b00
|
11
contrib/googletest-cmake/CMakeLists.txt
Normal file
11
contrib/googletest-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,11 @@
|
||||
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest")
|
||||
|
||||
add_library(gtest "${SRC_DIR}/src/gtest-all.cc")
|
||||
set_target_properties(gtest PROPERTIES VERSION "1.0.0")
|
||||
target_compile_definitions (gtest INTERFACE GTEST_HAS_POSIX_RE=0)
|
||||
target_include_directories(gtest SYSTEM PUBLIC "${SRC_DIR}/include")
|
||||
target_include_directories(gtest PRIVATE "${SRC_DIR}")
|
||||
|
||||
add_library(gtest_main "${SRC_DIR}/src/gtest_main.cc")
|
||||
set_target_properties(gtest_main PROPERTIES VERSION "1.0.0")
|
||||
target_link_libraries(gtest_main PUBLIC gtest)
|
@ -17,7 +17,7 @@ if (NOT ENABLE_HYPERSCAN)
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ON)
|
||||
|
||||
if (NOT USE_INTERNAL_HYPERSCAN_LIBRARY)
|
||||
find_library (LIBRARY_HYPERSCAN hs)
|
||||
|
@ -1,6 +1,6 @@
|
||||
if (SANITIZE OR NOT (
|
||||
((OS_LINUX OR OS_FREEBSD) AND (ARCH_AMD64 OR ARCH_ARM OR ARCH_PPC64LE OR ARCH_RISCV64)) OR
|
||||
(OS_DARWIN AND (CMAKE_BUILD_TYPE STREQUAL "RelWithDebInfo" OR CMAKE_BUILD_TYPE STREQUAL "Debug"))
|
||||
(OS_DARWIN AND (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" OR CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG"))
|
||||
))
|
||||
if (ENABLE_JEMALLOC)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL}
|
||||
|
@ -500,7 +500,6 @@ function(preprocess_et out_var)
|
||||
COMMAND perl "${KRB5_SOURCE_DIR}/util/et/compile_et" -d "${KRB5_SOURCE_DIR}/util/et" ${in_f}
|
||||
DEPENDS ${in_f} "${KRB5_SOURCE_DIR}/util/et/compile_et"
|
||||
WORKING_DIRECTORY ${ET_PATH}
|
||||
COMMENT "Creating preprocessed file ${F_C}"
|
||||
VERBATIM
|
||||
)
|
||||
list(APPEND result ${F_C})
|
||||
@ -526,7 +525,6 @@ add_custom_command(
|
||||
add_custom_target(
|
||||
ERROR_MAP_H
|
||||
DEPENDS "${KRB5_SOURCE_DIR}/lib/gssapi/krb5/error_map.h"
|
||||
COMMENT "generating error_map.h"
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
@ -539,14 +537,12 @@ add_custom_command(
|
||||
add_custom_target(
|
||||
ERRMAP_H
|
||||
DEPENDS "${KRB5_SOURCE_DIR}/lib/gssapi/generic/errmap.h"
|
||||
COMMENT "generating errmap.h"
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
add_custom_target(
|
||||
KRB_5_H
|
||||
DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/include/krb5/krb5.h"
|
||||
COMMENT "generating krb5.h"
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
|
1
contrib/libc-headers
vendored
1
contrib/libc-headers
vendored
@ -1 +0,0 @@
|
||||
Subproject commit aa5429bf67a346e48ad60efd88bcefc286644bf3
|
@ -73,6 +73,11 @@ target_compile_options(cxx PRIVATE -w)
|
||||
|
||||
target_link_libraries(cxx PUBLIC cxxabi)
|
||||
|
||||
# For __udivmodti4, __divmodti4.
|
||||
if (OS_DARWIN AND COMPILER_GCC)
|
||||
target_link_libraries(cxx PRIVATE gcc)
|
||||
endif ()
|
||||
|
||||
install(
|
||||
TARGETS cxx
|
||||
EXPORT global
|
||||
|
107
contrib/libgsasl-cmake/CMakeLists.txt
Normal file
107
contrib/libgsasl-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,107 @@
|
||||
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl")
|
||||
|
||||
set(SRCS
|
||||
${SRC_DIR}/gl/gc-gnulib.c
|
||||
${SRC_DIR}/gl/printf-parse.c
|
||||
${SRC_DIR}/gl/c-ctype.c
|
||||
${SRC_DIR}/gl/float.c
|
||||
${SRC_DIR}/gl/printf-args.c
|
||||
${SRC_DIR}/gl/hmac-sha1.c
|
||||
${SRC_DIR}/gl/itold.c
|
||||
${SRC_DIR}/gl/hmac-md5.c
|
||||
${SRC_DIR}/gl/gc-pbkdf2-sha1.c
|
||||
${SRC_DIR}/gl/md5.c
|
||||
${SRC_DIR}/gl/base64.c
|
||||
${SRC_DIR}/gl/memxor.c
|
||||
${SRC_DIR}/gl/sha1.c
|
||||
${SRC_DIR}/openid20/client.c
|
||||
${SRC_DIR}/openid20/mechinfo.c
|
||||
${SRC_DIR}/openid20/server.c
|
||||
${SRC_DIR}/anonymous/client.c
|
||||
${SRC_DIR}/anonymous/mechinfo.c
|
||||
${SRC_DIR}/anonymous/server.c
|
||||
${SRC_DIR}/saml20/client.c
|
||||
${SRC_DIR}/saml20/mechinfo.c
|
||||
${SRC_DIR}/saml20/server.c
|
||||
${SRC_DIR}/scram/parser.c
|
||||
${SRC_DIR}/scram/printer.c
|
||||
${SRC_DIR}/scram/tokens.c
|
||||
${SRC_DIR}/scram/client.c
|
||||
${SRC_DIR}/scram/mechinfo.c
|
||||
${SRC_DIR}/scram/server.c
|
||||
${SRC_DIR}/scram/validate.c
|
||||
${SRC_DIR}/src/free.c
|
||||
${SRC_DIR}/src/supportp.c
|
||||
${SRC_DIR}/src/init.c
|
||||
${SRC_DIR}/src/mechtools.c
|
||||
${SRC_DIR}/src/error.c
|
||||
${SRC_DIR}/src/property.c
|
||||
${SRC_DIR}/src/done.c
|
||||
${SRC_DIR}/src/callback.c
|
||||
${SRC_DIR}/src/xstart.c
|
||||
${SRC_DIR}/src/xfinish.c
|
||||
${SRC_DIR}/src/version.c
|
||||
${SRC_DIR}/src/xstep.c
|
||||
${SRC_DIR}/src/mechname.c
|
||||
${SRC_DIR}/src/xcode.c
|
||||
${SRC_DIR}/src/crypto.c
|
||||
${SRC_DIR}/src/doxygen.c
|
||||
${SRC_DIR}/src/suggest.c
|
||||
${SRC_DIR}/src/saslprep.c
|
||||
${SRC_DIR}/src/listmech.c
|
||||
${SRC_DIR}/src/register.c
|
||||
${SRC_DIR}/src/base64.c
|
||||
${SRC_DIR}/src/md5pwd.c
|
||||
${SRC_DIR}/external/client.c
|
||||
${SRC_DIR}/external/mechinfo.c
|
||||
${SRC_DIR}/external/server.c
|
||||
${SRC_DIR}/securid/client.c
|
||||
${SRC_DIR}/securid/mechinfo.c
|
||||
${SRC_DIR}/securid/server.c
|
||||
${SRC_DIR}/plain/client.c
|
||||
${SRC_DIR}/plain/mechinfo.c
|
||||
${SRC_DIR}/plain/server.c
|
||||
${SRC_DIR}/cram-md5/client.c
|
||||
${SRC_DIR}/cram-md5/challenge.c
|
||||
${SRC_DIR}/cram-md5/mechinfo.c
|
||||
${SRC_DIR}/cram-md5/server.c
|
||||
${SRC_DIR}/cram-md5/digest.c
|
||||
${SRC_DIR}/digest-md5/client.c
|
||||
${SRC_DIR}/digest-md5/digesthmac.c
|
||||
${SRC_DIR}/digest-md5/free.c
|
||||
${SRC_DIR}/digest-md5/getsubopt.c
|
||||
${SRC_DIR}/digest-md5/mechinfo.c
|
||||
${SRC_DIR}/digest-md5/nonascii.c
|
||||
${SRC_DIR}/digest-md5/parser.c
|
||||
${SRC_DIR}/digest-md5/printer.c
|
||||
${SRC_DIR}/digest-md5/qop.c
|
||||
${SRC_DIR}/digest-md5/server.c
|
||||
${SRC_DIR}/digest-md5/session.c
|
||||
${SRC_DIR}/digest-md5/test-parser.c
|
||||
${SRC_DIR}/digest-md5/validate.c
|
||||
${SRC_DIR}/login/client.c
|
||||
${SRC_DIR}/login/mechinfo.c
|
||||
${SRC_DIR}/login/server.c
|
||||
)
|
||||
|
||||
if (USE_KRB5)
|
||||
set(SRCS ${SRCS}
|
||||
${SRC_DIR}/gssapi/client.c
|
||||
${SRC_DIR}/gssapi/mechinfo.c
|
||||
${SRC_DIR}/gssapi/server.c)
|
||||
endif()
|
||||
|
||||
add_library(gsasl ${SRCS})
|
||||
|
||||
target_include_directories(gsasl PUBLIC ${SRC_DIR})
|
||||
target_include_directories(gsasl PUBLIC ${SRC_DIR}/gl)
|
||||
target_include_directories(gsasl PUBLIC ${SRC_DIR}/src)
|
||||
target_include_directories(gsasl PUBLIC ${SRC_DIR}/digest-md5)
|
||||
target_include_directories(gsasl PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libgsasl-cmake/linux_x86_64/include")
|
||||
|
||||
target_compile_definitions (gsasl PRIVATE HAVE_CONFIG_H=1)
|
||||
|
||||
if (USE_KRB5)
|
||||
target_link_libraries(gsasl PUBLIC ${KRB5_LIBRARY})
|
||||
target_compile_definitions (gsasl PRIVATE HAVE_GSSAPI_H=1 USE_GSSAPI=1)
|
||||
endif()
|
1091
contrib/libgsasl-cmake/linux_x86_64/include/config.h
Normal file
1091
contrib/libgsasl-cmake/linux_x86_64/include/config.h
Normal file
File diff suppressed because it is too large
Load Diff
2
contrib/libhdfs3
vendored
2
contrib/libhdfs3
vendored
@ -1 +1 @@
|
||||
Subproject commit a8c37ee001af1ae88e5dfa637ae5b31b087c96d3
|
||||
Subproject commit 9194af44588633c1b2dae44bf945804401ff883e
|
@ -1,23 +1,4 @@
|
||||
if (ENABLE_PROTOBUF AND NOT USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
option(PROTOBUF_OLD_ABI_COMPAT "Set to ON for compatiability with external protobuf which was compiled old C++ ABI" OFF)
|
||||
endif()
|
||||
|
||||
if (PROTOBUF_OLD_ABI_COMPAT)
|
||||
if (NOT ENABLE_PROTOBUF OR USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "PROTOBUF_OLD_ABI_COMPAT option is ignored")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT USE_INTERNAL_PROTOBUF_LIBRARY AND PROTOBUF_OLD_ABI_COMPAT)
|
||||
# compatiable with protobuf which was compiled old C++ ABI
|
||||
set(CMAKE_CXX_FLAGS "-D_GLIBCXX_USE_CXX11_ABI=0")
|
||||
set(CMAKE_C_FLAGS "")
|
||||
if (NOT (CMAKE_VERSION VERSION_LESS "3.8.0"))
|
||||
unset(CMAKE_CXX_STANDARD)
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (${ENABLE_LIBRARIES} AND ${ENABLE_KRB5})
|
||||
if (${ENABLE_KRB5})
|
||||
SET(WITH_KERBEROS 1)
|
||||
else()
|
||||
SET(WITH_KERBEROS 0)
|
||||
@ -46,9 +27,7 @@ set(PROTO_FILES
|
||||
"${HDFS3_SOURCE_DIR}/proto/datatransfer.proto"
|
||||
)
|
||||
|
||||
if(USE_PROTOBUF)
|
||||
PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES})
|
||||
endif()
|
||||
PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES})
|
||||
|
||||
configure_file("${HDFS3_SOURCE_DIR}/platform.h.in" "${CMAKE_CURRENT_BINARY_DIR}/platform.h")
|
||||
|
||||
@ -108,95 +87,14 @@ set(SRCS
|
||||
"${HDFS3_SOURCE_DIR}/common/Hash.cpp"
|
||||
"${HDFS3_SOURCE_DIR}/common/SWCrc32c.cpp"
|
||||
"${HDFS3_SOURCE_DIR}/common/Thread.cpp"
|
||||
|
||||
"${HDFS3_SOURCE_DIR}/network/TcpSocket.h"
|
||||
"${HDFS3_SOURCE_DIR}/network/BufferedSocketReader.h"
|
||||
"${HDFS3_SOURCE_DIR}/network/Socket.h"
|
||||
"${HDFS3_SOURCE_DIR}/network/DomainSocket.h"
|
||||
"${HDFS3_SOURCE_DIR}/network/Syscall.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/InputStreamImpl.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/FileSystem.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/ReadShortCircuitInfo.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/InputStreamInter.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/FileSystemImpl.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/PacketPool.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/Pipeline.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/OutputStreamInter.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/RemoteBlockReader.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/Token.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/KerberosName.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/DirectoryIterator.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/hdfs.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/FileSystemStats.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/FileSystemKey.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/DataTransferProtocolSender.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/Packet.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/PacketHeader.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/FileSystemInter.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/LocalBlockReader.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/TokenInternal.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/InputStream.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/PipelineAck.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/BlockReader.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/Permission.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/OutputStreamImpl.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/LeaseRenewer.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/UserInfo.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/PeerCache.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/OutputStream.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/FileStatus.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/DataTransferProtocol.h"
|
||||
"${HDFS3_SOURCE_DIR}/client/BlockLocation.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcConfig.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/SaslClient.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcAuth.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcClient.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcCall.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcContentWrapper.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcProtocolInfo.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcRemoteCall.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcServerInfo.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcChannel.h"
|
||||
"${HDFS3_SOURCE_DIR}/rpc/RpcChannelKey.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/BlockLocalPathInfo.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/LocatedBlocks.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/DatanodeInfo.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/RpcHelper.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/ExtendedBlock.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/NamenodeInfo.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/NamenodeImpl.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/LocatedBlock.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/NamenodeProxy.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/Datanode.h"
|
||||
"${HDFS3_SOURCE_DIR}/server/Namenode.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/XmlConfig.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/Logger.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/WriteBuffer.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/HWCrc32c.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/Checksum.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/SessionConfig.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/Unordered.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/BigEndian.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/Thread.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/StackPrinter.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/Exception.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/WritableUtils.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/StringUtil.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/LruMap.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/Function.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/DateTime.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/Hash.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/SWCrc32c.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/ExceptionInternal.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/Memory.h"
|
||||
"${HDFS3_SOURCE_DIR}/common/FileWrapper.h"
|
||||
)
|
||||
${PROTO_SOURCES}
|
||||
)
|
||||
|
||||
# old kernels (< 3.17) doesn't have SYS_getrandom. Always use POSIX implementation to have better compatibility
|
||||
set_source_files_properties("${HDFS3_SOURCE_DIR}/rpc/RpcClient.cpp" PROPERTIES COMPILE_FLAGS "-DBOOST_UUID_RANDOM_PROVIDER_FORCE_POSIX=1")
|
||||
|
||||
# target
|
||||
add_library(hdfs3 ${SRCS} ${PROTO_SOURCES} ${PROTO_HEADERS})
|
||||
add_library(hdfs3 ${SRCS})
|
||||
|
||||
if(USE_INTERNAL_PROTOBUF_LIBRARY)
|
||||
add_dependencies(hdfs3 protoc)
|
||||
@ -218,6 +116,7 @@ target_link_libraries(hdfs3 PRIVATE ${LIBXML2_LIBRARIES})
|
||||
# inherit from parent cmake
|
||||
target_include_directories(hdfs3 PRIVATE ${Protobuf_INCLUDE_DIR})
|
||||
target_link_libraries(hdfs3 PRIVATE ${Protobuf_LIBRARY} boost::headers_only)
|
||||
|
||||
if(OPENSSL_INCLUDE_DIR AND OPENSSL_LIBRARIES)
|
||||
target_include_directories(hdfs3 PRIVATE ${OPENSSL_INCLUDE_DIR})
|
||||
target_link_libraries(hdfs3 PRIVATE ${OPENSSL_LIBRARIES})
|
||||
|
2
contrib/libpqxx
vendored
2
contrib/libpqxx
vendored
@ -1 +1 @@
|
||||
Subproject commit 357608d11b7a1961c3fb7db2ef9a5dbb2e87da77
|
||||
Subproject commit 63e20f9485b8cbeabf99008123248fc9f033e766
|
@ -66,7 +66,7 @@
|
||||
#cmakedefine WITH_SASL_OAUTHBEARER 1
|
||||
#cmakedefine WITH_SASL_CYRUS 1
|
||||
// crc32chw
|
||||
#if !defined(__PPC__) && !defined(__riscv) && (!defined(__aarch64__) || defined(__ARM_FEATURE_CRC32)) && !(defined(__aarch64__) && defined(__APPLE__))
|
||||
#if !defined(__PPC__) && !defined(__riscv) && !defined(__aarch64__)
|
||||
#define WITH_CRC32C_HW 1
|
||||
#endif
|
||||
// regex
|
||||
|
@ -1,4 +1,4 @@
|
||||
option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ${NOT_UNBUNDLED})
|
||||
option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ON)
|
||||
|
||||
if (NOT USE_INTERNAL_LZ4_LIBRARY)
|
||||
find_library (LIBRARY_LZ4 lz4)
|
||||
|
243
contrib/mariadb-connector-c-cmake/CMakeLists.txt
Normal file
243
contrib/mariadb-connector-c-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,243 @@
|
||||
if (GLIBC_COMPATIBILITY)
|
||||
set(LIBM glibc-compatibility)
|
||||
endif()
|
||||
|
||||
# This is the LGPL libmariadb project.
|
||||
|
||||
set(CC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/mariadb-connector-c)
|
||||
set(CC_BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR})
|
||||
|
||||
set(WITH_SSL ON)
|
||||
|
||||
set(MARIADB_CONNECTOR_C_COPYRIGHT "2013-2017 MariaDB Corporation Ab")
|
||||
|
||||
set(PROTOCOL_VERSION 10) # we adapted new password option from PHP's mysqlnd !
|
||||
|
||||
# if C/C is build as subproject inside MariaDB server tree we will
|
||||
# use the version defined by server
|
||||
if(MAJOR_VERSION)
|
||||
set(MARIADB_CLIENT_VERSION_MAJOR ${MAJOR_VERSION})
|
||||
set(MARIADB_CLIENT_VERSION_MINOR ${MINOR_VERSION})
|
||||
set(MARIADB_CLIENT_VERSION_PATCH ${PATCH_VERSION})
|
||||
set(MARIADB_CLIENT_VERSION_EXTRA ${EXTRA_VERSION})
|
||||
else()
|
||||
set(MARIADB_CLIENT_VERSION_MAJOR "10")
|
||||
set(MARIADB_CLIENT_VERSION_MINOR "4")
|
||||
set(MARIADB_CLIENT_VERSION_PATCH "3")
|
||||
set(MARIADB_CLIENT_VERSION_EXTRA "")
|
||||
endif()
|
||||
set(MARIADB_CLIENT_VERSION "${MARIADB_CLIENT_VERSION_MAJOR}.${MARIADB_CLIENT_VERSION_MINOR}.${MARIADB_CLIENT_VERSION_PATCH}${MARIADB_CLIENT_VERSION_EXTRA}")
|
||||
set(MARIADB_BASE_VERSION "mariadb-${MARIADB_CLIENT_VERSION_MAJOR}.${MARIADB_CLIENT_VERSION_MINOR}")
|
||||
MATH(EXPR MARIADB_VERSION_ID "${MARIADB_CLIENT_VERSION_MAJOR} * 10000 +
|
||||
${MARIADB_CLIENT_VERSION_MINOR} * 100 +
|
||||
${MARIADB_CLIENT_VERSION_PATCH}")
|
||||
|
||||
IF (NOT MARIADB_PORT)
|
||||
set(MARIADB_PORT 3306)
|
||||
ENDIF ()
|
||||
if(NOT MARIADB_UNIX_ADDR)
|
||||
set(MARIADB_UNIX_ADDR "/tmp/mysql.sock")
|
||||
endif()
|
||||
|
||||
set(HAVE_ALLOCA_H 1)
|
||||
set(HAVE_ARPA_INET_H 1)
|
||||
set(HAVE_DLFCN_H 1)
|
||||
set(HAVE_FCNTL_H 1)
|
||||
set(HAVE_FLOAT_H 1)
|
||||
set(HAVE_LIMITS_H 1)
|
||||
set(HAVE_PWD_H 1)
|
||||
set(HAVE_SCHED_H 1)
|
||||
set(HAVE_SELECT_H 0)
|
||||
set(INCLUDE_SIGNAL 1)
|
||||
set(HAVE_SIGNAL 1)
|
||||
set(HAVE_STDDEF_H 1)
|
||||
set(HAVE_STDINT_H 1)
|
||||
set(HAVE_STDLIB_H 1)
|
||||
set(HAVE_STRING_H 1)
|
||||
set(HAVE_STRINGS_H 1)
|
||||
set(HAVE_SYS_IOCTL_H 1)
|
||||
set(HAVE_SYS_SELECT_H 1)
|
||||
set(HAVE_SYS_SOCKET_H 1)
|
||||
set(HAVE_SYS_TYPES_H 1)
|
||||
set(HAVE_SYS_UN_H 1)
|
||||
set(HAVE_UNISTD_H 1)
|
||||
set(HAVE_UTIME_H 1)
|
||||
set(HAVE_UCONTEXT_H 1)
|
||||
set(HAVE_ALLOCA 1)
|
||||
set(HAVE_DLERROR 0)
|
||||
set(HAVE_DLOPEN 0)
|
||||
set(HAVE_FCNTL 1)
|
||||
set(HAVE_MEMCPY 1)
|
||||
set(HAVE_NL_LANGINFO 0)
|
||||
set(HAVE_SETLOCALE 0)
|
||||
set(HAVE_POLL 1)
|
||||
|
||||
set(SIZEOF_CHARP 8)
|
||||
set(SIZEOF_INT 4)
|
||||
set(SIZEOF_LONG 8)
|
||||
set(SIZEOF_LONG_LONG 8)
|
||||
set(SIZEOF_SIZE_T 8)
|
||||
set(SOCKET_SIZE_TYPE socklen_t)
|
||||
|
||||
|
||||
set(SYSTEM_LIBS ${SYSTEM_LIBS} zlib)
|
||||
|
||||
if(CMAKE_HAVE_PTHREAD_H)
|
||||
set(CMAKE_REQUIRED_INCLUDES pthread.h)
|
||||
endif()
|
||||
|
||||
add_definitions(-DMARIADB_SYSTEM_TYPE="${CMAKE_SYSTEM_NAME}")
|
||||
add_definitions(-DMARIADB_MACHINE_TYPE="${CMAKE_SYSTEM_PROCESSOR}")
|
||||
|
||||
set(HAVE_THREADS 1)
|
||||
set(DEFAULT_CHARSET "utf8mb4")
|
||||
|
||||
add_definitions(-DHAVE_OPENSSL -DHAVE_TLS)
|
||||
set(SSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
|
||||
include_directories(BEFORE ${OPENSSL_INCLUDE_DIR})
|
||||
set(TLS_LIBRARY_VERSION "OpenSSL ${OPENSSL_VERSION}")
|
||||
|
||||
set(ENABLED_LOCAL_INFILE OFF)
|
||||
|
||||
CONFIGURE_FILE(${CC_SOURCE_DIR}/include/ma_config.h.in
|
||||
${CC_BINARY_DIR}/include-private/ma_config.h)
|
||||
CONFIGURE_FILE(${CC_SOURCE_DIR}/include/ma_config.h.in
|
||||
${CC_BINARY_DIR}/include-private/config.h)
|
||||
CONFIGURE_FILE(${CC_SOURCE_DIR}/include/mariadb_version.h.in
|
||||
${CC_BINARY_DIR}/include-public/mariadb_version.h)
|
||||
|
||||
if(WITH_SSL)
|
||||
set(SYSTEM_LIBS ${SYSTEM_LIBS} ${SSL_LIBRARIES})
|
||||
endif()
|
||||
|
||||
|
||||
function(REGISTER_PLUGIN)
|
||||
|
||||
SET(one_value_keywords TARGET TYPE)
|
||||
SET(multi_value_keywords SOURCES)
|
||||
|
||||
cmake_parse_arguments(CC_PLUGIN
|
||||
"${options}"
|
||||
"${one_value_keywords}"
|
||||
"${multi_value_keywords}"
|
||||
${ARGN})
|
||||
|
||||
# overwrite default if it was specified with cmake option
|
||||
string(TOUPPER ${CC_PLUGIN_TARGET} cc_plugin)
|
||||
if(NOT "${CLIENT_PLUGIN_${cc_plugin}}" STREQUAL "")
|
||||
SET(CC_PLUGIN_DEFAULT ${CLIENT_PLUGIN_${cc_plugin}})
|
||||
endif()
|
||||
|
||||
# use uppercase
|
||||
string(TOUPPER ${CC_PLUGIN_TARGET} target_name)
|
||||
string(TOUPPER "${CC_PLUGIN_CONFIGURATIONS}" CC_PLUGIN_CONFIGURATIONS)
|
||||
|
||||
if(NOT ${PLUGIN_${target_name}} STREQUAL "")
|
||||
string(TOUPPER ${PLUGIN_${target_name}} PLUGIN_${target_name})
|
||||
set(CC_PLUGIN_DEFAULT ${PLUGIN_${target_name}})
|
||||
endif()
|
||||
|
||||
set(PLUGINS_STATIC ${PLUGINS_STATIC} ${CC_PLUGIN_TARGET} PARENT_SCOPE)
|
||||
set(LIBMARIADB_PLUGIN_CFLAGS ${LIBMARIADB_PLUGIN_CFLAGS} ${CC_PLUGIN_COMPILE_OPTIONS} PARENT_SCOPE)
|
||||
set(LIBMARIADB_PLUGIN_INCLUDES ${LIBMARIADB_PLUGIN_INCLUDES} ${CC_PLUGIN_INCLUDES} PARENT_SCOPE)
|
||||
set(LIBMARIADB_PLUGIN_SOURCES ${LIBMARIADB_PLUGIN_SOURCES} ${CC_PLUGIN_SOURCES} PARENT_SCOPE)
|
||||
set(LIBMARIADB_PLUGIN_LIBS ${LIBMARIADB_PLUGIN_LIBS} ${CC_PLUGIN_LIBRARIES} PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
SET(PLUGIN_EXTRA_FILES ${CC_SOURCE_DIR}/libmariadb/ma_errmsg.c)
|
||||
|
||||
#native password
|
||||
REGISTER_PLUGIN(TARGET pvio_socket
|
||||
TYPE MARIADB_CLIENT_PLUGIN_PVIO
|
||||
SOURCES "${CC_SOURCE_DIR}/plugins/pvio/pvio_socket.c")
|
||||
|
||||
# SHA256 caching plugin for MySQL 8.0 connection
|
||||
REGISTER_PLUGIN(TARGET caching_sha2_password
|
||||
TYPE MARIADB_CLIENT_PLUGIN_AUTH
|
||||
SOURCES "${CC_SOURCE_DIR}/plugins/auth/caching_sha2_pw.c")
|
||||
|
||||
REGISTER_PLUGIN(TARGET sha256_password
|
||||
TYPE MARIADB_CLIENT_PLUGIN_AUTH
|
||||
SOURCES "${CC_SOURCE_DIR}/plugins/auth/sha256_pw.c")
|
||||
|
||||
#native password
|
||||
REGISTER_PLUGIN(TARGET mysql_native_password
|
||||
TYPE MARIADB_CLIENT_PLUGIN_AUTH
|
||||
SOURCES "${CC_SOURCE_DIR}/plugins/auth/my_auth.c")
|
||||
|
||||
REGISTER_PLUGIN(TARGET aurora
|
||||
TYPE MARIADB_CLIENT_PLUGIN_CONNECTION
|
||||
SOURCES "${CC_SOURCE_DIR}/plugins/connection/aurora.c")
|
||||
|
||||
|
||||
add_definitions(-D HAVE_COMPRESS)
|
||||
add_definitions(-D LIBMARIADB)
|
||||
add_definitions(-D THREAD)
|
||||
|
||||
# handle static plugins
|
||||
set(LIBMARIADB_SOURCES ${LIBMARIADB_PLUGIN_SOURCES})
|
||||
set(SYSTEM_LIBS ${SYSTEM_LIBS} ${LIBMARIADB_PLUGIN_LIBS})
|
||||
add_definitions(${LIBMARIADB_PLUGIN_DEFS})
|
||||
FOREACH(plugin ${PLUGINS_STATIC})
|
||||
set(EXTERNAL_PLUGINS "${EXTERNAL_PLUGINS} extern struct st_mysql_client_plugin ${plugin}_client_plugin;\n")
|
||||
set(BUILTIN_PLUGINS "${BUILTIN_PLUGINS} (struct st_mysql_client_plugin *)&${plugin}_client_plugin,\n")
|
||||
ENDFOREACH()
|
||||
CONFIGURE_FILE(${CC_SOURCE_DIR}/libmariadb/ma_client_plugin.c.in
|
||||
${CC_BINARY_DIR}/libmariadb/ma_client_plugin.c)
|
||||
|
||||
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES}
|
||||
${CC_SOURCE_DIR}/plugins/auth/my_auth.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_array.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_charset.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_hash.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_net.c
|
||||
${CC_SOURCE_DIR}/libmariadb/mariadb_charset.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_time.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_default.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_errmsg.c
|
||||
${CC_SOURCE_DIR}/libmariadb/mariadb_lib.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_list.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_pvio.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_tls.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_alloc.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_compress.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_init.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_password.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_ll2str.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_sha1.c
|
||||
${CC_SOURCE_DIR}/libmariadb/mariadb_stmt.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_loaddata.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_stmt_codec.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_string.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_dtoa.c
|
||||
${CC_SOURCE_DIR}/libmariadb/mariadb_rpl.c
|
||||
${CC_SOURCE_DIR}/libmariadb/ma_io.c
|
||||
${CC_SOURCE_DIR}/libmariadb/secure/openssl.c
|
||||
${CC_SOURCE_DIR}/libmariadb/secure/openssl_crypt.c
|
||||
${CC_BINARY_DIR}/libmariadb/ma_client_plugin.c
|
||||
)
|
||||
|
||||
if(ICONV_INCLUDE_DIR)
|
||||
include_directories(BEFORE ${ICONV_INCLUDE_DIR})
|
||||
endif()
|
||||
add_definitions(-DLIBICONV_PLUG)
|
||||
|
||||
if(ZLIB_FOUND AND WITH_EXTERNAL_ZLIB)
|
||||
include_directories(${ZLIB_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
if(WITH_DYNCOL)
|
||||
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_dyncol.c)
|
||||
endif()
|
||||
|
||||
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_async.c ${CC_SOURCE_DIR}/libmariadb/ma_context.c)
|
||||
|
||||
|
||||
add_library(mariadbclient STATIC ${LIBMARIADB_SOURCES})
|
||||
target_link_libraries(mariadbclient ${SYSTEM_LIBS})
|
||||
|
||||
target_include_directories(mariadbclient
|
||||
PRIVATE ${CC_BINARY_DIR}/include-private
|
||||
PUBLIC ${CC_BINARY_DIR}/include-public ${CC_SOURCE_DIR}/include ${CC_SOURCE_DIR}/libmariadb)
|
||||
|
||||
set_target_properties(mariadbclient PROPERTIES IMPORTED_INTERFACE_LINK_LIBRARIES "${SYSTEM_LIBS}")
|
@ -1,39 +1,238 @@
|
||||
set(protobuf_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf")
|
||||
set(protobuf_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/protobuf")
|
||||
set(protobuf_source_dir "${ClickHouse_SOURCE_DIR}/contrib/protobuf")
|
||||
set(protobuf_binary_dir "${ClickHouse_BINARY_DIR}/contrib/protobuf")
|
||||
|
||||
set(protobuf_WITH_ZLIB 0 CACHE INTERNAL "" FORCE) # actually will use zlib, but skip find
|
||||
set(protobuf_BUILD_TESTS OFF CACHE INTERNAL "" FORCE)
|
||||
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
set(protobuf_BUILD_SHARED_LIBS OFF CACHE INTERNAL "" FORCE)
|
||||
else ()
|
||||
set(protobuf_BUILD_SHARED_LIBS ON CACHE INTERNAL "" FORCE)
|
||||
endif ()
|
||||
add_definitions(-DGOOGLE_PROTOBUF_CMAKE_BUILD)
|
||||
|
||||
if (CMAKE_CROSSCOMPILING)
|
||||
# Will build 'protoc' for host arch instead of cross-compiling
|
||||
set(protobuf_BUILD_PROTOC_BINARIES OFF CACHE INTERNAL "" FORCE)
|
||||
endif ()
|
||||
add_definitions(-DHAVE_PTHREAD)
|
||||
add_definitions(-DHAVE_ZLIB)
|
||||
|
||||
add_subdirectory("${protobuf_SOURCE_DIR}/cmake" "${protobuf_BINARY_DIR}")
|
||||
include_directories(
|
||||
${ZLIB_INCLUDE_DIRECTORIES}
|
||||
${protobuf_binary_dir}
|
||||
${protobuf_source_dir}/src)
|
||||
|
||||
if (ENABLE_FUZZING)
|
||||
# `protoc` will be built with sanitizer and it could fail during ClickHouse build
|
||||
# It easily reproduces in oss-fuzz building pipeline
|
||||
# To avoid this we can try to build `protoc` without any sanitizer with option `-fno-sanitize=all`, but
|
||||
# it this case we will face with linker errors, because libcxx still will be built with sanitizer
|
||||
# So, we can simply suppress all of these failures with a combination this flag and an environment variable
|
||||
# export MSAN_OPTIONS=exit_code=0
|
||||
target_compile_options(protoc PRIVATE "-fsanitize-recover=all")
|
||||
set(libprotobuf_lite_files
|
||||
${protobuf_source_dir}/src/google/protobuf/any_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/arena.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/arenastring.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/extension_set.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/field_access_listener.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/generated_enum_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/io_win32.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/strtod.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/map.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/message_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/parse_context.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/repeated_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/bytestream.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/common.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/int128.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/status.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/statusor.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/stringpiece.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/stringprintf.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/structurally_valid.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/strutil.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/time.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc
|
||||
)
|
||||
|
||||
add_library(libprotobuf-lite ${libprotobuf_lite_files})
|
||||
target_link_libraries(libprotobuf-lite pthread)
|
||||
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
|
||||
target_link_libraries(libprotobuf-lite log)
|
||||
endif()
|
||||
target_include_directories(libprotobuf-lite SYSTEM PUBLIC ${protobuf_source_dir}/src)
|
||||
add_library(protobuf::libprotobuf-lite ALIAS libprotobuf-lite)
|
||||
|
||||
# We don't want to stop compilation on warnings in protobuf's headers.
|
||||
# The following line overrides the value assigned by the command target_include_directories() in libprotobuf.cmake
|
||||
set_property(TARGET libprotobuf PROPERTY INTERFACE_SYSTEM_INCLUDE_DIRECTORIES "${protobuf_SOURCE_DIR}/src")
|
||||
|
||||
if (CMAKE_CROSSCOMPILING)
|
||||
set(libprotobuf_files
|
||||
${protobuf_source_dir}/src/google/protobuf/any.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/any.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/api.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/importer.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/parser.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/descriptor.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/descriptor.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/descriptor_database.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/duration.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/dynamic_message.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/empty.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/extension_set_heavy.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/field_mask.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/generated_message_reflection.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/gzip_stream.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/printer.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/io/tokenizer.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/map_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/message.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/reflection_ops.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/service.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/source_context.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/struct.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/stubs/substitute.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/text_format.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/timestamp.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/type.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/unknown_field_set.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/delimited_message_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/field_comparator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/field_mask_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/datapiece.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/default_value_objectwriter.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/error_listener.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/field_mask_utility.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/json_escaping.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/json_objectwriter.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/json_stream_parser.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/object_writer.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/proto_writer.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectsource.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/type_info.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/type_info_test_helper.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/internal/utility.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/json_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/message_differencer.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/time_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/util/type_resolver_util.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/wire_format.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc
|
||||
)
|
||||
|
||||
add_library(libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files})
|
||||
if (ENABLE_FUZZING)
|
||||
target_compile_options(libprotobuf PRIVATE "-fsanitize-recover=all")
|
||||
endif()
|
||||
target_link_libraries(libprotobuf pthread)
|
||||
target_link_libraries(libprotobuf ${ZLIB_LIBRARIES})
|
||||
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
|
||||
target_link_libraries(libprotobuf log)
|
||||
endif()
|
||||
target_include_directories(libprotobuf SYSTEM PUBLIC ${protobuf_source_dir}/src)
|
||||
add_library(protobuf::libprotobuf ALIAS libprotobuf)
|
||||
|
||||
|
||||
set(libprotoc_files
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/code_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/command_line_interface.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_extension.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_file.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_helpers.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_map_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_padding_optimizer.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_parse_function_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_service.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_string_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_doc_comment.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_field_base.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_helpers.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_map_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_primitive_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_reflection_class.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_context.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_doc_comment.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_file.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator_factory.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_helpers.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_kotlin_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_name_resolver.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_service.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_shared_code_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field_lite.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/js/js_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/js/well_known_types_embed.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_extension.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_file.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_map_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_oneof.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_primitive_field.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/php/php_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/plugin.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/plugin.pb.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/python/python_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/ruby/ruby_generator.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/subprocess.cc
|
||||
${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc
|
||||
)
|
||||
|
||||
add_library(libprotoc ${libprotoc_files})
|
||||
target_link_libraries(libprotoc libprotobuf)
|
||||
add_library(protobuf::libprotoc ALIAS libprotoc)
|
||||
|
||||
set(protoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/main.cc)
|
||||
|
||||
if (CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
|
||||
AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR)
|
||||
|
||||
add_executable(protoc ${protoc_files})
|
||||
target_link_libraries(protoc libprotoc libprotobuf pthread)
|
||||
add_executable(protobuf::protoc ALIAS protoc)
|
||||
|
||||
if (ENABLE_FUZZING)
|
||||
# `protoc` will be built with sanitizer and it could fail during ClickHouse build
|
||||
# It easily reproduces in oss-fuzz building pipeline
|
||||
# To avoid this we can try to build `protoc` without any sanitizer with option `-fno-sanitize=all`, but
|
||||
# it this case we will face with linker errors, because libcxx still will be built with sanitizer
|
||||
# So, we can simply suppress all of these failures with a combination this flag and an environment variable
|
||||
# export MSAN_OPTIONS=exit_code=0
|
||||
target_compile_options(protoc PRIVATE "-fsanitize-recover=all")
|
||||
endif()
|
||||
else ()
|
||||
# Build 'protoc' for host arch
|
||||
set (PROTOC_BUILD_DIR "${protobuf_BINARY_DIR}/build")
|
||||
set (PROTOC_BUILD_DIR "${protobuf_binary_dir}/build")
|
||||
|
||||
if (NOT EXISTS "${PROTOC_BUILD_DIR}/protoc")
|
||||
|
||||
@ -53,7 +252,7 @@ if (CMAKE_CROSSCOMPILING)
|
||||
"-Dprotobuf_BUILD_CONFORMANCE=0"
|
||||
"-Dprotobuf_BUILD_EXAMPLES=0"
|
||||
"-Dprotobuf_BUILD_PROTOC_BINARIES=1"
|
||||
"${protobuf_SOURCE_DIR}/cmake"
|
||||
"${protobuf_source_dir}/cmake"
|
||||
WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
|
||||
COMMAND_ECHO STDOUT)
|
||||
|
||||
@ -78,7 +277,7 @@ if (CMAKE_CROSSCOMPILING)
|
||||
# -Dprotobuf_BUILD_CONFORMANCE=0
|
||||
# -Dprotobuf_BUILD_EXAMPLES=0
|
||||
# -Dprotobuf_BUILD_PROTOC_BINARIES=1
|
||||
# "${protobuf_SOURCE_DIR}/cmake"
|
||||
# "${protobuf_source_dir}/cmake"
|
||||
#
|
||||
# DEPENDS "${PROTOC_BUILD_DIR}"
|
||||
# WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
|
||||
@ -97,5 +296,4 @@ if (CMAKE_CROSSCOMPILING)
|
||||
add_executable(protoc IMPORTED GLOBAL)
|
||||
set_target_properties (protoc PROPERTIES IMPORTED_LOCATION "${PROTOC_BUILD_DIR}/protoc")
|
||||
add_dependencies(protoc "${PROTOC_BUILD_DIR}/protoc")
|
||||
|
||||
endif ()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user