Merge branch 'master' into stress-test

This commit is contained in:
mergify[bot] 2021-11-25 11:09:26 +00:00 committed by GitHub
commit ce0c112586
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
910 changed files with 21169 additions and 5652 deletions

View File

@ -1,7 +1,7 @@
name: Cancel
on: # yamllint disable-line rule:truthy
workflow_run:
workflows: ["CIGithubActions"]
workflows: ["CIGithubActions", "ReleaseCI"]
types:
- requested
jobs:

View File

@ -152,7 +152,7 @@ jobs:
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 compatibility_check.py 0
cd $REPO_COPY/tests/ci && python3 compatibility_check.py
- name: Cleanup
if: always()
run: |
@ -186,11 +186,11 @@ jobs:
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
##########################################################################################
##################################### ORDINARY BUILDS ####################################
##########################################################################################
#########################################################################################
#################################### ORDINARY BUILDS ####################################
#########################################################################################
BuilderDebRelease:
needs: DockerHubPush
needs: [DockerHubPush, FastTest]
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
runs-on: [self-hosted, builder]
steps:
@ -229,7 +229,7 @@ jobs:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderBinRelease:
needs: DockerHubPush
needs: [DockerHubPush, FastTest]
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
runs-on: [self-hosted, builder]
steps:
@ -250,7 +250,7 @@ jobs:
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 9
BUILD_NUMBER: 8
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
@ -268,7 +268,7 @@ jobs:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebAsan:
needs: DockerHubPush
needs: [DockerHubPush, FastTest]
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
runs-on: [self-hosted, builder]
steps:
@ -307,7 +307,7 @@ jobs:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebUBsan:
needs: DockerHubPush
needs: [DockerHubPush, FastTest]
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
runs-on: [self-hosted, builder]
steps:
@ -346,7 +346,7 @@ jobs:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebTsan:
needs: DockerHubPush
needs: [DockerHubPush, FastTest]
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
runs-on: [self-hosted, builder]
steps:
@ -385,7 +385,7 @@ jobs:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebMsan:
needs: DockerHubPush
needs: [DockerHubPush, FastTest]
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
runs-on: [self-hosted, builder]
steps:
@ -424,7 +424,7 @@ jobs:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebDebug:
needs: DockerHubPush
needs: [DockerHubPush, FastTest]
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
runs-on: [self-hosted, builder]
steps:
@ -466,7 +466,7 @@ jobs:
##################################### SPECIAL BUILDS #####################################
##########################################################################################
BuilderDebSplitted:
needs: DockerHubPush
needs: [DockerHubPush, FastTest]
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
runs-on: [self-hosted, builder]
steps:
@ -604,7 +604,9 @@ jobs:
sudo rm -fr $TEMP_PATH
FunctionalStatelessTestTsan:
needs: [BuilderDebTsan]
runs-on: [self-hosted, func-tester]
# tests can consume more than 60GB of memory,
# so use bigger server
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
@ -1210,38 +1212,124 @@ jobs:
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
# IntegrationTestsAsan:
# needs: [BuilderDebAsan]
# runs-on: [self-hosted, stress-tester]
# steps:
# - name: Download json reports
# uses: actions/download-artifact@v2
# with:
# path: ${{runner.temp}}/reports_dir
# - name: Check out repository code
# uses: actions/checkout@v2
# - name: Integration test
# env:
# TEMP_PATH: ${{runner.temp}}/integration_tests_asan
# REPORTS_PATH: ${{runner.temp}}/reports_dir
# CHECK_NAME: 'Integration tests (asan, actions)'
# REPO_COPY: ${{runner.temp}}/integration_tests_asan/ClickHouse
# run: |
# sudo rm -fr $TEMP_PATH
# mkdir -p $TEMP_PATH
# cp -r $GITHUB_WORKSPACE $TEMP_PATH
# cd $REPO_COPY/tests/ci
# python3 integration_test_check.py "$CHECK_NAME"
# - name: Cleanup
# if: always()
# run: |
# docker kill $(docker ps -q) ||:
# docker rm -f $(docker ps -a -q) ||:
# sudo rm -fr $TEMP_PATH
##############################################################################################
##################################### UNIT TESTS #############################################
##############################################################################################
#############################################################################################
############################# INTEGRATION TESTS #############################################
#############################################################################################
IntegrationTestsAsan:
needs: [BuilderDebAsan, FunctionalStatelessTestAsan]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Integration test
env:
TEMP_PATH: ${{runner.temp}}/integration_tests_asan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Integration tests (asan, actions)'
REPO_COPY: ${{runner.temp}}/integration_tests_asan/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 integration_test_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
IntegrationTestsTsan:
needs: [BuilderDebTsan, FunctionalStatelessTestTsan]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Integration test
env:
TEMP_PATH: ${{runner.temp}}/integration_tests_tsan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Integration tests (thread, actions)'
REPO_COPY: ${{runner.temp}}/integration_tests_tsan/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 integration_test_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
IntegrationTestsRelease:
needs: [BuilderDebRelease, FunctionalStatelessTestRelease]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Integration test
env:
TEMP_PATH: ${{runner.temp}}/integration_tests_release
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Integration tests (release, actions)'
REPO_COPY: ${{runner.temp}}/integration_tests_release/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 integration_test_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
IntegrationTestsFlakyCheck:
needs: [BuilderDebAsan]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Integration test
env:
TEMP_PATH: ${{runner.temp}}/integration_tests_asan_flaky_check
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Integration tests flaky check (asan, actions)'
REPO_COPY: ${{runner.temp}}/integration_tests_asan_flaky_check/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 integration_test_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
#############################################################################################
#################################### UNIT TESTS #############################################
#############################################################################################
UnitTestsAsan:
needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester]
@ -1368,7 +1456,7 @@ jobs:
env:
TEMP_PATH: ${{runner.temp}}/unit_tests_ubsan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Unit tests (msan, actions)'
CHECK_NAME: 'Unit tests (ubsan, actions)'
REPO_COPY: ${{runner.temp}}/unit_tests_ubsan/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
@ -1412,6 +1500,9 @@ jobs:
- ASTFuzzerTestTsan
- ASTFuzzerTestMSan
- ASTFuzzerTestUBSan
- IntegrationTestsAsan
- IntegrationTestsRelease
- IntegrationTestsTsan
- PVSCheck
- UnitTestsAsan
- UnitTestsTsan
@ -1420,6 +1511,7 @@ jobs:
- UnitTestsReleaseClang
- SplitBuildSmokeTest
- CompatibilityCheck
- IntegrationTestsFlakyCheck
runs-on: [self-hosted, style-checker]
steps:
- name: Check out repository code

1384
.github/workflows/master.yml vendored Normal file

File diff suppressed because it is too large Load Diff

933
.github/workflows/release_branches.yml vendored Normal file
View File

@ -0,0 +1,933 @@
name: ReleaseCI
on: # yamllint disable-line rule:truthy
push:
branches:
- '21.**'
- '22.**'
- '23.**'
- '24.**'
- 'backport/**'
jobs:
DockerHubPush:
runs-on: [self-hosted, style-checker]
steps:
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json
CompatibilityCheck:
needs: [BuilderDebRelease]
runs-on: [self-hosted, style-checker]
steps:
- name: Check out repository code
uses: actions/checkout@v2
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: CompatibilityCheck
env:
TEMP_PATH: ${{runner.temp}}/compatibility_check
REPO_COPY: ${{runner.temp}}/compatibility_check/ClickHouse
REPORTS_PATH: ${{runner.temp}}/reports_dir
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 compatibility_check.py 0
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
#########################################################################################
#################################### ORDINARY BUILDS ####################################
#########################################################################################
BuilderDebRelease:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'recursive'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
env:
TEMP_PATH: ${{runner.temp}}/build_check
IMAGES_PATH: ${{runner.temp}}/images_path
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 0
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebAsan:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'recursive'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
env:
TEMP_PATH: ${{runner.temp}}/build_check
IMAGES_PATH: ${{runner.temp}}/images_path
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 3
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebUBsan:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'recursive'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
env:
TEMP_PATH: ${{runner.temp}}/build_check
IMAGES_PATH: ${{runner.temp}}/images_path
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 4
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebTsan:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'recursive'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
env:
TEMP_PATH: ${{runner.temp}}/build_check
IMAGES_PATH: ${{runner.temp}}/images_path
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 5
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebMsan:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'recursive'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
env:
TEMP_PATH: ${{runner.temp}}/build_check
IMAGES_PATH: ${{runner.temp}}/images_path
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 6
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
BuilderDebDebug:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'recursive'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
env:
TEMP_PATH: ${{runner.temp}}/build_check
IMAGES_PATH: ${{runner.temp}}/images_path
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
CACHES_PATH: ${{runner.temp}}/../ccaches
CHECK_NAME: 'ClickHouse build check (actions)'
BUILD_NUMBER: 7
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
############################################################################################
##################################### BUILD REPORTER #######################################
############################################################################################
BuilderReport:
needs:
- BuilderDebRelease
- BuilderDebAsan
- BuilderDebTsan
- BuilderDebUBsan
- BuilderDebMsan
- BuilderDebDebug
runs-on: [self-hosted, style-checker]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Report Builder
env:
TEMP_PATH: ${{runner.temp}}/report_check
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'ClickHouse build check (actions)'
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cd $GITHUB_WORKSPACE/tests/ci
python3 build_report_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
##############################################################################################
########################### FUNCTIONAl STATELESS TESTS #######################################
##############################################################################################
FunctionalStatelessTestRelease:
needs: [BuilderDebRelease]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateless_debug
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateless tests (release, actions)'
REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
KILL_TIMEOUT: 10800
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatelessTestAsan:
needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateless_debug
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateless tests (address, actions)'
REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
KILL_TIMEOUT: 10800
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatelessTestTsan:
needs: [BuilderDebTsan]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateless_tsan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateless tests (thread, actions)'
REPO_COPY: ${{runner.temp}}/stateless_tsan/ClickHouse
KILL_TIMEOUT: 10800
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatelessTestUBsan:
needs: [BuilderDebUBsan]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateless_ubsan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateless tests (ubsan, actions)'
REPO_COPY: ${{runner.temp}}/stateless_ubsan/ClickHouse
KILL_TIMEOUT: 10800
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatelessTestMsan:
needs: [BuilderDebMsan]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateless_memory
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateless tests (memory, actions)'
REPO_COPY: ${{runner.temp}}/stateless_memory/ClickHouse
KILL_TIMEOUT: 10800
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatelessTestDebug:
needs: [BuilderDebDebug]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateless_debug
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateless tests (debug, actions)'
REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
KILL_TIMEOUT: 10800
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
##############################################################################################
############################ FUNCTIONAl STATEFUL TESTS #######################################
##############################################################################################
FunctionalStatefulTestRelease:
needs: [BuilderDebRelease]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateful_debug
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateful tests (release, actions)'
REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
KILL_TIMEOUT: 3600
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatefulTestAsan:
needs: [BuilderDebAsan]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateful_debug
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateful tests (address, actions)'
REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
KILL_TIMEOUT: 3600
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatefulTestTsan:
needs: [BuilderDebTsan]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateful_tsan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateful tests (thread, actions)'
REPO_COPY: ${{runner.temp}}/stateful_tsan/ClickHouse
KILL_TIMEOUT: 3600
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatefulTestMsan:
needs: [BuilderDebMsan]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateful_msan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateful tests (memory, actions)'
REPO_COPY: ${{runner.temp}}/stateful_msan/ClickHouse
KILL_TIMEOUT: 3600
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatefulTestUBsan:
needs: [BuilderDebUBsan]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateful_ubsan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateful tests (ubsan, actions)'
REPO_COPY: ${{runner.temp}}/stateful_ubsan/ClickHouse
KILL_TIMEOUT: 3600
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FunctionalStatefulTestDebug:
needs: [BuilderDebDebug]
runs-on: [self-hosted, func-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
env:
TEMP_PATH: ${{runner.temp}}/stateful_debug
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stateful tests (debug, actions)'
REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
KILL_TIMEOUT: 3600
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
##############################################################################################
######################################### STRESS TESTS #######################################
##############################################################################################
StressTestAsan:
needs: [BuilderDebAsan]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Stress test
env:
TEMP_PATH: ${{runner.temp}}/stress_thread
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stress test (address, actions)'
REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 stress_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
StressTestTsan:
needs: [BuilderDebTsan]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Stress test
env:
TEMP_PATH: ${{runner.temp}}/stress_thread
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stress test (thread, actions)'
REPO_COPY: ${{runner.temp}}/stress_thread/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 stress_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
StressTestMsan:
needs: [BuilderDebMsan]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Stress test
env:
TEMP_PATH: ${{runner.temp}}/stress_memory
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stress test (memory, actions)'
REPO_COPY: ${{runner.temp}}/stress_memory/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 stress_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
StressTestUBsan:
needs: [BuilderDebUBsan]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Stress test
env:
TEMP_PATH: ${{runner.temp}}/stress_undefined
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stress test (undefined, actions)'
REPO_COPY: ${{runner.temp}}/stress_undefined/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 stress_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
StressTestDebug:
needs: [BuilderDebDebug]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Stress test
env:
TEMP_PATH: ${{runner.temp}}/stress_debug
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Stress test (debug, actions)'
REPO_COPY: ${{runner.temp}}/stress_debug/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 stress_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
#############################################################################################
############################# INTEGRATION TESTS #############################################
#############################################################################################
IntegrationTestsAsan:
needs: [BuilderDebAsan, FunctionalStatelessTestAsan]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Integration test
env:
TEMP_PATH: ${{runner.temp}}/integration_tests_asan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Integration tests (asan, actions)'
REPO_COPY: ${{runner.temp}}/integration_tests_asan/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 integration_test_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
IntegrationTestsTsan:
needs: [BuilderDebTsan, FunctionalStatelessTestTsan]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Integration test
env:
TEMP_PATH: ${{runner.temp}}/integration_tests_tsan
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Integration tests (thread, actions)'
REPO_COPY: ${{runner.temp}}/integration_tests_tsan/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 integration_test_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
IntegrationTestsRelease:
needs: [BuilderDebRelease, FunctionalStatelessTestRelease]
runs-on: [self-hosted, stress-tester]
steps:
- name: Download json reports
uses: actions/download-artifact@v2
with:
path: ${{runner.temp}}/reports_dir
- name: Check out repository code
uses: actions/checkout@v2
- name: Integration test
env:
TEMP_PATH: ${{runner.temp}}/integration_tests_release
REPORTS_PATH: ${{runner.temp}}/reports_dir
CHECK_NAME: 'Integration tests (release, actions)'
REPO_COPY: ${{runner.temp}}/integration_tests_release/ClickHouse
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 integration_test_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
FinishCheck:
needs:
- DockerHubPush
- BuilderReport
- FunctionalStatelessTestDebug
- FunctionalStatelessTestRelease
- FunctionalStatelessTestAsan
- FunctionalStatelessTestTsan
- FunctionalStatelessTestMsan
- FunctionalStatelessTestUBsan
- FunctionalStatefulTestDebug
- FunctionalStatefulTestRelease
- FunctionalStatefulTestAsan
- FunctionalStatefulTestTsan
- FunctionalStatefulTestMsan
- FunctionalStatefulTestUBsan
- StressTestDebug
- StressTestAsan
- StressTestTsan
- StressTestMsan
- StressTestUBsan
- IntegrationTestsAsan
- IntegrationTestsRelease
- IntegrationTestsTsan
- CompatibilityCheck
runs-on: [self-hosted, style-checker]
steps:
- name: Check out repository code
uses: actions/checkout@v2
- name: Finish label
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 finish_check.py

8
.gitmodules vendored
View File

@ -140,7 +140,7 @@
url = https://github.com/ClickHouse-Extras/libc-headers.git
[submodule "contrib/replxx"]
path = contrib/replxx
url = https://github.com/AmokHuginnsson/replxx.git
url = https://github.com/ClickHouse-Extras/replxx.git
[submodule "contrib/avro"]
path = contrib/avro
url = https://github.com/ClickHouse-Extras/avro.git
@ -171,12 +171,6 @@
[submodule "contrib/sentry-native"]
path = contrib/sentry-native
url = https://github.com/ClickHouse-Extras/sentry-native.git
[submodule "contrib/gcem"]
path = contrib/gcem
url = https://github.com/kthohr/gcem.git
[submodule "contrib/stats"]
path = contrib/stats
url = https://github.com/kthohr/stats.git
[submodule "contrib/krb5"]
path = contrib/krb5
url = https://github.com/ClickHouse-Extras/krb5

View File

@ -17,7 +17,7 @@
* Support `EXISTS (subquery)`. Closes [#6852](https://github.com/ClickHouse/ClickHouse/issues/6852). [#29731](https://github.com/ClickHouse/ClickHouse/pull/29731) ([Kseniia Sumarokova](https://github.com/kssenii)).
* Session logging for audit. Logging all successful and failed login and logout events to a new `system.session_log` table. [#22415](https://github.com/ClickHouse/ClickHouse/pull/22415) ([Vasily Nemkov](https://github.com/Enmk)) ([Vitaly Baranov](https://github.com/vitlibar)).
* Support multidimensional cosine distance and euclidean distance functions; L1, L2, Lp, Linf distances and norms. Scalar product on tuples and various arithmetic operators on tuples. This fully closes [#4509](https://github.com/ClickHouse/ClickHouse/issues/4509) and even more. [#27933](https://github.com/ClickHouse/ClickHouse/pull/27933) ([Alexey Boykov](https://github.com/mathalex)).
* Add support for compression and decompression for `INTO OUTPUT` and `FROM INFILE` (with autodetect or with additional optional parameter). [#27135](https://github.com/ClickHouse/ClickHouse/pull/27135) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
* Add support for compression and decompression for `INTO OUTFILE` and `FROM INFILE` (with autodetect or with additional optional parameter). [#27135](https://github.com/ClickHouse/ClickHouse/pull/27135) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
* Add CORS (Cross Origin Resource Sharing) support with HTTP `OPTIONS` request. It means, now Grafana will work with serverless requests without a kludges. Closes [#18693](https://github.com/ClickHouse/ClickHouse/issues/18693). [#29155](https://github.com/ClickHouse/ClickHouse/pull/29155) ([Filatenkov Artur](https://github.com/FArthur-cmd)).
* Queries with JOIN ON now supports disjunctions (OR). [#21320](https://github.com/ClickHouse/ClickHouse/pull/21320) ([Ilya Golshtein](https://github.com/ilejn)).
* Added function `tokens`. That allow to split string into tokens using non-alpha numeric ASCII characters as separators. [#29981](https://github.com/ClickHouse/ClickHouse/pull/29981) ([Maksim Kita](https://github.com/kitaisreal)). Added function `ngrams` to extract ngrams from text. Closes [#29699](https://github.com/ClickHouse/ClickHouse/issues/29699). [#29738](https://github.com/ClickHouse/ClickHouse/pull/29738) ([Maksim Kita](https://github.com/kitaisreal)).

View File

@ -201,7 +201,7 @@ endif ()
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
# Only for Linux, x86_64 or aarch64.
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
elseif(GLIBC_COMPATIBILITY)
@ -392,6 +392,8 @@ if (COMPILER_CLANG)
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstrict-vtable-pointers")
# Set new experimental pass manager, it's a performance, build time and binary size win.
# Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang.
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager")
@ -402,9 +404,9 @@ if (COMPILER_CLANG)
# completely.
if (ENABLE_THINLTO AND NOT ENABLE_TESTS AND NOT SANITIZE)
# Link time optimization
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin")
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin")
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin")
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
elseif (ENABLE_THINLTO)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable ThinLTO")
endif ()
@ -435,20 +437,7 @@ endif ()
# Turns on all external libs like s3, kafka, ODBC, ...
option(ENABLE_LIBRARIES "Enable all external libraries by default" ON)
# We recommend avoiding this mode for production builds because we can't guarantee
# all needed libraries exist in your system.
# This mode exists for enthusiastic developers who are searching for trouble.
# The whole idea of using unknown version of libraries from the OS distribution is deeply flawed.
# Useful for maintainers of OS packages.
option (UNBUNDLED "Use system libraries instead of ones in contrib/" OFF)
if (UNBUNDLED)
set(NOT_UNBUNDLED OFF)
else ()
set(NOT_UNBUNDLED ON)
endif ()
if (UNBUNDLED OR NOT (OS_LINUX OR OS_DARWIN))
if (NOT (OS_LINUX OR OS_DARWIN))
# Using system libs can cause a lot of warnings in includes (on macro expansion).
option(WERROR "Enable -Werror compiler option" OFF)
else ()
@ -527,7 +516,6 @@ message (STATUS
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
MAKE_STATIC_LIBRARIES=${MAKE_STATIC_LIBRARIES}
SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES}
UNBUNDLED=${UNBUNDLED}
CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}")
include (GNUInstallDirs)
@ -590,7 +578,6 @@ include (cmake/find/avro.cmake)
include (cmake/find/msgpack.cmake)
include (cmake/find/cassandra.cmake)
include (cmake/find/sentry.cmake)
include (cmake/find/stats.cmake)
include (cmake/find/datasketches.cmake)
include (cmake/find/libprotobuf-mutator.cmake)

View File

@ -18,3 +18,27 @@ if (NOT DEFINED ENV{CLION_IDE} AND NOT DEFINED ENV{XCODE_IDE})
set(CMAKE_GENERATOR "Ninja" CACHE INTERNAL "" FORCE)
endif ()
endif()
# Default toolchain - this is needed to avoid dependency on OS files.
execute_process(COMMAND uname -s OUTPUT_VARIABLE OS)
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCH)
if (OS MATCHES "Linux"
AND NOT DEFINED CMAKE_TOOLCHAIN_FILE
AND NOT UNBUNDLED
AND NOT DISABLE_HERMETIC_BUILD
AND ($ENV{CC} MATCHES ".*clang.*" OR CMAKE_C_COMPILER MATCHES ".*clang.*")
AND (USE_STATIC_LIBRARIES OR NOT DEFINED USE_STATIC_LIBRARIES))
if (ARCH MATCHES "amd64|x86_64")
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-x86_64.cmake" CACHE INTERNAL "" FORCE)
elseif (ARCH MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-aarch64.cmake" CACHE INTERNAL "" FORCE)
elseif (ARCH MATCHES "^(ppc64le.*|PPC64LE.*)")
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-ppc64le.cmake" CACHE INTERNAL "" FORCE)
else ()
message (FATAL_ERROR "Unsupported architecture: ${ARCH}")
endif ()
endif()

View File

@ -19,9 +19,11 @@ The following versions of ClickHouse server are currently being supported with s
| 21.4 | :x: |
| 21.5 | :x: |
| 21.6 | :x: |
| 21.7 | |
| 21.7 | :x: |
| 21.8 | ✅ |
| 21.9 | ✅ |
| 21.10 | ✅ |
| 21.11 | ✅ |
## Reporting a Vulnerability

View File

@ -25,6 +25,16 @@ void trim(String & s)
s.erase(std::find_if(s.rbegin(), s.rend(), [](int ch) { return !std::isspace(ch); }).base(), s.end());
}
std::string getEditor()
{
const char * editor = std::getenv("EDITOR");
if (!editor || !*editor)
editor = "vim";
return editor;
}
/// Copied from replxx::src/util.cxx::now_ms_str() under the terms of 3-clause BSD license of Replxx.
/// Copyright (c) 2017-2018, Marcin Konarski (amok at codestation.org)
/// Copyright (c) 2010, Salvatore Sanfilippo (antirez at gmail dot com)
@ -123,6 +133,7 @@ ReplxxLineReader::ReplxxLineReader(
Patterns delimiters_,
replxx::Replxx::highlighter_callback_t highlighter_)
: LineReader(history_file_path_, multiline_, std::move(extenders_), std::move(delimiters_)), highlighter(std::move(highlighter_))
, editor(getEditor())
{
using namespace std::placeholders;
using Replxx = replxx::Replxx;
@ -236,14 +247,13 @@ void ReplxxLineReader::addToHistory(const String & line)
rx.print("Unlock of history file failed: %s\n", errnoToString(errno).c_str());
}
int ReplxxLineReader::execute(const std::string & command)
/// See comments in ShellCommand::executeImpl()
/// (for the vfork via dlsym())
int ReplxxLineReader::executeEditor(const std::string & path)
{
std::vector<char> argv0("sh", &("sh"[3]));
std::vector<char> argv1("-c", &("-c"[3]));
std::vector<char> argv2(command.data(), command.data() + command.size() + 1);
const char * filename = "/bin/sh";
char * const argv[] = {argv0.data(), argv1.data(), argv2.data(), nullptr};
std::vector<char> argv0(editor.data(), editor.data() + editor.size() + 1);
std::vector<char> argv1(path.data(), path.data() + path.size() + 1);
char * const argv[] = {argv0.data(), argv1.data(), nullptr};
static void * real_vfork = dlsym(RTLD_DEFAULT, "vfork");
if (!real_vfork)
@ -260,6 +270,7 @@ int ReplxxLineReader::execute(const std::string & command)
return -1;
}
/// Child
if (0 == pid)
{
sigset_t mask;
@ -267,16 +278,26 @@ int ReplxxLineReader::execute(const std::string & command)
sigprocmask(0, nullptr, &mask);
sigprocmask(SIG_UNBLOCK, &mask, nullptr);
execv(filename, argv);
execvp(editor.c_str(), argv);
rx.print("Cannot execute %s: %s\n", editor.c_str(), errnoToString(errno).c_str());
_exit(-1);
}
int status = 0;
if (-1 == waitpid(pid, &status, 0))
do
{
rx.print("Cannot waitpid: %s\n", errnoToString(errno).c_str());
return -1;
}
int exited_pid = waitpid(pid, &status, 0);
if (exited_pid == -1)
{
if (errno == EINTR)
continue;
rx.print("Cannot waitpid: %s\n", errnoToString(errno).c_str());
return -1;
}
else
break;
} while (true);
return status;
}
@ -290,10 +311,6 @@ void ReplxxLineReader::openEditor()
return;
}
const char * editor = std::getenv("EDITOR");
if (!editor || !*editor)
editor = "vim";
replxx::Replxx::State state(rx.get_state());
size_t bytes_written = 0;
@ -316,7 +333,7 @@ void ReplxxLineReader::openEditor()
return;
}
if (0 == execute(fmt::format("{} {}", editor, filename)))
if (0 == executeEditor(filename))
{
try
{

View File

@ -22,7 +22,7 @@ public:
private:
InputStatus readOneLine(const String & prompt) override;
void addToHistory(const String & line) override;
int execute(const std::string & command);
int executeEditor(const std::string & path);
void openEditor();
replxx::Replxx rx;
@ -31,4 +31,6 @@ private:
// used to call flock() to synchronize multiple clients using same history file
int history_file_fd = -1;
bool bracketed_paste_enabled = false;
std::string editor;
};

View File

@ -13,30 +13,21 @@
#if defined(__linux__)
#include <sys/prctl.h>
#endif
#include <fcntl.h>
#include <errno.h>
#include <string.h>
#include <signal.h>
#include <cxxabi.h>
#include <unistd.h>
#include <typeinfo>
#include <iostream>
#include <fstream>
#include <sstream>
#include <memory>
#include <base/scope_guard.h>
#include <Poco/Observer.h>
#include <Poco/AutoPtr.h>
#include <Poco/PatternFormatter.h>
#include <Poco/Message.h>
#include <Poco/Util/Application.h>
#include <Poco/Exception.h>
#include <Poco/ErrorHandler.h>
#include <Poco/Condition.h>
#include <Poco/SyslogChannel.h>
#include <Poco/DirectoryIterator.h>
#include <base/logger_useful.h>
#include <base/ErrorHandlers.h>
@ -56,13 +47,15 @@
#include <Common/getMultipleKeysFromConfig.h>
#include <Common/ClickHouseRevision.h>
#include <Common/Config/ConfigProcessor.h>
#include <Common/MemorySanitizer.h>
#include <Common/SymbolIndex.h>
#include <Common/getExecutablePath.h>
#include <Common/getHashOfLoadedBinary.h>
#include <Common/Elf.h>
#include <filesystem>
#include <loggers/OwnFormattingChannel.h>
#include <loggers/OwnPatternFormatter.h>
#include <Common/config_version.h>
#if defined(OS_DARWIN)
@ -1001,6 +994,14 @@ void BaseDaemon::setupWatchdog()
memcpy(argv0, new_process_name, std::min(strlen(new_process_name), original_process_name.size()));
}
/// If streaming compression of logs is used then we write watchdog logs to cerr
if (config().getRawString("logger.stream_compress", "false") == "true")
{
Poco::AutoPtr<OwnPatternFormatter> pf = new OwnPatternFormatter;
Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, new Poco::ConsoleChannel(std::cerr));
logger().setChannel(log);
}
logger().information(fmt::format("Will watch for the process with pid {}", pid));
/// Forward signals to the child process.

View File

@ -37,11 +37,3 @@ GraphiteWriter::GraphiteWriter(const std::string & config_name, const std::strin
root_path += sub_path;
}
}
std::string GraphiteWriter::getPerServerPath(const std::string & server_name, const std::string & root_path)
{
std::string path = root_path + "." + server_name;
std::replace(path.begin() + root_path.size() + 1, path.end(), '.', '_');
return path;
}

View File

@ -8,10 +8,10 @@
#include <base/logger_useful.h>
/// пишет в Graphite данные в формате
/// Writes to Graphite in the following format
/// path value timestamp\n
/// path может иметь любую вложенность. Директории разделяются с помощью "."
/// у нас принят следующий формат path - root_path.server_name.sub_path.key
/// path can be arbitrary nested. Elements are separated by '.'
/// Example: root_path.server_name.sub_path.key
class GraphiteWriter
{
public:
@ -32,8 +32,6 @@ public:
writeImpl(key_val_vec, timestamp, custom_root_path);
}
/// возвращает путь root_path.server_name
static std::string getPerServerPath(const std::string & server_name, const std::string & root_path = "one_min");
private:
template <typename T>
void writeImpl(const T & data, time_t timestamp, const std::string & custom_root_path)

View File

@ -62,7 +62,13 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
if (!log_path.empty())
{
createDirectory(log_path);
std::cerr << "Logging " << log_level_string << " to " << log_path << std::endl;
std::string ext;
if (config.getRawString("logger.stream_compress", "false") == "true")
ext = ".lz4";
std::cerr << "Logging " << log_level_string << " to " << log_path << ext << std::endl;
auto log_level = Poco::Logger::parseLevel(log_level_string);
if (log_level > max_log_level)
{
@ -75,6 +81,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
log_file->setProperty(Poco::FileChannel::PROP_ROTATION, config.getRawString("logger.size", "100M"));
log_file->setProperty(Poco::FileChannel::PROP_ARCHIVE, "number");
log_file->setProperty(Poco::FileChannel::PROP_COMPRESS, config.getRawString("logger.compress", "true"));
log_file->setProperty(Poco::FileChannel::PROP_STREAMCOMPRESS, config.getRawString("logger.stream_compress", "false"));
log_file->setProperty(Poco::FileChannel::PROP_PURGECOUNT, config.getRawString("logger.count", "1"));
log_file->setProperty(Poco::FileChannel::PROP_FLUSH, config.getRawString("logger.flush", "true"));
log_file->setProperty(Poco::FileChannel::PROP_ROTATEONOPEN, config.getRawString("logger.rotateOnOpen", "false"));
@ -100,13 +107,18 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
max_log_level = errorlog_level;
}
std::cerr << "Logging errors to " << errorlog_path << std::endl;
std::string ext;
if (config.getRawString("logger.stream_compress", "false") == "true")
ext = ".lz4";
std::cerr << "Logging errors to " << errorlog_path << ext << std::endl;
error_log_file = new Poco::FileChannel;
error_log_file->setProperty(Poco::FileChannel::PROP_PATH, fs::weakly_canonical(errorlog_path));
error_log_file->setProperty(Poco::FileChannel::PROP_ROTATION, config.getRawString("logger.size", "100M"));
error_log_file->setProperty(Poco::FileChannel::PROP_ARCHIVE, "number");
error_log_file->setProperty(Poco::FileChannel::PROP_COMPRESS, config.getRawString("logger.compress", "true"));
error_log_file->setProperty(Poco::FileChannel::PROP_STREAMCOMPRESS, config.getRawString("logger.stream_compress", "false"));
error_log_file->setProperty(Poco::FileChannel::PROP_PURGECOUNT, config.getRawString("logger.count", "1"));
error_log_file->setProperty(Poco::FileChannel::PROP_FLUSH, config.getRawString("logger.flush", "true"));
error_log_file->setProperty(Poco::FileChannel::PROP_ROTATEONOPEN, config.getRawString("logger.rotateOnOpen", "false"));

View File

@ -1,16 +1,12 @@
#include "OwnPatternFormatter.h"
#include <functional>
#include <optional>
#include <sys/time.h>
#include <IO/WriteBufferFromString.h>
#include <IO/WriteHelpers.h>
#include <Common/HashTable/Hash.h>
#include <Interpreters/InternalTextLogsQueue.h>
#include <Common/CurrentThread.h>
#include <base/getThreadId.h>
#include <base/terminalColors.h>
#include "Loggers.h"
OwnPatternFormatter::OwnPatternFormatter(bool color_)

View File

@ -13,10 +13,7 @@ add_library (mysqlxx
target_include_directories (mysqlxx PUBLIC ..)
if (USE_INTERNAL_MYSQL_LIBRARY)
target_include_directories (mysqlxx PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/include")
target_include_directories (mysqlxx PUBLIC "${ClickHouse_BINARY_DIR}/contrib/mariadb-connector-c/include")
else ()
if (NOT USE_INTERNAL_MYSQL_LIBRARY)
set(PLATFORM_LIBRARIES ${CMAKE_DL_LIBS})
if (USE_MYSQL)

View File

@ -12,13 +12,13 @@ macro (add_warning flag)
if (SUPPORTS_CXXFLAG_${underscored_flag})
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -W${flag}")
else ()
message (WARNING "Flag -W${flag} is unsupported")
message (STATUS "Flag -W${flag} is unsupported")
endif ()
if (SUPPORTS_CFLAG_${underscored_flag})
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -W${flag}")
else ()
message (WARNING "Flag -W${flag} is unsupported")
message (STATUS "Flag -W${flag} is unsupported")
endif ()
endmacro ()
@ -39,7 +39,7 @@ macro (target_add_warning target flag)
if (SUPPORTS_CXXFLAG_${underscored_flag})
target_compile_options (${target} PRIVATE "-W${flag}")
else ()
message (WARNING "Flag -W${flag} is unsupported")
message (STATUS "Flag -W${flag} is unsupported")
endif ()
endmacro ()

View File

@ -4,7 +4,7 @@ macro(find_contrib_lib LIB_NAME)
string(TOUPPER ${LIB_NAME} LIB_NAME_UC)
string(REPLACE "-" "_" LIB_NAME_UC ${LIB_NAME_UC})
option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ${NOT_UNBUNDLED})
option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ON)
if (NOT USE_INTERNAL_${LIB_NAME_UC}_LIBRARY)
find_package ("${LIB_NAME}")

View File

@ -28,6 +28,9 @@ option (ARCH_NATIVE "Add -march=native compiler flag. This makes your binaries n
if (ARCH_NATIVE)
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=native")
elseif (ARCH_AARCH64)
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=armv8-a+crc")
else ()
set (TEST_FLAG "-mssse3")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
@ -43,7 +46,6 @@ else ()
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
endif ()
set (TEST_FLAG "-msse4.1")
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
check_cxx_source_compiles("
@ -138,7 +140,7 @@ else ()
#include <immintrin.h>
int main() {
auto a = _mm512_setzero_epi32();
(void)a;
(void)a;
auto b = _mm512_add_epi16(__m512i(), __m512i());
(void)b;
return 0;
@ -160,9 +162,9 @@ else ()
" HAVE_BMI)
if (HAVE_BMI AND ENABLE_BMI)
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
endif ()
endif ()
#Limit avx2/avx512 flag for specific source build
# Limit avx2/avx512 flag for specific source build
set (X86_INTRINSICS_FLAGS "")
if (ENABLE_AVX2_FOR_SPEC_OP)
if (HAVE_BMI)

View File

@ -9,7 +9,7 @@ if (NOT ENABLE_AMQPCPP)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/CMakeLists.txt")
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/src")
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library")
set (USE_AMQPCPP 0)

View File

@ -8,10 +8,9 @@ if (NOT ENABLE_AVRO)
return()
endif()
option (USE_INTERNAL_AVRO_LIBRARY
"Set to FALSE to use system avro library instead of bundled" ON) # TODO: provide unbundled support
option (USE_INTERNAL_AVRO_LIBRARY "Set to FALSE to use system avro library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt")
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang")
if (USE_INTERNAL_AVRO_LIBRARY)
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro")

View File

@ -7,12 +7,7 @@ if (NOT ENABLE_BROTLI)
return()
endif()
if (UNBUNDLED)
# Many system ship only dynamic brotly libraries, so we back off to bundled by default
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ${USE_STATIC_LIBRARIES})
else()
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
endif()
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
if (USE_INTERNAL_BROTLI_LIBRARY)

View File

@ -7,9 +7,9 @@ if (NOT ENABLE_CAPNP)
return()
endif()
option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/CMakeLists.txt")
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/c++")
if(USE_INTERNAL_CAPNP_LIBRARY)
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto")

View File

@ -7,7 +7,7 @@ if (NOT ENABLE_CURL)
return()
endif()
option (USE_INTERNAL_CURL "Use internal curl library" ${NOT_UNBUNDLED})
option (USE_INTERNAL_CURL "Use internal curl library" ON)
if (NOT USE_INTERNAL_CURL)
find_package (CURL)
@ -22,8 +22,6 @@ if (NOT CURL_FOUND)
# find_package(CURL) compatibility for the following packages that uses
# find_package(CURL)/include(FindCURL):
# - mariadb-connector-c
# - aws-s3-cmake
# - sentry-native
set (CURL_FOUND ON CACHE BOOL "")
set (CURL_ROOT_DIR ${CURL_LIBRARY_DIR} CACHE PATH "")

View File

@ -1,4 +1,4 @@
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ${NOT_UNBUNDLED})
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ON)
if (NOT USE_LIBCXX)
if (USE_INTERNAL_LIBCXX_LIBRARY)
@ -10,12 +10,12 @@ if (NOT USE_LIBCXX)
return()
endif()
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ${NOT_UNBUNDLED})
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ON)
option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi libraries instead of bundled"
${USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT})
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt")
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/src")
if (USE_INTERNAL_LIBCXX_LIBRARY)
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx")

View File

@ -2,7 +2,7 @@ option (ENABLE_DATASKETCHES "Enable DataSketches" ${ENABLE_LIBRARIES})
if (ENABLE_DATASKETCHES)
option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt")
if (USE_INTERNAL_DATASKETCHES_LIBRARY)

View File

@ -22,7 +22,7 @@ endif()
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
# The external gRPC framework can be installed in the system by running
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
if(USE_INTERNAL_GRPC_LIBRARY)

View File

@ -1,6 +1,6 @@
# included only if ENABLE_TESTS=1
option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt")
if (USE_INTERNAL_GTEST_LIBRARY)

View File

@ -12,7 +12,7 @@ if (NOT ENABLE_ICU)
return()
endif()
option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE")
if (USE_INTERNAL_ICU_LIBRARY)

View File

@ -1,7 +1,3 @@
if (UNBUNDLED AND USE_STATIC_LIBRARIES)
set (ENABLE_LDAP OFF CACHE INTERNAL "")
endif()
option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES})
if (NOT ENABLE_LDAP)
@ -11,7 +7,7 @@ if (NOT ENABLE_LDAP)
return()
endif()
option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
if (USE_INTERNAL_LDAP_LIBRARY)

View File

@ -7,12 +7,7 @@ if (NOT ENABLE_GSASL_LIBRARY)
return()
endif()
if (UNBUNDLED)
# when USE_STATIC_LIBRARIES we usually need to pick up hell a lot of dependencies for libgsasl
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${USE_STATIC_LIBRARIES})
else()
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
endif()
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h")
if (USE_INTERNAL_LIBGSASL_LIBRARY)
@ -35,7 +30,7 @@ if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY)
set (LIBGSASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src" "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include")
set (USE_INTERNAL_LIBGSASL_LIBRARY 1)
set (LIBGSASL_LIBRARY libgsasl)
set (LIBGSASL_LIBRARY gsasl)
endif ()
if(LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)

View File

@ -4,7 +4,7 @@ if (NOT ENABLE_LIBPQXX)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/CMakeLists.txt")
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/src")
message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library")
set (USE_LIBPQXX 0)

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h")
if (USE_INTERNAL_LIBXML2_LIBRARY)

View File

@ -7,7 +7,7 @@ if(NOT ENABLE_MSGPACK)
return()
endif()
option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp")
if(USE_INTERNAL_MSGPACK_LIBRARY)

View File

@ -12,7 +12,7 @@ if(NOT ENABLE_MYSQL)
return()
endif()
option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ${NOT_UNBUNDLED})
option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README")
if(USE_INTERNAL_MYSQL_LIBRARY)

View File

@ -6,7 +6,7 @@ if (NOT USE_INTERNAL_NANODBC_LIBRARY)
message (FATAL_ERROR "Only the bundled nanodbc library can be used")
endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/CMakeLists.txt")
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/nanodbc")
message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init")
endif()

View File

@ -13,7 +13,7 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libstemmer_c/Makefile")
return()
endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/CMakeLists.txt")
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/wnb")
message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled")
set (USE_NLP 0)

View File

@ -4,7 +4,7 @@ if (NOT ENABLE_NURAFT)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/CMakeLists.txt")
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/src")
message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library")
set (USE_NURAFT 0)

View File

@ -19,7 +19,7 @@ if (NOT ENABLE_ODBC)
return()
endif()
option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ${NOT_UNBUNDLED})
option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ON)
if (NOT USE_INTERNAL_ODBC_LIBRARY)
find_library (LIBRARY_ODBC NAMES unixodbc odbc)

View File

@ -1,5 +1,5 @@
if (Protobuf_PROTOC_EXECUTABLE)
option (ENABLE_PARQUET "Enable parquet" ${ENABLE_LIBRARIES})
option (ENABLE_PARQUET "Enable parquet" ON)
elseif(ENABLE_PARQUET OR USE_INTERNAL_PARQUET_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use parquet without protoc executable")
endif()
@ -13,7 +13,7 @@ if (NOT ENABLE_PARQUET)
endif()
if (NOT OS_FREEBSD) # Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory
option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ${NOT_UNBUNDLED})
option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ON)
elseif(USE_INTERNAL_PARQUET_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet is not supported on freebsd")
endif()

View File

@ -11,7 +11,7 @@ endif()
# You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case.
# The external protobuf library can be installed in the system by running
# sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
if(USE_INTERNAL_PROTOBUF_LIBRARY)

View File

@ -6,7 +6,7 @@ if(NOT ENABLE_RAPIDJSON)
return()
endif()
option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ${NOT_UNBUNDLED})
option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h")
if(USE_INTERNAL_RAPIDJSON_LIBRARY)

View File

@ -7,9 +7,9 @@ if (NOT ENABLE_RDKAFKA)
return()
endif()
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/src")
if(USE_INTERNAL_RDKAFKA_LIBRARY)
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka")
@ -18,7 +18,7 @@ if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/CMakeLists.txt")
set (MISSING_INTERNAL_CPPKAFKA_LIBRARY 1)
endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/CMakeLists.txt")
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src")
if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY)
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka")
@ -40,7 +40,7 @@ if (NOT USE_INTERNAL_RDKAFKA_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sasl2 library needed for static librdkafka")
endif()
endif ()
set (CPPKAFKA_LIBRARY cppkafka) # TODO: try to use unbundled version.
set (CPPKAFKA_LIBRARY cppkafka)
endif ()
if (RDKAFKA_LIB AND RDKAFKA_INCLUDE_DIR)

View File

@ -1,6 +1,6 @@
option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ${NOT_UNBUNDLED})
option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/CMakeLists.txt")
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/re2")
if(USE_INTERNAL_RE2_LIBRARY)
message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library")

View File

@ -11,9 +11,9 @@ if (NOT ENABLE_ROCKSDB)
return()
endif()
option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ${NOT_UNBUNDLED})
option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/CMakeLists.txt")
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/include")
if (USE_INTERNAL_ROCKSDB_LIBRARY)
message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb")

View File

@ -9,7 +9,7 @@ if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h")
return()
endif ()
if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT_UNBUNDLED AND NOT (OS_DARWIN AND COMPILER_CLANG))
if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT (OS_DARWIN AND COMPILER_CLANG))
option (USE_SENTRY "Use Sentry" ${ENABLE_LIBRARIES})
set (SENTRY_TRANSPORT "curl" CACHE STRING "")
set (SENTRY_BACKEND "none" CACHE STRING "")
@ -18,8 +18,6 @@ if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT_UNBUNDLED AND NOT (OS_
set (SENTRY_PIC OFF CACHE BOOL "")
set (BUILD_SHARED_LIBS OFF)
message (STATUS "Using sentry=${USE_SENTRY}: ${SENTRY_LIBRARY}")
include_directories("${SENTRY_INCLUDE_DIR}")
elseif (USE_SENTRY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Sentry is not supported in current configuration")
endif ()

View File

@ -1,4 +1,4 @@
option(USE_SNAPPY "Enable snappy library" ${ENABLE_LIBRARIES})
option(USE_SNAPPY "Enable snappy library" ON)
if(NOT USE_SNAPPY)
if (USE_INTERNAL_SNAPPY_LIBRARY)
@ -7,7 +7,7 @@ if(NOT USE_SNAPPY)
return()
endif()
option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ON)
if(NOT USE_INTERNAL_SNAPPY_LIBRARY)
find_library(SNAPPY_LIBRARY snappy)

View File

@ -9,7 +9,7 @@ if(NOT ENABLE_SSL)
return()
endif()
option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ${NOT_UNBUNDLED})
option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md")
if(USE_INTERNAL_SSL_LIBRARY)

View File

@ -1,24 +0,0 @@
option(ENABLE_STATS "Enable StatsLib library" ${ENABLE_LIBRARIES})
if (ENABLE_STATS)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/stats")
message (WARNING "submodule contrib/stats is missing. to fix try run: \n git submodule update --init")
set (ENABLE_STATS 0)
set (USE_STATS 0)
elseif (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/gcem")
message (WARNING "submodule contrib/gcem is missing. to fix try run: \n git submodule update --init")
set (ENABLE_STATS 0)
set (USE_STATS 0)
else()
set(STATS_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/stats/include)
set(GCEM_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/gcem/include)
set (USE_STATS 1)
endif()
if (NOT USE_STATS)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable stats library")
endif()
endif()
message (STATUS "Using stats=${USE_STATS} : ${STATS_INCLUDE_DIR}")
message (STATUS "Using gcem=${USE_STATS}: ${GCEM_INCLUDE_DIR}")

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h")
if(USE_INTERNAL_XZ_LIBRARY)

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_ZLIB_LIBRARY "Set to FALSE to use system zlib library instead of bundled" ON)
if (NOT MSVC)
set (INTERNAL_ZLIB_NAME "zlib-ng" CACHE INTERNAL "")
@ -29,9 +29,6 @@ if (NOT USE_INTERNAL_ZLIB_LIBRARY)
endif ()
if (NOT ZLIB_FOUND AND NOT MISSING_INTERNAL_ZLIB_LIBRARY)
# https://github.com/zlib-ng/zlib-ng/pull/733
# This is disabed by default
add_compile_definitions(Z_TLS=__thread)
set (USE_INTERNAL_ZLIB_LIBRARY 1)
set (ZLIB_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/${INTERNAL_ZLIB_NAME}" "${ClickHouse_BINARY_DIR}/contrib/${INTERNAL_ZLIB_NAME}" CACHE INTERNAL "") # generated zconf.h
set (ZLIB_INCLUDE_DIRS ${ZLIB_INCLUDE_DIR}) # for poco

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library instead of bundled" ${NOT_UNBUNDLED})
option (USE_INTERNAL_ZSTD_LIBRARY "Set to FALSE to use system zstd library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/zstd/lib/zstd.h")
if(USE_INTERNAL_ZSTD_LIBRARY)

View File

@ -3,7 +3,7 @@ set (CMAKE_SYSTEM_PROCESSOR "aarch64")
set (CMAKE_C_COMPILER_TARGET "aarch64-unknown-freebsd12")
set (CMAKE_CXX_COMPILER_TARGET "aarch64-unknown-freebsd12")
set (CMAKE_ASM_COMPILER_TARGET "aarch64-unknown-freebsd12")
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../toolchain/freebsd-aarch64")
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-aarch64")
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake

View File

@ -3,7 +3,7 @@ set (CMAKE_SYSTEM_PROCESSOR "x86_64")
set (CMAKE_C_COMPILER_TARGET "x86_64-pc-freebsd11")
set (CMAKE_CXX_COMPILER_TARGET "x86_64-pc-freebsd11")
set (CMAKE_ASM_COMPILER_TARGET "x86_64-pc-freebsd11")
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../toolchain/freebsd-x86_64")
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-x86_64")
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake

View File

@ -5,8 +5,12 @@ set (DEFAULT_LIBS "-nodefaultlibs")
# We need builtins from Clang's RT even without libcxx - for ubsan+int128.
# See https://bugs.llvm.org/show_bug.cgi?id=16404
if (COMPILER_CLANG AND NOT CMAKE_CROSSCOMPILING)
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --print-libgcc-file-name --rtlib=compiler-rt OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
if (COMPILER_CLANG)
execute_process (COMMAND ${CMAKE_CXX_COMPILER} --target=${CMAKE_CXX_COMPILER_TARGET} --print-libgcc-file-name --rtlib=compiler-rt OUTPUT_VARIABLE BUILTINS_LIBRARY OUTPUT_STRIP_TRAILING_WHITESPACE)
if (NOT EXISTS "${BUILTINS_LIBRARY}")
set (BUILTINS_LIBRARY "-lgcc")
endif ()
else ()
set (BUILTINS_LIBRARY "-lgcc")
endif ()
@ -28,7 +32,7 @@ set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS})
# glibc-compatibility library relies to constant version of libc headers
# (because minor changes in function attributes between different glibc versions will introduce incompatibilities)
# This is for x86_64. For other architectures we have separate toolchains.
if (ARCH_AMD64 AND NOT_UNBUNDLED AND NOT CMAKE_CROSSCOMPILING)
if (ARCH_AMD64 AND NOT CMAKE_CROSSCOMPILING)
set(CMAKE_C_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
set(CMAKE_CXX_STANDARD_INCLUDE_DIRECTORIES ${ClickHouse_SOURCE_DIR}/contrib/libc-headers/x86_64-linux-gnu ${ClickHouse_SOURCE_DIR}/contrib/libc-headers)
endif ()

View File

@ -7,9 +7,7 @@
# - sometimes warnings from 3rd party libraries may come from macro substitutions in our code
# and we have to wrap them with #pragma GCC/clang diagnostic ignored
if (NOT MSVC)
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra")
endif ()
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wextra")
# Add some warnings that are not available even with -Wall -Wextra -Wpedantic.
# Intended for exploration of new compiler warnings that may be found useful.

View File

@ -51,7 +51,7 @@ if (USE_YAML_CPP)
endif()
if (USE_INTERNAL_XZ_LIBRARY)
add_subdirectory (xz)
add_subdirectory (xz-cmake)
endif()
add_subdirectory (poco-cmake)
@ -64,9 +64,7 @@ if (USE_INTERNAL_ZSTD_LIBRARY)
endif ()
if (USE_INTERNAL_RE2_LIBRARY)
set(RE2_BUILD_TESTING 0 CACHE INTERNAL "")
add_subdirectory (re2)
add_subdirectory (re2_st)
add_subdirectory (re2-cmake)
endif ()
if (USE_INTERNAL_DOUBLE_CONVERSION_LIBRARY)
@ -82,23 +80,10 @@ if (USE_INTERNAL_FARMHASH_LIBRARY)
endif ()
if (USE_INTERNAL_ZLIB_LIBRARY)
set (ZLIB_ENABLE_TESTS 0 CACHE INTERNAL "")
set (SKIP_INSTALL_ALL 1 CACHE INTERNAL "")
set (ZLIB_COMPAT 1 CACHE INTERNAL "") # also enables WITH_GZFILEOP
set (WITH_NATIVE_INSTRUCTIONS ${ARCH_NATIVE} CACHE INTERNAL "")
if (OS_FREEBSD OR ARCH_I386)
set (WITH_OPTIM 0 CACHE INTERNAL "") # Bug in assembler
endif ()
if (ARCH_AARCH64)
set(WITH_NEON 1 CACHE INTERNAL "")
set(WITH_ACLE 1 CACHE INTERNAL "")
endif ()
add_subdirectory (${INTERNAL_ZLIB_NAME})
# We should use same defines when including zlib.h as used when zlib compiled
target_compile_definitions (zlib PUBLIC ZLIB_COMPAT WITH_GZFILEOP)
if (ARCH_AMD64 OR ARCH_AARCH64)
target_compile_definitions (zlib PUBLIC X86_64 UNALIGNED_OK)
if (INTERNAL_ZLIB_NAME STREQUAL "zlib-ng")
add_subdirectory (zlib-ng-cmake)
else ()
add_subdirectory (${INTERNAL_ZLIB_NAME})
endif ()
endif ()
@ -117,28 +102,8 @@ if (USE_INTERNAL_LDAP_LIBRARY)
add_subdirectory (openldap-cmake)
endif ()
function(mysql_support)
set(CLIENT_PLUGIN_CACHING_SHA2_PASSWORD STATIC)
set(CLIENT_PLUGIN_SHA256_PASSWORD STATIC)
set(CLIENT_PLUGIN_REMOTE_IO OFF)
set(CLIENT_PLUGIN_DIALOG OFF)
set(CLIENT_PLUGIN_AUTH_GSSAPI_CLIENT OFF)
set(CLIENT_PLUGIN_CLIENT_ED25519 OFF)
set(CLIENT_PLUGIN_MYSQL_CLEAR_PASSWORD OFF)
set(SKIP_TESTS 1)
if (GLIBC_COMPATIBILITY)
set(LIBM glibc-compatibility)
endif()
if (USE_INTERNAL_ZLIB_LIBRARY)
set(ZLIB_FOUND ON)
set(ZLIB_LIBRARY ${ZLIB_LIBRARIES})
set(WITH_EXTERNAL_ZLIB ON)
endif()
set(WITH_CURL OFF)
add_subdirectory (mariadb-connector-c)
endfunction()
if (ENABLE_MYSQL AND USE_INTERNAL_MYSQL_LIBRARY)
mysql_support()
if (USE_INTERNAL_MYSQL_LIBRARY)
add_subdirectory (mariadb-connector-c-cmake)
endif ()
if (USE_INTERNAL_RDKAFKA_LIBRARY)
@ -194,11 +159,7 @@ if (USE_INTERNAL_AVRO_LIBRARY)
endif()
if(USE_INTERNAL_GTEST_LIBRARY)
set(GOOGLETEST_VERSION 1.10.0) # master
# Google Test from sources
add_subdirectory(${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest)
# avoid problems with <regexp.h>
target_compile_definitions (gtest INTERFACE GTEST_HAS_POSIX_RE=0)
add_subdirectory(googletest-cmake)
elseif(GTEST_SRC_DIR)
add_subdirectory(${GTEST_SRC_DIR}/googletest ${CMAKE_CURRENT_BINARY_DIR}/googletest)
target_compile_definitions(gtest INTERFACE GTEST_HAS_POSIX_RE=0)
@ -229,7 +190,7 @@ if (USE_EMBEDDED_COMPILER)
endif ()
if (USE_INTERNAL_LIBGSASL_LIBRARY)
add_subdirectory(libgsasl)
add_subdirectory(libgsasl-cmake)
endif()
if (USE_INTERNAL_LIBXML2_LIBRARY)
@ -281,14 +242,7 @@ if (USE_AMQPCPP)
add_subdirectory (amqpcpp-cmake)
endif()
if (USE_CASSANDRA)
# Need to use C++17 since the compilation is not possible with C++20 currently.
set (CMAKE_CXX_STANDARD_bak ${CMAKE_CXX_STANDARD})
set (CMAKE_CXX_STANDARD 17)
add_subdirectory (cassandra)
set (CMAKE_CXX_STANDARD ${CMAKE_CXX_STANDARD_bak})
unset (CMAKE_CXX_STANDARD_bak)
add_subdirectory (cassandra-cmake)
endif()
# Should go before:
@ -296,16 +250,11 @@ endif()
add_subdirectory (curl-cmake)
if (USE_SENTRY)
add_subdirectory (sentry-native)
add_subdirectory (sentry-native-cmake)
endif()
add_subdirectory (fmtlib-cmake)
if (USE_STATS)
add_subdirectory (stats-cmake)
add_subdirectory (gcem)
endif()
if (USE_KRB5)
add_subdirectory (krb5-cmake)
if (USE_CYRUS_SASL)
@ -326,7 +275,7 @@ if (USE_NURAFT)
add_subdirectory(nuraft-cmake)
endif()
add_subdirectory(fast_float)
add_subdirectory(fast_float-cmake)
if (USE_NLP)
add_subdirectory(libstemmer-c-cmake)

View File

@ -417,7 +417,49 @@ set(PARQUET_SRCS
#list(TRANSFORM PARQUET_SRCS PREPEND "${LIBRARY_DIR}/") # cmake 3.12
add_library(${PARQUET_LIBRARY} ${PARQUET_SRCS})
target_include_directories(${PARQUET_LIBRARY} SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/src" "${CMAKE_CURRENT_SOURCE_DIR}/cpp/src" PRIVATE ${OPENSSL_INCLUDE_DIR})
include("${ClickHouse_SOURCE_DIR}/contrib/thrift/build/cmake/ConfigureChecks.cmake") # makes config.h
set (HAVE_ARPA_INET_H 1)
set (HAVE_FCNTL_H 1)
set (HAVE_GETOPT_H 1)
set (HAVE_INTTYPES_H 1)
set (HAVE_NETDB_H 1)
set (HAVE_NETINET_IN_H 1)
set (HAVE_SIGNAL_H 1)
set (HAVE_STDINT_H 1)
set (HAVE_UNISTD_H 1)
set (HAVE_PTHREAD_H 1)
set (HAVE_SYS_IOCTL_H 1)
set (HAVE_SYS_PARAM_H 1)
set (HAVE_SYS_RESOURCE_H 1)
set (HAVE_SYS_SOCKET_H 1)
set (HAVE_SYS_STAT_H 1)
set (HAVE_SYS_TIME_H 1)
set (HAVE_SYS_UN_H 1)
set (HAVE_POLL_H 1)
set (HAVE_SYS_POLL_H 1)
set (HAVE_SYS_SELECT_H 1)
set (HAVE_SCHED_H 1)
set (HAVE_STRING_H 1)
set (HAVE_STRINGS_H 1)
set (HAVE_GETHOSTBYNAME 1)
set (HAVE_STRERROR_R 1)
set (HAVE_SCHED_GET_PRIORITY_MAX 1)
set (HAVE_SCHED_GET_PRIORITY_MIN 1)
if (OS_LINUX)
set (STRERROR_R_CHAR_P 1)
endif ()
#set(PACKAGE ${PACKAGE_NAME})
#set(PACKAGE_STRING "${PACKAGE_NAME} ${PACKAGE_VERSION}")
#set(VERSION ${thrift_VERSION})
# generate a config.h file
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/build/cmake/config.h.in" "${CMAKE_CURRENT_BINARY_DIR}/thrift/config.h")
include_directories("${CMAKE_CURRENT_BINARY_DIR}")
target_link_libraries(${PARQUET_LIBRARY} PUBLIC ${ARROW_LIBRARY} PRIVATE ${THRIFT_LIBRARY} boost::headers_only boost::regex ${OPENSSL_LIBRARIES})
if (SANITIZE STREQUAL "undefined")

2
contrib/base64 vendored

@ -1 +1 @@
Subproject commit af9b331f2b4f30b41c70f3a571ff904a8251c1d3
Subproject commit 9499e0c4945589973b9ea1bc927377cfbc84aa46

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ${NOT_UNBUNDLED})
option (USE_INTERNAL_BOOST_LIBRARY "Use internal Boost library" ON)
if (NOT USE_INTERNAL_BOOST_LIBRARY)
# 1.70 like in contrib/boost

View File

@ -0,0 +1,127 @@
# Need to use C++17 since the compilation is not possible with C++20 currently.
set (CMAKE_CXX_STANDARD 17)
set(CASS_ROOT_DIR ${CMAKE_SOURCE_DIR}/contrib/cassandra)
set(CASS_SRC_DIR "${CASS_ROOT_DIR}/src")
set(CASS_INCLUDE_DIR "${CASS_ROOT_DIR}/include")
# Ensure functions/modules are available
list(APPEND CMAKE_MODULE_PATH ${CASS_ROOT_DIR}/cmake)
set(CASS_BUILD_SHARED 1)
set(CASS_BUILD_STATIC 1)
set(CASS_USE_KERBEROS 0)
set(CASS_USE_LIBSSH2 0)
set(CASS_USE_OPENSSL 1)
set(CASS_USE_STD_ATOMIC 1)
set(CASS_USE_ZLIB 1)
file(GLOB SOURCES ${CASS_SRC_DIR}/*.cpp)
if(APPLE)
list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-unix.cpp ${CASS_SRC_DIR}/get_time-win.cpp)
elseif(UNIX)
list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-mac.cpp ${CASS_SRC_DIR}/get_time-win.cpp)
elseif(WIN32)
list(REMOVE_ITEM SOURCES ${CASS_SRC_DIR}/get_time-mac.cpp ${CASS_SRC_DIR}/get_time-unix.cpp)
endif()
if(CASS_USE_OPENSSL)
list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/ssl)
list(APPEND SOURCES ${CASS_SRC_DIR}/ssl/ssl_openssl_impl.cpp ${CASS_SRC_DIR}/ssl/ring_buffer_bio.cpp)
else()
list(APPEND SOURCES ${CASS_SRC_DIR}/ssl/ssl_no_impl.cpp)
endif()
if(CASS_USE_KERBEROS)
list(APPEND INCLUDE_DIRS ${CASS_SRC_DIR}/gssapi)
list(APPEND SOURCES ${CASS_SRC_DIR}/gssapi/dse_auth_gssapi.cpp ${CASS_SRC_DIR}/gssapi/dse_auth_gssapi.hpp)
endif()
list(APPEND SOURCES ${CASS_SRC_DIR}/atomic/atomic_std.hpp)
add_library(curl_hostcheck OBJECT ${CASS_SRC_DIR}/third_party/curl/hostcheck.cpp)
add_library(hdr_histogram OBJECT ${CASS_SRC_DIR}/third_party/hdr_histogram/hdr_histogram.cpp)
add_library(http-parser OBJECT ${CASS_SRC_DIR}/third_party/http-parser/http_parser.c)
add_library(minizip OBJECT
${CASS_SRC_DIR}/third_party/minizip/ioapi.c
${CASS_SRC_DIR}/third_party/minizip/zip.c
${CASS_SRC_DIR}/third_party/minizip/unzip.c)
target_link_libraries(minizip zlib)
target_compile_definitions(minizip PRIVATE "-Dz_crc_t=unsigned long")
list(APPEND INCLUDE_DIRS
${CASS_SRC_DIR}/third_party/curl
${CASS_SRC_DIR}/third_party/hdr_histogram
${CASS_SRC_DIR}/third_party/http-parser
${CASS_SRC_DIR}/third_party/minizip
${CASS_SRC_DIR}/third_party/mt19937_64
${CASS_SRC_DIR}/third_party/rapidjson/rapidjson
${CASS_SRC_DIR}/third_party/sparsehash/src)
list(APPEND INCLUDE_DIRS ${CASS_INCLUDE_DIR} ${CASS_SRC_DIR})
set(HASH_FUN_H "functional")
set(HASH_NAME hash)
set(HASH_NAMESPACE "std")
set(HAVE_INTTYPES_H 1)
set(HAVE_STDINT_H 1)
set(HAVE_SYS_TYPES_H 1)
set(HAVE_MEMCPY 1)
set(HAVE_LONG_LONG 1)
set(HAVE_UINT16_T 1)
configure_file("${CASS_SRC_DIR}/third_party/sparsehash/config.h.cmake" "${CMAKE_CURRENT_BINARY_DIR}/sparsehash/internal/sparseconfig.h")
# Determine random availability
if (OS_LINUX)
#set (HAVE_GETRANDOM 1) - not on every Linux kernel
elseif (OS_FREEBSD OR OS_DARWIN)
set (HAVE_ARC4RANDOM 1)
endif ()
# Determine if sigpipe is available
if (OS_LINUX)
set (HAVE_SIGTIMEDWAIT 1)
else (OS_FREEBSD OR OS_DARWIN)
set (HAVE_NOSIGPIPE 1)
endif()
set (HAVE_BUILTIN_BSWAP32 1)
set (HAVE_BUILTIN_BSWAP64 1)
set(HAVE_BOOST_ATOMIC 0)
set(HAVE_STD_ATOMIC 1)
set(HAVE_KERBEROS ${CASS_USE_KERBEROS})
set(HAVE_OPENSSL ${CASS_USE_OPENSSL})
set(HAVE_ZLIB ${CASS_USE_ZLIB})
# Generate the driver_config.hpp file
configure_file(
${CASS_ROOT_DIR}/driver_config.hpp.in
${CMAKE_CURRENT_BINARY_DIR}/driver_config.hpp)
add_library(cassandra
${SOURCES}
$<TARGET_OBJECTS:curl_hostcheck>
$<TARGET_OBJECTS:hdr_histogram>
$<TARGET_OBJECTS:http-parser>
$<TARGET_OBJECTS:minizip>)
target_link_libraries(cassandra zlib)
add_library(cassandra_static ALIAS cassandra)
target_include_directories(cassandra PRIVATE ${CMAKE_CURRENT_BINARY_DIR} ${INCLUDE_DIRS})
target_compile_definitions(cassandra PRIVATE CASS_BUILDING)
target_link_libraries(cassandra uv)
if(CASS_USE_OPENSSL)
target_link_libraries(cassandra ssl)
endif()

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ${NOT_UNBUNDLED})
option (USE_INTERNAL_CCTZ_LIBRARY "Use internal cctz library" ON)
if (NOT USE_INTERNAL_CCTZ_LIBRARY)
find_library (LIBRARY_CCTZ cctz)

View File

@ -0,0 +1,2 @@
add_library(fast_float INTERFACE)
target_include_directories(fast_float INTERFACE "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/")

1
contrib/gcem vendored

@ -1 +0,0 @@
Subproject commit 8d4f1b5d76ea8f6ff12f3f4f34cda45424556b00

View File

@ -0,0 +1,11 @@
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest")
add_library(gtest "${SRC_DIR}/src/gtest-all.cc")
set_target_properties(gtest PROPERTIES VERSION "1.0.0")
target_compile_definitions (gtest INTERFACE GTEST_HAS_POSIX_RE=0)
target_include_directories(gtest SYSTEM PUBLIC "${SRC_DIR}/include")
target_include_directories(gtest PRIVATE "${SRC_DIR}")
add_library(gtest_main "${SRC_DIR}/src/gtest_main.cc")
set_target_properties(gtest_main PROPERTIES VERSION "1.0.0")
target_link_libraries(gtest_main PUBLIC gtest)

View File

@ -17,7 +17,7 @@ if (NOT ENABLE_HYPERSCAN)
return()
endif()
option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ${NOT_UNBUNDLED})
option (USE_INTERNAL_HYPERSCAN_LIBRARY "Use internal hyperscan library" ON)
if (NOT USE_INTERNAL_HYPERSCAN_LIBRARY)
find_library (LIBRARY_HYPERSCAN hs)

View File

@ -500,7 +500,6 @@ function(preprocess_et out_var)
COMMAND perl "${KRB5_SOURCE_DIR}/util/et/compile_et" -d "${KRB5_SOURCE_DIR}/util/et" ${in_f}
DEPENDS ${in_f} "${KRB5_SOURCE_DIR}/util/et/compile_et"
WORKING_DIRECTORY ${ET_PATH}
COMMENT "Creating preprocessed file ${F_C}"
VERBATIM
)
list(APPEND result ${F_C})
@ -526,7 +525,6 @@ add_custom_command(
add_custom_target(
ERROR_MAP_H
DEPENDS "${KRB5_SOURCE_DIR}/lib/gssapi/krb5/error_map.h"
COMMENT "generating error_map.h"
VERBATIM
)
@ -539,14 +537,12 @@ add_custom_command(
add_custom_target(
ERRMAP_H
DEPENDS "${KRB5_SOURCE_DIR}/lib/gssapi/generic/errmap.h"
COMMENT "generating errmap.h"
VERBATIM
)
add_custom_target(
KRB_5_H
DEPENDS "${CMAKE_CURRENT_BINARY_DIR}/include/krb5/krb5.h"
COMMENT "generating krb5.h"
VERBATIM
)

View File

@ -0,0 +1,107 @@
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl")
set(SRCS
${SRC_DIR}/gl/gc-gnulib.c
${SRC_DIR}/gl/printf-parse.c
${SRC_DIR}/gl/c-ctype.c
${SRC_DIR}/gl/float.c
${SRC_DIR}/gl/printf-args.c
${SRC_DIR}/gl/hmac-sha1.c
${SRC_DIR}/gl/itold.c
${SRC_DIR}/gl/hmac-md5.c
${SRC_DIR}/gl/gc-pbkdf2-sha1.c
${SRC_DIR}/gl/md5.c
${SRC_DIR}/gl/base64.c
${SRC_DIR}/gl/memxor.c
${SRC_DIR}/gl/sha1.c
${SRC_DIR}/openid20/client.c
${SRC_DIR}/openid20/mechinfo.c
${SRC_DIR}/openid20/server.c
${SRC_DIR}/anonymous/client.c
${SRC_DIR}/anonymous/mechinfo.c
${SRC_DIR}/anonymous/server.c
${SRC_DIR}/saml20/client.c
${SRC_DIR}/saml20/mechinfo.c
${SRC_DIR}/saml20/server.c
${SRC_DIR}/scram/parser.c
${SRC_DIR}/scram/printer.c
${SRC_DIR}/scram/tokens.c
${SRC_DIR}/scram/client.c
${SRC_DIR}/scram/mechinfo.c
${SRC_DIR}/scram/server.c
${SRC_DIR}/scram/validate.c
${SRC_DIR}/src/free.c
${SRC_DIR}/src/supportp.c
${SRC_DIR}/src/init.c
${SRC_DIR}/src/mechtools.c
${SRC_DIR}/src/error.c
${SRC_DIR}/src/property.c
${SRC_DIR}/src/done.c
${SRC_DIR}/src/callback.c
${SRC_DIR}/src/xstart.c
${SRC_DIR}/src/xfinish.c
${SRC_DIR}/src/version.c
${SRC_DIR}/src/xstep.c
${SRC_DIR}/src/mechname.c
${SRC_DIR}/src/xcode.c
${SRC_DIR}/src/crypto.c
${SRC_DIR}/src/doxygen.c
${SRC_DIR}/src/suggest.c
${SRC_DIR}/src/saslprep.c
${SRC_DIR}/src/listmech.c
${SRC_DIR}/src/register.c
${SRC_DIR}/src/base64.c
${SRC_DIR}/src/md5pwd.c
${SRC_DIR}/external/client.c
${SRC_DIR}/external/mechinfo.c
${SRC_DIR}/external/server.c
${SRC_DIR}/securid/client.c
${SRC_DIR}/securid/mechinfo.c
${SRC_DIR}/securid/server.c
${SRC_DIR}/plain/client.c
${SRC_DIR}/plain/mechinfo.c
${SRC_DIR}/plain/server.c
${SRC_DIR}/cram-md5/client.c
${SRC_DIR}/cram-md5/challenge.c
${SRC_DIR}/cram-md5/mechinfo.c
${SRC_DIR}/cram-md5/server.c
${SRC_DIR}/cram-md5/digest.c
${SRC_DIR}/digest-md5/client.c
${SRC_DIR}/digest-md5/digesthmac.c
${SRC_DIR}/digest-md5/free.c
${SRC_DIR}/digest-md5/getsubopt.c
${SRC_DIR}/digest-md5/mechinfo.c
${SRC_DIR}/digest-md5/nonascii.c
${SRC_DIR}/digest-md5/parser.c
${SRC_DIR}/digest-md5/printer.c
${SRC_DIR}/digest-md5/qop.c
${SRC_DIR}/digest-md5/server.c
${SRC_DIR}/digest-md5/session.c
${SRC_DIR}/digest-md5/test-parser.c
${SRC_DIR}/digest-md5/validate.c
${SRC_DIR}/login/client.c
${SRC_DIR}/login/mechinfo.c
${SRC_DIR}/login/server.c
)
if (USE_KRB5)
set(SRCS ${SRCS}
${SRC_DIR}/gssapi/client.c
${SRC_DIR}/gssapi/mechinfo.c
${SRC_DIR}/gssapi/server.c)
endif()
add_library(gsasl ${SRCS})
target_include_directories(gsasl PUBLIC ${SRC_DIR})
target_include_directories(gsasl PUBLIC ${SRC_DIR}/gl)
target_include_directories(gsasl PUBLIC ${SRC_DIR}/src)
target_include_directories(gsasl PUBLIC ${SRC_DIR}/digest-md5)
target_include_directories(gsasl PRIVATE "${ClickHouse_SOURCE_DIR}/contrib/libgsasl-cmake/linux_x86_64/include")
target_compile_definitions (gsasl PRIVATE HAVE_CONFIG_H=1)
if (USE_KRB5)
target_link_libraries(gsasl PUBLIC ${KRB5_LIBRARY})
target_compile_definitions (gsasl PRIVATE HAVE_GSSAPI_H=1 USE_GSSAPI=1)
endif()

File diff suppressed because it is too large Load Diff

2
contrib/libhdfs3 vendored

@ -1 +1 @@
Subproject commit a8c37ee001af1ae88e5dfa637ae5b31b087c96d3
Subproject commit 9194af44588633c1b2dae44bf945804401ff883e

View File

@ -1,23 +1,4 @@
if (ENABLE_PROTOBUF AND NOT USE_INTERNAL_PROTOBUF_LIBRARY)
option(PROTOBUF_OLD_ABI_COMPAT "Set to ON for compatiability with external protobuf which was compiled old C++ ABI" OFF)
endif()
if (PROTOBUF_OLD_ABI_COMPAT)
if (NOT ENABLE_PROTOBUF OR USE_INTERNAL_PROTOBUF_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "PROTOBUF_OLD_ABI_COMPAT option is ignored")
endif()
endif()
if (NOT USE_INTERNAL_PROTOBUF_LIBRARY AND PROTOBUF_OLD_ABI_COMPAT)
# compatiable with protobuf which was compiled old C++ ABI
set(CMAKE_CXX_FLAGS "-D_GLIBCXX_USE_CXX11_ABI=0")
set(CMAKE_C_FLAGS "")
if (NOT (CMAKE_VERSION VERSION_LESS "3.8.0"))
unset(CMAKE_CXX_STANDARD)
endif ()
endif()
if (${ENABLE_LIBRARIES} AND ${ENABLE_KRB5})
if (${ENABLE_KRB5})
SET(WITH_KERBEROS 1)
else()
SET(WITH_KERBEROS 0)
@ -46,9 +27,7 @@ set(PROTO_FILES
"${HDFS3_SOURCE_DIR}/proto/datatransfer.proto"
)
if(USE_PROTOBUF)
PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES})
endif()
PROTOBUF_GENERATE_CPP(PROTO_SOURCES PROTO_HEADERS ${PROTO_FILES})
configure_file("${HDFS3_SOURCE_DIR}/platform.h.in" "${CMAKE_CURRENT_BINARY_DIR}/platform.h")
@ -108,95 +87,14 @@ set(SRCS
"${HDFS3_SOURCE_DIR}/common/Hash.cpp"
"${HDFS3_SOURCE_DIR}/common/SWCrc32c.cpp"
"${HDFS3_SOURCE_DIR}/common/Thread.cpp"
"${HDFS3_SOURCE_DIR}/network/TcpSocket.h"
"${HDFS3_SOURCE_DIR}/network/BufferedSocketReader.h"
"${HDFS3_SOURCE_DIR}/network/Socket.h"
"${HDFS3_SOURCE_DIR}/network/DomainSocket.h"
"${HDFS3_SOURCE_DIR}/network/Syscall.h"
"${HDFS3_SOURCE_DIR}/client/InputStreamImpl.h"
"${HDFS3_SOURCE_DIR}/client/FileSystem.h"
"${HDFS3_SOURCE_DIR}/client/ReadShortCircuitInfo.h"
"${HDFS3_SOURCE_DIR}/client/InputStreamInter.h"
"${HDFS3_SOURCE_DIR}/client/FileSystemImpl.h"
"${HDFS3_SOURCE_DIR}/client/PacketPool.h"
"${HDFS3_SOURCE_DIR}/client/Pipeline.h"
"${HDFS3_SOURCE_DIR}/client/OutputStreamInter.h"
"${HDFS3_SOURCE_DIR}/client/RemoteBlockReader.h"
"${HDFS3_SOURCE_DIR}/client/Token.h"
"${HDFS3_SOURCE_DIR}/client/KerberosName.h"
"${HDFS3_SOURCE_DIR}/client/DirectoryIterator.h"
"${HDFS3_SOURCE_DIR}/client/hdfs.h"
"${HDFS3_SOURCE_DIR}/client/FileSystemStats.h"
"${HDFS3_SOURCE_DIR}/client/FileSystemKey.h"
"${HDFS3_SOURCE_DIR}/client/DataTransferProtocolSender.h"
"${HDFS3_SOURCE_DIR}/client/Packet.h"
"${HDFS3_SOURCE_DIR}/client/PacketHeader.h"
"${HDFS3_SOURCE_DIR}/client/FileSystemInter.h"
"${HDFS3_SOURCE_DIR}/client/LocalBlockReader.h"
"${HDFS3_SOURCE_DIR}/client/TokenInternal.h"
"${HDFS3_SOURCE_DIR}/client/InputStream.h"
"${HDFS3_SOURCE_DIR}/client/PipelineAck.h"
"${HDFS3_SOURCE_DIR}/client/BlockReader.h"
"${HDFS3_SOURCE_DIR}/client/Permission.h"
"${HDFS3_SOURCE_DIR}/client/OutputStreamImpl.h"
"${HDFS3_SOURCE_DIR}/client/LeaseRenewer.h"
"${HDFS3_SOURCE_DIR}/client/UserInfo.h"
"${HDFS3_SOURCE_DIR}/client/PeerCache.h"
"${HDFS3_SOURCE_DIR}/client/OutputStream.h"
"${HDFS3_SOURCE_DIR}/client/FileStatus.h"
"${HDFS3_SOURCE_DIR}/client/DataTransferProtocol.h"
"${HDFS3_SOURCE_DIR}/client/BlockLocation.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcConfig.h"
"${HDFS3_SOURCE_DIR}/rpc/SaslClient.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcAuth.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcClient.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcCall.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcContentWrapper.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcProtocolInfo.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcRemoteCall.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcServerInfo.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcChannel.h"
"${HDFS3_SOURCE_DIR}/rpc/RpcChannelKey.h"
"${HDFS3_SOURCE_DIR}/server/BlockLocalPathInfo.h"
"${HDFS3_SOURCE_DIR}/server/LocatedBlocks.h"
"${HDFS3_SOURCE_DIR}/server/DatanodeInfo.h"
"${HDFS3_SOURCE_DIR}/server/RpcHelper.h"
"${HDFS3_SOURCE_DIR}/server/ExtendedBlock.h"
"${HDFS3_SOURCE_DIR}/server/NamenodeInfo.h"
"${HDFS3_SOURCE_DIR}/server/NamenodeImpl.h"
"${HDFS3_SOURCE_DIR}/server/LocatedBlock.h"
"${HDFS3_SOURCE_DIR}/server/NamenodeProxy.h"
"${HDFS3_SOURCE_DIR}/server/Datanode.h"
"${HDFS3_SOURCE_DIR}/server/Namenode.h"
"${HDFS3_SOURCE_DIR}/common/XmlConfig.h"
"${HDFS3_SOURCE_DIR}/common/Logger.h"
"${HDFS3_SOURCE_DIR}/common/WriteBuffer.h"
"${HDFS3_SOURCE_DIR}/common/HWCrc32c.h"
"${HDFS3_SOURCE_DIR}/common/Checksum.h"
"${HDFS3_SOURCE_DIR}/common/SessionConfig.h"
"${HDFS3_SOURCE_DIR}/common/Unordered.h"
"${HDFS3_SOURCE_DIR}/common/BigEndian.h"
"${HDFS3_SOURCE_DIR}/common/Thread.h"
"${HDFS3_SOURCE_DIR}/common/StackPrinter.h"
"${HDFS3_SOURCE_DIR}/common/Exception.h"
"${HDFS3_SOURCE_DIR}/common/WritableUtils.h"
"${HDFS3_SOURCE_DIR}/common/StringUtil.h"
"${HDFS3_SOURCE_DIR}/common/LruMap.h"
"${HDFS3_SOURCE_DIR}/common/Function.h"
"${HDFS3_SOURCE_DIR}/common/DateTime.h"
"${HDFS3_SOURCE_DIR}/common/Hash.h"
"${HDFS3_SOURCE_DIR}/common/SWCrc32c.h"
"${HDFS3_SOURCE_DIR}/common/ExceptionInternal.h"
"${HDFS3_SOURCE_DIR}/common/Memory.h"
"${HDFS3_SOURCE_DIR}/common/FileWrapper.h"
)
${PROTO_SOURCES}
)
# old kernels (< 3.17) doesn't have SYS_getrandom. Always use POSIX implementation to have better compatibility
set_source_files_properties("${HDFS3_SOURCE_DIR}/rpc/RpcClient.cpp" PROPERTIES COMPILE_FLAGS "-DBOOST_UUID_RANDOM_PROVIDER_FORCE_POSIX=1")
# target
add_library(hdfs3 ${SRCS} ${PROTO_SOURCES} ${PROTO_HEADERS})
add_library(hdfs3 ${SRCS})
if(USE_INTERNAL_PROTOBUF_LIBRARY)
add_dependencies(hdfs3 protoc)
@ -218,6 +116,7 @@ target_link_libraries(hdfs3 PRIVATE ${LIBXML2_LIBRARIES})
# inherit from parent cmake
target_include_directories(hdfs3 PRIVATE ${Protobuf_INCLUDE_DIR})
target_link_libraries(hdfs3 PRIVATE ${Protobuf_LIBRARY} boost::headers_only)
if(OPENSSL_INCLUDE_DIR AND OPENSSL_LIBRARIES)
target_include_directories(hdfs3 PRIVATE ${OPENSSL_INCLUDE_DIR})
target_link_libraries(hdfs3 PRIVATE ${OPENSSL_LIBRARIES})

View File

@ -66,7 +66,7 @@
#cmakedefine WITH_SASL_OAUTHBEARER 1
#cmakedefine WITH_SASL_CYRUS 1
// crc32chw
#if !defined(__PPC__) && !defined(__riscv) && (!defined(__aarch64__) || defined(__ARM_FEATURE_CRC32)) && !(defined(__aarch64__) && defined(__APPLE__))
#if !defined(__PPC__) && !defined(__riscv) && !defined(__aarch64__)
#define WITH_CRC32C_HW 1
#endif
// regex

View File

@ -1,4 +1,4 @@
option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ${NOT_UNBUNDLED})
option (USE_INTERNAL_LZ4_LIBRARY "Use internal lz4 library" ON)
if (NOT USE_INTERNAL_LZ4_LIBRARY)
find_library (LIBRARY_LZ4 lz4)

View File

@ -0,0 +1,243 @@
if (GLIBC_COMPATIBILITY)
set(LIBM glibc-compatibility)
endif()
# This is the LGPL libmariadb project.
set(CC_SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/mariadb-connector-c)
set(CC_BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR})
set(WITH_SSL ON)
set(MARIADB_CONNECTOR_C_COPYRIGHT "2013-2017 MariaDB Corporation Ab")
set(PROTOCOL_VERSION 10) # we adapted new password option from PHP's mysqlnd !
# if C/C is build as subproject inside MariaDB server tree we will
# use the version defined by server
if(MAJOR_VERSION)
set(MARIADB_CLIENT_VERSION_MAJOR ${MAJOR_VERSION})
set(MARIADB_CLIENT_VERSION_MINOR ${MINOR_VERSION})
set(MARIADB_CLIENT_VERSION_PATCH ${PATCH_VERSION})
set(MARIADB_CLIENT_VERSION_EXTRA ${EXTRA_VERSION})
else()
set(MARIADB_CLIENT_VERSION_MAJOR "10")
set(MARIADB_CLIENT_VERSION_MINOR "4")
set(MARIADB_CLIENT_VERSION_PATCH "3")
set(MARIADB_CLIENT_VERSION_EXTRA "")
endif()
set(MARIADB_CLIENT_VERSION "${MARIADB_CLIENT_VERSION_MAJOR}.${MARIADB_CLIENT_VERSION_MINOR}.${MARIADB_CLIENT_VERSION_PATCH}${MARIADB_CLIENT_VERSION_EXTRA}")
set(MARIADB_BASE_VERSION "mariadb-${MARIADB_CLIENT_VERSION_MAJOR}.${MARIADB_CLIENT_VERSION_MINOR}")
MATH(EXPR MARIADB_VERSION_ID "${MARIADB_CLIENT_VERSION_MAJOR} * 10000 +
${MARIADB_CLIENT_VERSION_MINOR} * 100 +
${MARIADB_CLIENT_VERSION_PATCH}")
IF (NOT MARIADB_PORT)
set(MARIADB_PORT 3306)
ENDIF ()
if(NOT MARIADB_UNIX_ADDR)
set(MARIADB_UNIX_ADDR "/tmp/mysql.sock")
endif()
set(HAVE_ALLOCA_H 1)
set(HAVE_ARPA_INET_H 1)
set(HAVE_DLFCN_H 1)
set(HAVE_FCNTL_H 1)
set(HAVE_FLOAT_H 1)
set(HAVE_LIMITS_H 1)
set(HAVE_PWD_H 1)
set(HAVE_SCHED_H 1)
set(HAVE_SELECT_H 0)
set(INCLUDE_SIGNAL 1)
set(HAVE_SIGNAL 1)
set(HAVE_STDDEF_H 1)
set(HAVE_STDINT_H 1)
set(HAVE_STDLIB_H 1)
set(HAVE_STRING_H 1)
set(HAVE_STRINGS_H 1)
set(HAVE_SYS_IOCTL_H 1)
set(HAVE_SYS_SELECT_H 1)
set(HAVE_SYS_SOCKET_H 1)
set(HAVE_SYS_TYPES_H 1)
set(HAVE_SYS_UN_H 1)
set(HAVE_UNISTD_H 1)
set(HAVE_UTIME_H 1)
set(HAVE_UCONTEXT_H 1)
set(HAVE_ALLOCA 1)
set(HAVE_DLERROR 0)
set(HAVE_DLOPEN 0)
set(HAVE_FCNTL 1)
set(HAVE_MEMCPY 1)
set(HAVE_NL_LANGINFO 0)
set(HAVE_SETLOCALE 0)
set(HAVE_POLL 1)
set(SIZEOF_CHARP 8)
set(SIZEOF_INT 4)
set(SIZEOF_LONG 8)
set(SIZEOF_LONG_LONG 8)
set(SIZEOF_SIZE_T 8)
set(SOCKET_SIZE_TYPE socklen_t)
set(SYSTEM_LIBS ${SYSTEM_LIBS} zlib)
if(CMAKE_HAVE_PTHREAD_H)
set(CMAKE_REQUIRED_INCLUDES pthread.h)
endif()
add_definitions(-DMARIADB_SYSTEM_TYPE="${CMAKE_SYSTEM_NAME}")
add_definitions(-DMARIADB_MACHINE_TYPE="${CMAKE_SYSTEM_PROCESSOR}")
set(HAVE_THREADS 1)
set(DEFAULT_CHARSET "utf8mb4")
add_definitions(-DHAVE_OPENSSL -DHAVE_TLS)
set(SSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
include_directories(BEFORE ${OPENSSL_INCLUDE_DIR})
set(TLS_LIBRARY_VERSION "OpenSSL ${OPENSSL_VERSION}")
set(ENABLED_LOCAL_INFILE OFF)
CONFIGURE_FILE(${CC_SOURCE_DIR}/include/ma_config.h.in
${CC_BINARY_DIR}/include-private/ma_config.h)
CONFIGURE_FILE(${CC_SOURCE_DIR}/include/ma_config.h.in
${CC_BINARY_DIR}/include-private/config.h)
CONFIGURE_FILE(${CC_SOURCE_DIR}/include/mariadb_version.h.in
${CC_BINARY_DIR}/include-public/mariadb_version.h)
if(WITH_SSL)
set(SYSTEM_LIBS ${SYSTEM_LIBS} ${SSL_LIBRARIES})
endif()
function(REGISTER_PLUGIN)
SET(one_value_keywords TARGET TYPE)
SET(multi_value_keywords SOURCES)
cmake_parse_arguments(CC_PLUGIN
"${options}"
"${one_value_keywords}"
"${multi_value_keywords}"
${ARGN})
# overwrite default if it was specified with cmake option
string(TOUPPER ${CC_PLUGIN_TARGET} cc_plugin)
if(NOT "${CLIENT_PLUGIN_${cc_plugin}}" STREQUAL "")
SET(CC_PLUGIN_DEFAULT ${CLIENT_PLUGIN_${cc_plugin}})
endif()
# use uppercase
string(TOUPPER ${CC_PLUGIN_TARGET} target_name)
string(TOUPPER "${CC_PLUGIN_CONFIGURATIONS}" CC_PLUGIN_CONFIGURATIONS)
if(NOT ${PLUGIN_${target_name}} STREQUAL "")
string(TOUPPER ${PLUGIN_${target_name}} PLUGIN_${target_name})
set(CC_PLUGIN_DEFAULT ${PLUGIN_${target_name}})
endif()
set(PLUGINS_STATIC ${PLUGINS_STATIC} ${CC_PLUGIN_TARGET} PARENT_SCOPE)
set(LIBMARIADB_PLUGIN_CFLAGS ${LIBMARIADB_PLUGIN_CFLAGS} ${CC_PLUGIN_COMPILE_OPTIONS} PARENT_SCOPE)
set(LIBMARIADB_PLUGIN_INCLUDES ${LIBMARIADB_PLUGIN_INCLUDES} ${CC_PLUGIN_INCLUDES} PARENT_SCOPE)
set(LIBMARIADB_PLUGIN_SOURCES ${LIBMARIADB_PLUGIN_SOURCES} ${CC_PLUGIN_SOURCES} PARENT_SCOPE)
set(LIBMARIADB_PLUGIN_LIBS ${LIBMARIADB_PLUGIN_LIBS} ${CC_PLUGIN_LIBRARIES} PARENT_SCOPE)
endfunction()
SET(PLUGIN_EXTRA_FILES ${CC_SOURCE_DIR}/libmariadb/ma_errmsg.c)
#native password
REGISTER_PLUGIN(TARGET pvio_socket
TYPE MARIADB_CLIENT_PLUGIN_PVIO
SOURCES "${CC_SOURCE_DIR}/plugins/pvio/pvio_socket.c")
# SHA256 caching plugin for MySQL 8.0 connection
REGISTER_PLUGIN(TARGET caching_sha2_password
TYPE MARIADB_CLIENT_PLUGIN_AUTH
SOURCES "${CC_SOURCE_DIR}/plugins/auth/caching_sha2_pw.c")
REGISTER_PLUGIN(TARGET sha256_password
TYPE MARIADB_CLIENT_PLUGIN_AUTH
SOURCES "${CC_SOURCE_DIR}/plugins/auth/sha256_pw.c")
#native password
REGISTER_PLUGIN(TARGET mysql_native_password
TYPE MARIADB_CLIENT_PLUGIN_AUTH
SOURCES "${CC_SOURCE_DIR}/plugins/auth/my_auth.c")
REGISTER_PLUGIN(TARGET aurora
TYPE MARIADB_CLIENT_PLUGIN_CONNECTION
SOURCES "${CC_SOURCE_DIR}/plugins/connection/aurora.c")
add_definitions(-D HAVE_COMPRESS)
add_definitions(-D LIBMARIADB)
add_definitions(-D THREAD)
# handle static plugins
set(LIBMARIADB_SOURCES ${LIBMARIADB_PLUGIN_SOURCES})
set(SYSTEM_LIBS ${SYSTEM_LIBS} ${LIBMARIADB_PLUGIN_LIBS})
add_definitions(${LIBMARIADB_PLUGIN_DEFS})
FOREACH(plugin ${PLUGINS_STATIC})
set(EXTERNAL_PLUGINS "${EXTERNAL_PLUGINS} extern struct st_mysql_client_plugin ${plugin}_client_plugin;\n")
set(BUILTIN_PLUGINS "${BUILTIN_PLUGINS} (struct st_mysql_client_plugin *)&${plugin}_client_plugin,\n")
ENDFOREACH()
CONFIGURE_FILE(${CC_SOURCE_DIR}/libmariadb/ma_client_plugin.c.in
${CC_BINARY_DIR}/libmariadb/ma_client_plugin.c)
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES}
${CC_SOURCE_DIR}/plugins/auth/my_auth.c
${CC_SOURCE_DIR}/libmariadb/ma_array.c
${CC_SOURCE_DIR}/libmariadb/ma_charset.c
${CC_SOURCE_DIR}/libmariadb/ma_hash.c
${CC_SOURCE_DIR}/libmariadb/ma_net.c
${CC_SOURCE_DIR}/libmariadb/mariadb_charset.c
${CC_SOURCE_DIR}/libmariadb/ma_time.c
${CC_SOURCE_DIR}/libmariadb/ma_default.c
${CC_SOURCE_DIR}/libmariadb/ma_errmsg.c
${CC_SOURCE_DIR}/libmariadb/mariadb_lib.c
${CC_SOURCE_DIR}/libmariadb/ma_list.c
${CC_SOURCE_DIR}/libmariadb/ma_pvio.c
${CC_SOURCE_DIR}/libmariadb/ma_tls.c
${CC_SOURCE_DIR}/libmariadb/ma_alloc.c
${CC_SOURCE_DIR}/libmariadb/ma_compress.c
${CC_SOURCE_DIR}/libmariadb/ma_init.c
${CC_SOURCE_DIR}/libmariadb/ma_password.c
${CC_SOURCE_DIR}/libmariadb/ma_ll2str.c
${CC_SOURCE_DIR}/libmariadb/ma_sha1.c
${CC_SOURCE_DIR}/libmariadb/mariadb_stmt.c
${CC_SOURCE_DIR}/libmariadb/ma_loaddata.c
${CC_SOURCE_DIR}/libmariadb/ma_stmt_codec.c
${CC_SOURCE_DIR}/libmariadb/ma_string.c
${CC_SOURCE_DIR}/libmariadb/ma_dtoa.c
${CC_SOURCE_DIR}/libmariadb/mariadb_rpl.c
${CC_SOURCE_DIR}/libmariadb/ma_io.c
${CC_SOURCE_DIR}/libmariadb/secure/openssl.c
${CC_SOURCE_DIR}/libmariadb/secure/openssl_crypt.c
${CC_BINARY_DIR}/libmariadb/ma_client_plugin.c
)
if(ICONV_INCLUDE_DIR)
include_directories(BEFORE ${ICONV_INCLUDE_DIR})
endif()
add_definitions(-DLIBICONV_PLUG)
if(ZLIB_FOUND AND WITH_EXTERNAL_ZLIB)
include_directories(${ZLIB_INCLUDE_DIR})
endif()
if(WITH_DYNCOL)
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_dyncol.c)
endif()
set(LIBMARIADB_SOURCES ${LIBMARIADB_SOURCES} ${CC_SOURCE_DIR}/libmariadb/mariadb_async.c ${CC_SOURCE_DIR}/libmariadb/ma_context.c)
add_library(mariadbclient STATIC ${LIBMARIADB_SOURCES})
target_link_libraries(mariadbclient ${SYSTEM_LIBS})
target_include_directories(mariadbclient
PRIVATE ${CC_BINARY_DIR}/include-private
PUBLIC ${CC_BINARY_DIR}/include-public ${CC_SOURCE_DIR}/include ${CC_SOURCE_DIR}/libmariadb)
set_target_properties(mariadbclient PROPERTIES IMPORTED_INTERFACE_LINK_LIBRARIES "${SYSTEM_LIBS}")

2
contrib/poco vendored

@ -1 +1 @@
Subproject commit 173fb31717837d366152c508619b09dcf11786da
Subproject commit 258b9ba6cd245ff88e9346f75c43464c403f329d

View File

@ -51,6 +51,7 @@ if (USE_INTERNAL_POCO_LIBRARY)
"${LIBRARY_DIR}/Foundation/src/Channel.cpp"
"${LIBRARY_DIR}/Foundation/src/Checksum.cpp"
"${LIBRARY_DIR}/Foundation/src/Clock.cpp"
"${LIBRARY_DIR}/Foundation/src/CompressedLogFile.cpp"
"${LIBRARY_DIR}/Foundation/src/Condition.cpp"
"${LIBRARY_DIR}/Foundation/src/Configurable.cpp"
"${LIBRARY_DIR}/Foundation/src/ConsoleChannel.cpp"
@ -222,7 +223,7 @@ if (USE_INTERNAL_POCO_LIBRARY)
POCO_OS_FAMILY_UNIX
)
target_include_directories (_poco_foundation SYSTEM PUBLIC "${LIBRARY_DIR}/Foundation/include")
target_link_libraries (_poco_foundation PRIVATE Poco::Foundation::PCRE ${ZLIB_LIBRARIES})
target_link_libraries (_poco_foundation PRIVATE Poco::Foundation::PCRE ${ZLIB_LIBRARIES} lz4)
else ()
add_library (Poco::Foundation UNKNOWN IMPORTED GLOBAL)

View File

@ -1,39 +1,235 @@
set(protobuf_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf")
set(protobuf_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/protobuf")
set(protobuf_source_dir "${ClickHouse_SOURCE_DIR}/contrib/protobuf")
set(protobuf_binary_dir "${ClickHouse_BINARY_DIR}/contrib/protobuf")
set(protobuf_WITH_ZLIB 0 CACHE INTERNAL "" FORCE) # actually will use zlib, but skip find
set(protobuf_BUILD_TESTS OFF CACHE INTERNAL "" FORCE)
if (MAKE_STATIC_LIBRARIES)
set(protobuf_BUILD_SHARED_LIBS OFF CACHE INTERNAL "" FORCE)
else ()
set(protobuf_BUILD_SHARED_LIBS ON CACHE INTERNAL "" FORCE)
endif ()
add_definitions(-DGOOGLE_PROTOBUF_CMAKE_BUILD)
if (CMAKE_CROSSCOMPILING)
# Will build 'protoc' for host arch instead of cross-compiling
set(protobuf_BUILD_PROTOC_BINARIES OFF CACHE INTERNAL "" FORCE)
endif ()
add_definitions(-DHAVE_PTHREAD)
add_definitions(-DHAVE_ZLIB)
add_subdirectory("${protobuf_SOURCE_DIR}/cmake" "${protobuf_BINARY_DIR}")
include_directories(
${ZLIB_INCLUDE_DIRECTORIES}
${protobuf_binary_dir}
${protobuf_source_dir}/src)
if (ENABLE_FUZZING)
# `protoc` will be built with sanitizer and it could fail during ClickHouse build
# It easily reproduces in oss-fuzz building pipeline
# To avoid this we can try to build `protoc` without any sanitizer with option `-fno-sanitize=all`, but
# it this case we will face with linker errors, because libcxx still will be built with sanitizer
# So, we can simply suppress all of these failures with a combination this flag and an environment variable
# export MSAN_OPTIONS=exit_code=0
target_compile_options(protoc PRIVATE "-fsanitize-recover=all")
set(libprotobuf_lite_files
${protobuf_source_dir}/src/google/protobuf/any_lite.cc
${protobuf_source_dir}/src/google/protobuf/arena.cc
${protobuf_source_dir}/src/google/protobuf/arenastring.cc
${protobuf_source_dir}/src/google/protobuf/extension_set.cc
${protobuf_source_dir}/src/google/protobuf/field_access_listener.cc
${protobuf_source_dir}/src/google/protobuf/generated_enum_util.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven_lite.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_util.cc
${protobuf_source_dir}/src/google/protobuf/implicit_weak_message.cc
${protobuf_source_dir}/src/google/protobuf/io/coded_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/io_win32.cc
${protobuf_source_dir}/src/google/protobuf/io/strtod.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl.cc
${protobuf_source_dir}/src/google/protobuf/io/zero_copy_stream_impl_lite.cc
${protobuf_source_dir}/src/google/protobuf/map.cc
${protobuf_source_dir}/src/google/protobuf/message_lite.cc
${protobuf_source_dir}/src/google/protobuf/parse_context.cc
${protobuf_source_dir}/src/google/protobuf/repeated_field.cc
${protobuf_source_dir}/src/google/protobuf/stubs/bytestream.cc
${protobuf_source_dir}/src/google/protobuf/stubs/common.cc
${protobuf_source_dir}/src/google/protobuf/stubs/int128.cc
${protobuf_source_dir}/src/google/protobuf/stubs/status.cc
${protobuf_source_dir}/src/google/protobuf/stubs/statusor.cc
${protobuf_source_dir}/src/google/protobuf/stubs/stringpiece.cc
${protobuf_source_dir}/src/google/protobuf/stubs/stringprintf.cc
${protobuf_source_dir}/src/google/protobuf/stubs/structurally_valid.cc
${protobuf_source_dir}/src/google/protobuf/stubs/strutil.cc
${protobuf_source_dir}/src/google/protobuf/stubs/time.cc
${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc
)
add_library(libprotobuf-lite ${libprotobuf_lite_files})
target_link_libraries(libprotobuf-lite pthread)
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
target_link_libraries(libprotobuf-lite log)
endif()
target_include_directories(libprotobuf-lite SYSTEM PUBLIC ${protobuf_source_dir}/src)
add_library(protobuf::libprotobuf-lite ALIAS libprotobuf-lite)
# We don't want to stop compilation on warnings in protobuf's headers.
# The following line overrides the value assigned by the command target_include_directories() in libprotobuf.cmake
set_property(TARGET libprotobuf PROPERTY INTERFACE_SYSTEM_INCLUDE_DIRECTORIES "${protobuf_SOURCE_DIR}/src")
if (CMAKE_CROSSCOMPILING)
set(libprotobuf_files
${protobuf_source_dir}/src/google/protobuf/any.cc
${protobuf_source_dir}/src/google/protobuf/any.pb.cc
${protobuf_source_dir}/src/google/protobuf/api.pb.cc
${protobuf_source_dir}/src/google/protobuf/compiler/importer.cc
${protobuf_source_dir}/src/google/protobuf/compiler/parser.cc
${protobuf_source_dir}/src/google/protobuf/descriptor.cc
${protobuf_source_dir}/src/google/protobuf/descriptor.pb.cc
${protobuf_source_dir}/src/google/protobuf/descriptor_database.cc
${protobuf_source_dir}/src/google/protobuf/duration.pb.cc
${protobuf_source_dir}/src/google/protobuf/dynamic_message.cc
${protobuf_source_dir}/src/google/protobuf/empty.pb.cc
${protobuf_source_dir}/src/google/protobuf/extension_set_heavy.cc
${protobuf_source_dir}/src/google/protobuf/field_mask.pb.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_reflection.cc
${protobuf_source_dir}/src/google/protobuf/generated_message_table_driven.cc
${protobuf_source_dir}/src/google/protobuf/io/gzip_stream.cc
${protobuf_source_dir}/src/google/protobuf/io/printer.cc
${protobuf_source_dir}/src/google/protobuf/io/tokenizer.cc
${protobuf_source_dir}/src/google/protobuf/map_field.cc
${protobuf_source_dir}/src/google/protobuf/message.cc
${protobuf_source_dir}/src/google/protobuf/reflection_ops.cc
${protobuf_source_dir}/src/google/protobuf/service.cc
${protobuf_source_dir}/src/google/protobuf/source_context.pb.cc
${protobuf_source_dir}/src/google/protobuf/struct.pb.cc
${protobuf_source_dir}/src/google/protobuf/stubs/substitute.cc
${protobuf_source_dir}/src/google/protobuf/text_format.cc
${protobuf_source_dir}/src/google/protobuf/timestamp.pb.cc
${protobuf_source_dir}/src/google/protobuf/type.pb.cc
${protobuf_source_dir}/src/google/protobuf/unknown_field_set.cc
${protobuf_source_dir}/src/google/protobuf/util/delimited_message_util.cc
${protobuf_source_dir}/src/google/protobuf/util/field_comparator.cc
${protobuf_source_dir}/src/google/protobuf/util/field_mask_util.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/datapiece.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/default_value_objectwriter.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/error_listener.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/field_mask_utility.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/json_escaping.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/json_objectwriter.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/json_stream_parser.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/object_writer.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/proto_writer.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectsource.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/type_info.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/type_info_test_helper.cc
${protobuf_source_dir}/src/google/protobuf/util/internal/utility.cc
${protobuf_source_dir}/src/google/protobuf/util/json_util.cc
${protobuf_source_dir}/src/google/protobuf/util/message_differencer.cc
${protobuf_source_dir}/src/google/protobuf/util/time_util.cc
${protobuf_source_dir}/src/google/protobuf/util/type_resolver_util.cc
${protobuf_source_dir}/src/google/protobuf/wire_format.cc
${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc
)
add_library(libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files})
target_link_libraries(libprotobuf pthread)
target_link_libraries(libprotobuf ${ZLIB_LIBRARIES})
if(${CMAKE_SYSTEM_NAME} STREQUAL "Android")
target_link_libraries(libprotobuf log)
endif()
target_include_directories(libprotobuf SYSTEM PUBLIC ${protobuf_source_dir}/src)
add_library(protobuf::libprotobuf ALIAS libprotobuf)
set(libprotoc_files
${protobuf_source_dir}/src/google/protobuf/compiler/code_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/command_line_interface.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_padding_optimizer.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_parse_function_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_service.cc
${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_string_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_doc_comment.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_field_base.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_reflection_class.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc
${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_context.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_doc_comment.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator_factory.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_kotlin_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_map_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_builder_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_message_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_name_resolver.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_primitive_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_service.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_shared_code_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/java/java_string_field_lite.cc
${protobuf_source_dir}/src/google/protobuf/compiler/js/js_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/js/well_known_types_embed.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_extension.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_file.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_helpers.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_map_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_message_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_oneof.cc
${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_primitive_field.cc
${protobuf_source_dir}/src/google/protobuf/compiler/php/php_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/plugin.cc
${protobuf_source_dir}/src/google/protobuf/compiler/plugin.pb.cc
${protobuf_source_dir}/src/google/protobuf/compiler/python/python_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/ruby/ruby_generator.cc
${protobuf_source_dir}/src/google/protobuf/compiler/subprocess.cc
${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc
)
add_library(libprotoc ${libprotoc_files})
target_link_libraries(libprotoc libprotobuf)
add_library(protobuf::libprotoc ALIAS libprotoc)
set(protoc_files ${protobuf_source_dir}/src/google/protobuf/compiler/main.cc)
if (CMAKE_HOST_SYSTEM_NAME STREQUAL CMAKE_SYSTEM_NAME
AND CMAKE_HOST_SYSTEM_PROCESSOR STREQUAL CMAKE_SYSTEM_PROCESSOR)
add_executable(protoc ${protoc_files})
target_link_libraries(protoc libprotoc libprotobuf pthread)
add_executable(protobuf::protoc ALIAS protoc)
if (ENABLE_FUZZING)
# `protoc` will be built with sanitizer and it could fail during ClickHouse build
# It easily reproduces in oss-fuzz building pipeline
# To avoid this we can try to build `protoc` without any sanitizer with option `-fno-sanitize=all`, but
# it this case we will face with linker errors, because libcxx still will be built with sanitizer
# So, we can simply suppress all of these failures with a combination this flag and an environment variable
# export MSAN_OPTIONS=exit_code=0
target_compile_options(protoc PRIVATE "-fsanitize-recover=all")
endif()
else ()
# Build 'protoc' for host arch
set (PROTOC_BUILD_DIR "${protobuf_BINARY_DIR}/build")
set (PROTOC_BUILD_DIR "${protobuf_binary_dir}/build")
if (NOT EXISTS "${PROTOC_BUILD_DIR}/protoc")
@ -53,7 +249,7 @@ if (CMAKE_CROSSCOMPILING)
"-Dprotobuf_BUILD_CONFORMANCE=0"
"-Dprotobuf_BUILD_EXAMPLES=0"
"-Dprotobuf_BUILD_PROTOC_BINARIES=1"
"${protobuf_SOURCE_DIR}/cmake"
"${protobuf_source_dir}/cmake"
WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
COMMAND_ECHO STDOUT)
@ -78,7 +274,7 @@ if (CMAKE_CROSSCOMPILING)
# -Dprotobuf_BUILD_CONFORMANCE=0
# -Dprotobuf_BUILD_EXAMPLES=0
# -Dprotobuf_BUILD_PROTOC_BINARIES=1
# "${protobuf_SOURCE_DIR}/cmake"
# "${protobuf_source_dir}/cmake"
#
# DEPENDS "${PROTOC_BUILD_DIR}"
# WORKING_DIRECTORY "${PROTOC_BUILD_DIR}"
@ -97,5 +293,4 @@ if (CMAKE_CROSSCOMPILING)
add_executable(protoc IMPORTED GLOBAL)
set_target_properties (protoc PROPERTIES IMPORTED_LOCATION "${PROTOC_BUILD_DIR}/protoc")
add_dependencies(protoc "${PROTOC_BUILD_DIR}/protoc")
endif ()

View File

@ -1,24 +1,54 @@
# Copyright 2015 The RE2 Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
# This file was edited for ClickHouse
set(SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2")
set(RE2_SOURCES
${SRC_DIR}/re2/bitstate.cc
${SRC_DIR}/re2/compile.cc
${SRC_DIR}/re2/dfa.cc
${SRC_DIR}/re2/filtered_re2.cc
${SRC_DIR}/re2/mimics_pcre.cc
${SRC_DIR}/re2/nfa.cc
${SRC_DIR}/re2/onepass.cc
${SRC_DIR}/re2/parse.cc
${SRC_DIR}/re2/perl_groups.cc
${SRC_DIR}/re2/prefilter.cc
${SRC_DIR}/re2/prefilter_tree.cc
${SRC_DIR}/re2/prog.cc
${SRC_DIR}/re2/re2.cc
${SRC_DIR}/re2/regexp.cc
${SRC_DIR}/re2/set.cc
${SRC_DIR}/re2/simplify.cc
${SRC_DIR}/re2/stringpiece.cc
${SRC_DIR}/re2/tostring.cc
${SRC_DIR}/re2/unicode_casefold.cc
${SRC_DIR}/re2/unicode_groups.cc
${SRC_DIR}/util/rune.cc
${SRC_DIR}/util/strutil.cc
)
add_library(re2 ${RE2_SOURCES})
target_include_directories(re2 PUBLIC "${SRC_DIR}")
# Building re2 which is thread-safe and re2_st which is not.
# re2 changes its state during matching of regular expression, e.g. creates temporary DFA.
# It uses RWLock to process the same regular expression object from different threads.
# In order to avoid redundant locks in some cases, we use not thread-safe version of the library (re2_st).
set (RE2_SOURCE_DIR ${ClickHouse_SOURCE_DIR}/contrib/re2/)
get_target_property (RE2_SOURCES_ re2 SOURCES)
foreach (src ${RE2_SOURCES_})
list(APPEND RE2_ST_SOURCES ${RE2_SOURCE_DIR}/${src})
endforeach ()
add_library(re2_st ${RE2_ST_SOURCES})
add_library(re2_st ${RE2_SOURCES})
target_compile_definitions (re2_st PRIVATE NDEBUG NO_THREADS re2=re2_st)
target_include_directories (re2_st PRIVATE .)
target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${RE2_SOURCE_DIR})
target_include_directories (re2_st SYSTEM PUBLIC ${CMAKE_CURRENT_BINARY_DIR} ${SRC_DIR})
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/re2_st)
foreach (FILENAME filtered_re2.h re2.h set.h stringpiece.h)
add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/re2_st/${FILENAME}"
COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${RE2_SOURCE_DIR}/re2/${FILENAME}"
COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${SRC_DIR}/re2/${FILENAME}"
-DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/re2_st/${FILENAME}"
-P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake"
COMMENT "Creating ${FILENAME} for re2_st library.")
@ -29,7 +59,7 @@ endforeach ()
file (MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/util)
foreach (FILENAME mutex.h)
add_custom_command (OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${RE2_SOURCE_DIR}/util/${FILENAME}"
COMMAND ${CMAKE_COMMAND} -DSOURCE_FILENAME="${SRC_DIR}/util/${FILENAME}"
-DTARGET_FILENAME="${CMAKE_CURRENT_BINARY_DIR}/util/${FILENAME}"
-P "${CMAKE_CURRENT_SOURCE_DIR}/re2_transform.cmake"
COMMENT "Creating ${FILENAME} for re2_st library.")

2
contrib/replxx vendored

@ -1 +1 @@
Subproject commit b0c266c2d8a835784181e17292b421848c78c6b8
Subproject commit f019cba7ea1bcd1b4feb7826f28ed57fb581b04c

View File

@ -190,7 +190,7 @@ if(HAVE_PTHREAD_MUTEX_ADAPTIVE_NP)
endif()
include(CheckCXXSymbolExists)
if(CMAKE_SYSTEM_NAME MATCHES "^FreeBSD")
if (OS_FREEBSD)
check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc_np.h" HAVE_MALLOC_USABLE_SIZE)
else()
check_cxx_symbol_exists(malloc_usable_size "${ROCKSDB_SOURCE_DIR}/malloc.h" HAVE_MALLOC_USABLE_SIZE)
@ -199,20 +199,14 @@ if(HAVE_MALLOC_USABLE_SIZE)
add_definitions(-DROCKSDB_MALLOC_USABLE_SIZE)
endif()
check_cxx_symbol_exists(sched_getcpu sched.h HAVE_SCHED_GETCPU)
if(HAVE_SCHED_GETCPU)
if (OS_LINUX)
add_definitions(-DROCKSDB_SCHED_GETCPU_PRESENT)
add_definitions(-DROCKSDB_AUXV_SYSAUXV_PRESENT)
add_definitions(-DROCKSDB_AUXV_GETAUXVAL_PRESENT)
elseif (OS_FREEBSD)
add_definitions(-DROCKSDB_AUXV_SYSAUXV_PRESENT)
endif()
check_cxx_symbol_exists(getauxval auvx.h HAVE_AUXV_GETAUXVAL)
if(HAVE_AUXV_GETAUXVAL)
add_definitions(-DROCKSDB_AUXV_GETAUXVAL_PRESENT)
endif()
check_cxx_symbol_exists(elf_aux_info sys/auxv.h HAVE_ELF_AUX_INFO)
if(HAVE_ELF_AUX_INFO)
add_definitions(-DROCKSDB_AUXV_GETAUXVAL_PRESENT)
endif()
include_directories(${ROCKSDB_SOURCE_DIR})
include_directories("${ROCKSDB_SOURCE_DIR}/include")

View File

@ -0,0 +1,47 @@
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native")
set (SRCS
${SRC_DIR}/vendor/mpack.c
${SRC_DIR}/src/sentry_alloc.c
${SRC_DIR}/src/sentry_backend.c
${SRC_DIR}/src/sentry_core.c
${SRC_DIR}/src/sentry_database.c
${SRC_DIR}/src/sentry_envelope.c
${SRC_DIR}/src/sentry_json.c
${SRC_DIR}/src/sentry_logger.c
${SRC_DIR}/src/sentry_options.c
${SRC_DIR}/src/sentry_random.c
${SRC_DIR}/src/sentry_ratelimiter.c
${SRC_DIR}/src/sentry_scope.c
${SRC_DIR}/src/sentry_session.c
${SRC_DIR}/src/sentry_slice.c
${SRC_DIR}/src/sentry_string.c
${SRC_DIR}/src/sentry_sync.c
${SRC_DIR}/src/sentry_transport.c
${SRC_DIR}/src/sentry_utils.c
${SRC_DIR}/src/sentry_uuid.c
${SRC_DIR}/src/sentry_value.c
${SRC_DIR}/src/path/sentry_path.c
${SRC_DIR}/src/transports/sentry_disk_transport.c
${SRC_DIR}/src/transports/sentry_function_transport.c
${SRC_DIR}/src/unwinder/sentry_unwinder.c
${SRC_DIR}/src/sentry_unix_pageallocator.c
${SRC_DIR}/src/path/sentry_path_unix.c
${SRC_DIR}/src/symbolizer/sentry_symbolizer_unix.c
${SRC_DIR}/src/modulefinder/sentry_modulefinder_linux.c
${SRC_DIR}/src/transports/sentry_transport_curl.c
${SRC_DIR}/src/backends/sentry_backend_none.c
)
add_library(sentry ${SRCS})
add_library(sentry::sentry ALIAS sentry)
if(BUILD_SHARED_LIBS)
target_compile_definitions(sentry PRIVATE SENTRY_BUILD_SHARED)
else()
target_compile_definitions(sentry PUBLIC SENTRY_BUILD_STATIC)
endif()
target_link_libraries(sentry PRIVATE curl pthread)
target_include_directories(sentry PUBLIC "${SRC_DIR}/include" PRIVATE "${SRC_DIR}/src")
target_compile_definitions(sentry PRIVATE SENTRY_WITH_INPROC_BACKEND SIZEOF_LONG=8)

View File

@ -1,16 +1,13 @@
set (SOURCE_DIR "${CMAKE_SOURCE_DIR}/contrib/snappy")
set(SNAPPY_IS_BIG_ENDIAN 0)
set (SNAPPY_IS_BIG_ENDIAN 0)
include(CheckIncludeFile)
check_include_file("byteswap.h" HAVE_BYTESWAP_H)
check_include_file("sys/endian.h" HAVE_SYS_ENDIAN_H)
check_include_file("sys/mman.h" HAVE_SYS_MMAN_H)
check_include_file("sys/resource.h" HAVE_SYS_RESOURCE_H)
check_include_file("sys/time.h" HAVE_SYS_TIME_H)
check_include_file("sys/uio.h" HAVE_SYS_UIO_H)
check_include_file("unistd.h" HAVE_UNISTD_H)
check_include_file("windows.h" HAVE_WINDOWS_H)
set (HAVE_BYTESWAP_H 1)
set (HAVE_SYS_MMAN_H 1)
set (HAVE_SYS_RESOURCE_H 1)
set (HAVE_SYS_TIME_H 1)
set (HAVE_SYS_UIO_H 1)
set (HAVE_UNISTD_H 1)
set (HAVE_BUILTIN_EXPECT 1)
set (HAVE_BUILTIN_CTZ 1)

1
contrib/stats vendored

@ -1 +0,0 @@
Subproject commit b6dd459c10a88c7ea04693c007e9e35820c5d9ad

2
contrib/sysroot vendored

@ -1 +1 @@
Subproject commit 1a64956aa7c280448be6526251bb2b8e6d380ab1
Subproject commit 4ef348b7f30f2ad5b02b266268b3c948e51ad457

View File

@ -0,0 +1,263 @@
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/xz")
# Author: Lasse Collin
#
# This file has been put into the public domain.
# You can do whatever you want with this file.
#
# The file was edited for ClickHouse
# Get the package version from version.h into XZ_VERSION variable.
file(READ ${SRC_DIR}/src/liblzma/api/lzma/version.h XZ_VERSION)
string(REGEX REPLACE
"^.*\n\
#define LZMA_VERSION_MAJOR ([0-9]+)\n\
#define LZMA_VERSION_MINOR ([0-9]+)\n\
#define LZMA_VERSION_PATCH ([0-9]+)\n\
.*$"
"\\1.\\2.\\3" XZ_VERSION "${XZ_VERSION}")
# Definitions common to all targets:
add_compile_definitions(
# Package info:
PACKAGE_NAME="XZ Utils"
PACKAGE_BUGREPORT="lasse.collin@tukaani.org"
PACKAGE_URL="https://tukaani.org/xz/"
# Features:
HAVE_CHECK_CRC32
HAVE_CHECK_CRC64
HAVE_CHECK_SHA256
HAVE_DECODERS
HAVE_DECODER_ARM
HAVE_DECODER_ARMTHUMB
HAVE_DECODER_DELTA
HAVE_DECODER_IA64
HAVE_DECODER_LZMA1
HAVE_DECODER_LZMA2
HAVE_DECODER_POWERPC
HAVE_DECODER_SPARC
HAVE_DECODER_X86
HAVE_ENCODERS
HAVE_ENCODER_ARM
HAVE_ENCODER_ARMTHUMB
HAVE_ENCODER_DELTA
HAVE_ENCODER_IA64
HAVE_ENCODER_LZMA1
HAVE_ENCODER_LZMA2
HAVE_ENCODER_POWERPC
HAVE_ENCODER_SPARC
HAVE_ENCODER_X86
HAVE_MF_BT2
HAVE_MF_BT3
HAVE_MF_BT4
HAVE_MF_HC3
HAVE_MF_HC4
# Standard headers and types are available:
HAVE_STDBOOL_H
HAVE__BOOL
HAVE_STDINT_H
HAVE_INTTYPES_H
HAVE___BUILTIN_BSWAPXX
HAVE___BUILTIN_ASSUME_ALIGNED
_GNU_SOURCE
__EXTENSIONS__
_POSIX_PTHREAD_SEMANTICS
_TANDEM_SOURCE
_ALL_SOURCE
HAVE_CLOCK_GETTIME=1
HAVE_DECL_CLOCK_MONOTONIC=1
HAVE_PTHREAD_CONDATTR_SETCLOCK
MYTHREAD_POSIX
)
if (OS_LINUX)
add_compile_definitions(
TUKLIB_CPUCORES_SCHED_GETAFFINITY
TUKLIB_PHYSMEM_SYSCONF)
elseif (OS_FREEBSD)
add_compile_definitions(
TUKLIB_CPUCORES_CPUSET
TUKLIB_PHYSMEM_SYSCTL)
elseif (OS_DARWIN)
add_compile_definitions(
TUKLIB_CPUCORES_SYSCTL
TUKLIB_PHYSMEM_SYSCTL)
endif ()
if (ARCH_AMD64 OR ARCH_AARCH64)
add_compile_definitions(TUKLIB_FAST_UNALIGNED_ACCESS=1)
endif ()
find_package(Threads REQUIRED)
add_library(liblzma
${SRC_DIR}/src/common/mythread.h
${SRC_DIR}/src/common/sysdefs.h
${SRC_DIR}/src/common/tuklib_common.h
${SRC_DIR}/src/common/tuklib_config.h
${SRC_DIR}/src/common/tuklib_cpucores.c
${SRC_DIR}/src/common/tuklib_cpucores.h
${SRC_DIR}/src/common/tuklib_integer.h
${SRC_DIR}/src/common/tuklib_physmem.c
${SRC_DIR}/src/common/tuklib_physmem.h
${SRC_DIR}/src/liblzma/api/lzma.h
${SRC_DIR}/src/liblzma/api/lzma/base.h
${SRC_DIR}/src/liblzma/api/lzma/bcj.h
${SRC_DIR}/src/liblzma/api/lzma/block.h
${SRC_DIR}/src/liblzma/api/lzma/check.h
${SRC_DIR}/src/liblzma/api/lzma/container.h
${SRC_DIR}/src/liblzma/api/lzma/delta.h
${SRC_DIR}/src/liblzma/api/lzma/filter.h
${SRC_DIR}/src/liblzma/api/lzma/hardware.h
${SRC_DIR}/src/liblzma/api/lzma/index.h
${SRC_DIR}/src/liblzma/api/lzma/index_hash.h
${SRC_DIR}/src/liblzma/api/lzma/lzma12.h
${SRC_DIR}/src/liblzma/api/lzma/stream_flags.h
${SRC_DIR}/src/liblzma/api/lzma/version.h
${SRC_DIR}/src/liblzma/api/lzma/vli.h
${SRC_DIR}/src/liblzma/check/check.c
${SRC_DIR}/src/liblzma/check/check.h
${SRC_DIR}/src/liblzma/check/crc32_fast.c
${SRC_DIR}/src/liblzma/check/crc32_table.c
${SRC_DIR}/src/liblzma/check/crc32_table_be.h
${SRC_DIR}/src/liblzma/check/crc32_table_le.h
${SRC_DIR}/src/liblzma/check/crc64_fast.c
${SRC_DIR}/src/liblzma/check/crc64_table.c
${SRC_DIR}/src/liblzma/check/crc64_table_be.h
${SRC_DIR}/src/liblzma/check/crc64_table_le.h
${SRC_DIR}/src/liblzma/check/crc_macros.h
${SRC_DIR}/src/liblzma/check/sha256.c
${SRC_DIR}/src/liblzma/common/alone_decoder.c
${SRC_DIR}/src/liblzma/common/alone_decoder.h
${SRC_DIR}/src/liblzma/common/alone_encoder.c
${SRC_DIR}/src/liblzma/common/auto_decoder.c
${SRC_DIR}/src/liblzma/common/block_buffer_decoder.c
${SRC_DIR}/src/liblzma/common/block_buffer_encoder.c
${SRC_DIR}/src/liblzma/common/block_buffer_encoder.h
${SRC_DIR}/src/liblzma/common/block_decoder.c
${SRC_DIR}/src/liblzma/common/block_decoder.h
${SRC_DIR}/src/liblzma/common/block_encoder.c
${SRC_DIR}/src/liblzma/common/block_encoder.h
${SRC_DIR}/src/liblzma/common/block_header_decoder.c
${SRC_DIR}/src/liblzma/common/block_header_encoder.c
${SRC_DIR}/src/liblzma/common/block_util.c
${SRC_DIR}/src/liblzma/common/common.c
${SRC_DIR}/src/liblzma/common/common.h
${SRC_DIR}/src/liblzma/common/easy_buffer_encoder.c
${SRC_DIR}/src/liblzma/common/easy_decoder_memusage.c
${SRC_DIR}/src/liblzma/common/easy_encoder.c
${SRC_DIR}/src/liblzma/common/easy_encoder_memusage.c
${SRC_DIR}/src/liblzma/common/easy_preset.c
${SRC_DIR}/src/liblzma/common/easy_preset.h
${SRC_DIR}/src/liblzma/common/file_info.c
${SRC_DIR}/src/liblzma/common/filter_buffer_decoder.c
${SRC_DIR}/src/liblzma/common/filter_buffer_encoder.c
${SRC_DIR}/src/liblzma/common/filter_common.c
${SRC_DIR}/src/liblzma/common/filter_common.h
${SRC_DIR}/src/liblzma/common/filter_decoder.c
${SRC_DIR}/src/liblzma/common/filter_decoder.h
${SRC_DIR}/src/liblzma/common/filter_encoder.c
${SRC_DIR}/src/liblzma/common/filter_encoder.h
${SRC_DIR}/src/liblzma/common/filter_flags_decoder.c
${SRC_DIR}/src/liblzma/common/filter_flags_encoder.c
${SRC_DIR}/src/liblzma/common/hardware_cputhreads.c
${SRC_DIR}/src/liblzma/common/hardware_physmem.c
${SRC_DIR}/src/liblzma/common/index.c
${SRC_DIR}/src/liblzma/common/index.h
${SRC_DIR}/src/liblzma/common/index_decoder.c
${SRC_DIR}/src/liblzma/common/index_decoder.h
${SRC_DIR}/src/liblzma/common/index_encoder.c
${SRC_DIR}/src/liblzma/common/index_encoder.h
${SRC_DIR}/src/liblzma/common/index_hash.c
${SRC_DIR}/src/liblzma/common/memcmplen.h
${SRC_DIR}/src/liblzma/common/outqueue.c
${SRC_DIR}/src/liblzma/common/outqueue.h
${SRC_DIR}/src/liblzma/common/stream_buffer_decoder.c
${SRC_DIR}/src/liblzma/common/stream_buffer_encoder.c
${SRC_DIR}/src/liblzma/common/stream_decoder.c
${SRC_DIR}/src/liblzma/common/stream_decoder.h
${SRC_DIR}/src/liblzma/common/stream_encoder.c
${SRC_DIR}/src/liblzma/common/stream_encoder_mt.c
${SRC_DIR}/src/liblzma/common/stream_flags_common.c
${SRC_DIR}/src/liblzma/common/stream_flags_common.h
${SRC_DIR}/src/liblzma/common/stream_flags_decoder.c
${SRC_DIR}/src/liblzma/common/stream_flags_encoder.c
${SRC_DIR}/src/liblzma/common/vli_decoder.c
${SRC_DIR}/src/liblzma/common/vli_encoder.c
${SRC_DIR}/src/liblzma/common/vli_size.c
${SRC_DIR}/src/liblzma/delta/delta_common.c
${SRC_DIR}/src/liblzma/delta/delta_common.h
${SRC_DIR}/src/liblzma/delta/delta_decoder.c
${SRC_DIR}/src/liblzma/delta/delta_decoder.h
${SRC_DIR}/src/liblzma/delta/delta_encoder.c
${SRC_DIR}/src/liblzma/delta/delta_encoder.h
${SRC_DIR}/src/liblzma/delta/delta_private.h
${SRC_DIR}/src/liblzma/lz/lz_decoder.c
${SRC_DIR}/src/liblzma/lz/lz_decoder.h
${SRC_DIR}/src/liblzma/lz/lz_encoder.c
${SRC_DIR}/src/liblzma/lz/lz_encoder.h
${SRC_DIR}/src/liblzma/lz/lz_encoder_hash.h
${SRC_DIR}/src/liblzma/lz/lz_encoder_hash_table.h
${SRC_DIR}/src/liblzma/lz/lz_encoder_mf.c
${SRC_DIR}/src/liblzma/lzma/fastpos.h
${SRC_DIR}/src/liblzma/lzma/fastpos_table.c
${SRC_DIR}/src/liblzma/lzma/lzma2_decoder.c
${SRC_DIR}/src/liblzma/lzma/lzma2_decoder.h
${SRC_DIR}/src/liblzma/lzma/lzma2_encoder.c
${SRC_DIR}/src/liblzma/lzma/lzma2_encoder.h
${SRC_DIR}/src/liblzma/lzma/lzma_common.h
${SRC_DIR}/src/liblzma/lzma/lzma_decoder.c
${SRC_DIR}/src/liblzma/lzma/lzma_decoder.h
${SRC_DIR}/src/liblzma/lzma/lzma_encoder.c
${SRC_DIR}/src/liblzma/lzma/lzma_encoder.h
${SRC_DIR}/src/liblzma/lzma/lzma_encoder_optimum_fast.c
${SRC_DIR}/src/liblzma/lzma/lzma_encoder_optimum_normal.c
${SRC_DIR}/src/liblzma/lzma/lzma_encoder_presets.c
${SRC_DIR}/src/liblzma/lzma/lzma_encoder_private.h
${SRC_DIR}/src/liblzma/rangecoder/price.h
${SRC_DIR}/src/liblzma/rangecoder/price_table.c
${SRC_DIR}/src/liblzma/rangecoder/range_common.h
${SRC_DIR}/src/liblzma/rangecoder/range_decoder.h
${SRC_DIR}/src/liblzma/rangecoder/range_encoder.h
${SRC_DIR}/src/liblzma/simple/arm.c
${SRC_DIR}/src/liblzma/simple/armthumb.c
${SRC_DIR}/src/liblzma/simple/ia64.c
${SRC_DIR}/src/liblzma/simple/powerpc.c
${SRC_DIR}/src/liblzma/simple/simple_coder.c
${SRC_DIR}/src/liblzma/simple/simple_coder.h
${SRC_DIR}/src/liblzma/simple/simple_decoder.c
${SRC_DIR}/src/liblzma/simple/simple_decoder.h
${SRC_DIR}/src/liblzma/simple/simple_encoder.c
${SRC_DIR}/src/liblzma/simple/simple_encoder.h
${SRC_DIR}/src/liblzma/simple/simple_private.h
${SRC_DIR}/src/liblzma/simple/sparc.c
${SRC_DIR}/src/liblzma/simple/x86.c
)
target_include_directories(liblzma PRIVATE
${SRC_DIR}/src/liblzma/api
${SRC_DIR}/src/liblzma/common
${SRC_DIR}/src/liblzma/check
${SRC_DIR}/src/liblzma/lz
${SRC_DIR}/src/liblzma/rangecoder
${SRC_DIR}/src/liblzma/lzma
${SRC_DIR}/src/liblzma/delta
${SRC_DIR}/src/liblzma/simple
${SRC_DIR}/src/common
)
target_link_libraries(liblzma Threads::Threads)
# Put the tuklib functions under the lzma_ namespace.
target_compile_definitions(liblzma PRIVATE TUKLIB_SYMBOL_PREFIX=lzma_)
if (ENABLE_SSE2)
target_compile_definitions(liblzma PRIVATE HAVE_IMMINTRIN_H HAVE__MM_MOVEMASK_EPI8)
endif()

View File

@ -0,0 +1,161 @@
set (SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/zlib-ng)
add_definitions(-DZLIB_COMPAT)
add_definitions(-DWITH_GZFILEOP)
add_definitions(-DUNALIGNED_OK)
add_definitions(-DUNALIGNED64_OK)
set (HAVE_UNISTD_H 1)
add_definitions(-D_LARGEFILE64_SOURCE=1 -D__USE_LARGEFILE64)
add_definitions(-DHAVE_VISIBILITY_HIDDEN)
add_definitions(-DHAVE_VISIBILITY_INTERNAL)
add_definitions(-DHAVE_BUILTIN_CTZ)
add_definitions(-DHAVE_BUILTIN_CTZLL)
set(ZLIB_ARCH_SRCS)
set(ZLIB_ARCH_HDRS)
set(ARCHDIR "arch/generic")
if(ARCH_AARCH64)
set(ARCHDIR "${SOURCE_DIR}/arch/arm")
add_definitions(-DARM_FEATURES)
add_definitions(-DARM_AUXV_HAS_CRC32 -DARM_ASM_HWCAP)
add_definitions(-DARM_AUXV_HAS_NEON)
add_definitions(-DARM_ACLE_CRC_HASH)
add_definitions(-DARM_NEON_ADLER32 -DARM_NEON_CHUNKSET -DARM_NEON_SLIDEHASH)
list(APPEND ZLIB_ARCH_HDRS ${ARCHDIR}/arm.h)
list(APPEND ZLIB_ARCH_SRCS ${ARCHDIR}/armfeature.c)
set(ACLE_SRCS ${ARCHDIR}/crc32_acle.c ${ARCHDIR}/insert_string_acle.c)
list(APPEND ZLIB_ARCH_SRCS ${ACLE_SRCS})
set(NEON_SRCS ${ARCHDIR}/adler32_neon.c ${ARCHDIR}/chunkset_neon.c ${ARCHDIR}/slide_neon.c)
list(APPEND ZLIB_ARCH_SRCS ${NEON_SRCS})
elseif(ARCH_PPC64LE)
set(ARCHDIR "${SOURCE_DIR}/arch/power")
add_definitions(-DPOWER8)
add_definitions(-DPOWER_FEATURES)
add_definitions(-DPOWER8_VSX_ADLER32)
add_definitions(-DPOWER8_VSX_SLIDEHASH)
list(APPEND ZLIB_ARCH_HDRS ${ARCHDIR}/power.h)
list(APPEND ZLIB_ARCH_SRCS ${ARCHDIR}/power.c)
set(POWER8_SRCS ${ARCHDIR}/adler32_power8.c ${ARCHDIR}/slide_hash_power8.c)
list(APPEND ZLIB_ARCH_SRCS ${POWER8_SRCS})
elseif(ARCH_AMD64)
set(ARCHDIR "${SOURCE_DIR}/arch/x86")
add_definitions(-DX86_FEATURES)
list(APPEND ZLIB_ARCH_HDRS ${ARCHDIR}/x86.h)
list(APPEND ZLIB_ARCH_SRCS ${ARCHDIR}/x86.c)
if(ENABLE_AVX2)
add_definitions(-DX86_AVX2 -DX86_AVX2_ADLER32 -DX86_AVX_CHUNKSET)
set(AVX2_SRCS ${ARCHDIR}/slide_avx.c)
list(APPEND AVX2_SRCS ${ARCHDIR}/chunkset_avx.c)
list(APPEND AVX2_SRCS ${ARCHDIR}/compare258_avx.c)
list(APPEND AVX2_SRCS ${ARCHDIR}/adler32_avx.c)
list(APPEND ZLIB_ARCH_SRCS ${AVX2_SRCS})
endif()
if(ENABLE_SSE42)
add_definitions(-DX86_SSE42_CRC_HASH)
set(SSE42_SRCS ${ARCHDIR}/insert_string_sse.c)
list(APPEND ZLIB_ARCH_SRCS ${SSE42_SRCS})
add_definitions(-DX86_SSE42_CRC_INTRIN)
add_definitions(-DX86_SSE42_CMP_STR)
set(SSE42_SRCS ${ARCHDIR}/compare258_sse.c)
list(APPEND ZLIB_ARCH_SRCS ${SSE42_SRCS})
endif()
if(ENABLE_SSSE3)
add_definitions(-DX86_SSSE3 -DX86_SSSE3_ADLER32)
set(SSSE3_SRCS ${ARCHDIR}/adler32_ssse3.c)
list(APPEND ZLIB_ARCH_SRCS ${SSSE3_SRCS})
endif()
if(ENABLE_PCLMULQDQ)
add_definitions(-DX86_PCLMULQDQ_CRC)
set(PCLMULQDQ_SRCS ${ARCHDIR}/crc_folding.c)
list(APPEND ZLIB_ARCH_SRCS ${PCLMULQDQ_SRCS})
endif()
add_definitions(-DX86_SSE2 -DX86_SSE2_CHUNKSET -DX86_SSE2_SLIDEHASH)
set(SSE2_SRCS ${ARCHDIR}/chunkset_sse.c ${ARCHDIR}/slide_sse.c)
list(APPEND ZLIB_ARCH_SRCS ${SSE2_SRCS})
add_definitions(-DX86_NOCHECK_SSE2)
endif ()
macro(generate_cmakein input output)
file(REMOVE ${output})
file(STRINGS ${input} _lines)
foreach(_line IN LISTS _lines)
string(REGEX REPLACE "#ifdef HAVE_UNISTD_H.*" "@ZCONF_UNISTD_LINE@" _line "${_line}")
string(REGEX REPLACE "#ifdef NEED_PTRDIFF_T.*" "@ZCONF_PTRDIFF_LINE@" _line "${_line}")
if(NEED_PTRDIFF_T)
string(REGEX REPLACE "typedef PTRDIFF_TYPE" "typedef @PTRDIFF_TYPE@" _line "${_line}")
endif()
file(APPEND ${output} "${_line}\n")
endforeach()
endmacro(generate_cmakein)
generate_cmakein(${SOURCE_DIR}/zconf.h.in ${CMAKE_CURRENT_BINARY_DIR}/zconf.h.cmakein)
set(ZLIB_SRCS
${SOURCE_DIR}/adler32.c
${SOURCE_DIR}/chunkset.c
${SOURCE_DIR}/compare258.c
${SOURCE_DIR}/compress.c
${SOURCE_DIR}/crc32.c
${SOURCE_DIR}/crc32_comb.c
${SOURCE_DIR}/deflate.c
${SOURCE_DIR}/deflate_fast.c
${SOURCE_DIR}/deflate_medium.c
${SOURCE_DIR}/deflate_quick.c
${SOURCE_DIR}/deflate_slow.c
${SOURCE_DIR}/functable.c
${SOURCE_DIR}/infback.c
${SOURCE_DIR}/inffast.c
${SOURCE_DIR}/inflate.c
${SOURCE_DIR}/inftrees.c
${SOURCE_DIR}/insert_string.c
${SOURCE_DIR}/trees.c
${SOURCE_DIR}/uncompr.c
${SOURCE_DIR}/zutil.c
${SOURCE_DIR}/gzlib.c
${SOURCE_DIR}/gzread.c
${SOURCE_DIR}/gzwrite.c
)
set(ZLIB_ALL_SRCS ${ZLIB_SRCS} ${ZLIB_ARCH_SRCS})
add_library(zlib ${ZLIB_ALL_SRCS})
add_library(zlibstatic ALIAS zlib)
# https://github.com/zlib-ng/zlib-ng/pull/733
# This is disabed by default
add_compile_definitions(Z_TLS=__thread)
if(HAVE_UNISTD_H)
SET(ZCONF_UNISTD_LINE "#if 1 /* was set to #if 1 by configure/cmake/etc */")
else()
SET(ZCONF_UNISTD_LINE "#if 0 /* was set to #if 0 by configure/cmake/etc */")
endif()
if(NEED_PTRDIFF_T)
SET(ZCONF_PTRDIFF_LINE "#if 1 /* was set to #if 1 by configure/cmake/etc */")
else()
SET(ZCONF_PTRDIFF_LINE "#ifdef NEED_PTRDIFF_T /* may be set to #if 1 by configure/cmake/etc */")
endif()
set(ZLIB_PC ${CMAKE_CURRENT_BINARY_DIR}/zlib.pc)
configure_file(${SOURCE_DIR}/zlib.pc.cmakein ${ZLIB_PC} @ONLY)
configure_file(${CMAKE_CURRENT_BINARY_DIR}/zconf.h.cmakein ${CMAKE_CURRENT_BINARY_DIR}/zconf.h @ONLY)
# We should use same defines when including zlib.h as used when zlib compiled
target_compile_definitions (zlib PUBLIC ZLIB_COMPAT WITH_GZFILEOP)
if (ARCH_AMD64 OR ARCH_AARCH64)
target_compile_definitions (zlib PUBLIC X86_64 UNALIGNED_OK)
endif ()
target_include_directories(zlib PUBLIC ${SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR})

View File

@ -1,9 +1,7 @@
{
"docker/packager/deb": {
"name": "clickhouse/deb-builder",
"dependent": [
"docker/packager/unbundled"
]
"dependent": []
},
"docker/packager/binary": {
"name": "clickhouse/binary-builder",
@ -13,10 +11,6 @@
"docker/test/codebrowser"
]
},
"docker/packager/unbundled": {
"name": "clickhouse/unbundled-builder",
"dependent": []
},
"docker/test/compatibility/centos": {
"name": "clickhouse/test-old-centos",
"dependent": []
@ -138,23 +132,11 @@
"name": "clickhouse/test-base",
"dependent": [
"docker/test/stateless",
"docker/test/stateless_unbundled",
"docker/test/integration/base",
"docker/test/fuzzer",
"docker/test/keeper-jepsen"
]
},
"docker/packager/unbundled": {
"name": "clickhouse/unbundled-builder",
"dependent": [
"docker/test/stateless_unbundled"
]
},
"docker/test/stateless_unbundled": {
"name": "clickhouse/stateless-unbundled-test",
"dependent": [
]
},
"docker/test/integration/kerberized_hadoop": {
"name": "clickhouse/kerberized-hadoop",
"dependent": []

View File

@ -93,9 +93,6 @@ RUN git clone https://github.com/tpoechtrager/cctools-port.git \
# Download toolchain and SDK for Darwin
RUN wget -nv https://github.com/phracker/MacOSX-SDKs/releases/download/11.3/MacOSX11.0.sdk.tar.xz
# Download toolchain for FreeBSD 11.3
RUN wget -nv https://clickhouse-datasets.s3.yandex.net/toolchains/toolchains/freebsd-11.3-toolchain.tar.xz
# NOTE: Seems like gcc-11 is too new for ubuntu20 repository
RUN add-apt-repository ppa:ubuntu-toolchain-r/test --yes \
&& apt-get update \

View File

@ -6,9 +6,6 @@ mkdir -p build/cmake/toolchain/darwin-x86_64
tar xJf MacOSX11.0.sdk.tar.xz -C build/cmake/toolchain/darwin-x86_64 --strip-components=1
ln -sf darwin-x86_64 build/cmake/toolchain/darwin-aarch64
mkdir -p build/cmake/toolchain/freebsd-x86_64
tar xJf freebsd-11.3-toolchain.tar.xz -C build/cmake/toolchain/freebsd-x86_64 --strip-components=1
# Uncomment to debug ccache. Don't put ccache log in /output right away, or it
# will be confusingly packed into the "performance" package.
# export CCACHE_LOGFILE=/build/ccache.log

View File

@ -33,7 +33,6 @@ RUN curl -O https://clickhouse-datasets.s3.yandex.net/utils/1/dpkg-deb \
&& chmod +x dpkg-deb \
&& cp dpkg-deb /usr/bin
# Libraries from OS are only needed to test the "unbundled" build (this is not used in production).
RUN apt-get update \
&& apt-get install \
alien \

View File

@ -11,7 +11,6 @@ SCRIPT_PATH = os.path.realpath(__file__)
IMAGE_MAP = {
"deb": "clickhouse/deb-builder",
"binary": "clickhouse/binary-builder",
"unbundled": "clickhouse/unbundled-builder"
}
def check_image_exists_locally(image_name):
@ -55,7 +54,7 @@ def run_docker_image_with_env(image_name, output, env_variables, ch_root, ccache
subprocess.check_call(cmd, shell=True)
def parse_env_variables(build_type, compiler, sanitizer, package_type, image_type, cache, distcc_hosts, unbundled, split_binary, clang_tidy, version, author, official, alien_pkgs, with_coverage, with_binaries):
def parse_env_variables(build_type, compiler, sanitizer, package_type, image_type, cache, distcc_hosts, split_binary, clang_tidy, version, author, official, alien_pkgs, with_coverage, with_binaries):
CLANG_PREFIX = "clang"
DARWIN_SUFFIX = "-darwin"
DARWIN_ARM_SUFFIX = "-darwin-aarch64"
@ -107,7 +106,7 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
cxx = cc.replace('gcc', 'g++').replace('clang', 'clang++')
if image_type == "deb" or image_type == "unbundled":
if image_type == "deb":
result.append("DEB_CC={}".format(cc))
result.append("DEB_CXX={}".format(cxx))
# For building fuzzers
@ -159,15 +158,6 @@ def parse_env_variables(build_type, compiler, sanitizer, package_type, image_typ
cmake_flags.append('-DENABLE_TESTS=1')
cmake_flags.append('-DUSE_GTEST=1')
# "Unbundled" build is not suitable for any production usage.
# But it is occasionally used by some developers.
# The whole idea of using unknown version of libraries from the OS distribution is deeply flawed.
# We wish these developers good luck.
if unbundled:
# We also disable all CPU features except basic x86_64.
# It is only slightly related to "unbundled" build, but it is a good place to test if code compiles without these instruction sets.
cmake_flags.append('-DUNBUNDLED=1 -DUSE_INTERNAL_RDKAFKA_LIBRARY=1 -DENABLE_ARROW=0 -DENABLE_AVRO=0 -DENABLE_ORC=0 -DENABLE_PARQUET=0 -DENABLE_SSSE3=0 -DENABLE_SSE41=0 -DENABLE_SSE42=0 -DENABLE_PCLMULQDQ=0 -DENABLE_POPCNT=0 -DENABLE_AVX=0 -DENABLE_AVX2=0')
if split_binary:
cmake_flags.append('-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1 -DCLICKHOUSE_SPLIT_BINARY=1')
# We can't always build utils because it requires too much space, but
@ -213,7 +203,6 @@ if __name__ == "__main__":
"clang-13", "clang-13-darwin", "clang-13-darwin-aarch64", "clang-13-aarch64", "clang-13-ppc64le",
"clang-11-freebsd", "clang-12-freebsd", "clang-13-freebsd", "gcc-11"), default="clang-13")
parser.add_argument("--sanitizer", choices=("address", "thread", "memory", "undefined", ""), default="")
parser.add_argument("--unbundled", action="store_true")
parser.add_argument("--split-binary", action="store_true")
parser.add_argument("--clang-tidy", action="store_true")
parser.add_argument("--cache", choices=("", "ccache", "distcc"), default="")
@ -232,7 +221,7 @@ if __name__ == "__main__":
if not os.path.isabs(args.output_dir):
args.output_dir = os.path.abspath(os.path.join(os.getcwd(), args.output_dir))
image_type = 'binary' if args.package_type == 'performance' else 'unbundled' if args.unbundled else args.package_type
image_type = 'binary' if args.package_type == 'performance' else args.package_type
image_name = IMAGE_MAP[image_type]
if not os.path.isabs(args.clickhouse_repo_path):
@ -256,7 +245,7 @@ if __name__ == "__main__":
build_image(image_with_version, dockerfile)
env_prepared = parse_env_variables(
args.build_type, args.compiler, args.sanitizer, args.package_type, image_type,
args.cache, args.distcc_hosts, args.unbundled, args.split_binary, args.clang_tidy,
args.cache, args.distcc_hosts, args.split_binary, args.clang_tidy,
args.version, args.author, args.official, args.alien_pkgs, args.with_coverage, args.with_binaries)
run_docker_image_with_env(image_name, args.output_dir, env_prepared, ch_root, args.ccache_dir, args.docker_image_version)

View File

@ -1,69 +0,0 @@
# docker build -t clickhouse/unbundled-builder .
FROM clickhouse/deb-builder
RUN export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
&& wget -nv -O /tmp/arrow-keyring.deb "https://apache.jfrog.io/artifactory/arrow/ubuntu/apache-arrow-apt-source-latest-${CODENAME}.deb" \
&& dpkg -i /tmp/arrow-keyring.deb
RUN sed -i 's|http://archive|http://ru.archive|g' /etc/apt/sources.list
# Libraries from OS are only needed to test the "unbundled" build (that is not used in production).
RUN apt-get update \
&& apt-get install \
libicu-dev \
gperf \
perl \
pkg-config \
devscripts \
libc++-dev \
libc++abi-dev \
libboost-all-dev \
zlib1g-dev \
liblz4-dev \
libdouble-conversion-dev \
libxml2-dev \
librdkafka-dev \
libgoogle-perftools-dev \
libzstd-dev \
libltdl-dev \
libre2-dev \
libjemalloc-dev \
libmsgpack-dev \
libcurl4-openssl-dev \
unixodbc-dev \
odbcinst \
tzdata \
alien \
libcapnp-dev \
cmake \
gdb \
pigz \
moreutils \
libcctz-dev \
libldap2-dev \
libsasl2-dev \
libgsasl7-dev \
heimdal-multidev \
libhyperscan-dev \
libbrotli-dev \
protobuf-compiler \
libprotoc-dev \
libgrpc++-dev \
protobuf-compiler-grpc \
libc-ares-dev \
rapidjson-dev \
libsnappy-dev \
libparquet-dev \
libthrift-dev \
libutf8proc-dev \
libbz2-dev \
libavro-dev \
libfarmhash-dev \
librocksdb-dev \
libgflags-dev \
libmysqlclient-dev \
--yes --no-install-recommends
COPY build.sh /
CMD ["/bin/bash", "/build.sh"]

View File

@ -1,15 +0,0 @@
#!/usr/bin/env bash
set -x -e
ccache --show-stats ||:
ccache --zero-stats ||:
read -ra ALIEN_PKGS <<< "${ALIEN_PKGS:-}"
build/release "${ALIEN_PKGS[@]}" | ts '%Y-%m-%d %H:%M:%S'
mv /*.deb /output
mv -- *.changes /output
mv -- *.buildinfo /output
mv /*.rpm /output ||: # if exists
mv /*.tgz /output ||: # if exists
ccache --show-stats ||:

View File

@ -52,14 +52,18 @@ RUN apt-get update \
--yes --no-install-recommends
# Sanitizer options for services (clickhouse-server)
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7'" >> /etc/environment; \
# Set resident memory limit for TSAN to 45GiB (46080MiB) to avoid OOMs in Stress tests
# and MEMORY_LIMIT_EXCEEDED exceptions in Functional tests (total memory limit in Functional tests is ~55.24 GiB).
# TSAN will flush shadow memory when reaching this limit.
# It may cause false-negatives, but it's better than OOM.
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7 memory_limit_mb=46080'" >> /etc/environment; \
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \
echo "MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'" >> /etc/environment; \
echo "LSAN_OPTIONS='suppressions=/usr/share/clickhouse-test/config/lsan_suppressions.txt'" >> /etc/environment; \
ln -s /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7'
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080'
ENV UBSAN_OPTIONS='print_stacktrace=1'
ENV MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'

View File

@ -159,6 +159,7 @@ function clone_submodules
cd "$FASTTEST_SOURCE"
SUBMODULES_TO_UPDATE=(
contrib/sysroot
contrib/magic_enum
contrib/abseil-cpp
contrib/boost

View File

@ -19,6 +19,7 @@ RUN apt-get update \
sqlite3 \
curl \
tar \
lz4 \
krb5-user \
iproute2 \
lsof \

View File

@ -2,18 +2,17 @@
FROM sequenceiq/hadoop-docker:2.7.0
RUN sed -i -e 's/^\#baseurl/baseurl/' /etc/yum.repos.d/CentOS-Base.repo && \
sed -i -e 's/^mirrorlist/#mirrorlist/' /etc/yum.repos.d/CentOS-Base.repo && \
sed -i -e 's#http://mirror.centos.org/#http://vault.centos.org/#' /etc/yum.repos.d/CentOS-Base.repo
# https://community.letsencrypt.org/t/rhel-centos-6-openssl-client-compatibility-after-dst-root-ca-x3-expiration/161032/81
RUN sed -i s/xMDkzMDE0MDExNVow/0MDkzMDE4MTQwM1ow/ /etc/pki/tls/certs/ca-bundle.crt
RUN yum clean all && \
rpm --rebuilddb && \
yum -y update && \
yum -y install yum-plugin-ovl && \
yum --quiet -y install krb5-workstation.x86_64
RUN curl -o krb5-libs-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.10/os/x86_64/Packages/krb5-libs-1.10.3-65.el6.x86_64.rpm && \
curl -o krb5-workstation-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/os/x86_64/Packages/krb5-workstation-1.10.3-65.el6.x86_64.rpm && \
curl -o libkadm5-1.10.3-65.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.10/os/x86_64/Packages/libkadm5-1.10.3-65.el6.x86_64.rpm && \
curl -o libss-1.41.12-24.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/cr/x86_64/Packages/libss-1.41.12-24.el6.x86_64.rpm && \
curl -o libcom_err-1.41.12-24.el6.x86_64.rpm ftp://ftp.pbone.net/mirror/vault.centos.org/6.9/cr/x86_64/Packages/libcom_err-1.41.12-24.el6.x86_64.rpm && \
rpm -Uvh libkadm5-1.10.3-65.el6.x86_64.rpm libss-1.41.12-24.el6.x86_64.rpm krb5-libs-1.10.3-65.el6.x86_64.rpm krb5-workstation-1.10.3-65.el6.x86_64.rpm libcom_err-1.41.12-24.el6.x86_64.rpm && \
rm -fr *.rpm
RUN cd /tmp && \
curl http://archive.apache.org/dist/commons/daemon/source/commons-daemon-1.0.15-src.tar.gz -o commons-daemon-1.0.15-src.tar.gz && \

View File

@ -40,7 +40,7 @@ RUN set -x \
ENV CCACHE_DIR=/test_output/ccache
CMD echo "Running PVS version $PKG_VERSION" && mkdir -p $CCACHE_DIR && cd /repo_folder && pvs-studio-analyzer credentials $LICENCE_NAME $LICENCE_KEY -o ./licence.lic \
&& cmake . -D"ENABLE_EMBEDDED_COMPILER"=OFF -D"USE_INTERNAL_PROTOBUF_LIBRARY"=OFF -D"USE_INTERNAL_GRPC_LIBRARY"=OFF -DCMAKE_C_COMPILER=clang-13 -DCMAKE_CXX_COMPILER=clang\+\+-13 \
&& cmake . -D"ENABLE_EMBEDDED_COMPILER"=OFF -D"DISABLE_HERMETIC_BUILD"=ON -DCMAKE_C_COMPILER=clang-13 -DCMAKE_CXX_COMPILER=clang\+\+-13 \
&& ninja re2_st clickhouse_grpc_protos \
&& pvs-studio-analyzer analyze -o pvs-studio.log -e contrib -j 4 -l ./licence.lic; \
cp /repo_folder/pvs-studio.log /test_output; \

Some files were not shown because too many files have changed in this diff Show More