Merge branch 'master' into fix-lz4

This commit is contained in:
mergify[bot] 2022-01-20 11:53:51 +00:00 committed by GitHub
commit 9326f99d78
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1626 changed files with 52429 additions and 25352 deletions

4
.github/CODEOWNERS vendored
View File

@ -1,3 +1 @@
docs/* @ClickHouse/docs
docs/zh/* @ClickHouse/docs-zh
website/* @ClickHouse/docs

View File

@ -13,13 +13,4 @@ Changelog entry (a user-readable short description of the changes that goes to C
...
Detailed description / Documentation draft:
...
> By adding documentation, you'll allow users to try your new feature immediately, not when someone else will have time to document it later. Documentation is necessary for all features that affect user experience in any way. You can add brief documentation draft above, or add documentation right into your patch as Markdown files in [docs](https://github.com/ClickHouse/ClickHouse/tree/master/docs) folder.
> If you are doing this for the first time, it's recommended to read the lightweight [Contributing to ClickHouse Documentation](https://github.com/ClickHouse/ClickHouse/tree/master/docs/README.md) guide first.
> Information about CI checks: https://clickhouse.tech/docs/en/development/continuous-integration/

8
.github/actionlint.yml vendored Normal file
View File

@ -0,0 +1,8 @@
self-hosted-runner:
labels:
- builder
- fuzzer-unit-tester
- stress-tester
- style-checker
- func-tester-aarch64
- func-tester

View File

@ -33,11 +33,11 @@ jobs:
- name: Cherry pick
run: |
sudo pip install GitPython
cd $GITHUB_WORKSPACE/tests/ci
cd "$GITHUB_WORKSPACE/tests/ci"
python3 cherry_pick.py
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"

View File

@ -9,23 +9,68 @@ on: # yamllint disable-line rule:truthy
branches:
- 'backport/**'
jobs:
DockerHubPush:
runs-on: [self-hosted, style-checker]
DockerHubPushAarch64:
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json
path: ${{ runner.temp }}/changed_images.json
CompatibilityCheck:
needs: [BuilderDebRelease]
runs-on: [self-hosted, style-checker]
@ -39,7 +84,7 @@ jobs:
EOF
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Download json reports
@ -48,16 +93,16 @@ jobs:
path: ${{ env.REPORTS_PATH }}
- name: CompatibilityCheck
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 compatibility_check.py
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"
#########################################################################################
#################################### ORDINARY BUILDS ####################################
#########################################################################################
@ -82,7 +127,7 @@ jobs:
path: ${{ env.IMAGES_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
with:
@ -90,11 +135,12 @@ jobs:
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
@ -102,9 +148,50 @@ jobs:
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH $CACHES_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
BuilderDebAarch64:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/build_check
IMAGES_PATH=${{runner.temp}}/images_path
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
CACHES_PATH=${{runner.temp}}/../ccaches
CHECK_NAME=ClickHouse build check (actions)
BUILD_NAME=package_aarch64
EOF
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/images_path
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'true'
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
- name: Upload build URLs to artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
- name: Cleanup
if: always()
run: |
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
BuilderDebAsan:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
@ -126,7 +213,7 @@ jobs:
path: ${{ env.IMAGES_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
with:
@ -134,11 +221,12 @@ jobs:
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
@ -146,9 +234,9 @@ jobs:
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH $CACHES_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
BuilderDebTsan:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
@ -170,7 +258,7 @@ jobs:
path: ${{ env.IMAGES_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
with:
@ -178,11 +266,12 @@ jobs:
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
@ -190,9 +279,9 @@ jobs:
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH $CACHES_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
BuilderDebDebug:
needs: [DockerHubPush]
runs-on: [self-hosted, builder]
@ -214,7 +303,7 @@ jobs:
path: ${{ env.IMAGES_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
with:
@ -222,11 +311,12 @@ jobs:
fetch-depth: 0 # otherwise we will have no info about contributors
- name: Build
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NAME
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
- name: Upload build URLs to artifacts
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v2
with:
name: ${{ env.BUILD_NAME }}
@ -234,15 +324,16 @@ jobs:
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH $CACHES_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
############################################################################################
##################################### BUILD REPORTER #######################################
############################################################################################
BuilderReport:
needs:
- BuilderDebRelease
- BuilderDebAarch64
- BuilderDebAsan
- BuilderDebTsan
- BuilderDebDebug
@ -261,21 +352,21 @@ jobs:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Report Builder
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cd $GITHUB_WORKSPACE/tests/ci
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cd "$GITHUB_WORKSPACE/tests/ci"
python3 build_report_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"
##############################################################################################
########################### FUNCTIONAl STATELESS TESTS #######################################
##############################################################################################
@ -298,22 +389,22 @@ jobs:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci"
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"
##############################################################################################
############################ FUNCTIONAl STATEFUL TESTS #######################################
##############################################################################################
@ -336,22 +427,22 @@ jobs:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Functional test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 functional_test_check.py "$CHECK_NAME" $KILL_TIMEOUT
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci"
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"
##############################################################################################
######################################### STRESS TESTS #######################################
##############################################################################################
@ -377,22 +468,22 @@ jobs:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Stress test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci"
python3 stress_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"
#############################################################################################
############################# INTEGRATION TESTS #############################################
#############################################################################################
@ -414,22 +505,22 @@ jobs:
path: ${{ env.REPORTS_PATH }}
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Integration test
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci"
python3 integration_test_check.py "$CHECK_NAME"
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"
FinishCheck:
needs:
- DockerHubPush
@ -443,10 +534,10 @@ jobs:
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Finish label
run: |
cd $GITHUB_WORKSPACE/tests/ci
cd "$GITHUB_WORKSPACE/tests/ci"
python3 finish_check.py

View File

@ -6,7 +6,7 @@ env:
on: # yamllint disable-line rule:truthy
workflow_run:
workflows: ["CIGithubActions", "ReleaseCI", "DocsCheck", "BackportPR"]
workflows: ["PullRequestCI", "ReleaseCI", "DocsCheck", "BackportPR"]
types:
- requested
jobs:

View File

@ -21,31 +21,77 @@ jobs:
steps:
- name: Clear repository
run: |
sudo rm -rf $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Labels check
run: |
cd $GITHUB_WORKSPACE/tests/ci
cd "$GITHUB_WORKSPACE/tests/ci"
python3 run_check.py
DockerHubPush:
DockerHubPushAarch64:
needs: CheckLabels
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
needs: CheckLabels
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -rf $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json
path: ${{ runner.temp }}/changed_images.json
DocsCheck:
needs: DockerHubPush
runs-on: [self-hosted, func-tester]
@ -63,17 +109,17 @@ jobs:
path: ${{ env.TEMP_PATH }}
- name: Clear repository
run: |
sudo rm -rf $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Docs Check
run: |
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci"
python3 docs_check.py
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"

121
.github/workflows/docs_release.yml vendored Normal file
View File

@ -0,0 +1,121 @@
name: DocsReleaseChecks
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
concurrency:
group: master-release
cancel-in-progress: true
on: # yamllint disable-line rule:truthy
push:
branches:
- master
paths:
- 'docs/**'
- 'website/**'
- 'benchmark/**'
- 'docker/**'
- '.github/**'
workflow_dispatch:
jobs:
DockerHubPushAarch64:
runs-on: [self-hosted, func-tester-aarch64]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
DockerHubPushAmd64:
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_images_check.py --suffix amd64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
DockerHubPush:
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed aarch64 images
uses: actions/download-artifact@v2
with:
name: changed_images_aarch64
path: ${{ runner.temp }}
- name: Download changed amd64 images
uses: actions/download-artifact@v2
with:
name: changed_images_amd64
path: ${{ runner.temp }}
- name: Images check
run: |
cd "$GITHUB_WORKSPACE/tests/ci"
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/changed_images.json
DocsRelease:
needs: DockerHubPush
runs-on: [self-hosted, func-tester]
steps:
- name: Set envs
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/docs_release
REPO_COPY=${{runner.temp}}/docs_release/ClickHouse
CLOUDFLARE_TOKEN=${{secrets.CLOUDFLARE}}
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
RCSK
EOF
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ env.TEMP_PATH }}
- name: Docs Release
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci"
python3 docs_release.py
- name: Cleanup
if: always()
run: |
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"

44
.github/workflows/jepsen.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: JepsenWorkflow
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
concurrency:
group: jepsen
on: # yamllint disable-line rule:truthy
schedule:
- cron: '0 */6 * * *'
workflow_run:
workflows: ["PullRequestCI"]
types:
- completed
workflow_dispatch:
jobs:
KeeperJepsenRelease:
runs-on: [self-hosted, style-checker]
steps:
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/keeper_jepsen
REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse
EOF
- name: Clear repository
run: |
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Jepsen Test
run: |
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci"
python3 keeper_jepsen_check.py
- name: Cleanup
if: always()
run: |
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,76 +1,39 @@
name: DocsReleaseChecks
name: ReleaseWorkflow
# - Gets artifacts from S3
# - Sends it to JFROG Artifactory
# - Adds them to the release assets
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
concurrency:
group: master-release
cancel-in-progress: true
on: # yamllint disable-line rule:truthy
push:
branches:
- master
paths:
- 'docs/**'
- 'website/**'
- 'benchmark/**'
- 'docker/**'
- '.github/**'
workflow_dispatch:
release:
types:
- published
jobs:
DockerHubPush:
ReleasePublish:
runs-on: [self-hosted, style-checker]
steps:
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Images check
run: |
cd $GITHUB_WORKSPACE/tests/ci
python3 docker_images_check.py
- name: Upload images files to artifacts
uses: actions/upload-artifact@v2
with:
name: changed_images
path: ${{ runner.temp }}/docker_images_check/changed_images.json
DocsRelease:
needs: DockerHubPush
runs-on: [self-hosted, func-tester]
steps:
- name: Set envs
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
run: |
cat >> "$GITHUB_ENV" << 'EOF'
TEMP_PATH=${{runner.temp}}/docs_release
REPO_COPY=${{runner.temp}}/docs_release/ClickHouse
CLOUDFLARE_TOKEN=${{secrets.CLOUDFLARE}}
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
RCSK
EOF
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
- name: Check out repository code
uses: actions/checkout@v2
- name: Download changed images
uses: actions/download-artifact@v2
with:
name: changed_images
path: ${{ env.TEMP_PATH }}
- name: Docs Release
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci
python3 docs_release.py
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
- name: Set envs
run: |
cat >> "$GITHUB_ENV" << 'EOF'
JFROG_API_KEY=${{ secrets.JFROG_KEY_API_PACKAGES }}
TEMP_PATH=${{runner.temp}}/release_packages
REPO_COPY=${{runner.temp}}/release_packages/ClickHouse
EOF
- name: Check out repository code
uses: actions/checkout@v2
- name: Download packages and push to Artifactory
env:
run: |
rm -rf "$TEMP_PATH" && mkdir -p "$REPO_COPY"
cp -r "$GITHUB_WORKSPACE" "$REPO_COPY"
cd "$REPO_COPY"
python3 ./tests/ci/push_to_artifactory.py --release "${{ github.ref }}" \
--commit '${{ github.sha }}' --all
- name: Upload packages to release assets
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: ${{runner.temp}}/release_packages/*
overwrite: true
tag: ${{ github.ref }}
file_glob: true

File diff suppressed because it is too large Load Diff

View File

@ -23,20 +23,20 @@ jobs:
EOF
- name: Clear repository
run: |
sudo rm -fr $GITHUB_WORKSPACE && mkdir $GITHUB_WORKSPACE
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
- name: Check out repository code
uses: actions/checkout@v2
with:
submodules: 'true'
- name: Codebrowser
run: |
sudo rm -fr $TEMP_PATH
mkdir -p $TEMP_PATH
cp -r $GITHUB_WORKSPACE $TEMP_PATH
cd $REPO_COPY/tests/ci && python3 codebrowser_check.py
sudo rm -fr "$TEMP_PATH"
mkdir -p "$TEMP_PATH"
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
cd "$REPO_COPY/tests/ci" && python3 codebrowser_check.py
- name: Cleanup
if: always()
run: |
docker kill $(docker ps -q) ||:
docker rm -f $(docker ps -a -q) ||:
sudo rm -fr $TEMP_PATH
docker kill "$(docker ps -q)" ||:
docker rm -f "$(docker ps -a -q)" ||:
sudo rm -fr "$TEMP_PATH"

1
.gitignore vendored
View File

@ -13,6 +13,7 @@
/build_*
/build-*
/tests/venv
/obj-x86_64-linux-gnu/
# logs
*.log

11
.gitmodules vendored
View File

@ -54,8 +54,8 @@
url = https://github.com/ClickHouse-Extras/Turbo-Base64.git
[submodule "contrib/arrow"]
path = contrib/arrow
url = https://github.com/ClickHouse-Extras/arrow
branch = clickhouse-arrow-2.0.0
url = https://github.com/ClickHouse-Extras/arrow.git
branch = blessed/release-6.0.1
[submodule "contrib/thrift"]
path = contrib/thrift
url = https://github.com/apache/thrift.git
@ -190,8 +190,8 @@
url = https://github.com/xz-mirror/xz
[submodule "contrib/abseil-cpp"]
path = contrib/abseil-cpp
url = https://github.com/ClickHouse-Extras/abseil-cpp.git
branch = lts_2020_02_25
url = https://github.com/abseil/abseil-cpp.git
branch = lts_2021_11_02
[submodule "contrib/dragonbox"]
path = contrib/dragonbox
url = https://github.com/ClickHouse-Extras/dragonbox.git
@ -247,6 +247,9 @@
[submodule "contrib/sysroot"]
path = contrib/sysroot
url = https://github.com/ClickHouse-Extras/sysroot.git
[submodule "contrib/hive-metastore"]
path = contrib/hive-metastore
url = https://github.com/ClickHouse-Extras/hive-metastore
[submodule "contrib/azure"]
path = contrib/azure
url = https://github.com/ClickHouse-Extras/azure-sdk-for-cpp.git

File diff suppressed because it is too large Load Diff

View File

@ -139,7 +139,6 @@ if (ENABLE_FUZZING)
set (ENABLE_CLICKHOUSE_ODBC_BRIDGE OFF)
set (ENABLE_LIBRARIES 0)
set (ENABLE_SSL 1)
set (USE_INTERNAL_SSL_LIBRARY 1)
set (USE_UNWIND ON)
set (ENABLE_EMBEDDED_COMPILER 0)
set (ENABLE_EXAMPLES 0)
@ -152,7 +151,6 @@ if (ENABLE_FUZZING)
# For codegen_select_fuzzer
set (ENABLE_PROTOBUF 1)
set (USE_INTERNAL_PROTOBUF_LIBRARY 1)
endif()
# Global libraries
@ -415,13 +413,19 @@ else ()
endif ()
if (WERROR)
add_warning(error)
# Don't pollute CMAKE_CXX_FLAGS with -Werror as it will break some CMake checks.
# Instead, adopt modern cmake usage requirement.
target_compile_options(global-libs INTERFACE "-Werror")
endif ()
# Make this extra-checks for correct library dependencies.
if (OS_LINUX AND NOT SANITIZE)
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-undefined")
set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--no-undefined")
target_link_options(global-libs INTERFACE "-Wl,--no-undefined")
endif ()
# Increase stack size on Musl. We need big stack for our recursive-descend parser.
if (USE_MUSL)
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-z,stack-size=2097152")
endif ()
include(cmake/dbms_glob_sources.cmake)
@ -451,6 +455,11 @@ if (MAKE_STATIC_LIBRARIES)
endif ()
else ()
set (CMAKE_POSITION_INDEPENDENT_CODE ON)
# This is required for clang on Arch linux, that uses PIE by default.
# See enable-SSP-and-PIE-by-default.patch [1].
#
# [1]: https://github.com/archlinux/svntogit-packages/blob/6e681aa860e65ad46a1387081482eb875c2200f2/trunk/enable-SSP-and-PIE-by-default.patch
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie")
endif ()
if (ENABLE_TESTS)
@ -476,80 +485,6 @@ message (STATUS
CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}")
include (GNUInstallDirs)
include (cmake/contrib_finder.cmake)
find_contrib_lib(double-conversion) # Must be before parquet
include (cmake/find/ssl.cmake)
include (cmake/find/ldap.cmake) # after ssl
include (cmake/find/icu.cmake)
include (cmake/find/xz.cmake)
include (cmake/find/zlib.cmake)
include (cmake/find/zstd.cmake)
include (cmake/find/ltdl.cmake) # for odbc
# openssl, zlib before poco
include (cmake/find/sparsehash.cmake)
include (cmake/find/re2.cmake)
include (cmake/find/krb5.cmake)
include (cmake/find/libgsasl.cmake)
include (cmake/find/cyrus-sasl.cmake)
include (cmake/find/rdkafka.cmake)
include (cmake/find/libuv.cmake) # for amqpcpp and cassandra
include (cmake/find/amqpcpp.cmake)
include (cmake/find/capnp.cmake)
include (cmake/find/llvm.cmake)
include (cmake/find/h3.cmake)
include (cmake/find/libxml2.cmake)
include (cmake/find/brotli.cmake)
include (cmake/find/protobuf.cmake)
include (cmake/find/grpc.cmake)
include (cmake/find/pdqsort.cmake)
include (cmake/find/miniselect.cmake)
include (cmake/find/hdfs3.cmake) # uses protobuf
include (cmake/find/poco.cmake)
include (cmake/find/curl.cmake)
include (cmake/find/s3.cmake)
include (cmake/find/blob_storage.cmake)
include (cmake/find/base64.cmake)
include (cmake/find/parquet.cmake)
include (cmake/find/simdjson.cmake)
include (cmake/find/fast_float.cmake)
include (cmake/find/rapidjson.cmake)
include (cmake/find/fastops.cmake)
include (cmake/find/odbc.cmake)
include (cmake/find/nanodbc.cmake)
include (cmake/find/sqlite.cmake)
include (cmake/find/rocksdb.cmake)
include (cmake/find/libpqxx.cmake)
include (cmake/find/nuraft.cmake)
include (cmake/find/yaml-cpp.cmake)
include (cmake/find/s2geometry.cmake)
include (cmake/find/nlp.cmake)
include (cmake/find/bzip2.cmake)
include (cmake/find/filelog.cmake)
if(NOT USE_INTERNAL_PARQUET_LIBRARY)
set (ENABLE_ORC OFF CACHE INTERNAL "")
endif()
include (cmake/find/orc.cmake)
include (cmake/find/avro.cmake)
include (cmake/find/msgpack.cmake)
include (cmake/find/cassandra.cmake)
include (cmake/find/sentry.cmake)
include (cmake/find/datasketches.cmake)
include (cmake/find/libprotobuf-mutator.cmake)
set (USE_INTERNAL_CITYHASH_LIBRARY ON CACHE INTERNAL "")
find_contrib_lib(cityhash)
find_contrib_lib(farmhash)
if (ENABLE_TESTS)
include (cmake/find/gtest.cmake)
endif ()
# Need to process before "contrib" dir:
include (cmake/find/mysqlclient.cmake)
# When testing for memory leaks with Valgrind, don't link tcmalloc or jemalloc.

View File

@ -21,9 +21,10 @@ The following versions of ClickHouse server are currently being supported with s
| 21.6 | :x: |
| 21.7 | :x: |
| 21.8 | ✅ |
| 21.9 | |
| 21.9 | :x: |
| 21.10 | ✅ |
| 21.11 | ✅ |
| 21.12 | ✅ |
## Reporting a Vulnerability

View File

@ -9,7 +9,3 @@ add_subdirectory (pcg-random)
add_subdirectory (widechar_width)
add_subdirectory (readpassphrase)
add_subdirectory (bridge)
if (USE_MYSQL)
add_subdirectory (mysqlxx)
endif ()

View File

@ -1,8 +1,6 @@
set (SRCS
argsToConfig.cpp
coverage.cpp
DateLUT.cpp
DateLUTImpl.cpp
demangle.cpp
getFQDNOrHostName.cpp
getMemoryAmount.cpp
@ -18,7 +16,6 @@ set (SRCS
sleep.cpp
terminalColors.cpp
errnoToString.cpp
getResource.cpp
StringRef.cpp
)
@ -27,9 +24,12 @@ if (ENABLE_REPLXX)
endif ()
if (USE_DEBUG_HELPERS)
get_target_property(MAGIC_ENUM_INCLUDE_DIR magic_enum INTERFACE_INCLUDE_DIRECTORIES)
set (INCLUDE_DEBUG_HELPERS "-I\"${MAGIC_ENUM_INCLUDE_DIR}\" -include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${INCLUDE_DEBUG_HELPERS}")
get_target_property(MAGIC_ENUM_INCLUDE_DIR ch_contrib::magic_enum INTERFACE_INCLUDE_DIRECTORIES)
# CMake generator expression will do insane quoting when it encounters special character like quotes, spaces, etc.
# Prefixing "SHELL:" will force it to use the original text.
set (INCLUDE_DEBUG_HELPERS "SHELL:-I\"${MAGIC_ENUM_INCLUDE_DIR}\" -include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"")
# Use generator expression as we don't want to pollute CMAKE_CXX_FLAGS, which will interfere with CMake check system.
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:${INCLUDE_DEBUG_HELPERS}>)
endif ()
add_library (common ${SRCS})
@ -40,10 +40,6 @@ else ()
target_compile_definitions(common PUBLIC WITH_COVERAGE=0)
endif ()
if (USE_INTERNAL_CCTZ)
set_source_files_properties(DateLUTImpl.cpp PROPERTIES COMPILE_DEFINITIONS USE_INTERNAL_CCTZ)
endif()
target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..")
if (OS_DARWIN AND NOT MAKE_STATIC_LIBRARIES)
@ -52,17 +48,17 @@ endif()
target_link_libraries (common
PUBLIC
${CITYHASH_LIBRARIES}
ch_contrib::cityhash
boost::headers_only
boost::system
Poco::Net
Poco::Net::SSL
Poco::Util
Poco::Foundation
replxx
cctz
fmt
magic_enum
ch_contrib::replxx
ch_contrib::cctz
ch_contrib::fmt
ch_contrib::magic_enum
)
if (ENABLE_TESTS)

View File

@ -48,7 +48,9 @@ struct StringRef
std::string toString() const { return std::string(data, size); }
explicit operator std::string() const { return toString(); }
constexpr explicit operator std::string_view() const { return {data, size}; }
std::string_view toView() const { return std::string_view(data, size); }
constexpr explicit operator std::string_view() const { return std::string_view(data, size); }
};
/// Here constexpr doesn't implicate inline, see https://www.viva64.com/en/w/v1043/

View File

@ -1,7 +1,7 @@
#pragma once
#include "strong_typedef.h"
#include "extended_types.h"
#include <base/strong_typedef.h>
#include <base/extended_types.h>
namespace DB
{

View File

@ -123,6 +123,12 @@ bool hasPHDRCache()
#else
void updatePHDRCache() {}
bool hasPHDRCache() { return false; }
#if defined(USE_MUSL)
/// With statically linked with musl, dl_iterate_phdr is immutable.
bool hasPHDRCache() { return true; }
#else
bool hasPHDRCache() { return false; }
#endif
#endif

View File

@ -2,7 +2,7 @@
#include <base/scope_guard.h>
#include <base/logger_useful.h>
#include <Common/MemoryTracker.h>
#include <Common/LockMemoryExceptionInThread.h>
/// Same as SCOPE_EXIT() but block the MEMORY_LIMIT_EXCEEDED errors.
///
@ -12,8 +12,7 @@
///
/// NOTE: it should be used with caution.
#define SCOPE_EXIT_MEMORY(...) SCOPE_EXIT( \
MemoryTracker::LockExceptionInThread \
lock_memory_tracker(VariableContext::Global); \
LockMemoryExceptionInThread lock_memory_tracker(VariableContext::Global); \
__VA_ARGS__; \
)
@ -57,8 +56,7 @@
#define SCOPE_EXIT_MEMORY_SAFE(...) SCOPE_EXIT( \
try \
{ \
MemoryTracker::LockExceptionInThread \
lock_memory_tracker(VariableContext::Global); \
LockMemoryExceptionInThread lock_memory_tracker(VariableContext::Global); \
__VA_ARGS__; \
} \
catch (...) \

View File

@ -125,11 +125,11 @@ public:
template <size_t Bits, typename Signed, size_t Bits2, typename Signed2>
struct common_type<wide::integer<Bits, Signed>, wide::integer<Bits2, Signed2>>
{
using type = std::conditional_t < Bits == Bits2,
wide::integer<
Bits,
std::conditional_t<(std::is_same_v<Signed, Signed2> && std::is_same_v<Signed2, signed>), signed, unsigned>>,
std::conditional_t<Bits2<Bits, wide::integer<Bits, Signed>, wide::integer<Bits2, Signed2>>>;
using type = std::conditional_t<Bits == Bits2,
wide::integer<
Bits,
std::conditional_t<(std::is_same_v<Signed, Signed2> && std::is_same_v<Signed2, signed>), signed, unsigned>>,
std::conditional_t<Bits2<Bits, wide::integer<Bits, Signed>, wide::integer<Bits2, Signed2>>>;
};
template <size_t Bits, typename Signed, typename Arithmetic>
@ -827,7 +827,7 @@ public:
CompilerUInt128 a = (CompilerUInt128(numerator.items[1]) << 64) + numerator.items[0];
CompilerUInt128 b = (CompilerUInt128(denominator.items[1]) << 64) + denominator.items[0];
CompilerUInt128 c = a / b;
CompilerUInt128 c = a / b; // NOLINT
integer<Bits, Signed> res;
res.items[0] = c;
@ -1020,8 +1020,15 @@ constexpr integer<Bits, Signed>::integer(std::initializer_list<T> il) noexcept
{
auto it = il.begin();
for (size_t i = 0; i < _impl::item_count; ++i)
{
if (it < il.end())
{
items[i] = *it;
++it;
}
else
items[i] = 0;
}
}
}

View File

@ -9,6 +9,7 @@
#include <Common/StringUtils/StringUtils.h>
#include <Common/SensitiveDataMasker.h>
#include <Common/config.h>
#include <base/errnoToString.h>
#include <IO/ReadHelpers.h>
#include <Formats/registerFormats.h>

View File

@ -51,6 +51,7 @@
#include <Common/getExecutablePath.h>
#include <Common/getHashOfLoadedBinary.h>
#include <Common/Elf.h>
#include <Common/setThreadName.h>
#include <filesystem>
#include <loggers/OwnFormattingChannel.h>

View File

@ -12,6 +12,6 @@ endif()
target_link_libraries (daemon PUBLIC loggers PRIVATE clickhouse_common_io clickhouse_common_config common ${EXECINFO_LIBRARIES})
if (USE_SENTRY)
target_link_libraries (daemon PRIVATE ${SENTRY_LIBRARY})
if (TARGET ch_contrib::sentry)
target_link_libraries (daemon PRIVATE ch_contrib::sentry)
endif ()

View File

@ -5,9 +5,11 @@
#include <Poco/Util/AbstractConfiguration.h>
#include "OwnFormattingChannel.h"
#include "OwnPatternFormatter.h"
#include "OwnSplitChannel.h"
#include <Poco/ConsoleChannel.h>
#include <Poco/Logger.h>
#include <Poco/Net/RemoteSyslogChannel.h>
#include <Interpreters/TextLog.h>
#include <filesystem>
namespace fs = std::filesystem;

View File

@ -5,9 +5,12 @@
#include <Poco/AutoPtr.h>
#include <Poco/FileChannel.h>
#include <Poco/Util/Application.h>
#include <Interpreters/TextLog.h>
#include "OwnSplitChannel.h"
namespace DB
{
class TextLog;
}
namespace Poco::Util
{

View File

@ -10,6 +10,8 @@
#include <Poco/Message.h>
#include <Common/CurrentThread.h>
#include <Common/DNSResolver.h>
#include <Common/setThreadName.h>
#include <Common/LockMemoryExceptionInThread.h>
#include <base/getThreadId.h>
#include <Common/SensitiveDataMasker.h>
#include <Common/IO.h>
@ -57,7 +59,7 @@ void OwnSplitChannel::tryLogSplit(const Poco::Message & msg)
/// but let's log it into the stderr at least.
catch (...)
{
MemoryTracker::LockExceptionInThread lock_memory_tracker(VariableContext::Global);
LockMemoryExceptionInThread lock_memory_tracker(VariableContext::Global);
const std::string & exception_message = getCurrentExceptionMessage(true);
const std::string & message = msg.getText();

View File

@ -1,11 +1,16 @@
#pragma once
#include <atomic>
#include <vector>
#include <map>
#include <mutex>
#include <Poco/AutoPtr.h>
#include <Poco/Channel.h>
#include "ExtendedLogChannel.h"
#include <Interpreters/TextLog.h>
namespace DB
{
class TextLog;
}
namespace DB
{

View File

@ -1,61 +0,0 @@
add_library (mysqlxx
Connection.cpp
Exception.cpp
Query.cpp
ResultBase.cpp
UseQueryResult.cpp
Row.cpp
Value.cpp
Pool.cpp
PoolFactory.cpp
PoolWithFailover.cpp
)
target_include_directories (mysqlxx PUBLIC ..)
if (NOT USE_INTERNAL_MYSQL_LIBRARY)
set(PLATFORM_LIBRARIES ${CMAKE_DL_LIBS})
if (USE_MYSQL)
target_include_directories (mysqlxx SYSTEM PRIVATE ${MYSQL_INCLUDE_DIR})
endif ()
if (APPLE)
find_library (ICONV_LIBRARY iconv)
set (MYSQLCLIENT_LIBRARIES ${MYSQLCLIENT_LIBRARIES} ${STATIC_MYSQLCLIENT_LIB} ${ICONV_LIBRARY})
elseif (USE_STATIC_LIBRARIES AND STATIC_MYSQLCLIENT_LIB)
set (MYSQLCLIENT_LIBRARIES ${STATIC_MYSQLCLIENT_LIB})
endif ()
endif ()
target_link_libraries (mysqlxx
PUBLIC
common
PRIVATE
${MYSQLCLIENT_LIBRARIES}
${ZLIB_LIBRARIES}
)
if(OPENSSL_LIBRARIES)
target_link_libraries(mysqlxx PRIVATE ${OPENSSL_LIBRARIES})
endif()
target_link_libraries(mysqlxx PRIVATE ${PLATFORM_LIBRARIES})
if (NOT USE_INTERNAL_MYSQL_LIBRARY AND OPENSSL_INCLUDE_DIR)
target_include_directories (mysqlxx SYSTEM PRIVATE ${OPENSSL_INCLUDE_DIR})
endif ()
target_no_warning(mysqlxx reserved-macro-identifier)
if (NOT USE_INTERNAL_MYSQL_LIBRARY AND USE_STATIC_LIBRARIES)
message(WARNING "Statically linking with system mysql/mariadb only works "
"if mysql client libraries are built with same openssl version as "
"we are going to use now. It wouldn't work if GnuTLS is used. "
"Try -D\"USE_INTERNAL_MYSQL_LIBRARY\"=ON or -D\"ENABLE_MYSQL\"=OFF or "
"-D\"USE_STATIC_LIBRARIES\"=OFF")
endif ()
if (ENABLE_TESTS)
add_subdirectory (tests)
endif ()

View File

@ -1,433 +0,0 @@
# https://github.com/apache/arrow/blob/master/cpp/cmake_modules/FindArrow.cmake
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# - Find Arrow (arrow/api.h, libarrow.a, libarrow.so)
# This module defines
# ARROW_FOUND, whether Arrow has been found
# ARROW_FULL_SO_VERSION, full shared object version of found Arrow "100.0.0"
# ARROW_IMPORT_LIB, path to libarrow's import library (Windows only)
# ARROW_INCLUDE_DIR, directory containing headers
# ARROW_LIBS, deprecated. Use ARROW_LIB_DIR instead
# ARROW_LIB_DIR, directory containing Arrow libraries
# ARROW_SHARED_IMP_LIB, deprecated. Use ARROW_IMPORT_LIB instead
# ARROW_SHARED_LIB, path to libarrow's shared library
# ARROW_SO_VERSION, shared object version of found Arrow such as "100"
# ARROW_STATIC_LIB, path to libarrow.a
# ARROW_VERSION, version of found Arrow
# ARROW_VERSION_MAJOR, major version of found Arrow
# ARROW_VERSION_MINOR, minor version of found Arrow
# ARROW_VERSION_PATCH, patch version of found Arrow
if(DEFINED ARROW_FOUND)
return()
endif()
include(FindPkgConfig)
include(FindPackageHandleStandardArgs)
set(ARROW_SEARCH_LIB_PATH_SUFFIXES)
if(CMAKE_LIBRARY_ARCHITECTURE)
list(APPEND ARROW_SEARCH_LIB_PATH_SUFFIXES "lib/${CMAKE_LIBRARY_ARCHITECTURE}")
endif()
list(APPEND ARROW_SEARCH_LIB_PATH_SUFFIXES
"lib64"
"lib32"
"lib"
"bin")
set(ARROW_CONFIG_SUFFIXES
"_RELEASE"
"_RELWITHDEBINFO"
"_MINSIZEREL"
"_DEBUG"
"")
if(CMAKE_BUILD_TYPE)
string(TOUPPER ${CMAKE_BUILD_TYPE} ARROW_CONFIG_SUFFIX_PREFERRED)
set(ARROW_CONFIG_SUFFIX_PREFERRED "_${ARROW_CONFIG_SUFFIX_PREFERRED}")
list(INSERT ARROW_CONFIG_SUFFIXES 0 "${ARROW_CONFIG_SUFFIX_PREFERRED}")
endif()
if(NOT DEFINED ARROW_MSVC_STATIC_LIB_SUFFIX)
if(MSVC)
set(ARROW_MSVC_STATIC_LIB_SUFFIX "_static")
else()
set(ARROW_MSVC_STATIC_LIB_SUFFIX "")
endif()
endif()
# Internal function.
#
# Set shared library name for ${base_name} to ${output_variable}.
#
# Example:
# arrow_build_shared_library_name(ARROW_SHARED_LIBRARY_NAME arrow)
# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.so on Linux
# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.dylib on macOS
# # -> ARROW_SHARED_LIBRARY_NAME=arrow.dll with MSVC on Windows
# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.dll with MinGW on Windows
function(arrow_build_shared_library_name output_variable base_name)
set(${output_variable}
"${CMAKE_SHARED_LIBRARY_PREFIX}${base_name}${CMAKE_SHARED_LIBRARY_SUFFIX}"
PARENT_SCOPE)
endfunction()
# Internal function.
#
# Set import library name for ${base_name} to ${output_variable}.
# This is useful only for MSVC build. Import library is used only
# with MSVC build.
#
# Example:
# arrow_build_import_library_name(ARROW_IMPORT_LIBRARY_NAME arrow)
# # -> ARROW_IMPORT_LIBRARY_NAME=arrow on Linux (meaningless)
# # -> ARROW_IMPORT_LIBRARY_NAME=arrow on macOS (meaningless)
# # -> ARROW_IMPORT_LIBRARY_NAME=arrow.lib with MSVC on Windows
# # -> ARROW_IMPORT_LIBRARY_NAME=libarrow.dll.a with MinGW on Windows
function(arrow_build_import_library_name output_variable base_name)
set(${output_variable}
"${CMAKE_IMPORT_LIBRARY_PREFIX}${base_name}${CMAKE_IMPORT_LIBRARY_SUFFIX}"
PARENT_SCOPE)
endfunction()
# Internal function.
#
# Set static library name for ${base_name} to ${output_variable}.
#
# Example:
# arrow_build_static_library_name(ARROW_STATIC_LIBRARY_NAME arrow)
# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.a on Linux
# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.a on macOS
# # -> ARROW_STATIC_LIBRARY_NAME=arrow.lib with MSVC on Windows
# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.dll.a with MinGW on Windows
function(arrow_build_static_library_name output_variable base_name)
set(
${output_variable}
"${CMAKE_STATIC_LIBRARY_PREFIX}${base_name}${ARROW_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}"
PARENT_SCOPE)
endfunction()
# Internal function.
#
# Set macro value for ${macro_name} in ${header_content} to ${output_variable}.
#
# Example:
# arrow_extract_macro_value(version_major
# "ARROW_VERSION_MAJOR"
# "#define ARROW_VERSION_MAJOR 1.0.0")
# # -> version_major=1.0.0
function(arrow_extract_macro_value output_variable macro_name header_content)
string(REGEX MATCH "#define +${macro_name} +[^\r\n]+" macro_definition
"${header_content}")
string(REGEX
REPLACE "^#define +${macro_name} +(.+)$" "\\1" macro_value "${macro_definition}")
set(${output_variable} "${macro_value}" PARENT_SCOPE)
endfunction()
# Internal macro only for arrow_find_package.
#
# Find package in HOME.
macro(arrow_find_package_home)
find_path(${prefix}_include_dir "${header_path}"
PATHS "${home}"
PATH_SUFFIXES "include"
NO_DEFAULT_PATH)
set(include_dir "${${prefix}_include_dir}")
set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE)
if(MSVC)
set(CMAKE_SHARED_LIBRARY_SUFFIXES_ORIGINAL ${CMAKE_FIND_LIBRARY_SUFFIXES})
# .dll isn't found by find_library with MSVC because .dll isn't included in
# CMAKE_FIND_LIBRARY_SUFFIXES.
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${CMAKE_SHARED_LIBRARY_SUFFIX}")
endif()
find_library(${prefix}_shared_lib
NAMES "${shared_lib_name}"
PATHS "${home}"
PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
NO_DEFAULT_PATH)
if(MSVC)
set(CMAKE_SHARED_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_ORIGINAL})
endif()
set(shared_lib "${${prefix}_shared_lib}")
set(${prefix}_SHARED_LIB "${shared_lib}" PARENT_SCOPE)
if(shared_lib)
add_library(${target_shared} SHARED IMPORTED)
set_target_properties(${target_shared} PROPERTIES IMPORTED_LOCATION "${shared_lib}")
if(include_dir)
set_target_properties(${target_shared}
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}")
endif()
find_library(${prefix}_import_lib
NAMES "${import_lib_name}"
PATHS "${home}"
PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
NO_DEFAULT_PATH)
set(import_lib "${${prefix}_import_lib}")
set(${prefix}_IMPORT_LIB "${import_lib}" PARENT_SCOPE)
if(import_lib)
set_target_properties(${target_shared} PROPERTIES IMPORTED_IMPLIB "${import_lib}")
endif()
endif()
find_library(${prefix}_static_lib
NAMES "${static_lib_name}"
PATHS "${home}"
PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
NO_DEFAULT_PATH)
set(static_lib "${${prefix}_static_lib}")
set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE)
if(static_lib)
add_library(${target_static} STATIC IMPORTED)
set_target_properties(${target_static} PROPERTIES IMPORTED_LOCATION "${static_lib}")
if(include_dir)
set_target_properties(${target_static}
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}")
endif()
endif()
endmacro()
# Internal macro only for arrow_find_package.
#
# Find package by CMake package configuration.
macro(arrow_find_package_cmake_package_configuration)
find_package(${cmake_package_name} CONFIG)
if(${cmake_package_name}_FOUND)
set(${prefix}_USE_CMAKE_PACKAGE_CONFIG TRUE PARENT_SCOPE)
if(TARGET ${target_shared})
foreach(suffix ${ARROW_CONFIG_SUFFIXES})
get_target_property(shared_lib ${target_shared} IMPORTED_LOCATION${suffix})
if(shared_lib)
# Remove shared library version:
# libarrow.so.100.0.0 -> libarrow.so
# Because ARROW_HOME and pkg-config approaches don't add
# shared library version.
string(REGEX
REPLACE "(${CMAKE_SHARED_LIBRARY_SUFFIX})[.0-9]+$" "\\1" shared_lib
"${shared_lib}")
set(${prefix}_SHARED_LIB "${shared_lib}" PARENT_SCOPE)
break()
endif()
endforeach()
endif()
if(TARGET ${target_static})
foreach(suffix ${ARROW_CONFIG_SUFFIXES})
get_target_property(static_lib ${target_static} IMPORTED_LOCATION${suffix})
if(static_lib)
set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE)
break()
endif()
endforeach()
endif()
endif()
endmacro()
# Internal macro only for arrow_find_package.
#
# Find package by pkg-config.
macro(arrow_find_package_pkg_config)
pkg_check_modules(${prefix}_PC ${pkg_config_name})
if(${prefix}_PC_FOUND)
set(${prefix}_USE_PKG_CONFIG TRUE PARENT_SCOPE)
set(include_dir "${${prefix}_PC_INCLUDEDIR}")
set(lib_dir "${${prefix}_PC_LIBDIR}")
set(shared_lib_paths "${${prefix}_PC_LINK_LIBRARIES}")
# Use the first shared library path as the IMPORTED_LOCATION
# for ${target_shared}. This assumes that the first shared library
# path is the shared library path for this module.
list(GET shared_lib_paths 0 first_shared_lib_path)
# Use the rest shared library paths as the INTERFACE_LINK_LIBRARIES
# for ${target_shared}. This assumes that the rest shared library
# paths are dependency library paths for this module.
list(LENGTH shared_lib_paths n_shared_lib_paths)
if(n_shared_lib_paths LESS_EQUAL 1)
set(rest_shared_lib_paths)
else()
list(SUBLIST
shared_lib_paths
1
-1
rest_shared_lib_paths)
endif()
set(${prefix}_VERSION "${${prefix}_PC_VERSION}" PARENT_SCOPE)
set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE)
set(${prefix}_SHARED_LIB "${first_shared_lib_path}" PARENT_SCOPE)
add_library(${target_shared} SHARED IMPORTED)
set_target_properties(${target_shared}
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES
"${include_dir}"
INTERFACE_LINK_LIBRARIES
"${rest_shared_lib_paths}"
IMPORTED_LOCATION
"${first_shared_lib_path}")
get_target_property(shared_lib ${target_shared} IMPORTED_LOCATION)
find_library(${prefix}_static_lib
NAMES "${static_lib_name}"
PATHS "${lib_dir}"
NO_DEFAULT_PATH)
set(static_lib "${${prefix}_static_lib}")
set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE)
if(static_lib)
add_library(${target_static} STATIC IMPORTED)
set_target_properties(${target_static}
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}"
IMPORTED_LOCATION "${static_lib}")
endif()
endif()
endmacro()
function(arrow_find_package
prefix
home
base_name
header_path
cmake_package_name
pkg_config_name)
arrow_build_shared_library_name(shared_lib_name ${base_name})
arrow_build_import_library_name(import_lib_name ${base_name})
arrow_build_static_library_name(static_lib_name ${base_name})
set(target_shared ${base_name}_shared)
set(target_static ${base_name}_static)
if(home)
arrow_find_package_home()
set(${prefix}_FIND_APPROACH "HOME: ${home}" PARENT_SCOPE)
else()
arrow_find_package_cmake_package_configuration()
if(${cmake_package_name}_FOUND)
set(${prefix}_FIND_APPROACH
"CMake package configuration: ${cmake_package_name}"
PARENT_SCOPE)
else()
arrow_find_package_pkg_config()
set(${prefix}_FIND_APPROACH "pkg-config: ${pkg_config_name}" PARENT_SCOPE)
endif()
endif()
if(NOT include_dir)
if(TARGET ${target_shared})
get_target_property(include_dir ${target_shared} INTERFACE_INCLUDE_DIRECTORIES)
elseif(TARGET ${target_static})
get_target_property(include_dir ${target_static} INTERFACE_INCLUDE_DIRECTORIES)
endif()
endif()
if(include_dir)
set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE)
endif()
if(shared_lib)
get_filename_component(lib_dir "${shared_lib}" DIRECTORY)
elseif(static_lib)
get_filename_component(lib_dir "${static_lib}" DIRECTORY)
else()
set(lib_dir NOTFOUND)
endif()
set(${prefix}_LIB_DIR "${lib_dir}" PARENT_SCOPE)
# For backward compatibility
set(${prefix}_LIBS "${lib_dir}" PARENT_SCOPE)
endfunction()
if(NOT "$ENV{ARROW_HOME}" STREQUAL "")
file(TO_CMAKE_PATH "$ENV{ARROW_HOME}" ARROW_HOME)
endif()
arrow_find_package(ARROW
"${ARROW_HOME}"
arrow
arrow/api.h
Arrow
arrow)
if(ARROW_HOME)
if(ARROW_INCLUDE_DIR)
file(READ "${ARROW_INCLUDE_DIR}/arrow/util/config.h" ARROW_CONFIG_H_CONTENT)
arrow_extract_macro_value(ARROW_VERSION_MAJOR "ARROW_VERSION_MAJOR"
"${ARROW_CONFIG_H_CONTENT}")
arrow_extract_macro_value(ARROW_VERSION_MINOR "ARROW_VERSION_MINOR"
"${ARROW_CONFIG_H_CONTENT}")
arrow_extract_macro_value(ARROW_VERSION_PATCH "ARROW_VERSION_PATCH"
"${ARROW_CONFIG_H_CONTENT}")
if("${ARROW_VERSION_MAJOR}" STREQUAL ""
OR "${ARROW_VERSION_MINOR}" STREQUAL ""
OR "${ARROW_VERSION_PATCH}" STREQUAL "")
set(ARROW_VERSION "0.0.0")
else()
set(ARROW_VERSION
"${ARROW_VERSION_MAJOR}.${ARROW_VERSION_MINOR}.${ARROW_VERSION_PATCH}")
endif()
arrow_extract_macro_value(ARROW_SO_VERSION_QUOTED "ARROW_SO_VERSION"
"${ARROW_CONFIG_H_CONTENT}")
string(REGEX REPLACE "^\"(.+)\"$" "\\1" ARROW_SO_VERSION "${ARROW_SO_VERSION_QUOTED}")
arrow_extract_macro_value(ARROW_FULL_SO_VERSION_QUOTED "ARROW_FULL_SO_VERSION"
"${ARROW_CONFIG_H_CONTENT}")
string(REGEX
REPLACE "^\"(.+)\"$" "\\1" ARROW_FULL_SO_VERSION
"${ARROW_FULL_SO_VERSION_QUOTED}")
endif()
else()
if(ARROW_USE_CMAKE_PACKAGE_CONFIG)
find_package(Arrow CONFIG)
elseif(ARROW_USE_PKG_CONFIG)
pkg_get_variable(ARROW_SO_VERSION arrow so_version)
pkg_get_variable(ARROW_FULL_SO_VERSION arrow full_so_version)
endif()
endif()
set(ARROW_ABI_VERSION ${ARROW_SO_VERSION})
mark_as_advanced(ARROW_ABI_VERSION
ARROW_CONFIG_SUFFIXES
ARROW_FULL_SO_VERSION
ARROW_IMPORT_LIB
ARROW_INCLUDE_DIR
ARROW_LIBS
ARROW_LIB_DIR
ARROW_SEARCH_LIB_PATH_SUFFIXES
ARROW_SHARED_IMP_LIB
ARROW_SHARED_LIB
ARROW_SO_VERSION
ARROW_STATIC_LIB
ARROW_VERSION
ARROW_VERSION_MAJOR
ARROW_VERSION_MINOR
ARROW_VERSION_PATCH)
find_package_handle_standard_args(Arrow REQUIRED_VARS
# The first required variable is shown
# in the found message. So this list is
# not sorted alphabetically.
ARROW_INCLUDE_DIR
ARROW_LIB_DIR
ARROW_FULL_SO_VERSION
ARROW_SO_VERSION
VERSION_VAR
ARROW_VERSION)
set(ARROW_FOUND ${Arrow_FOUND})
if(Arrow_FOUND AND NOT Arrow_FIND_QUIETLY)
message(STATUS "Arrow version: ${ARROW_VERSION} (${ARROW_FIND_APPROACH})")
message(STATUS "Arrow SO and ABI version: ${ARROW_SO_VERSION}")
message(STATUS "Arrow full SO version: ${ARROW_FULL_SO_VERSION}")
message(STATUS "Found the Arrow core shared library: ${ARROW_SHARED_LIB}")
message(STATUS "Found the Arrow core import library: ${ARROW_IMPORT_LIB}")
message(STATUS "Found the Arrow core static library: ${ARROW_STATIC_LIB}")
endif()

View File

@ -1,394 +0,0 @@
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
# file Copyright.txt or https://cmake.org/licensing for details.
#.rst:
# FindICU
# -------
#
# Find the International Components for Unicode (ICU) libraries and
# programs.
#
# This module supports multiple components.
# Components can include any of: ``data``, ``i18n``, ``io``, ``le``,
# ``lx``, ``test``, ``tu`` and ``uc``.
#
# Note that on Windows ``data`` is named ``dt`` and ``i18n`` is named
# ``in``; any of the names may be used, and the appropriate
# platform-specific library name will be automatically selected.
#
# This module reports information about the ICU installation in
# several variables. General variables::
#
# ICU_VERSION - ICU release version
# ICU_FOUND - true if the main programs and libraries were found
# ICU_LIBRARIES - component libraries to be linked
# ICU_INCLUDE_DIRS - the directories containing the ICU headers
#
# Imported targets::
#
# ICU::<C>
#
# Where ``<C>`` is the name of an ICU component, for example
# ``ICU::i18n``.
#
# ICU programs are reported in::
#
# ICU_GENCNVAL_EXECUTABLE - path to gencnval executable
# ICU_ICUINFO_EXECUTABLE - path to icuinfo executable
# ICU_GENBRK_EXECUTABLE - path to genbrk executable
# ICU_ICU-CONFIG_EXECUTABLE - path to icu-config executable
# ICU_GENRB_EXECUTABLE - path to genrb executable
# ICU_GENDICT_EXECUTABLE - path to gendict executable
# ICU_DERB_EXECUTABLE - path to derb executable
# ICU_PKGDATA_EXECUTABLE - path to pkgdata executable
# ICU_UCONV_EXECUTABLE - path to uconv executable
# ICU_GENCFU_EXECUTABLE - path to gencfu executable
# ICU_MAKECONV_EXECUTABLE - path to makeconv executable
# ICU_GENNORM2_EXECUTABLE - path to gennorm2 executable
# ICU_GENCCODE_EXECUTABLE - path to genccode executable
# ICU_GENSPREP_EXECUTABLE - path to gensprep executable
# ICU_ICUPKG_EXECUTABLE - path to icupkg executable
# ICU_GENCMN_EXECUTABLE - path to gencmn executable
#
# ICU component libraries are reported in::
#
# ICU_<C>_FOUND - ON if component was found
# ICU_<C>_LIBRARIES - libraries for component
#
# ICU datafiles are reported in::
#
# ICU_MAKEFILE_INC - Makefile.inc
# ICU_PKGDATA_INC - pkgdata.inc
#
# Note that ``<C>`` is the uppercased name of the component.
#
# This module reads hints about search results from::
#
# ICU_ROOT - the root of the ICU installation
#
# The environment variable ``ICU_ROOT`` may also be used; the
# ICU_ROOT variable takes precedence.
#
# The following cache variables may also be set::
#
# ICU_<P>_EXECUTABLE - the path to executable <P>
# ICU_INCLUDE_DIR - the directory containing the ICU headers
# ICU_<C>_LIBRARY - the library for component <C>
#
# .. note::
#
# In most cases none of the above variables will require setting,
# unless multiple ICU versions are available and a specific version
# is required.
#
# Other variables one may set to control this module are::
#
# ICU_DEBUG - Set to ON to enable debug output from FindICU.
# Written by Roger Leigh <rleigh@codelibre.net>
set(icu_programs
gencnval
icuinfo
genbrk
icu-config
genrb
gendict
derb
pkgdata
uconv
gencfu
makeconv
gennorm2
genccode
gensprep
icupkg
gencmn)
set(icu_data
Makefile.inc
pkgdata.inc)
# The ICU checks are contained in a function due to the large number
# of temporary variables needed.
function(_ICU_FIND)
# Set up search paths, taking compiler into account. Search ICU_ROOT,
# with ICU_ROOT in the environment as a fallback if unset.
if(ICU_ROOT)
list(APPEND icu_roots "${ICU_ROOT}")
else()
if(NOT "$ENV{ICU_ROOT}" STREQUAL "")
file(TO_CMAKE_PATH "$ENV{ICU_ROOT}" NATIVE_PATH)
list(APPEND icu_roots "${NATIVE_PATH}")
set(ICU_ROOT "${NATIVE_PATH}"
CACHE PATH "Location of the ICU installation" FORCE)
endif()
endif()
# Find include directory
list(APPEND icu_include_suffixes "include")
find_path(ICU_INCLUDE_DIR
NAMES "unicode/utypes.h"
HINTS ${icu_roots}
PATH_SUFFIXES ${icu_include_suffixes}
DOC "ICU include directory")
set(ICU_INCLUDE_DIR "${ICU_INCLUDE_DIR}" PARENT_SCOPE)
# Get version
if(ICU_INCLUDE_DIR AND EXISTS "${ICU_INCLUDE_DIR}/unicode/uvernum.h")
file(STRINGS "${ICU_INCLUDE_DIR}/unicode/uvernum.h" icu_header_str
REGEX "^#define[\t ]+U_ICU_VERSION[\t ]+\".*\".*")
string(REGEX REPLACE "^#define[\t ]+U_ICU_VERSION[\t ]+\"([^ \\n]*)\".*"
"\\1" icu_version_string "${icu_header_str}")
set(ICU_VERSION "${icu_version_string}")
set(ICU_VERSION "${icu_version_string}" PARENT_SCOPE)
unset(icu_header_str)
unset(icu_version_string)
endif()
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
# 64-bit binary directory
set(_bin64 "bin64")
# 64-bit library directory
set(_lib64 "lib64")
endif()
# Find all ICU programs
list(APPEND icu_binary_suffixes "${_bin64}" "bin")
foreach(program ${icu_programs})
string(TOUPPER "${program}" program_upcase)
set(cache_var "ICU_${program_upcase}_EXECUTABLE")
set(program_var "ICU_${program_upcase}_EXECUTABLE")
find_program("${cache_var}" "${program}"
HINTS ${icu_roots}
PATH_SUFFIXES ${icu_binary_suffixes}
DOC "ICU ${program} executable")
mark_as_advanced(cache_var)
set("${program_var}" "${${cache_var}}" PARENT_SCOPE)
endforeach()
# Find all ICU libraries
list(APPEND icu_library_suffixes "${_lib64}" "lib")
set(ICU_REQUIRED_LIBS_FOUND ON)
foreach(component ${ICU_FIND_COMPONENTS})
string(TOUPPER "${component}" component_upcase)
set(component_cache "ICU_${component_upcase}_LIBRARY")
set(component_cache_release "${component_cache}_RELEASE")
set(component_cache_debug "${component_cache}_DEBUG")
set(component_found "${component_upcase}_FOUND")
set(component_libnames "icu${component}")
set(component_debug_libnames "icu${component}d")
# Special case deliberate library naming mismatches between Unix
# and Windows builds
unset(component_libnames)
unset(component_debug_libnames)
list(APPEND component_libnames "icu${component}")
list(APPEND component_debug_libnames "icu${component}d")
if(component STREQUAL "data")
list(APPEND component_libnames "icudt")
# Note there is no debug variant at present
list(APPEND component_debug_libnames "icudtd")
endif()
if(component STREQUAL "dt")
list(APPEND component_libnames "icudata")
# Note there is no debug variant at present
list(APPEND component_debug_libnames "icudatad")
endif()
if(component STREQUAL "i18n")
list(APPEND component_libnames "icuin")
list(APPEND component_debug_libnames "icuind")
endif()
if(component STREQUAL "in")
list(APPEND component_libnames "icui18n")
list(APPEND component_debug_libnames "icui18nd")
endif()
find_library("${component_cache_release}" ${component_libnames}
HINTS ${icu_roots}
PATH_SUFFIXES ${icu_library_suffixes}
DOC "ICU ${component} library (release)")
find_library("${component_cache_debug}" ${component_debug_libnames}
HINTS ${icu_roots}
PATH_SUFFIXES ${icu_library_suffixes}
DOC "ICU ${component} library (debug)")
include(SelectLibraryConfigurations)
select_library_configurations(ICU_${component_upcase})
mark_as_advanced("${component_cache_release}" "${component_cache_debug}")
if(${component_cache})
set("${component_found}" ON)
list(APPEND ICU_LIBRARY "${${component_cache}}")
endif()
mark_as_advanced("${component_found}")
set("${component_cache}" "${${component_cache}}" PARENT_SCOPE)
set("${component_found}" "${${component_found}}" PARENT_SCOPE)
if(${component_found})
if (ICU_FIND_REQUIRED_${component})
list(APPEND ICU_LIBS_FOUND "${component} (required)")
else()
list(APPEND ICU_LIBS_FOUND "${component} (optional)")
endif()
else()
if (ICU_FIND_REQUIRED_${component})
set(ICU_REQUIRED_LIBS_FOUND OFF)
list(APPEND ICU_LIBS_NOTFOUND "${component} (required)")
else()
list(APPEND ICU_LIBS_NOTFOUND "${component} (optional)")
endif()
endif()
endforeach()
set(_ICU_REQUIRED_LIBS_FOUND "${ICU_REQUIRED_LIBS_FOUND}" PARENT_SCOPE)
set(ICU_LIBRARY "${ICU_LIBRARY}" PARENT_SCOPE)
# Find all ICU data files
if(CMAKE_LIBRARY_ARCHITECTURE)
list(APPEND icu_data_suffixes
"${_lib64}/${CMAKE_LIBRARY_ARCHITECTURE}/icu/${ICU_VERSION}"
"lib/${CMAKE_LIBRARY_ARCHITECTURE}/icu/${ICU_VERSION}"
"${_lib64}/${CMAKE_LIBRARY_ARCHITECTURE}/icu"
"lib/${CMAKE_LIBRARY_ARCHITECTURE}/icu")
endif()
list(APPEND icu_data_suffixes
"${_lib64}/icu/${ICU_VERSION}"
"lib/icu/${ICU_VERSION}"
"${_lib64}/icu"
"lib/icu")
foreach(data ${icu_data})
string(TOUPPER "${data}" data_upcase)
string(REPLACE "." "_" data_upcase "${data_upcase}")
set(cache_var "ICU_${data_upcase}")
set(data_var "ICU_${data_upcase}")
find_file("${cache_var}" "${data}"
HINTS ${icu_roots}
PATH_SUFFIXES ${icu_data_suffixes}
DOC "ICU ${data} data file")
mark_as_advanced(cache_var)
set("${data_var}" "${${cache_var}}" PARENT_SCOPE)
endforeach()
if(NOT ICU_FIND_QUIETLY)
if(ICU_LIBS_FOUND)
message(STATUS "Found the following ICU libraries:")
foreach(found ${ICU_LIBS_FOUND})
message(STATUS " ${found}")
endforeach()
endif()
if(ICU_LIBS_NOTFOUND)
message(STATUS "The following ICU libraries were not found:")
foreach(notfound ${ICU_LIBS_NOTFOUND})
message(STATUS " ${notfound}")
endforeach()
endif()
endif()
if(ICU_DEBUG)
message(STATUS "--------FindICU.cmake search debug--------")
message(STATUS "ICU binary path search order: ${icu_roots}")
message(STATUS "ICU include path search order: ${icu_roots}")
message(STATUS "ICU library path search order: ${icu_roots}")
message(STATUS "----------------")
endif()
endfunction()
_ICU_FIND()
include(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(ICU
FOUND_VAR ICU_FOUND
REQUIRED_VARS ICU_INCLUDE_DIR
ICU_LIBRARY
_ICU_REQUIRED_LIBS_FOUND
VERSION_VAR ICU_VERSION
FAIL_MESSAGE "Failed to find all ICU components")
unset(_ICU_REQUIRED_LIBS_FOUND)
if(ICU_FOUND)
set(ICU_INCLUDE_DIRS "${ICU_INCLUDE_DIR}")
set(ICU_LIBRARIES "${ICU_LIBRARY}")
foreach(_ICU_component ${ICU_FIND_COMPONENTS})
string(TOUPPER "${_ICU_component}" _ICU_component_upcase)
set(_ICU_component_cache "ICU_${_ICU_component_upcase}_LIBRARY")
set(_ICU_component_cache_release "ICU_${_ICU_component_upcase}_LIBRARY_RELEASE")
set(_ICU_component_cache_debug "ICU_${_ICU_component_upcase}_LIBRARY_DEBUG")
set(_ICU_component_lib "ICU_${_ICU_component_upcase}_LIBRARIES")
set(_ICU_component_found "${_ICU_component_upcase}_FOUND")
set(_ICU_imported_target "ICU::${_ICU_component}")
if(${_ICU_component_found})
set("${_ICU_component_lib}" "${${_ICU_component_cache}}")
if(NOT TARGET ${_ICU_imported_target})
add_library(${_ICU_imported_target} UNKNOWN IMPORTED)
if(ICU_INCLUDE_DIR)
set_target_properties(${_ICU_imported_target} PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${ICU_INCLUDE_DIR}")
endif()
if(EXISTS "${${_ICU_component_cache}}")
set_target_properties(${_ICU_imported_target} PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES "CXX"
IMPORTED_LOCATION "${${_ICU_component_cache}}")
endif()
if(EXISTS "${${_ICU_component_cache_release}}")
set_property(TARGET ${_ICU_imported_target} APPEND PROPERTY
IMPORTED_CONFIGURATIONS RELEASE)
set_target_properties(${_ICU_imported_target} PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "CXX"
IMPORTED_LOCATION_RELEASE "${${_ICU_component_cache_release}}")
endif()
if(EXISTS "${${_ICU_component_cache_debug}}")
set_property(TARGET ${_ICU_imported_target} APPEND PROPERTY
IMPORTED_CONFIGURATIONS DEBUG)
set_target_properties(${_ICU_imported_target} PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "CXX"
IMPORTED_LOCATION_DEBUG "${${_ICU_component_cache_debug}}")
endif()
endif()
endif()
unset(_ICU_component_upcase)
unset(_ICU_component_cache)
unset(_ICU_component_lib)
unset(_ICU_component_found)
unset(_ICU_imported_target)
endforeach()
endif()
if(ICU_DEBUG)
message(STATUS "--------FindICU.cmake results debug--------")
message(STATUS "ICU found: ${ICU_FOUND}")
message(STATUS "ICU_VERSION number: ${ICU_VERSION}")
message(STATUS "ICU_ROOT directory: ${ICU_ROOT}")
message(STATUS "ICU_INCLUDE_DIR directory: ${ICU_INCLUDE_DIR}")
message(STATUS "ICU_LIBRARIES: ${ICU_LIBRARIES}")
foreach(program IN LISTS icu_programs)
string(TOUPPER "${program}" program_upcase)
set(program_lib "ICU_${program_upcase}_EXECUTABLE")
message(STATUS "${program} program: ${${program_lib}}")
unset(program_upcase)
unset(program_lib)
endforeach()
foreach(data IN LISTS icu_data)
string(TOUPPER "${data}" data_upcase)
string(REPLACE "." "_" data_upcase "${data_upcase}")
set(data_lib "ICU_${data_upcase}")
message(STATUS "${data} data: ${${data_lib}}")
unset(data_upcase)
unset(data_lib)
endforeach()
foreach(component IN LISTS ICU_FIND_COMPONENTS)
string(TOUPPER "${component}" component_upcase)
set(component_lib "ICU_${component_upcase}_LIBRARIES")
set(component_found "${component_upcase}_FOUND")
message(STATUS "${component} library found: ${${component_found}}")
message(STATUS "${component} library: ${${component_lib}}")
unset(component_upcase)
unset(component_lib)
unset(component_found)
endforeach()
message(STATUS "----------------")
endif()
unset(icu_programs)

View File

@ -1,55 +0,0 @@
# Find OpenLDAP libraries.
#
# Can be configured with:
# OPENLDAP_ROOT_DIR - path to the OpenLDAP installation prefix
# OPENLDAP_USE_STATIC_LIBS - look for static version of the libraries
# OPENLDAP_USE_REENTRANT_LIBS - look for thread-safe version of the libraries
#
# Sets values of:
# OPENLDAP_FOUND - TRUE if found
# OPENLDAP_INCLUDE_DIRS - paths to the include directories
# OPENLDAP_LIBRARIES - paths to the libldap and liblber libraries
# OPENLDAP_LDAP_LIBRARY - paths to the libldap library
# OPENLDAP_LBER_LIBRARY - paths to the liblber library
#
if(OPENLDAP_USE_STATIC_LIBS)
set(_orig_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
if(WIN32)
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib" ".a" ${CMAKE_FIND_LIBRARY_SUFFIXES})
else()
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
endif()
endif()
set(_r_suffix)
if(OPENLDAP_USE_REENTRANT_LIBS)
set(_r_suffix "_r")
endif()
if(OPENLDAP_ROOT_DIR)
find_path(OPENLDAP_INCLUDE_DIRS NAMES "ldap.h" "lber.h" PATHS "${OPENLDAP_ROOT_DIR}" PATH_SUFFIXES "include" NO_DEFAULT_PATH)
find_library(OPENLDAP_LDAP_LIBRARY NAMES "ldap${_r_suffix}" PATHS "${OPENLDAP_ROOT_DIR}" PATH_SUFFIXES "lib" NO_DEFAULT_PATH)
find_library(OPENLDAP_LBER_LIBRARY NAMES "lber" PATHS "${OPENLDAP_ROOT_DIR}" PATH_SUFFIXES "lib" NO_DEFAULT_PATH)
else()
find_path(OPENLDAP_INCLUDE_DIRS NAMES "ldap.h" "lber.h")
find_library(OPENLDAP_LDAP_LIBRARY NAMES "ldap${_r_suffix}")
find_library(OPENLDAP_LBER_LIBRARY NAMES "lber")
endif()
unset(_r_suffix)
set(OPENLDAP_LIBRARIES ${OPENLDAP_LDAP_LIBRARY} ${OPENLDAP_LBER_LIBRARY})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(
OpenLDAP DEFAULT_MSG
OPENLDAP_INCLUDE_DIRS OPENLDAP_LDAP_LIBRARY OPENLDAP_LBER_LIBRARY
)
mark_as_advanced(OPENLDAP_INCLUDE_DIRS OPENLDAP_LIBRARIES OPENLDAP_LDAP_LIBRARY OPENLDAP_LBER_LIBRARY)
if(OPENLDAP_USE_STATIC_LIBS)
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_orig_CMAKE_FIND_LIBRARY_SUFFIXES})
unset(_orig_CMAKE_FIND_LIBRARY_SUFFIXES)
endif()

View File

@ -1,132 +0,0 @@
# https://github.com/apache/arrow/blob/master/cpp/cmake_modules/FindParquet.cmake
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# - Find Parquet (parquet/api/reader.h, libparquet.a, libparquet.so)
#
# This module requires Arrow from which it uses
# arrow_find_package()
#
# This module defines
# PARQUET_FOUND, whether Parquet has been found
# PARQUET_IMPORT_LIB, path to libparquet's import library (Windows only)
# PARQUET_INCLUDE_DIR, directory containing headers
# PARQUET_LIBS, deprecated. Use PARQUET_LIB_DIR instead
# PARQUET_LIB_DIR, directory containing Parquet libraries
# PARQUET_SHARED_IMP_LIB, deprecated. Use PARQUET_IMPORT_LIB instead
# PARQUET_SHARED_LIB, path to libparquet's shared library
# PARQUET_SO_VERSION, shared object version of found Parquet such as "100"
# PARQUET_STATIC_LIB, path to libparquet.a
if(DEFINED PARQUET_FOUND)
return()
endif()
set(find_package_arguments)
if(${CMAKE_FIND_PACKAGE_NAME}_FIND_VERSION)
list(APPEND find_package_arguments "${${CMAKE_FIND_PACKAGE_NAME}_FIND_VERSION}")
endif()
if(${CMAKE_FIND_PACKAGE_NAME}_FIND_REQUIRED)
list(APPEND find_package_arguments REQUIRED)
endif()
if(${CMAKE_FIND_PACKAGE_NAME}_FIND_QUIETLY)
list(APPEND find_package_arguments QUIET)
endif()
find_package(Arrow ${find_package_arguments})
if(NOT "$ENV{PARQUET_HOME}" STREQUAL "")
file(TO_CMAKE_PATH "$ENV{PARQUET_HOME}" PARQUET_HOME)
endif()
if((NOT PARQUET_HOME) AND ARROW_HOME)
set(PARQUET_HOME ${ARROW_HOME})
endif()
if(ARROW_FOUND)
arrow_find_package(PARQUET
"${PARQUET_HOME}"
parquet
parquet/api/reader.h
Parquet
parquet)
if(PARQUET_HOME)
if(PARQUET_INCLUDE_DIR)
file(READ "${PARQUET_INCLUDE_DIR}/parquet/parquet_version.h"
PARQUET_VERSION_H_CONTENT)
arrow_extract_macro_value(PARQUET_VERSION_MAJOR "PARQUET_VERSION_MAJOR"
"${PARQUET_VERSION_H_CONTENT}")
arrow_extract_macro_value(PARQUET_VERSION_MINOR "PARQUET_VERSION_MINOR"
"${PARQUET_VERSION_H_CONTENT}")
arrow_extract_macro_value(PARQUET_VERSION_PATCH "PARQUET_VERSION_PATCH"
"${PARQUET_VERSION_H_CONTENT}")
if("${PARQUET_VERSION_MAJOR}" STREQUAL ""
OR "${PARQUET_VERSION_MINOR}" STREQUAL ""
OR "${PARQUET_VERSION_PATCH}" STREQUAL "")
set(PARQUET_VERSION "0.0.0")
else()
set(PARQUET_VERSION
"${PARQUET_VERSION_MAJOR}.${PARQUET_VERSION_MINOR}.${PARQUET_VERSION_PATCH}")
endif()
arrow_extract_macro_value(PARQUET_SO_VERSION_QUOTED "PARQUET_SO_VERSION"
"${PARQUET_VERSION_H_CONTENT}")
string(REGEX
REPLACE "^\"(.+)\"$" "\\1" PARQUET_SO_VERSION "${PARQUET_SO_VERSION_QUOTED}")
arrow_extract_macro_value(PARQUET_FULL_SO_VERSION_QUOTED "PARQUET_FULL_SO_VERSION"
"${PARQUET_VERSION_H_CONTENT}")
string(REGEX
REPLACE "^\"(.+)\"$" "\\1" PARQUET_FULL_SO_VERSION
"${PARQUET_FULL_SO_VERSION_QUOTED}")
endif()
else()
if(PARQUET_USE_CMAKE_PACKAGE_CONFIG)
find_package(Parquet CONFIG)
elseif(PARQUET_USE_PKG_CONFIG)
pkg_get_variable(PARQUET_SO_VERSION parquet so_version)
pkg_get_variable(PARQUET_FULL_SO_VERSION parquet full_so_version)
endif()
endif()
set(PARQUET_ABI_VERSION "${PARQUET_SO_VERSION}")
endif()
mark_as_advanced(PARQUET_ABI_VERSION
PARQUET_IMPORT_LIB
PARQUET_INCLUDE_DIR
PARQUET_LIBS
PARQUET_LIB_DIR
PARQUET_SHARED_IMP_LIB
PARQUET_SHARED_LIB
PARQUET_SO_VERSION
PARQUET_STATIC_LIB
PARQUET_VERSION)
find_package_handle_standard_args(Parquet
REQUIRED_VARS
PARQUET_INCLUDE_DIR
PARQUET_LIB_DIR
PARQUET_SO_VERSION
VERSION_VAR
PARQUET_VERSION)
set(PARQUET_FOUND ${Parquet_FOUND})
if(Parquet_FOUND AND NOT Parquet_FIND_QUIETLY)
message(STATUS "Parquet version: ${PARQUET_VERSION} (${PARQUET_FIND_APPROACH})")
message(STATUS "Found the Parquet shared library: ${PARQUET_SHARED_LIB}")
message(STATUS "Found the Parquet import library: ${PARQUET_IMPORT_LIB}")
message(STATUS "Found the Parquet static library: ${PARQUET_STATIC_LIB}")
endif()

View File

@ -1,44 +0,0 @@
# - Try to find cityhash headers and libraries.
#
# Usage of this module as follows:
#
# find_package(cityhash)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# CITYHASH_ROOT_DIR Set this variable to the root installation of
# cityhash if the module has problems finding
# the proper installation path.
#
# Variables defined by this module:
#
# CITYHASH_FOUND System has cityhash libs/headers
# CITYHASH_LIBRARIES The cityhash library/libraries
# CITYHASH_INCLUDE_DIR The location of cityhash headers
find_path(CITYHASH_ROOT_DIR
NAMES include/city.h
)
find_library(CITYHASH_LIBRARIES
NAMES cityhash
PATHS ${CITYHASH_ROOT_DIR}/lib ${CITYHASH_LIBRARIES_PATHS}
)
find_path(CITYHASH_INCLUDE_DIR
NAMES city.h
PATHS ${CITYHASH_ROOT_DIR}/include ${CITYHASH_INCLUDE_PATHS}
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(cityhash DEFAULT_MSG
CITYHASH_LIBRARIES
CITYHASH_INCLUDE_DIR
)
mark_as_advanced(
CITYHASH_ROOT_DIR
CITYHASH_LIBRARIES
CITYHASH_INCLUDE_DIR
)

View File

@ -1,44 +0,0 @@
# - Try to find double-conversion headers and libraries.
#
# Usage of this module as follows:
#
# find_package(double-conversion)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# DOUBLE_CONVERSION_ROOT_DIR Set this variable to the root installation of
# double-conversion if the module has problems finding
# the proper installation path.
#
# Variables defined by this module:
#
# DOUBLE_CONVERSION_FOUND System has double-conversion libs/headers
# DOUBLE_CONVERSION_LIBRARIES The double-conversion library/libraries
# DOUBLE_CONVERSION_INCLUDE_DIR The location of double-conversion headers
find_path(DOUBLE_CONVERSION_ROOT_DIR
NAMES include/double-conversion/double-conversion.h
)
find_library(DOUBLE_CONVERSION_LIBRARIES
NAMES double-conversion
PATHS ${DOUBLE_CONVERSION_ROOT_DIR}/lib ${BTRIE_CITYHASH_PATHS}
)
find_path(DOUBLE_CONVERSION_INCLUDE_DIR
NAMES double-conversion/double-conversion.h
PATHS ${DOUBLE_CONVERSION_ROOT_DIR}/include ${DOUBLE_CONVERSION_INCLUDE_PATHS}
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(double_conversion DEFAULT_MSG
DOUBLE_CONVERSION_LIBRARIES
DOUBLE_CONVERSION_INCLUDE_DIR
)
mark_as_advanced(
DOUBLE_CONVERSION_ROOT_DIR
DOUBLE_CONVERSION_LIBRARIES
DOUBLE_CONVERSION_INCLUDE_DIR
)

View File

@ -1,44 +0,0 @@
# - Try to find farmhash headers and libraries.
#
# Usage of this module as follows:
#
# find_package(farmhash)
#
# Variables used by this module, they can change the default behaviour and need
# to be set before calling find_package:
#
# FARMHASH_ROOT_DIR Set this variable to the root installation of
# farmhash if the module has problems finding
# the proper installation path.
#
# Variables defined by this module:
#
# FARMHASH_FOUND System has farmhash libs/headers
# FARMHASH_LIBRARIES The farmhash library/libraries
# FARMHASH_INCLUDE_DIR The location of farmhash headers
find_path(FARMHASH_ROOT_DIR
NAMES include/farmhash.h
)
find_library(FARMHASH_LIBRARIES
NAMES farmhash
PATHS ${FARMHASH_ROOT_DIR}/lib ${FARMHASH_LIBRARIES_PATHS}
)
find_path(FARMHASH_INCLUDE_DIR
NAMES farmhash.h
PATHS ${FARMHASH_ROOT_DIR}/include ${FARMHASH_INCLUDE_PATHS}
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(farmhash DEFAULT_MSG
FARMHASH_LIBRARIES
FARMHASH_INCLUDE_DIR
)
mark_as_advanced(
FARMHASH_ROOT_DIR
FARMHASH_LIBRARIES
FARMHASH_INCLUDE_DIR
)

View File

@ -1,337 +0,0 @@
#[[
Defines the following variables:
``gRPC_FOUND``
Whether the gRPC framework is found
``gRPC_INCLUDE_DIRS``
The include directories of the gRPC framework, including the include directories of the C++ wrapper.
``gRPC_LIBRARIES``
The libraries of the gRPC framework.
``gRPC_CPP_PLUGIN``
The plugin for generating gRPC client and server C++ stubs from `.proto` files
``gRPC_PYTHON_PLUGIN``
The plugin for generating gRPC client and server Python stubs from `.proto` files
The following :prop_tgt:`IMPORTED` targets are also defined:
``grpc++``
``grpc++_unsecure``
``grpc_cpp_plugin``
``grpc_python_plugin``
Set the following variables to adjust the behaviour of this script:
``gRPC_USE_UNSECURE_LIBRARIES``
if set gRPC_LIBRARIES will be filled with the unsecure version of the libraries (i.e. without SSL)
instead of the secure ones.
``gRPC_DEBUG`
if set the debug message will be printed.
Add custom commands to process ``.proto`` files to C++::
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
``SRCS``
Variable to define with autogenerated source files
``HDRS``
Variable to define with autogenerated header files
``DESCRIPTORS``
Variable to define with autogenerated descriptor files, if requested.
``EXPORT_MACRO``
is a macro which should expand to ``__declspec(dllexport)`` or
``__declspec(dllimport)`` depending on what is being compiled.
``ARGN``
``.proto`` files
#]]
# Function to generate C++ files from .proto files.
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
if(NOT _proto_files)
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
return()
endif()
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
set(_append_arg APPEND_PATH)
endif()
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(_descriptors DESCRIPTORS)
endif()
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
endif()
if(DEFINED Protobuf_IMPORT_DIRS)
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
endif()
set(_outvar)
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
set(${SRCS})
set(${HDRS})
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
endif()
foreach(_file ${_outvar})
if(_file MATCHES "cc$")
list(APPEND ${SRCS} ${_file})
elseif(_file MATCHES "desc$")
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
else()
list(APPEND ${HDRS} ${_file})
endif()
endforeach()
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
endif()
endfunction()
# Helper function.
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
function(protobuf_generate_grpc)
set(_options APPEND_PATH DESCRIPTORS)
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
if(COMMAND target_sources)
list(APPEND _singleargs TARGET)
endif()
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
return()
endif()
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
return()
endif()
if(NOT protobuf_generate_grpc_LANGUAGE)
set(protobuf_generate_grpc_LANGUAGE cpp)
endif()
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
endif()
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
endif()
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
return()
endif()
endif()
if(NOT protobuf_generate_grpc_PLUGIN)
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
else()
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
return()
endif()
endif()
if(protobuf_generate_grpc_TARGET)
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
foreach(_file ${_source_list})
if(_file MATCHES "proto$")
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
endif()
endforeach()
endif()
if(NOT protobuf_generate_grpc_PROTOS)
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
return()
endif()
if(protobuf_generate_grpc_APPEND_PATH)
# Create an include path for each file specified
foreach(_file ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_file} ABSOLUTE)
get_filename_component(_abs_path ${_abs_file} PATH)
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${_abs_path})
endif()
endforeach()
else()
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
endif()
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
if(${_contains_already} EQUAL -1)
list(APPEND _protobuf_include_path -I ${ABS_PATH})
endif()
endforeach()
set(_generated_srcs_all)
foreach(_proto ${protobuf_generate_grpc_PROTOS})
get_filename_component(_abs_file ${_proto} ABSOLUTE)
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
get_filename_component(_basename ${_proto} NAME_WE)
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
set(_possible_rel_dir)
if(NOT protobuf_generate_grpc_APPEND_PATH)
set(_possible_rel_dir ${_rel_dir}/)
endif()
set(_generated_srcs)
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
endforeach()
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
list(APPEND _generated_srcs ${_descriptor_file})
endif()
list(APPEND _generated_srcs_all ${_generated_srcs})
add_custom_command(
OUTPUT ${_generated_srcs}
COMMAND protobuf::protoc
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
VERBATIM)
endforeach()
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
if(protobuf_generate_grpc_OUT_VAR)
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
endif()
if(protobuf_generate_grpc_TARGET)
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
endif()
endfunction()
# Find the libraries.
if(gRPC_USE_STATIC_LIBS)
# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES
set(_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
if(WIN32)
set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a ${CMAKE_FIND_LIBRARY_SUFFIXES})
else()
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
endif()
endif()
find_library(gRPC_LIBRARY NAMES grpc)
find_library(gRPC_CPP_LIBRARY NAMES grpc++)
find_library(gRPC_UNSECURE_LIBRARY NAMES grpc_unsecure)
find_library(gRPC_CPP_UNSECURE_LIBRARY NAMES grpc++_unsecure)
find_library(gRPC_CARES_LIBRARY NAMES cares)
set(gRPC_LIBRARIES)
if(gRPC_USE_UNSECURE_LIBRARIES)
if(gRPC_UNSECURE_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_UNSECURE_LIBRARY})
endif()
if(gRPC_CPP_UNSECURE_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_UNSECURE_LIBRARY})
endif()
else()
if(gRPC_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_LIBRARY})
endif()
if(gRPC_CPP_UNSECURE_LIBRARY)
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_LIBRARY})
endif()
endif()
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CARES_LIBRARY})
# Restore the original find library ordering.
if(gRPC_USE_STATIC_LIBS)
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES})
endif()
# Find the include directories.
find_path(gRPC_INCLUDE_DIR grpc/grpc.h)
find_path(gRPC_CPP_INCLUDE_DIR grpc++/grpc++.h)
if(gRPC_INCLUDE_DIR AND gRPC_CPP_INCLUDE_DIR AND NOT(gRPC_INCLUDE_DIR STREQUAL gRPC_CPP_INCLUDE_DIR))
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR} ${gRPC_CPP_INCLUDE_DIR})
elseif(gRPC_INCLUDE_DIR)
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR})
else()
set(gRPC_INCLUDE_DIRS ${gRPC_CPP_INCLUDE_DIR})
endif()
# Get full path to plugin.
find_program(gRPC_CPP_PLUGIN
NAMES grpc_cpp_plugin
DOC "The plugin for generating gRPC client and server C++ stubs from `.proto` files")
find_program(gRPC_PYTHON_PLUGIN
NAMES grpc_python_plugin
DOC "The plugin for generating gRPC client and server Python stubs from `.proto` files")
# Add imported targets.
if(gRPC_CPP_LIBRARY AND NOT TARGET grpc++)
add_library(grpc++ UNKNOWN IMPORTED)
set_target_properties(grpc++ PROPERTIES
IMPORTED_LOCATION "${gRPC_CPP_LIBRARY}")
set_target_properties(grpc++ PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
endif()
if(gRPC_CPP_UNSECURE_LIBRARY AND NOT TARGET grpc++_unsecure)
add_library(grpc++_unsecure UNKNOWN IMPORTED)
set_target_properties(grpc++_unsecure PROPERTIES
IMPORTED_LOCATION "${gRPC_CPP_UNSECURE_LIBRARY}")
set_target_properties(grpc++_unsecure PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
endif()
if(gRPC_CPP_PLUGIN AND NOT TARGET grpc_cpp_plugin)
add_executable(grpc_cpp_plugin IMPORTED)
set_target_properties(grpc_cpp_plugin PROPERTIES
IMPORTED_LOCATION "${gRPC_CPP_PLUGIN}")
endif()
if(gRPC_PYTHON_PLUGIN AND NOT TARGET grpc_python_plugin)
add_executable(grpc_python_plugin IMPORTED)
set_target_properties(grpc_python_plugin PROPERTIES
IMPORTED_LOCATION "${gRPC_PYTHON_PLUGIN}")
endif()
#include(FindPackageHandleStandardArgs.cmake)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(gRPC
REQUIRED_VARS gRPC_LIBRARY gRPC_CPP_LIBRARY gRPC_UNSECURE_LIBRARY gRPC_CPP_UNSECURE_LIBRARY gRPC_CARES_LIBRARY
gRPC_INCLUDE_DIR gRPC_CPP_INCLUDE_DIR gRPC_CPP_PLUGIN gRPC_PYTHON_PLUGIN)
if(gRPC_FOUND)
if(gRPC_DEBUG)
message(STATUS "gRPC: INCLUDE_DIRS=${gRPC_INCLUDE_DIRS}")
message(STATUS "gRPC: LIBRARIES=${gRPC_LIBRARIES}")
message(STATUS "gRPC: CPP_PLUGIN=${gRPC_CPP_PLUGIN}")
message(STATUS "gRPC: PYTHON_PLUGIN=${gRPC_PYTHON_PLUGIN}")
endif()
endif()

View File

@ -2,11 +2,11 @@
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
SET(VERSION_REVISION 54458)
SET(VERSION_MAJOR 21)
SET(VERSION_MINOR 13)
SET(VERSION_REVISION 54459)
SET(VERSION_MAJOR 22)
SET(VERSION_MINOR 2)
SET(VERSION_PATCH 1)
SET(VERSION_GITHASH 4cc45c1e15912ee300bca7cc8b8da2b888a70e2a)
SET(VERSION_DESCRIBE v21.13.1.1-prestable)
SET(VERSION_STRING 21.13.1.1)
SET(VERSION_GITHASH dfe64a2789bbf51046bb6b5476f874f7b59d124c)
SET(VERSION_DESCRIBE v22.2.1.1-prestable)
SET(VERSION_STRING 22.2.1.1)
# end of autochange

View File

@ -1,23 +0,0 @@
macro(find_contrib_lib LIB_NAME)
string(TOLOWER ${LIB_NAME} LIB_NAME_LC)
string(TOUPPER ${LIB_NAME} LIB_NAME_UC)
string(REPLACE "-" "_" LIB_NAME_UC ${LIB_NAME_UC})
option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ON)
if (NOT USE_INTERNAL_${LIB_NAME_UC}_LIBRARY)
find_package ("${LIB_NAME}")
if (NOT ${LIB_NAME_UC}_FOUND)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use system ${LIB_NAME}")
endif()
endif ()
if (NOT ${LIB_NAME_UC}_FOUND)
set (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY 1)
set (${LIB_NAME_UC}_LIBRARIES ${LIB_NAME_LC})
set (${LIB_NAME_UC}_INCLUDE_DIR ${${LIB_NAME_UC}_CONTRIB_INCLUDE_DIR})
endif ()
message (STATUS "Using ${LIB_NAME}: ${${LIB_NAME_UC}_INCLUDE_DIR} : ${${LIB_NAME_UC}_LIBRARIES}")
endmacro()

View File

@ -1,29 +0,0 @@
if (MISSING_INTERNAL_LIBUV_LIBRARY)
message (WARNING "Can't find internal libuv needed for AMQP-CPP library")
set (ENABLE_AMQPCPP OFF CACHE INTERNAL "")
endif()
option(ENABLE_AMQPCPP "Enalbe AMQP-CPP" ${ENABLE_LIBRARIES})
if (NOT ENABLE_AMQPCPP)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/src")
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library")
set (USE_AMQPCPP 0)
return()
endif ()
set (USE_AMQPCPP 1)
set (AMQPCPP_LIBRARY amqp-cpp ${OPENSSL_LIBRARIES})
set (AMQPCPP_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/include")
list (APPEND AMQPCPP_INCLUDE_DIR
"${LIBUV_INCLUDE_DIR}"
"${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP")
list (APPEND AMQPCPP_LIBRARY "${LIBUV_LIBRARY}")
message (STATUS "Using AMQP-CPP=${USE_AMQPCPP}: ${AMQPCPP_INCLUDE_DIR} : ${AMQPCPP_LIBRARY}")

View File

@ -1,35 +0,0 @@
# Needed when using Apache Avro serialization format
option (ENABLE_AVRO "Enable Avro" ${ENABLE_LIBRARIES})
if (NOT ENABLE_AVRO)
if (USE_INTERNAL_AVRO_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal avro library with ENABLE_AVRO=OFF")
endif()
return()
endif()
option (USE_INTERNAL_AVRO_LIBRARY "Set to FALSE to use system avro library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang")
if (USE_INTERNAL_AVRO_LIBRARY)
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro")
set(USE_INTERNAL_AVRO_LIBRARY 0)
endif()
set(MISSING_INTERNAL_AVRO_LIBRARY 1)
endif()
if (NOT USE_INTERNAL_AVRO_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Using system avro library is not supported yet")
elseif(NOT MISSING_INTERNAL_AVRO_LIBRARY)
include(cmake/find/snappy.cmake)
set(AVROCPP_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/include")
set(AVROCPP_LIBRARY avrocpp)
set(USE_INTERNAL_AVRO_LIBRARY 1)
endif ()
if (AVROCPP_LIBRARY AND AVROCPP_INCLUDE_DIR)
set(USE_AVRO 1)
endif()
message (STATUS "Using avro=${USE_AVRO}: ${AVROCPP_INCLUDE_DIR} : ${AVROCPP_LIBRARY}")

View File

@ -1,25 +0,0 @@
if(ARCH_AMD64 OR ARCH_ARM)
option (ENABLE_BASE64 "Enable base64" ${ENABLE_LIBRARIES})
elseif(ENABLE_BASE64)
message (${RECONFIGURE_MESSAGE_LEVEL} "base64 library is only supported on x86_64 and aarch64")
endif()
if (NOT ENABLE_BASE64)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64/LICENSE")
set (MISSING_INTERNAL_BASE64_LIBRARY 1)
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init")
endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64")
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init")
else()
set (BASE64_LIBRARY base64)
set (USE_BASE64 1)
endif()
if (NOT USE_BASE64)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable base64")
endif()

View File

@ -1,30 +0,0 @@
option (ENABLE_AZURE_BLOB_STORAGE "Enable Azure blob storage" ${ENABLE_LIBRARIES})
option(USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY
"Set to FALSE to use system Azure SDK instead of bundled (OFF currently not implemented)"
ON)
if (ENABLE_AZURE_BLOB_STORAGE)
set(USE_AZURE_BLOB_STORAGE 1)
set(AZURE_BLOB_STORAGE_LIBRARY azure_sdk)
endif()
if ((NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/azure/sdk"
OR NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/azure/cmake-modules")
AND USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY)
message (WARNING "submodule contrib/azure is missing. to fix try run: \n git submodule update --init")
set(USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY OFF)
set(USE_AZURE_BLOB_STORAGE 0)
endif ()
if (NOT USE_INTERNAL_SSL_LIBRARY AND USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY)
message (FATAL_ERROR "Currently Blob Storage support can be built only with internal SSL library")
endif()
if (NOT USE_INTERNAL_CURL AND USE_INTERNAL_AZURE_BLOB_STORAGE_LIBRARY)
message (FATAL_ERROR "Currently Blob Storage support can be built only with internal curl library")
endif()
if (USE_AZURE_BLOB_STORAGE)
message (STATUS "Using Azure Blob Storage - ${USE_AZURE_BLOB_STORAGE}")
endif()

View File

@ -1,42 +0,0 @@
option (ENABLE_BROTLI "Enable brotli" ${ENABLE_LIBRARIES})
if (NOT ENABLE_BROTLI)
if (USE_INTERNAL_BROTLI_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal brotly library with ENABLE_BROTLI=OFF")
endif()
return()
endif()
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
if (USE_INTERNAL_BROTLI_LIBRARY)
message (WARNING "submodule contrib/brotli is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal brotli")
set (USE_INTERNAL_BROTLI_LIBRARY 0)
endif ()
set (MISSING_INTERNAL_BROTLI_LIBRARY 1)
endif ()
if(NOT USE_INTERNAL_BROTLI_LIBRARY)
find_library(BROTLI_LIBRARY_COMMON brotlicommon)
find_library(BROTLI_LIBRARY_DEC brotlidec)
find_library(BROTLI_LIBRARY_ENC brotlienc)
find_path(BROTLI_INCLUDE_DIR NAMES brotli/decode.h brotli/encode.h brotli/port.h brotli/types.h PATHS ${BROTLI_INCLUDE_PATHS})
if(BROTLI_LIBRARY_DEC AND BROTLI_LIBRARY_ENC AND BROTLI_LIBRARY_COMMON)
set(BROTLI_LIBRARY ${BROTLI_LIBRARY_DEC} ${BROTLI_LIBRARY_ENC} ${BROTLI_LIBRARY_COMMON})
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use system brotli")
endif()
endif()
if (BROTLI_LIBRARY AND BROTLI_INCLUDE_DIR)
set (USE_BROTLI 1)
elseif (NOT MISSING_INTERNAL_BROTLI_LIBRARY)
set (BROTLI_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include")
set (USE_INTERNAL_BROTLI_LIBRARY 1)
set (BROTLI_LIBRARY brotli)
set (USE_BROTLI 1)
endif ()
message (STATUS "Using brotli=${USE_BROTLI}: ${BROTLI_INCLUDE_DIR} : ${BROTLI_LIBRARY}")

View File

@ -1,19 +0,0 @@
option(ENABLE_BZIP2 "Enable bzip2 compression support" ${ENABLE_LIBRARIES})
if (NOT ENABLE_BZIP2)
message (STATUS "bzip2 compression disabled")
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/bzip2/bzlib.h")
message (WARNING "submodule contrib/bzip2 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal bzip2 library")
set (USE_NLP 0)
return()
endif ()
set (USE_BZIP2 1)
set (BZIP2_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/bzip2")
set (BZIP2_LIBRARY bzip2)
message (STATUS "Using bzip2=${USE_BZIP2}: ${BZIP2_INCLUDE_DIR} : ${BZIP2_LIBRARY}")

View File

@ -1,42 +0,0 @@
option (ENABLE_CAPNP "Enable Cap'n Proto" ${ENABLE_LIBRARIES})
if (NOT ENABLE_CAPNP)
if (USE_INTERNAL_CAPNP_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal capnproto library with ENABLE_CAPNP=OFF")
endif()
return()
endif()
option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/c++")
if(USE_INTERNAL_CAPNP_LIBRARY)
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto")
set(USE_INTERNAL_CAPNP_LIBRARY 0)
endif()
set(MISSING_INTERNAL_CAPNP_LIBRARY 1)
endif()
# FIXME: refactor to use `add_library( IMPORTED)` if possible.
if (NOT USE_INTERNAL_CAPNP_LIBRARY)
find_library (KJ kj)
find_library (CAPNP capnp)
find_library (CAPNPC capnpc)
if(KJ AND CAPNP AND CAPNPC)
set (CAPNP_LIBRARIES ${CAPNPC} ${CAPNP} ${KJ})
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system capnproto")
endif()
endif()
if (CAPNP_LIBRARIES)
set (USE_CAPNP 1)
elseif(NOT MISSING_INTERNAL_CAPNP_LIBRARY)
set (CAPNP_LIBRARIES capnpc)
set (USE_CAPNP 1)
set (USE_INTERNAL_CAPNP_LIBRARY 1)
endif ()
message (STATUS "Using capnp=${USE_CAPNP}: ${CAPNP_LIBRARIES}")

View File

@ -1,34 +0,0 @@
if (MISSING_INTERNAL_LIBUV_LIBRARY)
message (WARNING "Disabling cassandra due to missing libuv")
set (ENABLE_CASSANDRA OFF CACHE INTERNAL "")
endif()
option(ENABLE_CASSANDRA "Enable Cassandra" ${ENABLE_LIBRARIES})
if (NOT ENABLE_CASSANDRA)
return()
endif()
if (APPLE)
set(CMAKE_MACOSX_RPATH ON)
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cassandra")
message (ERROR "submodule contrib/cassandra is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal Cassandra")
set (USE_CASSANDRA 0)
return()
endif()
set (USE_CASSANDRA 1)
set (CASSANDRA_INCLUDE_DIR
"${ClickHouse_SOURCE_DIR}/contrib/cassandra/include/")
if (MAKE_STATIC_LIBRARIES)
set (CASSANDRA_LIBRARY cassandra_static)
else()
set (CASSANDRA_LIBRARY cassandra)
endif()
set (CASS_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/cassandra")
message (STATUS "Using cassandra=${USE_CASSANDRA}: ${CASSANDRA_INCLUDE_DIR} : ${CASSANDRA_LIBRARY}")

View File

@ -31,11 +31,7 @@ if (CCACHE_FOUND AND NOT COMPILER_MATCHES_CCACHE)
if (CCACHE_VERSION VERSION_GREATER "3.2.0" OR NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
message(STATUS "Using ${CCACHE_FOUND} ${CCACHE_VERSION}")
set (CMAKE_CXX_COMPILER_LAUNCHER ${CCACHE_FOUND} ${CMAKE_CXX_COMPILER_LAUNCHER})
set (CMAKE_C_COMPILER_LAUNCHER ${CCACHE_FOUND} ${CMAKE_C_COMPILER_LAUNCHER})
set_property (GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_FOUND})
set(LAUNCHER ${CCACHE_FOUND})
# debian (debhelpers) set SOURCE_DATE_EPOCH environment variable, that is
# filled from the debian/changelog or current time.
@ -44,16 +40,14 @@ if (CCACHE_FOUND AND NOT COMPILER_MATCHES_CCACHE)
# of the manifest, which do not allow to use previous cache,
# - 4.2+ ccache ignores SOURCE_DATE_EPOCH for every file w/o __DATE__/__TIME__
#
# So for:
# - 4.2+ does not require any sloppiness
# - 4.0+ will ignore SOURCE_DATE_EPOCH environment variable.
if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.2")
message(STATUS "ccache is 4.2+ no quirks for SOURCE_DATE_EPOCH required")
elseif (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0")
# Exclude SOURCE_DATE_EPOCH env for ccache versions between [4.0, 4.2).
if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0" AND CCACHE_VERSION VERSION_LESS "4.2")
message(STATUS "Ignore SOURCE_DATE_EPOCH for ccache")
set_property (GLOBAL PROPERTY RULE_LAUNCH_COMPILE "env -u SOURCE_DATE_EPOCH")
set_property (GLOBAL PROPERTY RULE_LAUNCH_LINK "env -u SOURCE_DATE_EPOCH")
set(LAUNCHER env -u SOURCE_DATE_EPOCH ${CCACHE_FOUND})
endif()
set (CMAKE_CXX_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_CXX_COMPILER_LAUNCHER})
set (CMAKE_C_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_C_COMPILER_LAUNCHER})
else ()
message(${RECONFIGURE_MESSAGE_LEVEL} "Not using ${CCACHE_FOUND} ${CCACHE_VERSION} bug: https://bugzilla.samba.org/show_bug.cgi?id=8118")
endif ()

View File

@ -1,35 +0,0 @@
option (ENABLE_CURL "Enable curl" ${ENABLE_LIBRARIES})
if (NOT ENABLE_CURL)
if (USE_INTERNAL_CURL)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal curl with ENABLE_CURL=OFF")
endif()
return()
endif()
option (USE_INTERNAL_CURL "Use internal curl library" ON)
if (NOT USE_INTERNAL_CURL)
find_package (CURL)
if (NOT CURL_FOUND)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system curl")
endif()
endif()
if (NOT CURL_FOUND)
set (USE_INTERNAL_CURL 1)
set (CURL_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/curl")
# find_package(CURL) compatibility for the following packages that uses
# find_package(CURL)/include(FindCURL):
# - sentry-native
set (CURL_FOUND ON CACHE BOOL "")
set (CURL_ROOT_DIR ${CURL_LIBRARY_DIR} CACHE PATH "")
set (CURL_INCLUDE_DIR ${CURL_LIBRARY_DIR}/include CACHE PATH "")
set (CURL_INCLUDE_DIRS ${CURL_LIBRARY_DIR}/include CACHE PATH "")
set (CURL_LIBRARY curl CACHE STRING "")
set (CURL_LIBRARIES ${CURL_LIBRARY} CACHE STRING "")
set (CURL_VERSION_STRING 7.67.0 CACHE STRING "")
endif ()
message (STATUS "Using curl: ${CURL_INCLUDE_DIRS} : ${CURL_LIBRARIES}")

View File

@ -1,52 +1,13 @@
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ON)
if (NOT USE_LIBCXX)
if (USE_INTERNAL_LIBCXX_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal libcxx with USE_LIBCXX=OFF")
endif()
target_link_libraries(global-libs INTERFACE -l:libstdc++.a -l:libstdc++fs.a) # Always link these libraries as static
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
return()
endif()
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ON)
option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi libraries instead of bundled"
${USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT})
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/src")
if (USE_INTERNAL_LIBCXX_LIBRARY)
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx")
set(USE_INTERNAL_LIBCXX_LIBRARY 0)
endif()
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT 0)
set(MISSING_INTERNAL_LIBCXX_LIBRARY 1)
endif()
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_LIBCPP_DEBUG=0") # More checks in debug build.
if (NOT USE_INTERNAL_LIBCXX_LIBRARY)
find_library (LIBCXX_LIBRARY c++)
find_library (LIBCXXFS_LIBRARY c++fs)
find_library (LIBCXXABI_LIBRARY c++abi)
if(LIBCXX_LIBRARY AND LIBCXXABI_LIBRARY) # c++fs is now a part of the libc++
set (HAVE_LIBCXX 1)
else ()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libcxx")
endif()
if(NOT LIBCXXFS_LIBRARY)
set(LIBCXXFS_LIBRARY ${LIBCXX_LIBRARY})
endif()
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
endif ()
if (NOT HAVE_LIBCXX AND NOT MISSING_INTERNAL_LIBCXX_LIBRARY)
set (LIBCXX_LIBRARY cxx)
set (LIBCXXABI_LIBRARY cxxabi)
@ -56,7 +17,6 @@ if (NOT HAVE_LIBCXX AND NOT MISSING_INTERNAL_LIBCXX_LIBRARY)
# Exception handling library is embedded into libcxxabi.
set (HAVE_LIBCXX 1)
set(USE_INTERNAL_LIBCXX_LIBRARY 1)
endif ()
if (HAVE_LIBCXX)

View File

@ -1,23 +0,0 @@
if (${ENABLE_LIBRARIES} AND ${ENABLE_KRB5})
set (DEFAULT_ENABLE_CYRUS_SASL 1)
else()
set (DEFAULT_ENABLE_CYRUS_SASL 0)
endif()
OPTION(ENABLE_CYRUS_SASL "Enable cyrus-sasl" ${DEFAULT_ENABLE_CYRUS_SASL})
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/README")
message (WARNING "submodule contrib/cyrus-sasl is missing. to fix try run: \n git submodule update --init")
set (ENABLE_CYRUS_SASL 0)
endif ()
if (ENABLE_CYRUS_SASL)
set (USE_CYRUS_SASL 1)
set (CYRUS_SASL_LIBRARY sasl2)
set (CYRUS_SASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/include")
endif ()
message (STATUS "Using cyrus-sasl: krb5=${USE_KRB5}: ${CYRUS_SASL_INCLUDE_DIR} : ${CYRUS_SASL_LIBRARY}")

View File

@ -1,29 +0,0 @@
option (ENABLE_DATASKETCHES "Enable DataSketches" ${ENABLE_LIBRARIES})
if (ENABLE_DATASKETCHES)
option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt")
if (USE_INTERNAL_DATASKETCHES_LIBRARY)
message(WARNING "submodule contrib/datasketches-cpp is missing. to fix try run: \n git submodule update --init")
endif()
set(MISSING_INTERNAL_DATASKETCHES_LIBRARY 1)
set(USE_INTERNAL_DATASKETCHES_LIBRARY 0)
endif()
if (USE_INTERNAL_DATASKETCHES_LIBRARY)
set(DATASKETCHES_LIBRARY theta)
set(DATASKETCHES_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/common/include" "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/include")
elseif (NOT MISSING_INTERNAL_DATASKETCHES_LIBRARY)
find_library(DATASKETCHES_LIBRARY theta)
find_path(DATASKETCHES_INCLUDE_DIR NAMES theta_sketch.hpp PATHS ${DATASKETCHES_INCLUDE_PATHS})
endif()
if (DATASKETCHES_LIBRARY AND DATASKETCHES_INCLUDE_DIR)
set(USE_DATASKETCHES 1)
endif()
endif()
message (STATUS "Using datasketches=${USE_DATASKETCHES}: ${DATASKETCHES_INCLUDE_DIR} : ${DATASKETCHES_LIBRARY}")

View File

@ -1,6 +0,0 @@
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/fast_float/fast_float.h")
message (FATAL_ERROR "submodule contrib/fast_float is missing. to fix try run: \n git submodule update --init")
endif ()
set(FAST_FLOAT_LIBRARY fast_float)
set(FAST_FLOAT_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/")

View File

@ -1,24 +0,0 @@
if(ARCH_AMD64 AND NOT OS_FREEBSD AND NOT OS_DARWIN)
option(ENABLE_FASTOPS "Enable fast vectorized mathematical functions library by Mikhail Parakhin" ${ENABLE_LIBRARIES})
elseif(ENABLE_FASTOPS)
message (${RECONFIGURE_MESSAGE_LEVEL} "Fastops library is supported on x86_64 only, and not FreeBSD or Darwin")
endif()
if(NOT ENABLE_FASTOPS)
set(USE_FASTOPS 0)
return()
endif()
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fastops/fastops/fastops.h")
message(WARNING "submodule contrib/fastops is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal fastops library")
set(MISSING_INTERNAL_FASTOPS_LIBRARY 1)
endif()
if(NOT MISSING_INTERNAL_FASTOPS_LIBRARY)
set(USE_FASTOPS 1)
set(FASTOPS_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/fastops/)
set(FASTOPS_LIBRARY fastops)
endif()
message(STATUS "Using fastops=${USE_FASTOPS}: ${FASTOPS_INCLUDE_DIR} : ${FASTOPS_LIBRARY}")

View File

@ -1,8 +0,0 @@
# StorageFileLog only support Linux platform
if (OS_LINUX)
set (USE_FILELOG 1)
message (STATUS "Using StorageFileLog = 1")
else()
message(STATUS "StorageFileLog is only supported on Linux")
endif ()

View File

@ -1,16 +0,0 @@
if(NOT DEFINED ENABLE_GPERF OR ENABLE_GPERF)
# Check if gperf was installed
find_program(GPERF gperf)
if(GPERF)
option(ENABLE_GPERF "Use gperf function hash generator tool" ${ENABLE_LIBRARIES})
endif()
endif()
if (ENABLE_GPERF)
if(NOT GPERF)
message(FATAL_ERROR "Could not find the program gperf")
endif()
set(USE_GPERF 1)
endif()
message(STATUS "Using gperf=${USE_GPERF}: ${GPERF}")

View File

@ -1,72 +0,0 @@
# disable grpc due to conflicts of abseil (required by grpc) dynamic annotations with libtsan.a
if (SANITIZE STREQUAL "thread" AND COMPILER_GCC)
set(ENABLE_GRPC_DEFAULT OFF)
else()
set(ENABLE_GRPC_DEFAULT ${ENABLE_LIBRARIES})
endif()
option(ENABLE_GRPC "Use gRPC" ${ENABLE_GRPC_DEFAULT})
if(NOT ENABLE_GRPC)
if(USE_INTERNAL_GRPC_LIBRARY)
message(${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal gRPC library with ENABLE_GRPC=OFF")
endif()
return()
endif()
if(NOT USE_PROTOBUF)
message(WARNING "Cannot use gRPC library without protobuf")
endif()
# Normally we use the internal gRPC framework.
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
# The external gRPC framework can be installed in the system by running
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
if(USE_INTERNAL_GRPC_LIBRARY)
message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc")
set(USE_INTERNAL_GRPC_LIBRARY 0)
endif()
set(MISSING_INTERNAL_GRPC_LIBRARY 1)
endif()
if(USE_SSL)
set(gRPC_USE_UNSECURE_LIBRARIES FALSE)
else()
set(gRPC_USE_UNSECURE_LIBRARIES TRUE)
endif()
if(NOT USE_INTERNAL_GRPC_LIBRARY)
find_package(gRPC)
if(NOT gRPC_INCLUDE_DIRS OR NOT gRPC_LIBRARIES)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC library")
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
elseif(NOT gRPC_CPP_PLUGIN)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system grpc_cpp_plugin")
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
else()
set(EXTERNAL_GRPC_LIBRARY_FOUND 1)
set(USE_GRPC 1)
endif()
endif()
if(NOT EXTERNAL_GRPC_LIBRARY_FOUND AND NOT MISSING_INTERNAL_GRPC_LIBRARY)
set(gRPC_INCLUDE_DIRS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
if(gRPC_USE_UNSECURE_LIBRARIES)
set(gRPC_LIBRARIES grpc_unsecure grpc++_unsecure)
else()
set(gRPC_LIBRARIES grpc grpc++)
endif()
set(gRPC_CPP_PLUGIN $<TARGET_FILE:grpc_cpp_plugin>)
set(gRPC_PYTHON_PLUGIN $<TARGET_FILE:grpc_python_plugin>)
include("${ClickHouse_SOURCE_DIR}/contrib/grpc-cmake/protobuf_generate_grpc.cmake")
set(USE_INTERNAL_GRPC_LIBRARY 1)
set(USE_GRPC 1)
endif()
message(STATUS "Using gRPC=${USE_GRPC}: ${gRPC_INCLUDE_DIRS} : ${gRPC_LIBRARIES} : ${gRPC_CPP_PLUGIN}")

View File

@ -1,40 +0,0 @@
# included only if ENABLE_TESTS=1
option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt")
if (USE_INTERNAL_GTEST_LIBRARY)
message (WARNING "submodule contrib/googletest is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gtest")
set (USE_INTERNAL_GTEST_LIBRARY 0)
endif ()
set (MISSING_INTERNAL_GTEST_LIBRARY 1)
endif ()
if(NOT USE_INTERNAL_GTEST_LIBRARY)
# TODO: autodetect of GTEST_SRC_DIR by EXISTS /usr/src/googletest/CMakeLists.txt
if(NOT GTEST_SRC_DIR)
find_package(GTest)
if (NOT GTEST_INCLUDE_DIRS)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system Google Test")
endif()
endif()
endif()
if (NOT GTEST_SRC_DIR AND NOT GTEST_INCLUDE_DIRS AND NOT MISSING_INTERNAL_GTEST_LIBRARY)
set (USE_INTERNAL_GTEST_LIBRARY 1)
set (GTEST_MAIN_LIBRARIES gtest_main)
set (GTEST_LIBRARIES gtest)
set (GTEST_BOTH_LIBRARIES ${GTEST_MAIN_LIBRARIES} ${GTEST_LIBRARIES})
set (GTEST_INCLUDE_DIRS ${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest)
elseif(USE_INTERNAL_GTEST_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Wouldn't use internal Google Test library")
set (USE_INTERNAL_GTEST_LIBRARY 0)
endif ()
if((GTEST_INCLUDE_DIRS AND GTEST_BOTH_LIBRARIES) OR GTEST_SRC_DIR)
set(USE_GTEST 1)
endif()
message (STATUS "Using gtest=${USE_GTEST}: ${GTEST_INCLUDE_DIRS} : ${GTEST_BOTH_LIBRARIES} : ${GTEST_SRC_DIR}")

View File

@ -1,39 +0,0 @@
option (ENABLE_H3 "Enable H3" ${ENABLE_LIBRARIES})
if(NOT ENABLE_H3)
if(USE_INTERNAL_H3_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal h3 library with ENABLE_H3=OFF")
endif ()
return()
endif()
option(USE_INTERNAL_H3_LIBRARY "Set to FALSE to use system h3 library instead of bundled"
ON) # we are not aware of any distribution that provides h3 package
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include/h3Index.h")
if(USE_INTERNAL_H3_LIBRARY)
message(WARNING "submodule contrib/h3 is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal h3 library")
set(USE_INTERNAL_H3_LIBRARY 0)
endif()
set(MISSING_INTERNAL_H3_LIBRARY 1)
endif()
if(NOT USE_INTERNAL_H3_LIBRARY)
find_library(H3_LIBRARY h3)
find_path(H3_INCLUDE_DIR NAMES h3/h3api.h PATHS ${H3_INCLUDE_PATHS})
if(NOT H3_LIBRARY OR NOT H3_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system h3 library")
endif()
endif()
if (H3_LIBRARY AND H3_INCLUDE_DIR)
set (USE_H3 1)
elseif(NOT MISSING_INTERNAL_H3_LIBRARY)
set (H3_LIBRARY h3)
set (H3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include")
set (USE_H3 1)
set (USE_INTERNAL_H3_LIBRARY 1)
endif()
message (STATUS "Using h3=${USE_H3}: ${H3_INCLUDE_DIR} : ${H3_LIBRARY}")

View File

@ -1,45 +0,0 @@
if(NOT ARCH_ARM AND NOT OS_FREEBSD AND NOT APPLE AND USE_PROTOBUF AND NOT ARCH_PPC64LE)
option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES})
elseif(ENABLE_HDFS OR USE_INTERNAL_HDFS3_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration")
endif()
if(NOT ENABLE_HDFS)
if(USE_INTERNAL_HDFS3_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library with ENABLE_HDFS3=OFF")
endif()
return()
endif()
option(USE_INTERNAL_HDFS3_LIBRARY "Set to FALSE to use system HDFS3 instead of bundled (experimental - set to OFF on your own risk)"
ON) # We don't know any linux distribution with package for it
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include/hdfs/hdfs.h")
if(USE_INTERNAL_HDFS3_LIBRARY)
message(WARNING "submodule contrib/libhdfs3 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library")
set(USE_INTERNAL_HDFS3_LIBRARY 0)
endif()
set(MISSING_INTERNAL_HDFS3_LIBRARY 1)
endif()
if(NOT USE_INTERNAL_HDFS3_LIBRARY)
find_library(HDFS3_LIBRARY hdfs3)
find_path(HDFS3_INCLUDE_DIR NAMES hdfs/hdfs.h PATHS ${HDFS3_INCLUDE_PATHS})
if(NOT HDFS3_LIBRARY OR NOT HDFS3_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find system HDFS3 library")
endif()
endif()
if(HDFS3_LIBRARY AND HDFS3_INCLUDE_DIR)
set(USE_HDFS 1)
elseif(NOT MISSING_INTERNAL_HDFS3_LIBRARY AND LIBGSASL_LIBRARY AND LIBXML2_LIBRARIES)
set(HDFS3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include")
set(HDFS3_LIBRARY hdfs3)
set(USE_INTERNAL_HDFS3_LIBRARY 1)
set(USE_HDFS 1)
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannout enable HDFS3")
endif()
message(STATUS "Using hdfs3=${USE_HDFS}: ${HDFS3_INCLUDE_DIR} : ${HDFS3_LIBRARY}")

View File

@ -1,51 +0,0 @@
if (OS_LINUX)
option(ENABLE_ICU "Enable ICU" ${ENABLE_LIBRARIES})
else ()
option(ENABLE_ICU "Enable ICU" 0)
endif ()
if (NOT ENABLE_ICU)
if(USE_INTERNAL_ICU_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal icu library with ENABLE_ICU=OFF")
endif()
message(STATUS "Build without ICU (support for collations and charset conversion functions will be disabled)")
return()
endif()
option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE")
if (USE_INTERNAL_ICU_LIBRARY)
message (WARNING "submodule contrib/icu is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ICU")
set (USE_INTERNAL_ICU_LIBRARY 0)
endif ()
set (MISSING_INTERNAL_ICU_LIBRARY 1)
endif ()
if(NOT USE_INTERNAL_ICU_LIBRARY)
if (APPLE)
set(ICU_ROOT "/usr/local/opt/icu4c" CACHE STRING "")
endif()
find_package(ICU COMPONENTS i18n uc data) # TODO: remove Modules/FindICU.cmake after cmake 3.7
#set (ICU_LIBRARIES ${ICU_I18N_LIBRARY} ${ICU_UC_LIBRARY} ${ICU_DATA_LIBRARY} CACHE STRING "")
if(ICU_FOUND)
set(USE_ICU 1)
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ICU")
endif()
endif()
if (ICU_LIBRARY AND ICU_INCLUDE_DIR)
set (USE_ICU 1)
elseif (NOT MISSING_INTERNAL_ICU_LIBRARY)
set (USE_INTERNAL_ICU_LIBRARY 1)
set (ICU_LIBRARIES icui18n icuuc icudata)
set (USE_ICU 1)
endif ()
if(USE_ICU)
message(STATUS "Using icu=${USE_ICU}: ${ICU_INCLUDE_DIR} : ${ICU_LIBRARIES}")
else()
message(STATUS "Build without ICU (support for collations and charset conversion functions will be disabled)")
endif()

View File

@ -1,25 +0,0 @@
OPTION(ENABLE_KRB5 "Enable krb5" ${ENABLE_LIBRARIES})
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/krb5/README")
message (WARNING "submodule contrib/krb5 is missing. to fix try run: \n git submodule update --init")
set (ENABLE_KRB5 0)
endif ()
if (NOT CMAKE_SYSTEM_NAME MATCHES "Linux" AND NOT (CMAKE_SYSTEM_NAME MATCHES "Darwin" AND NOT CMAKE_CROSSCOMPILING))
message (WARNING "krb5 disabled in non-Linux and non-native-Darwin environments")
set (ENABLE_KRB5 0)
endif ()
if (ENABLE_KRB5)
set (USE_KRB5 1)
set (KRB5_LIBRARY krb5)
set (KRB5_INCLUDE_DIR
"${ClickHouse_SOURCE_DIR}/contrib/krb5/src/include"
"${ClickHouse_BINARY_DIR}/contrib/krb5-cmake/include"
)
endif ()
message (STATUS "Using krb5=${USE_KRB5}: ${KRB5_INCLUDE_DIR} : ${KRB5_LIBRARY}")

View File

@ -1,100 +0,0 @@
option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES})
if (NOT ENABLE_LDAP)
if(USE_INTERNAL_LDAP_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal LDAP library with ENABLE_LDAP=OFF")
endif ()
return()
endif()
option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
if (USE_INTERNAL_LDAP_LIBRARY)
message (WARNING "Submodule contrib/openldap is missing. To fix try running:\n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal LDAP library")
endif ()
set (USE_INTERNAL_LDAP_LIBRARY 0)
set (MISSING_INTERNAL_LDAP_LIBRARY 1)
endif ()
set (OPENLDAP_USE_STATIC_LIBS ${USE_STATIC_LIBRARIES})
set (OPENLDAP_USE_REENTRANT_LIBS 1)
if (NOT USE_INTERNAL_LDAP_LIBRARY)
if (OPENLDAP_USE_STATIC_LIBS)
message (WARNING "Unable to use external static OpenLDAP libraries, falling back to the bundled version.")
message (${RECONFIGURE_MESSAGE_LEVEL} "Unable to use external OpenLDAP")
set (USE_INTERNAL_LDAP_LIBRARY 1)
else ()
if (APPLE AND NOT OPENLDAP_ROOT_DIR)
set (OPENLDAP_ROOT_DIR "/usr/local/opt/openldap")
endif ()
find_package (OpenLDAP)
if (NOT OPENLDAP_FOUND)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system OpenLDAP")
endif()
endif ()
endif ()
if (NOT OPENLDAP_FOUND AND NOT MISSING_INTERNAL_LDAP_LIBRARY)
string (TOLOWER "${CMAKE_SYSTEM_NAME}" _system_name)
string (TOLOWER "${CMAKE_SYSTEM_PROCESSOR}" _system_processor)
if (
"${_system_processor}" STREQUAL "amd64" OR
"${_system_processor}" STREQUAL "x64"
)
set (_system_processor "x86_64")
elseif (
"${_system_processor}" STREQUAL "arm64"
)
set (_system_processor "aarch64")
endif ()
if (
( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "x86_64" ) OR
( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "aarch64" ) OR
( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "ppc64le" ) OR
( "${_system_name}" STREQUAL "freebsd" AND "${_system_processor}" STREQUAL "x86_64" ) OR
( "${_system_name}" STREQUAL "freebsd" AND "${_system_processor}" STREQUAL "aarch64" ) OR
( "${_system_name}" STREQUAL "darwin" AND "${_system_processor}" STREQUAL "x86_64" ) OR
( "${_system_name}" STREQUAL "darwin" AND "${_system_processor}" STREQUAL "aarch64" )
)
set (_ldap_supported_platform TRUE)
endif ()
if (NOT _ldap_supported_platform)
message (WARNING "LDAP support using the bundled library is not implemented for ${CMAKE_SYSTEM_NAME} ${CMAKE_SYSTEM_PROCESSOR} platform.")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable LDAP support")
elseif (NOT USE_SSL)
message (WARNING "LDAP support using the bundled library is not possible if SSL is not used.")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable LDAP support")
else ()
set (USE_INTERNAL_LDAP_LIBRARY 1)
set (OPENLDAP_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/openldap")
set (OPENLDAP_INCLUDE_DIRS
"${ClickHouse_SOURCE_DIR}/contrib/openldap-cmake/${_system_name}_${_system_processor}/include"
"${ClickHouse_SOURCE_DIR}/contrib/openldap/include"
)
# Below, 'ldap'/'ldap_r' and 'lber' will be resolved to
# the targets defined in contrib/openldap-cmake/CMakeLists.txt
if (OPENLDAP_USE_REENTRANT_LIBS)
set (OPENLDAP_LDAP_LIBRARY "ldap_r")
else ()
set (OPENLDAP_LDAP_LIBRARY "ldap")
endif()
set (OPENLDAP_LBER_LIBRARY "lber")
set (OPENLDAP_LIBRARIES ${OPENLDAP_LDAP_LIBRARY} ${OPENLDAP_LBER_LIBRARY})
set (OPENLDAP_FOUND 1)
endif ()
endif ()
if (OPENLDAP_FOUND)
set (USE_LDAP 1)
endif ()
message (STATUS "Using ldap=${USE_LDAP}: ${OPENLDAP_INCLUDE_DIRS} : ${OPENLDAP_LIBRARIES}")

View File

@ -1,40 +0,0 @@
option(ENABLE_GSASL_LIBRARY "Enable gsasl library" ${ENABLE_LIBRARIES})
if (NOT ENABLE_GSASL_LIBRARY)
if(USE_INTERNAL_LIBGSASL_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal libgsasl library with ENABLE_GSASL_LIBRARY=OFF")
endif()
return()
endif()
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h")
if (USE_INTERNAL_LIBGSASL_LIBRARY)
message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libgsasl")
set (USE_INTERNAL_LIBGSASL_LIBRARY 0)
endif ()
set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1)
endif ()
if (NOT USE_INTERNAL_LIBGSASL_LIBRARY)
find_library (LIBGSASL_LIBRARY gsasl)
find_path (LIBGSASL_INCLUDE_DIR NAMES gsasl.h PATHS ${LIBGSASL_INCLUDE_PATHS})
if (NOT LIBGSASL_LIBRARY OR NOT LIBGSASL_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libgsasl")
endif ()
endif ()
if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY)
set (LIBGSASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src" "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include")
set (USE_INTERNAL_LIBGSASL_LIBRARY 1)
set (LIBGSASL_LIBRARY gsasl)
endif ()
if(LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
set (USE_LIBGSASL 1)
endif()
message (STATUS "Using libgsasl=${USE_LIBGSASL}: ${LIBGSASL_INCLUDE_DIR} : ${LIBGSASL_LIBRARY}")

View File

@ -1,31 +0,0 @@
option(ENABLE_LIBPQXX "Enalbe libpqxx" ${ENABLE_LIBRARIES})
if (NOT ENABLE_LIBPQXX)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/src")
message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library")
set (USE_LIBPQXX 0)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpq/include")
message (ERROR "submodule contrib/libpq is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpq needed for libpqxx")
set (USE_LIBPQXX 0)
return()
endif()
if (NOT USE_INTERNAL_SSL_LIBRARY)
set (USE_LIBPQXX 0)
else ()
set (USE_LIBPQXX 1)
set (LIBPQXX_LIBRARY libpqxx)
set (LIBPQ_LIBRARY libpq)
set (LIBPQXX_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/include")
set (LIBPQ_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpq")
message (STATUS "Using libpqxx=${USE_LIBPQXX}: ${LIBPQXX_INCLUDE_DIR} : ${LIBPQXX_LIBRARY}")
message (STATUS "Using libpq: ${LIBPQ_ROOT_DIR} : ${LIBPQ_INCLUDE_DIR} : ${LIBPQ_LIBRARY}")
endif()

View File

@ -1,11 +0,0 @@
option(USE_LIBPROTOBUF_MUTATOR "Enable libprotobuf-mutator" ${ENABLE_FUZZING})
if (NOT USE_LIBPROTOBUF_MUTATOR)
return()
endif()
set(LibProtobufMutator_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libprotobuf-mutator")
if (NOT EXISTS "${LibProtobufMutator_SOURCE_DIR}/README.md")
message (ERROR "submodule contrib/libprotobuf-mutator is missing. to fix try run: \n git submodule update --init")
endif()

View File

@ -1,22 +0,0 @@
if (OS_DARWIN AND COMPILER_GCC)
message (WARNING "libuv cannot be built with GCC in macOS due to a bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93082")
SET(MISSING_INTERNAL_LIBUV_LIBRARY 1)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libuv")
message (WARNING "submodule contrib/libuv is missing. to fix try run: \n git submodule update --init")
SET(MISSING_INTERNAL_LIBUV_LIBRARY 1)
return()
endif()
if (MAKE_STATIC_LIBRARIES)
set (LIBUV_LIBRARY uv_a)
else()
set (LIBUV_LIBRARY uv)
endif()
set (LIBUV_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/libuv")
set (LIBUV_INCLUDE_DIR "${LIBUV_ROOT_DIR}/include")
message (STATUS "Using libuv: ${LIBUV_ROOT_DIR} : ${LIBUV_LIBRARY}")

View File

@ -1,34 +0,0 @@
option (USE_INTERNAL_LIBXML2_LIBRARY "Set to FALSE to use system libxml2 library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libxml2/libxml.h")
if (USE_INTERNAL_LIBXML2_LIBRARY)
message (WARNING "submodule contrib/libxml2 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libxml")
set (USE_INTERNAL_LIBXML2_LIBRARY 0)
endif ()
set (MISSING_INTERNAL_LIBXML2_LIBRARY 1)
endif ()
if (NOT USE_INTERNAL_LIBXML2_LIBRARY)
find_package (LibXml2)
#find_library (LIBXML2_LIBRARY libxml2)
#find_path (LIBXML2_INCLUDE_DIR NAMES libxml.h PATHS ${LIBXML2_INCLUDE_PATHS})
if (NOT LIBXML2_LIBRARY OR NOT LIBXML2_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libxml2")
endif ()
if (USE_STATIC_LIBRARIES)
find_package(LibLZMA)
set (LIBXML2_LIBRARIES ${LIBXML2_LIBRARIES} ${LIBLZMA_LIBRARIES})
endif ()
endif ()
if (LIBXML2_LIBRARY AND LIBXML2_INCLUDE_DIR)
elseif (NOT MISSING_INTERNAL_LIBXML2_LIBRARY)
set (LIBXML2_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/libxml2/include ${ClickHouse_SOURCE_DIR}/contrib/libxml2-cmake/linux_x86_64/include)
set (USE_INTERNAL_LIBXML2_LIBRARY 1)
set (LIBXML2_LIBRARIES libxml2)
endif ()
message (STATUS "Using libxml2: ${LIBXML2_INCLUDE_DIR} : ${LIBXML2_LIBRARIES}")

View File

@ -1,79 +0,0 @@
if (APPLE OR NOT ARCH_AMD64 OR SANITIZE STREQUAL "undefined")
set (ENABLE_EMBEDDED_COMPILER_DEFAULT OFF)
else()
set (ENABLE_EMBEDDED_COMPILER_DEFAULT ON)
endif()
option (ENABLE_EMBEDDED_COMPILER "Enable support for 'compile_expressions' option for query execution" ${ENABLE_EMBEDDED_COMPILER_DEFAULT})
if (NOT ENABLE_EMBEDDED_COMPILER)
set (USE_EMBEDDED_COMPILER 0)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/CMakeLists.txt")
message (${RECONFIGURE_MESSAGE_LEVEL} "submodule /contrib/llvm is missing. to fix try run: \n git submodule update --init")
endif ()
set (USE_EMBEDDED_COMPILER 1)
set (LLVM_FOUND 1)
set (LLVM_VERSION "12.0.0bundled")
set (LLVM_INCLUDE_DIRS
"${ClickHouse_SOURCE_DIR}/contrib/llvm/llvm/include"
"${ClickHouse_BINARY_DIR}/contrib/llvm/llvm/include"
)
set (LLVM_LIBRARY_DIRS "${ClickHouse_BINARY_DIR}/contrib/llvm/llvm")
message(STATUS "LLVM include Directory: ${LLVM_INCLUDE_DIRS}")
message(STATUS "LLVM library Directory: ${LLVM_LIBRARY_DIRS}")
message(STATUS "LLVM C++ compiler flags: ${LLVM_CXXFLAGS}")
# This list was generated by listing all LLVM libraries, compiling the binary and removing all libraries while it still compiles.
set (REQUIRED_LLVM_LIBRARIES
LLVMExecutionEngine
LLVMRuntimeDyld
LLVMX86CodeGen
LLVMX86Desc
LLVMX86Info
LLVMAsmPrinter
LLVMDebugInfoDWARF
LLVMGlobalISel
LLVMSelectionDAG
LLVMMCDisassembler
LLVMPasses
LLVMCodeGen
LLVMipo
LLVMBitWriter
LLVMInstrumentation
LLVMScalarOpts
LLVMAggressiveInstCombine
LLVMInstCombine
LLVMVectorize
LLVMTransformUtils
LLVMTarget
LLVMAnalysis
LLVMProfileData
LLVMObject
LLVMBitReader
LLVMCore
LLVMRemarks
LLVMBitstreamReader
LLVMMCParser
LLVMMC
LLVMBinaryFormat
LLVMDebugInfoCodeView
LLVMSupport
LLVMDemangle
)
#function(llvm_libs_all REQUIRED_LLVM_LIBRARIES)
# llvm_map_components_to_libnames (result all)
# if (USE_STATIC_LIBRARIES OR NOT "LLVM" IN_LIST result)
# list (REMOVE_ITEM result "LTO" "LLVM")
# else()
# set (result "LLVM")
# endif ()
# list (APPEND result ${CMAKE_DL_LIBS} ${ZLIB_LIBRARIES})
# set (${REQUIRED_LLVM_LIBRARIES} ${result} PARENT_SCOPE)
#endfunction()

View File

@ -1,5 +0,0 @@
if (ENABLE_ODBC AND NOT USE_INTERNAL_ODBC_LIBRARY)
set (LTDL_PATHS "/usr/local/opt/libtool/lib")
find_library (LTDL_LIBRARY ltdl PATHS ${LTDL_PATHS} REQUIRED)
message (STATUS "Using ltdl: ${LTDL_LIBRARY}")
endif ()

View File

@ -1,2 +0,0 @@
set(MINISELECT_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/miniselect/include)
message(STATUS "Using miniselect: ${MINISELECT_INCLUDE_DIR}")

View File

@ -1,37 +0,0 @@
option (ENABLE_MSGPACK "Enable msgpack library" ${ENABLE_LIBRARIES})
if(NOT ENABLE_MSGPACK)
if(USE_INTERNAL_MSGPACK_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal msgpack with ENABLE_MSGPACK=OFF")
endif()
return()
endif()
option (USE_INTERNAL_MSGPACK_LIBRARY "Set to FALSE to use system msgpack library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include/msgpack.hpp")
if(USE_INTERNAL_MSGPACK_LIBRARY)
message(WARNING "Submodule contrib/msgpack-c is missing. To fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal msgpack")
set(USE_INTERNAL_MSGPACK_LIBRARY 0)
endif()
set(MISSING_INTERNAL_MSGPACK_LIBRARY 1)
endif()
if(NOT USE_INTERNAL_MSGPACK_LIBRARY)
find_path(MSGPACK_INCLUDE_DIR NAMES msgpack.hpp PATHS ${MSGPACK_INCLUDE_PATHS})
if(NOT MSGPACK_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system msgpack")
endif()
endif()
if(NOT MSGPACK_INCLUDE_DIR AND NOT MISSING_INTERNAL_MSGPACK_LIBRARY)
set(MSGPACK_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/msgpack-c/include")
set(USE_INTERNAL_MSGPACK_LIBRARY 1)
endif()
if (MSGPACK_INCLUDE_DIR)
set(USE_MSGPACK 1)
endif()
message(STATUS "Using msgpack=${USE_MSGPACK}: ${MSGPACK_INCLUDE_DIR}")

View File

@ -1,78 +0,0 @@
if(OS_LINUX AND OPENSSL_FOUND)
option(ENABLE_MYSQL "Enable MySQL" ${ENABLE_LIBRARIES})
else ()
option(ENABLE_MYSQL "Enable MySQL" FALSE)
endif ()
if(NOT ENABLE_MYSQL)
if (USE_INTERNAL_MYSQL_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal mysql library with ENABLE_MYSQL=OFF")
endif ()
message (STATUS "Build without mysqlclient (support for MYSQL dictionary source will be disabled)")
return()
endif()
option(USE_INTERNAL_MYSQL_LIBRARY "Set to FALSE to use system mysqlclient library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/README")
if(USE_INTERNAL_MYSQL_LIBRARY)
message(WARNING "submodule contrib/mariadb-connector-c is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal mysql library")
set(USE_INTERNAL_MYSQL_LIBRARY 0)
endif()
set(MISSING_INTERNAL_MYSQL_LIBRARY 1)
endif()
if (NOT USE_INTERNAL_MYSQL_LIBRARY)
set (MYSQL_LIB_PATHS
"/usr/local/opt/mysql/lib"
"/usr/local/lib"
"/usr/local/lib64"
"/usr/local/lib/mariadb" # macos brew mariadb-connector-c
"/usr/mysql/lib"
"/usr/mysql/lib64"
"/usr/lib"
"/usr/lib64"
"/lib"
"/lib64")
set (MYSQL_INCLUDE_PATHS
"/usr/local/opt/mysql/include"
"/usr/mysql/include"
"/usr/local/include"
"/usr/include/mariadb"
"/usr/include/mysql"
"/usr/include")
find_path (MYSQL_INCLUDE_DIR NAMES mysql.h mysql/mysql.h mariadb/mysql.h PATHS ${MYSQL_INCLUDE_PATHS} PATH_SUFFIXES mysql)
if (USE_STATIC_LIBRARIES)
find_library (STATIC_MYSQLCLIENT_LIB NAMES mariadbclient mysqlclient PATHS ${MYSQL_LIB_PATHS} PATH_SUFFIXES mysql)
else ()
find_library (MYSQLCLIENT_LIBRARIES NAMES mariadb mariadbclient mysqlclient PATHS ${MYSQL_LIB_PATHS} PATH_SUFFIXES mysql)
endif ()
if (MYSQL_INCLUDE_DIR AND (STATIC_MYSQLCLIENT_LIB OR MYSQLCLIENT_LIBRARIES))
set (USE_MYSQL 1)
set (MYSQLXX_LIBRARY mysqlxx)
if (APPLE)
# /usr/local/include/mysql/mysql_com.h:1011:10: fatal error: mysql/udf_registration_types.h: No such file or directory
set(MYSQL_INCLUDE_DIR ${MYSQL_INCLUDE_DIR} ${MYSQL_INCLUDE_DIR}/mysql)
endif ()
else ()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system mysql library")
endif ()
endif ()
if (NOT USE_MYSQL AND NOT MISSING_INTERNAL_MYSQL_LIBRARY)
set (MYSQLCLIENT_LIBRARIES mariadbclient)
set (MYSQLXX_LIBRARY mysqlxx)
set (USE_MYSQL 1)
set (USE_INTERNAL_MYSQL_LIBRARY 1)
endif()
if (USE_MYSQL)
message (STATUS "Using mysqlclient=${USE_MYSQL}: ${MYSQL_INCLUDE_DIR} : ${MYSQLCLIENT_LIBRARIES}; staticlib=${STATIC_MYSQLCLIENT_LIB}")
else ()
message (STATUS "Build without mysqlclient (support for MYSQL dictionary source will be disabled)")
endif ()

View File

@ -1,16 +0,0 @@
if (NOT ENABLE_ODBC)
return ()
endif ()
if (NOT USE_INTERNAL_NANODBC_LIBRARY)
message (FATAL_ERROR "Only the bundled nanodbc library can be used")
endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/nanodbc")
message (FATAL_ERROR "submodule contrib/nanodbc is missing. to fix try run: \n git submodule update --init")
endif()
set (NANODBC_LIBRARY nanodbc)
set (NANODBC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/nanodbc/nanodbc")
message (STATUS "Using nanodbc: ${NANODBC_INCLUDE_DIR} : ${NANODBC_LIBRARY}")

View File

@ -1,32 +0,0 @@
option(ENABLE_NLP "Enable NLP functions support" ${ENABLE_LIBRARIES})
if (NOT ENABLE_NLP)
message (STATUS "NLP functions disabled")
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libstemmer_c/Makefile")
message (WARNING "submodule contrib/libstemmer_c is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libstemmer_c library, NLP functions will be disabled")
set (USE_NLP 0)
return()
endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/wordnet-blast/wnb")
message (WARNING "submodule contrib/wordnet-blast is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal wordnet-blast library, NLP functions will be disabled")
set (USE_NLP 0)
return()
endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/lemmagen-c/README.md")
message (WARNING "submodule contrib/lemmagen-c is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal lemmagen-c library, NLP functions will be disabled")
set (USE_NLP 0)
return()
endif ()
set (USE_NLP 1)
message (STATUS "Using Libraries for NLP functions: contrib/wordnet-blast, contrib/libstemmer_c, contrib/lemmagen-c")

View File

@ -1,24 +0,0 @@
option(ENABLE_NURAFT "Enable NuRaft" ${ENABLE_LIBRARIES})
if (NOT ENABLE_NURAFT)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/src")
message (WARNING "submodule contrib/NuRaft is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal NuRaft library")
set (USE_NURAFT 0)
return()
endif ()
if (NOT OS_FREEBSD)
set (USE_NURAFT 1)
set (NURAFT_LIBRARY nuraft)
set (NURAFT_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/NuRaft/include")
message (STATUS "Using NuRaft=${USE_NURAFT}: ${NURAFT_INCLUDE_DIR} : ${NURAFT_LIBRARY}")
else()
set (USE_NURAFT 0)
message (STATUS "Using internal NuRaft library on FreeBSD and Darwin is not supported")
endif()

View File

@ -1,55 +0,0 @@
option (ENABLE_ODBC "Enable ODBC library" ${ENABLE_LIBRARIES})
if (NOT OS_LINUX)
if (ENABLE_ODBC)
message(STATUS "ODBC is only supported on Linux")
endif()
set (ENABLE_ODBC OFF CACHE INTERNAL "")
endif ()
if (NOT ENABLE_ODBC)
if (USE_INTERNAL_ODBC_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal ODBC with ENABLE_ODBC=OFF")
endif()
add_library (unixodbc INTERFACE)
target_compile_definitions (unixodbc INTERFACE USE_ODBC=0)
message (STATUS "Not using unixodbc")
return()
endif()
option (USE_INTERNAL_ODBC_LIBRARY "Use internal ODBC library" ON)
if (NOT USE_INTERNAL_ODBC_LIBRARY)
find_library (LIBRARY_ODBC NAMES unixodbc odbc)
find_path (INCLUDE_ODBC sql.h)
if(LIBRARY_ODBC AND INCLUDE_ODBC)
add_library (unixodbc INTERFACE)
set_target_properties (unixodbc PROPERTIES INTERFACE_LINK_LIBRARIES ${LIBRARY_ODBC})
set_target_properties (unixodbc PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${INCLUDE_ODBC})
set_target_properties (unixodbc PROPERTIES INTERFACE_COMPILE_DEFINITIONS USE_ODBC=1)
if (USE_STATIC_LIBRARIES)
find_library(LTDL_LIBRARY ltdl)
if (LTDL_LIBRARY)
target_link_libraries(unixodbc INTERFACE ${LTDL_LIBRARY})
endif()
endif()
set(EXTERNAL_ODBC_LIBRARY_FOUND 1)
message (STATUS "Found odbc: ${LIBRARY_ODBC}")
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ODBC library")
set(EXTERNAL_ODBC_LIBRARY_FOUND 0)
endif()
endif()
if (NOT EXTERNAL_ODBC_LIBRARY_FOUND)
set (USE_INTERNAL_ODBC_LIBRARY 1)
endif ()
set (USE_INTERNAL_NANODBC_LIBRARY 1)
message (STATUS "Using unixodbc")

View File

@ -1,57 +0,0 @@
option (ENABLE_ORC "Enable ORC" ${ENABLE_LIBRARIES})
if(NOT ENABLE_ORC)
if(USE_INTERNAL_ORC_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal ORC library with ENABLE_ORC=OFF")
endif()
return()
endif()
if (USE_INTERNAL_PARQUET_LIBRARY)
option(USE_INTERNAL_ORC_LIBRARY "Set to FALSE to use system ORC instead of bundled (experimental set to OFF on your own risk)"
ON)
elseif(USE_INTERNAL_ORC_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Currently internal ORC can be build only with bundled Parquet")
endif()
include(cmake/find/snappy.cmake)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include/orc/OrcFile.hh")
if(USE_INTERNAL_ORC_LIBRARY)
message(WARNING "submodule contrib/orc is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ORC")
set(USE_INTERNAL_ORC_LIBRARY 0)
endif()
set(MISSING_INTERNAL_ORC_LIBRARY 1)
endif ()
if (NOT USE_INTERNAL_ORC_LIBRARY)
find_package(orc)
if (NOT ORC_LIBRARY OR NOT ORC_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ORC")
endif ()
endif ()
#if (USE_INTERNAL_ORC_LIBRARY)
#find_path(CYRUS_SASL_INCLUDE_DIR sasl/sasl.h)
#find_library(CYRUS_SASL_SHARED_LIB sasl2)
#if (NOT CYRUS_SASL_INCLUDE_DIR OR NOT CYRUS_SASL_SHARED_LIB)
# set(USE_ORC 0)
#endif()
#endif()
if (ORC_LIBRARY AND ORC_INCLUDE_DIR)
set(USE_ORC 1)
elseif(NOT MISSING_INTERNAL_ORC_LIBRARY AND ARROW_LIBRARY AND SNAPPY_LIBRARY) # (LIBGSASL_LIBRARY AND LIBXML2_LIBRARY)
set(ORC_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/orc/c++/include")
set(ORC_LIBRARY orc)
set(USE_ORC 1)
set(USE_INTERNAL_ORC_LIBRARY 1)
else()
message (${RECONFIGURE_MESSAGE_LEVEL}
"Can't enable ORC support - missing dependencies. Missing internal orc=${MISSING_INTERNAL_ORC_LIBRARY}. "
"arrow=${ARROW_LIBRARY} snappy=${SNAPPY_LIBRARY}")
set(USE_INTERNAL_ORC_LIBRARY 0)
endif()
message (STATUS "Using internal=${USE_INTERNAL_ORC_LIBRARY} orc=${USE_ORC}: ${ORC_INCLUDE_DIR} : ${ORC_LIBRARY}")

View File

@ -1,171 +0,0 @@
if (Protobuf_PROTOC_EXECUTABLE)
option (ENABLE_PARQUET "Enable parquet" ${ENABLE_LIBRARIES})
elseif(ENABLE_PARQUET OR USE_INTERNAL_PARQUET_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use parquet without protoc executable")
endif()
if (NOT ENABLE_PARQUET)
if(USE_INTERNAL_PARQUET_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal parquet with ENABLE_PARQUET=OFF")
endif()
message(STATUS "Building without Parquet support")
return()
endif()
if (NOT OS_FREEBSD) # Freebsd: ../contrib/arrow/cpp/src/arrow/util/bit-util.h:27:10: fatal error: endian.h: No such file or directory
option(USE_INTERNAL_PARQUET_LIBRARY "Set to FALSE to use system parquet library instead of bundled" ON)
elseif(USE_INTERNAL_PARQUET_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet is not supported on freebsd")
endif()
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/arrow/cpp/CMakeLists.txt")
if(USE_INTERNAL_PARQUET_LIBRARY)
message(WARNING "submodule contrib/arrow (required for Parquet) is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet library")
set(USE_INTERNAL_PARQUET_LIBRARY 0)
endif()
set(MISSING_INTERNAL_PARQUET_LIBRARY 1)
endif()
if (NOT SNAPPY_LIBRARY)
include(cmake/find/snappy.cmake)
endif()
if(NOT USE_INTERNAL_PARQUET_LIBRARY)
find_package(Arrow)
find_package(Parquet)
find_library(THRIFT_LIBRARY thrift)
find_library(UTF8_PROC_LIBRARY utf8proc)
find_package(BZip2)
if(USE_STATIC_LIBRARIES)
find_library(ARROW_DEPS_LIBRARY arrow_bundled_dependencies)
if (ARROW_DEPS_LIBRARY)
set(ARROW_IMPORT_OBJ_DIR "${CMAKE_CURRENT_BINARY_DIR}/contrib/arrow-cmake/imported-objects")
set(ARROW_OTHER_OBJS
"${ARROW_IMPORT_OBJ_DIR}/jemalloc.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/arena.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/background_thread.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/base.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/bin.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/bitmap.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/ckh.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/ctl.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/div.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/extent.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/extent_dss.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/extent_mmap.pic.o"
# skip hash
"${ARROW_IMPORT_OBJ_DIR}/hook.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/large.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/log.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/malloc_io.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/mutex.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/mutex_pool.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/nstime.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/pages.pic.o"
# skip prng
"${ARROW_IMPORT_OBJ_DIR}/prof.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/rtree.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/stats.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/sc.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/sz.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/tcache.pic.o"
# skip ticker
"${ARROW_IMPORT_OBJ_DIR}/tsd.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/test_hooks.pic.o"
"${ARROW_IMPORT_OBJ_DIR}/witness.pic.o"
)
add_custom_command(OUTPUT ${ARROW_OTHER_OBJS}
COMMAND
mkdir -p "${ARROW_IMPORT_OBJ_DIR}" &&
cd "${ARROW_IMPORT_OBJ_DIR}" &&
"${CMAKE_AR}" x "${ARROW_DEPS_LIBRARY}"
)
set_source_files_properties(jemalloc.pic.o PROPERTIES EXTERNAL_OBJECT true GENERATED true)
add_library(imported_arrow_deps STATIC ${ARROW_OTHER_OBJS})
set(ARROW_LIBRARY ${ARROW_STATIC_LIB}
imported_arrow_deps ${THRIFT_LIBRARY} ${UTF8_PROC_LIBRARY} ${BZIP2_LIBRARIES} ${SNAPPY_LIBRARY})
else()
message(WARNING "Using external static Arrow does not always work. "
"Could not find arrow_bundled_dependencies.a. If compilation fails, "
"Try: -D\"USE_INTERNAL_PARQUET_LIBRARY\"=ON or -D\"ENABLE_PARQUET\"=OFF or "
"-D\"USE_STATIC_LIBRARIES\"=OFF")
set(ARROW_LIBRARY ${ARROW_STATIC_LIB})
endif()
set(PARQUET_LIBRARY ${PARQUET_STATIC_LIB})
else()
set(ARROW_LIBRARY ${ARROW_SHARED_LIB})
set(PARQUET_LIBRARY ${PARQUET_SHARED_LIB})
endif()
if(ARROW_INCLUDE_DIR AND ARROW_LIBRARY AND PARQUET_INCLUDE_DIR AND PARQUET_LIBRARY AND THRIFT_LIBRARY AND UTF8_PROC_LIBRARY AND BZIP2_FOUND)
set(USE_PARQUET 1)
set(EXTERNAL_PARQUET_FOUND 1)
else()
message (${RECONFIGURE_MESSAGE_LEVEL}
"Can't find system parquet: arrow=${ARROW_INCLUDE_DIR}:${ARROW_LIBRARY} ;"
" parquet=${PARQUET_INCLUDE_DIR}:${PARQUET_LIBRARY} ;"
" thrift=${THRIFT_LIBRARY} ;")
set(EXTERNAL_PARQUET_FOUND 0)
endif()
endif()
if(NOT EXTERNAL_PARQUET_FOUND AND NOT MISSING_INTERNAL_PARQUET_LIBRARY AND NOT OS_FREEBSD)
if(SNAPPY_LIBRARY)
set(CAN_USE_INTERNAL_PARQUET_LIBRARY 1)
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet library without snappy")
endif()
include(CheckCXXSourceCompiles)
if(NOT USE_INTERNAL_DOUBLE_CONVERSION_LIBRARY)
set(CMAKE_REQUIRED_LIBRARIES ${DOUBLE_CONVERSION_LIBRARIES})
set(CMAKE_REQUIRED_INCLUDES ${DOUBLE_CONVERSION_INCLUDE_DIR})
check_cxx_source_compiles("
#include <double-conversion/double-conversion.h>
int main() { static const int flags_ = double_conversion::StringToDoubleConverter::ALLOW_CASE_INSENSIBILITY; return 0;}
" HAVE_DOUBLE_CONVERSION_ALLOW_CASE_INSENSIBILITY)
if(NOT HAVE_DOUBLE_CONVERSION_ALLOW_CASE_INSENSIBILITY) # HAVE_STD_RANDOM_SHUFFLE
message (${RECONFIGURE_MESSAGE_LEVEL} "Disabling internal parquet library because arrow is broken (can't use old double_conversion)")
set(CAN_USE_INTERNAL_PARQUET_LIBRARY 0)
endif()
endif()
if(NOT CAN_USE_INTERNAL_PARQUET_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal parquet")
set(USE_INTERNAL_PARQUET_LIBRARY 0)
else()
set(USE_INTERNAL_PARQUET_LIBRARY 1)
if(MAKE_STATIC_LIBRARIES)
set(FLATBUFFERS_LIBRARY flatbuffers)
set(ARROW_LIBRARY arrow_static)
set(PARQUET_LIBRARY parquet_static)
set(THRIFT_LIBRARY thrift_static)
else()
set(FLATBUFFERS_LIBRARY flatbuffers_shared)
set(ARROW_LIBRARY arrow_shared)
set(PARQUET_LIBRARY parquet_shared)
set(THRIFT_LIBRARY thrift)
endif()
set(USE_PARQUET 1)
set(USE_ORC 1)
set(USE_ARROW 1)
endif()
elseif(OS_FREEBSD)
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal parquet library on FreeBSD is not supported")
endif()
if(USE_PARQUET)
message(STATUS "Using Parquet: arrow=${ARROW_LIBRARY}:${ARROW_INCLUDE_DIR} ;"
" parquet=${PARQUET_LIBRARY}:${PARQUET_INCLUDE_DIR} ;"
" thrift=${THRIFT_LIBRARY} ;"
" flatbuffers=${FLATBUFFERS_LIBRARY}")
else()
message(STATUS "Building without Parquet support")
endif()

View File

@ -1,2 +0,0 @@
set(PDQSORT_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/pdqsort)
message(STATUS "Using pdqsort: ${PDQSORT_INCLUDE_DIR}")

View File

@ -1,8 +0,0 @@
option (USE_INTERNAL_POCO_LIBRARY "Use internal Poco library" ON)
if (NOT USE_INTERNAL_POCO_LIBRARY)
find_path (ROOT_DIR NAMES Foundation/include/Poco/Poco.h include/Poco/Poco.h)
if (NOT ROOT_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system poco")
endif()
endif ()

View File

@ -1,62 +0,0 @@
option(ENABLE_PROTOBUF "Enable protobuf" ${ENABLE_LIBRARIES})
if(NOT ENABLE_PROTOBUF)
if(USE_INTERNAL_PROTOBUF_LIBRARY)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf with ENABLE_PROTOBUF=OFF")
endif()
return()
endif()
# Normally we use the internal protobuf library.
# You can set USE_INTERNAL_PROTOBUF_LIBRARY to OFF to force using the external protobuf library, which should be installed in the system in this case.
# The external protobuf library can be installed in the system by running
# sudo apt-get install libprotobuf-dev protobuf-compiler libprotoc-dev
option(USE_INTERNAL_PROTOBUF_LIBRARY "Set to FALSE to use system protobuf instead of bundled. (Experimental. Set to OFF on your own risk)" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/protobuf/cmake/CMakeLists.txt")
if(USE_INTERNAL_PROTOBUF_LIBRARY)
message(WARNING "submodule contrib/protobuf is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal protobuf")
set(USE_INTERNAL_PROTOBUF_LIBRARY 0)
endif()
set(MISSING_INTERNAL_PROTOBUF_LIBRARY 1)
endif()
if(NOT USE_INTERNAL_PROTOBUF_LIBRARY)
find_package(Protobuf)
if(NOT Protobuf_INCLUDE_DIR OR NOT Protobuf_LIBRARY)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf library")
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
elseif(NOT Protobuf_PROTOC_EXECUTABLE)
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system protobuf compiler")
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 0)
else()
set(EXTERNAL_PROTOBUF_LIBRARY_FOUND 1)
set(USE_PROTOBUF 1)
endif()
endif()
if(NOT EXTERNAL_PROTOBUF_LIBRARY_FOUND AND NOT MISSING_INTERNAL_PROTOBUF_LIBRARY)
set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/protobuf/src")
set(Protobuf_LIBRARY libprotobuf)
set(Protobuf_PROTOC_EXECUTABLE "$<TARGET_FILE:protoc>")
set(Protobuf_PROTOC_LIBRARY libprotoc)
include("${ClickHouse_SOURCE_DIR}/contrib/protobuf-cmake/protobuf_generate.cmake")
set(USE_INTERNAL_PROTOBUF_LIBRARY 1)
set(USE_PROTOBUF 1)
endif()
if(OS_FREEBSD AND SANITIZE STREQUAL "address")
# ../contrib/protobuf/src/google/protobuf/arena_impl.h:45:10: fatal error: 'sanitizer/asan_interface.h' file not found
# #include <sanitizer/asan_interface.h>
if(LLVM_INCLUDE_DIRS)
set(Protobuf_INCLUDE_DIR "${Protobuf_INCLUDE_DIR}" ${LLVM_INCLUDE_DIRS})
else()
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use protobuf on FreeBSD with address sanitizer without LLVM")
set(USE_PROTOBUF 0)
endif()
endif()
message(STATUS "Using protobuf=${USE_PROTOBUF}: ${Protobuf_INCLUDE_DIR} : ${Protobuf_LIBRARY} : ${Protobuf_PROTOC_EXECUTABLE} : ${Protobuf_PROTOC_LIBRARY}")

View File

@ -1,35 +0,0 @@
option(ENABLE_RAPIDJSON "Use rapidjson" ${ENABLE_LIBRARIES})
if(NOT ENABLE_RAPIDJSON)
if(USE_INTERNAL_RAPIDJSON_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal rapidjson library with ENABLE_RAPIDJSON=OFF")
endif()
return()
endif()
option(USE_INTERNAL_RAPIDJSON_LIBRARY "Set to FALSE to use system rapidjson library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include/rapidjson/rapidjson.h")
if(USE_INTERNAL_RAPIDJSON_LIBRARY)
message(WARNING "submodule contrib/rapidjson is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rapidjson library")
set(USE_INTERNAL_RAPIDJSON_LIBRARY 0)
endif()
set(MISSING_INTERNAL_RAPIDJSON_LIBRARY 1)
endif()
if(NOT USE_INTERNAL_RAPIDJSON_LIBRARY)
find_path(RAPIDJSON_INCLUDE_DIR NAMES rapidjson/rapidjson.h PATHS ${RAPIDJSON_INCLUDE_PATHS})
if(NOT RAPIDJSON_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system rapidjson")
endif()
endif()
if(RAPIDJSON_INCLUDE_DIR)
set(USE_RAPIDJSON 1)
elseif(NOT MISSING_INTERNAL_RAPIDJSON_LIBRARY)
set(RAPIDJSON_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/rapidjson/include")
set(USE_INTERNAL_RAPIDJSON_LIBRARY 1)
set(USE_RAPIDJSON 1)
endif()
message(STATUS "Using rapidjson=${USE_RAPIDJSON}: ${RAPIDJSON_INCLUDE_DIR}")

View File

@ -1,68 +0,0 @@
option (ENABLE_RDKAFKA "Enable kafka" ${ENABLE_LIBRARIES})
if (NOT ENABLE_RDKAFKA)
if (USE_INTERNAL_RDKAFKA_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal librdkafka with ENABLE_RDKAFKA=OFF")
endif()
return()
endif()
option (USE_INTERNAL_RDKAFKA_LIBRARY "Set to FALSE to use system librdkafka instead of the bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cppkafka/src")
if(USE_INTERNAL_RDKAFKA_LIBRARY)
message (WARNING "submodule contrib/cppkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal cppkafka")
set (USE_INTERNAL_RDKAFKA_LIBRARY 0)
endif()
set (MISSING_INTERNAL_CPPKAFKA_LIBRARY 1)
endif ()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src")
if(USE_INTERNAL_RDKAFKA_LIBRARY OR MISSING_INTERNAL_CPPKAFKA_LIBRARY)
message (WARNING "submodule contrib/librdkafka is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal rdkafka")
set (USE_INTERNAL_RDKAFKA_LIBRARY 0)
endif()
set (MISSING_INTERNAL_RDKAFKA_LIBRARY 1)
endif ()
if (NOT USE_INTERNAL_RDKAFKA_LIBRARY)
find_library (RDKAFKA_LIB rdkafka)
find_path (RDKAFKA_INCLUDE_DIR NAMES librdkafka/rdkafka.h PATHS ${RDKAFKA_INCLUDE_PATHS})
if (NOT RDKAFKA_LIB OR NOT RDKAFKA_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system librdkafka")
endif()
if (USE_STATIC_LIBRARIES AND NOT OS_FREEBSD)
find_library (SASL2_LIBRARY sasl2)
if (NOT SASL2_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sasl2 library needed for static librdkafka")
endif()
endif ()
set (CPPKAFKA_LIBRARY cppkafka)
endif ()
if (RDKAFKA_LIB AND RDKAFKA_INCLUDE_DIR)
set (USE_RDKAFKA 1)
add_library (rdkafka_imp UNKNOWN IMPORTED)
set_target_properties (rdkafka_imp PROPERTIES IMPORTED_LOCATION ${RDKAFKA_LIB})
set_target_properties (rdkafka_imp PROPERTIES INTERFACE_INCLUDE_DIRECTORIES ${RDKAFKA_INCLUDE_DIR})
set (RDKAFKA_LIBRARY rdkafka_imp ${OPENSSL_LIBRARIES})
set (CPPKAFKA_LIBRARY cppkafka)
if (SASL2_LIBRARY)
list (APPEND RDKAFKA_LIBRARY ${SASL2_LIBRARY})
endif ()
if (LZ4_LIBRARY)
list (APPEND RDKAFKA_LIBRARY ${LZ4_LIBRARY})
endif ()
elseif (NOT MISSING_INTERNAL_RDKAFKA_LIBRARY AND NOT MISSING_INTERNAL_CPPKAFKA_LIBRARY)
set (USE_INTERNAL_RDKAFKA_LIBRARY 1)
set (RDKAFKA_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/librdkafka/src")
set (RDKAFKA_LIBRARY rdkafka)
set (CPPKAFKA_LIBRARY cppkafka)
set (USE_RDKAFKA 1)
endif ()
message (STATUS "Using librdkafka=${USE_RDKAFKA}: ${RDKAFKA_INCLUDE_DIR} : ${RDKAFKA_LIBRARY} ${CPPKAFKA_LIBRARY}")

View File

@ -1,41 +0,0 @@
option (USE_INTERNAL_RE2_LIBRARY "Set to FALSE to use system re2 library instead of bundled [slower]" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/re2/re2")
if(USE_INTERNAL_RE2_LIBRARY)
message(WARNING "submodule contrib/re2 is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal re2 library")
endif()
set(USE_INTERNAL_RE2_LIBRARY 0)
set(MISSING_INTERNAL_RE2_LIBRARY 1)
endif()
if (NOT USE_INTERNAL_RE2_LIBRARY)
find_library (RE2_LIBRARY re2)
find_path (RE2_INCLUDE_DIR NAMES re2/re2.h PATHS ${RE2_INCLUDE_PATHS})
if (NOT RE2_LIBRARY OR NOT RE2_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system re2 library")
endif ()
endif ()
string(FIND ${CMAKE_CURRENT_BINARY_DIR} " " _have_space)
if(_have_space GREATER 0)
message(WARNING "Using spaces in build path [${CMAKE_CURRENT_BINARY_DIR}] highly not recommended. Library re2st will be disabled.")
set (MISSING_INTERNAL_RE2_ST_LIBRARY 1)
endif()
if (RE2_LIBRARY AND RE2_INCLUDE_DIR)
set (RE2_ST_LIBRARY ${RE2_LIBRARY})
elseif (NOT MISSING_INTERNAL_RE2_LIBRARY)
set (USE_INTERNAL_RE2_LIBRARY 1)
set (RE2_LIBRARY re2)
set (RE2_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/re2")
if (NOT MISSING_INTERNAL_RE2_ST_LIBRARY)
set (RE2_ST_LIBRARY re2_st)
set (USE_RE2_ST 1)
else ()
set (RE2_ST_LIBRARY ${RE2_LIBRARY})
message (${RECONFIGURE_MESSAGE_LEVEL} "Using internal re2 library instead of re2_st")
endif ()
endif ()
message (STATUS "Using re2: ${RE2_INCLUDE_DIR} : ${RE2_LIBRARY}; ${RE2_ST_INCLUDE_DIR} : ${RE2_ST_LIBRARY}")

View File

@ -1,71 +0,0 @@
if (OS_DARWIN AND ARCH_AARCH64)
set (ENABLE_ROCKSDB OFF CACHE INTERNAL "")
endif()
option(ENABLE_ROCKSDB "Enable ROCKSDB" ${ENABLE_LIBRARIES})
if (NOT ENABLE_ROCKSDB)
if (USE_INTERNAL_ROCKSDB_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal rocksdb library with ENABLE_ROCKSDB=OFF")
endif()
return()
endif()
option(USE_INTERNAL_ROCKSDB_LIBRARY "Set to FALSE to use system ROCKSDB library instead of bundled" ON)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/include")
if (USE_INTERNAL_ROCKSDB_LIBRARY)
message (WARNING "submodule contrib is missing. to fix try run: \n git submodule update --init")
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal rocksdb")
endif()
set (MISSING_INTERNAL_ROCKSDB 1)
endif ()
if (NOT USE_INTERNAL_ROCKSDB_LIBRARY)
find_library (ROCKSDB_LIBRARY rocksdb)
find_path (ROCKSDB_INCLUDE_DIR NAMES rocksdb/db.h PATHS ${ROCKSDB_INCLUDE_PATHS})
if (NOT ROCKSDB_LIBRARY OR NOT ROCKSDB_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system rocksdb library")
endif()
if (NOT SNAPPY_LIBRARY)
include(cmake/find/snappy.cmake)
endif()
if (NOT ZLIB_LIBRARY)
include(cmake/find/zlib.cmake)
endif()
find_package(BZip2)
find_library(ZSTD_LIBRARY zstd)
find_library(LZ4_LIBRARY lz4)
find_library(GFLAGS_LIBRARY gflags)
if(SNAPPY_LIBRARY AND ZLIB_LIBRARY AND LZ4_LIBRARY AND BZIP2_FOUND AND ZSTD_LIBRARY AND GFLAGS_LIBRARY)
list (APPEND ROCKSDB_LIBRARY ${SNAPPY_LIBRARY})
list (APPEND ROCKSDB_LIBRARY ${ZLIB_LIBRARY})
list (APPEND ROCKSDB_LIBRARY ${LZ4_LIBRARY})
list (APPEND ROCKSDB_LIBRARY ${BZIP2_LIBRARY})
list (APPEND ROCKSDB_LIBRARY ${ZSTD_LIBRARY})
list (APPEND ROCKSDB_LIBRARY ${GFLAGS_LIBRARY})
else()
message (${RECONFIGURE_MESSAGE_LEVEL}
"Can't find system rocksdb: snappy=${SNAPPY_LIBRARY} ;"
" zlib=${ZLIB_LIBRARY} ;"
" lz4=${LZ4_LIBRARY} ;"
" bz2=${BZIP2_LIBRARY} ;"
" zstd=${ZSTD_LIBRARY} ;"
" gflags=${GFLAGS_LIBRARY} ;")
endif()
endif ()
if(ROCKSDB_LIBRARY AND ROCKSDB_INCLUDE_DIR)
set(USE_ROCKSDB 1)
elseif (NOT MISSING_INTERNAL_ROCKSDB)
set (USE_INTERNAL_ROCKSDB_LIBRARY 1)
set (ROCKSDB_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/rocksdb/include")
set (ROCKSDB_LIBRARY "rocksdb")
set (USE_ROCKSDB 1)
endif ()
message (STATUS "Using ROCKSDB=${USE_ROCKSDB}: ${ROCKSDB_INCLUDE_DIR} : ${ROCKSDB_LIBRARY}")

View File

@ -1,24 +0,0 @@
option(ENABLE_S2_GEOMETRY "Enable S2 geometry library" ${ENABLE_LIBRARIES})
if (ENABLE_S2_GEOMETRY)
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/s2geometry")
message (WARNING "submodule contrib/s2geometry is missing. to fix try run: \n git submodule update --init")
set (ENABLE_S2_GEOMETRY 0)
set (USE_S2_GEOMETRY 0)
else()
if (OPENSSL_FOUND)
set (S2_GEOMETRY_LIBRARY s2)
set (S2_GEOMETRY_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/s2geometry/src/s2)
set (USE_S2_GEOMETRY 1)
else()
message (WARNING "S2 uses OpenSSL, but the latter is absent.")
endif()
endif()
if (NOT USE_S2_GEOMETRY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable S2 geometry library")
endif()
endif()
message (STATUS "Using s2geometry=${USE_S2_GEOMETRY} : ${S2_GEOMETRY_INCLUDE_DIR}")

View File

@ -1,45 +0,0 @@
if(NOT OS_FREEBSD)
option(ENABLE_S3 "Enable S3" ${ENABLE_LIBRARIES})
elseif(ENABLE_S3 OR USE_INTERNAL_AWS_S3_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use S3 on FreeBSD")
endif()
if(NOT ENABLE_S3)
if(USE_INTERNAL_AWS_S3_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal S3 library with ENABLE_S3=OFF")
endif()
return()
endif()
option(USE_INTERNAL_AWS_S3_LIBRARY "Set to FALSE to use system S3 instead of bundled (experimental set to OFF on your own risk)"
ON)
if (NOT USE_INTERNAL_POCO_LIBRARY AND USE_INTERNAL_AWS_S3_LIBRARY)
message (FATAL_ERROR "Currently S3 support can be built only with internal POCO library")
endif()
if (NOT USE_INTERNAL_AWS_S3_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Compilation with external S3 library is not supported yet")
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
message (WARNING "submodule contrib/aws is missing. to fix try run: \n git submodule update --init")
if (USE_INTERNAL_AWS_S3_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal S3 library")
endif ()
set (MISSING_AWS_S3 1)
endif ()
if (USE_INTERNAL_AWS_S3_LIBRARY AND NOT MISSING_AWS_S3)
set(AWS_S3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3/include")
set(AWS_S3_CORE_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core/include")
set(AWS_S3_LIBRARY aws_s3)
set(USE_INTERNAL_AWS_S3_LIBRARY 1)
set(USE_AWS_S3 1)
else()
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't enable S3")
set(USE_INTERNAL_AWS_S3_LIBRARY 0)
set(USE_AWS_S3 0)
endif ()
message (STATUS "Using aws_s3=${USE_AWS_S3}: ${AWS_S3_INCLUDE_DIR} : ${AWS_S3_LIBRARY}")

View File

@ -1,23 +0,0 @@
set (SENTRY_LIBRARY "sentry")
set (SENTRY_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/sentry-native/include")
if (NOT EXISTS "${SENTRY_INCLUDE_DIR}/sentry.h")
message (WARNING "submodule contrib/sentry-native is missing. to fix try run: \n git submodule update --init")
if (USE_SENTRY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sentry library")
endif()
return()
endif ()
if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT (OS_DARWIN AND COMPILER_CLANG))
option (USE_SENTRY "Use Sentry" ${ENABLE_LIBRARIES})
set (SENTRY_TRANSPORT "curl" CACHE STRING "")
set (SENTRY_BACKEND "none" CACHE STRING "")
set (SENTRY_EXPORT_SYMBOLS OFF CACHE BOOL "")
set (SENTRY_LINK_PTHREAD OFF CACHE BOOL "")
set (SENTRY_PIC OFF CACHE BOOL "")
set (BUILD_SHARED_LIBS OFF)
message (STATUS "Using sentry=${USE_SENTRY}: ${SENTRY_LIBRARY}")
elseif (USE_SENTRY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Sentry is not supported in current configuration")
endif ()

View File

@ -1,11 +0,0 @@
option (USE_SIMDJSON "Use simdjson" ${ENABLE_LIBRARIES})
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/simdjson/include/simdjson.h")
message (WARNING "submodule contrib/simdjson is missing. to fix try run: \n git submodule update --init")
if (USE_SIMDJSON)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal simdjson library")
endif()
return()
endif ()
message(STATUS "Using simdjson=${USE_SIMDJSON}")

View File

@ -1,21 +0,0 @@
option(USE_SNAPPY "Enable snappy library" ON)
if(NOT USE_SNAPPY)
if (USE_INTERNAL_SNAPPY_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal snappy library with USE_SNAPPY=OFF")
endif()
return()
endif()
option (USE_INTERNAL_SNAPPY_LIBRARY "Set to FALSE to use system snappy library instead of bundled" ON)
if(NOT USE_INTERNAL_SNAPPY_LIBRARY)
find_library(SNAPPY_LIBRARY snappy)
if (NOT SNAPPY_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system snappy library")
endif()
else ()
set(SNAPPY_LIBRARY snappy)
endif()
message (STATUS "Using snappy: ${SNAPPY_LIBRARY}")

View File

@ -1,17 +0,0 @@
option (USE_INTERNAL_SPARSEHASH_LIBRARY "Set to FALSE to use system sparsehash library instead of bundled"
ON) # ON by default as we are not aware of any system providing package for sparsehash-c11
if (NOT USE_INTERNAL_SPARSEHASH_LIBRARY)
find_path (SPARSEHASH_INCLUDE_DIR NAMES sparsehash/sparse_hash_map PATHS ${SPARSEHASH_INCLUDE_PATHS})
if (NOT SPARSEHASH_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system sparsehash library")
endif ()
endif ()
if (SPARSEHASH_INCLUDE_DIR)
else ()
set (USE_INTERNAL_SPARSEHASH_LIBRARY 1)
set (SPARSEHASH_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/sparsehash-c11")
endif ()
message (STATUS "Using sparsehash: ${SPARSEHASH_INCLUDE_DIR}")

View File

@ -1,16 +0,0 @@
option(ENABLE_SQLITE "Enable sqlite" ${ENABLE_LIBRARIES})
if (NOT ENABLE_SQLITE)
return()
endif()
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/sqlite-amalgamation/sqlite3.c")
message (WARNING "submodule contrib/sqlite3-amalgamation is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal sqlite library")
set (USE_SQLITE 0)
return()
endif()
set (USE_SQLITE 1)
set (SQLITE_LIBRARY sqlite)
message (STATUS "Using sqlite=${USE_SQLITE}")

View File

@ -1,133 +0,0 @@
# Needed when securely connecting to an external server, e.g.
# clickhouse-client --host ... --secure
option(ENABLE_SSL "Enable ssl" ${ENABLE_LIBRARIES})
if(NOT ENABLE_SSL)
if (USE_INTERNAL_SSL_LIBRARY)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal ssl library with ENABLE_SSL=OFF")
endif()
return()
endif()
option(USE_INTERNAL_SSL_LIBRARY "Set to FALSE to use system *ssl library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/boringssl/README.md")
if(USE_INTERNAL_SSL_LIBRARY)
message(WARNING "submodule contrib/boringssl is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ssl library")
endif()
set(USE_INTERNAL_SSL_LIBRARY 0)
set(MISSING_INTERNAL_SSL_LIBRARY 1)
endif()
set (OPENSSL_USE_STATIC_LIBS ${USE_STATIC_LIBRARIES})
if (NOT USE_INTERNAL_SSL_LIBRARY)
if (APPLE)
set (OPENSSL_ROOT_DIR "/usr/local/opt/openssl" CACHE INTERNAL "")
# https://rt.openssl.org/Ticket/Display.html?user=guest&pass=guest&id=2232
if (USE_STATIC_LIBRARIES)
message(WARNING "Disable USE_STATIC_LIBRARIES if you have linking problems with OpenSSL on MacOS")
endif ()
endif ()
find_package (OpenSSL)
if (NOT OPENSSL_FOUND)
# Try to find manually.
set (OPENSSL_INCLUDE_PATHS "/usr/local/opt/openssl/include")
set (OPENSSL_PATHS "/usr/local/opt/openssl/lib")
find_path (OPENSSL_INCLUDE_DIR NAMES openssl/ssl.h PATHS ${OPENSSL_INCLUDE_PATHS})
find_library (OPENSSL_SSL_LIBRARY ssl PATHS ${OPENSSL_PATHS})
find_library (OPENSSL_CRYPTO_LIBRARY crypto PATHS ${OPENSSL_PATHS})
if (OPENSSL_SSL_LIBRARY AND OPENSSL_CRYPTO_LIBRARY AND OPENSSL_INCLUDE_DIR)
set (OPENSSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
set (OPENSSL_FOUND 1)
endif ()
endif ()
if (NOT OPENSSL_FOUND)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ssl")
endif()
endif ()
if (NOT OPENSSL_FOUND AND NOT MISSING_INTERNAL_SSL_LIBRARY)
set (USE_INTERNAL_SSL_LIBRARY 1)
set (OPENSSL_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/boringssl")
set (OPENSSL_INCLUDE_DIR "${OPENSSL_ROOT_DIR}/include")
set (OPENSSL_CRYPTO_LIBRARY crypto)
set (OPENSSL_SSL_LIBRARY ssl)
set (OPENSSL_FOUND 1)
set (OPENSSL_LIBRARIES ${OPENSSL_SSL_LIBRARY} ${OPENSSL_CRYPTO_LIBRARY})
endif ()
if(OPENSSL_FOUND)
# we need keep OPENSSL_FOUND for many libs in contrib
set(USE_SSL 1)
endif()
# used by new poco
# part from /usr/share/cmake-*/Modules/FindOpenSSL.cmake, with removed all "EXISTS "
if(OPENSSL_FOUND AND NOT USE_INTERNAL_SSL_LIBRARY)
if(NOT TARGET OpenSSL::Crypto AND
(OPENSSL_CRYPTO_LIBRARY OR
LIB_EAY_LIBRARY_DEBUG OR
LIB_EAY_LIBRARY_RELEASE)
)
add_library(OpenSSL::Crypto UNKNOWN IMPORTED)
set_target_properties(OpenSSL::Crypto PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${OPENSSL_INCLUDE_DIR}")
if(OPENSSL_CRYPTO_LIBRARY)
set_target_properties(OpenSSL::Crypto PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES "C"
IMPORTED_LOCATION "${OPENSSL_CRYPTO_LIBRARY}")
endif()
if(LIB_EAY_LIBRARY_RELEASE)
set_property(TARGET OpenSSL::Crypto APPEND PROPERTY
IMPORTED_CONFIGURATIONS RELEASE)
set_target_properties(OpenSSL::Crypto PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C"
IMPORTED_LOCATION_RELEASE "${LIB_EAY_LIBRARY_RELEASE}")
endif()
if(LIB_EAY_LIBRARY_DEBUG)
set_property(TARGET OpenSSL::Crypto APPEND PROPERTY
IMPORTED_CONFIGURATIONS DEBUG)
set_target_properties(OpenSSL::Crypto PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C"
IMPORTED_LOCATION_DEBUG "${LIB_EAY_LIBRARY_DEBUG}")
endif()
endif()
if(NOT TARGET OpenSSL::SSL AND
(OPENSSL_SSL_LIBRARY OR
SSL_EAY_LIBRARY_DEBUG OR
SSL_EAY_LIBRARY_RELEASE)
)
add_library(OpenSSL::SSL UNKNOWN IMPORTED)
set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${OPENSSL_INCLUDE_DIR}")
if(OPENSSL_SSL_LIBRARY)
set_target_properties(OpenSSL::SSL PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES "C"
IMPORTED_LOCATION "${OPENSSL_SSL_LIBRARY}")
endif()
if(SSL_EAY_LIBRARY_RELEASE)
set_property(TARGET OpenSSL::SSL APPEND PROPERTY
IMPORTED_CONFIGURATIONS RELEASE)
set_target_properties(OpenSSL::SSL PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C"
IMPORTED_LOCATION_RELEASE "${SSL_EAY_LIBRARY_RELEASE}")
endif()
if(SSL_EAY_LIBRARY_DEBUG)
set_property(TARGET OpenSSL::SSL APPEND PROPERTY
IMPORTED_CONFIGURATIONS DEBUG)
set_target_properties(OpenSSL::SSL PROPERTIES
IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C"
IMPORTED_LOCATION_DEBUG "${SSL_EAY_LIBRARY_DEBUG}")
endif()
if(TARGET OpenSSL::Crypto)
set_target_properties(OpenSSL::SSL PROPERTIES
INTERFACE_LINK_LIBRARIES OpenSSL::Crypto)
endif()
endif()
endif()
message (STATUS "Using ssl=${USE_SSL}: ${OPENSSL_INCLUDE_DIR} : ${OPENSSL_LIBRARIES}")

View File

@ -1,27 +0,0 @@
option (USE_INTERNAL_XZ_LIBRARY "Set to OFF to use system xz (lzma) library instead of bundled" ON)
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api/lzma.h")
if(USE_INTERNAL_XZ_LIBRARY)
message(WARNING "submodule contrib/xz is missing. to fix try run: \n git submodule update --init")
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal xz (lzma) library")
set(USE_INTERNAL_XZ_LIBRARY 0)
endif()
set(MISSING_INTERNAL_XZ_LIBRARY 1)
endif()
if (NOT USE_INTERNAL_XZ_LIBRARY)
find_library (XZ_LIBRARY lzma)
find_path (XZ_INCLUDE_DIR NAMES lzma.h PATHS ${XZ_INCLUDE_PATHS})
if (NOT XZ_LIBRARY OR NOT XZ_INCLUDE_DIR)
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system xz (lzma) library")
endif ()
endif ()
if (XZ_LIBRARY AND XZ_INCLUDE_DIR)
elseif (NOT MISSING_INTERNAL_XZ_LIBRARY)
set (USE_INTERNAL_XZ_LIBRARY 1)
set (XZ_LIBRARY liblzma)
set (XZ_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/xz/src/liblzma/api)
endif ()
message (STATUS "Using xz (lzma): ${XZ_INCLUDE_DIR} : ${XZ_LIBRARY}")

Some files were not shown because too many files have changed in this diff Show More