mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 16:42:05 +00:00
Merge branch 'master' into merge_15765
This commit is contained in:
commit
82b0fc0bd0
2
.gitattributes
vendored
2
.gitattributes
vendored
@ -1,4 +1,2 @@
|
||||
contrib/* linguist-vendored
|
||||
*.h linguist-language=C++
|
||||
# to avoid frequent conflicts
|
||||
tests/queries/0_stateless/arcadia_skip_list.txt text merge=union
|
||||
|
4
.github/CODEOWNERS
vendored
4
.github/CODEOWNERS
vendored
@ -1,3 +1 @@
|
||||
docs/* @ClickHouse/docs
|
||||
docs/zh/* @ClickHouse/docs-zh
|
||||
website/* @ClickHouse/docs
|
||||
|
||||
|
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -13,13 +13,4 @@ Changelog entry (a user-readable short description of the changes that goes to C
|
||||
...
|
||||
|
||||
|
||||
Detailed description / Documentation draft:
|
||||
...
|
||||
|
||||
|
||||
> By adding documentation, you'll allow users to try your new feature immediately, not when someone else will have time to document it later. Documentation is necessary for all features that affect user experience in any way. You can add brief documentation draft above, or add documentation right into your patch as Markdown files in [docs](https://github.com/ClickHouse/ClickHouse/tree/master/docs) folder.
|
||||
|
||||
> If you are doing this for the first time, it's recommended to read the lightweight [Contributing to ClickHouse Documentation](https://github.com/ClickHouse/ClickHouse/tree/master/docs/README.md) guide first.
|
||||
|
||||
|
||||
> Information about CI checks: https://clickhouse.tech/docs/en/development/continuous-integration/
|
||||
|
8
.github/actionlint.yml
vendored
Normal file
8
.github/actionlint.yml
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
self-hosted-runner:
|
||||
labels:
|
||||
- builder
|
||||
- fuzzer-unit-tester
|
||||
- stress-tester
|
||||
- style-checker
|
||||
- func-tester-aarch64
|
||||
- func-tester
|
4
.github/workflows/anchore-analysis.yml
vendored
4
.github/workflows/anchore-analysis.yml
vendored
@ -8,6 +8,10 @@
|
||||
|
||||
name: Docker Container Scan (clickhouse-server)
|
||||
|
||||
env:
|
||||
# Force the stdout and stderr streams to be unbuffered
|
||||
PYTHONUNBUFFERED: 1
|
||||
|
||||
"on":
|
||||
pull_request:
|
||||
paths:
|
||||
|
31
.github/workflows/backport.yml
vendored
31
.github/workflows/backport.yml
vendored
@ -1,4 +1,9 @@
|
||||
name: CherryPick
|
||||
|
||||
env:
|
||||
# Force the stdout and stderr streams to be unbuffered
|
||||
PYTHONUNBUFFERED: 1
|
||||
|
||||
concurrency:
|
||||
group: cherry-pick
|
||||
on: # yamllint disable-line rule:truthy
|
||||
@ -8,25 +13,31 @@ jobs:
|
||||
CherryPick:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Set envs
|
||||
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/cherry_pick
|
||||
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||
RCSK
|
||||
REPO_OWNER=ClickHouse
|
||||
REPO_NAME=ClickHouse
|
||||
REPO_TEAM=core
|
||||
EOF
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
|
||||
fetch-depth: 0
|
||||
- name: Cherry pick
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/cherry_pick
|
||||
ROBOT_CLICKHOUSE_SSH_KEY: ${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||
REPO_OWNER: "ClickHouse"
|
||||
REPO_NAME: "ClickHouse"
|
||||
REPO_TEAM: "core"
|
||||
run: |
|
||||
sudo pip install GitPython
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 cherry_pick.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
|
543
.github/workflows/backport_branches.yml
vendored
Normal file
543
.github/workflows/backport_branches.yml
vendored
Normal file
@ -0,0 +1,543 @@
|
||||
name: BackportPR
|
||||
|
||||
env:
|
||||
# Force the stdout and stderr streams to be unbuffered
|
||||
PYTHONUNBUFFERED: 1
|
||||
|
||||
on: # yamllint disable-line rule:truthy
|
||||
push:
|
||||
branches:
|
||||
- 'backport/**'
|
||||
jobs:
|
||||
DockerHubPushAarch64:
|
||||
runs-on: [self-hosted, func-tester-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
DockerHubPushAmd64:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
DockerHubPush:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
CompatibilityCheck:
|
||||
needs: [BuilderDebRelease]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/compatibility_check
|
||||
REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: CompatibilityCheck
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
#########################################################################################
|
||||
#################################### ORDINARY BUILDS ####################################
|
||||
#########################################################################################
|
||||
BuilderDebRelease:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/build_check
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||
CHECK_NAME=ClickHouse build check (actions)
|
||||
BUILD_NAME=package_release
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'true'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||
BuilderDebAarch64:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/build_check
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||
CHECK_NAME=ClickHouse build check (actions)
|
||||
BUILD_NAME=package_aarch64
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'true'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||
BuilderDebAsan:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/build_check
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||
CHECK_NAME=ClickHouse build check (actions)
|
||||
BUILD_NAME=package_asan
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'true'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||
BuilderDebTsan:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/build_check
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||
CHECK_NAME=ClickHouse build check (actions)
|
||||
BUILD_NAME=package_tsan
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'true'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||
BuilderDebDebug:
|
||||
needs: [DockerHubPush]
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/build_check
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH=${{runner.temp}}/../ccaches
|
||||
CHECK_NAME=ClickHouse build check (actions)
|
||||
BUILD_NAME=package_debug
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'true'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$CHECK_NAME" "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||
############################################################################################
|
||||
##################################### BUILD REPORTER #######################################
|
||||
############################################################################################
|
||||
BuilderReport:
|
||||
needs:
|
||||
- BuilderDebRelease
|
||||
- BuilderDebAarch64
|
||||
- BuilderDebAsan
|
||||
- BuilderDebTsan
|
||||
- BuilderDebDebug
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/report_check
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=ClickHouse build check (actions)
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Report Builder
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
##############################################################################################
|
||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||
##############################################################################################
|
||||
FunctionalStatelessTestAsan:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/stateless_debug
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Stateless tests (address, actions)
|
||||
REPO_COPY=${{runner.temp}}/stateless_debug/ClickHouse
|
||||
KILL_TIMEOUT=10800
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
##############################################################################################
|
||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||
##############################################################################################
|
||||
FunctionalStatefulTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/stateful_debug
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Stateful tests (debug, actions)
|
||||
REPO_COPY=${{runner.temp}}/stateful_debug/ClickHouse
|
||||
KILL_TIMEOUT=3600
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
##############################################################################################
|
||||
######################################### STRESS TESTS #######################################
|
||||
##############################################################################################
|
||||
StressTestTsan:
|
||||
needs: [BuilderDebTsan]
|
||||
# func testers have 16 cores + 128 GB memory
|
||||
# while stress testers have 36 cores + 72 memory
|
||||
# It would be better to have something like 32 + 128,
|
||||
# but such servers almost unavailable as spot instances.
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/stress_thread
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Stress test (thread, actions)
|
||||
REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Stress test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 stress_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
#############################################################################################
|
||||
############################# INTEGRATION TESTS #############################################
|
||||
#############################################################################################
|
||||
IntegrationTestsRelease:
|
||||
needs: [BuilderDebRelease]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/integration_tests_release
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
CHECK_NAME=Integration tests (release, actions)
|
||||
REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Integration test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 integration_test_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
FinishCheck:
|
||||
needs:
|
||||
- DockerHubPush
|
||||
- BuilderReport
|
||||
- FunctionalStatelessTestAsan
|
||||
- FunctionalStatefulTestDebug
|
||||
- StressTestTsan
|
||||
- IntegrationTestsRelease
|
||||
- CompatibilityCheck
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Finish label
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 finish_check.py
|
8
.github/workflows/cancel.yml
vendored
8
.github/workflows/cancel.yml
vendored
@ -1,7 +1,12 @@
|
||||
name: Cancel
|
||||
|
||||
env:
|
||||
# Force the stdout and stderr streams to be unbuffered
|
||||
PYTHONUNBUFFERED: 1
|
||||
|
||||
on: # yamllint disable-line rule:truthy
|
||||
workflow_run:
|
||||
workflows: ["CIGithubActions"]
|
||||
workflows: ["PullRequestCI", "ReleaseCI", "DocsCheck", "BackportPR"]
|
||||
types:
|
||||
- requested
|
||||
jobs:
|
||||
@ -10,4 +15,5 @@ jobs:
|
||||
steps:
|
||||
- uses: styfle/cancel-workflow-action@0.9.1
|
||||
with:
|
||||
all_but_latest: true
|
||||
workflow_id: ${{ github.event.workflow.id }}
|
||||
|
125
.github/workflows/docs_check.yml
vendored
Normal file
125
.github/workflows/docs_check.yml
vendored
Normal file
@ -0,0 +1,125 @@
|
||||
name: DocsCheck
|
||||
|
||||
env:
|
||||
# Force the stdout and stderr streams to be unbuffered
|
||||
PYTHONUNBUFFERED: 1
|
||||
|
||||
on: # yamllint disable-line rule:truthy
|
||||
pull_request:
|
||||
types:
|
||||
- synchronize
|
||||
- reopened
|
||||
- opened
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'website/**'
|
||||
jobs:
|
||||
CheckLabels:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Labels check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 run_check.py
|
||||
DockerHubPushAarch64:
|
||||
needs: CheckLabels
|
||||
runs-on: [self-hosted, func-tester-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
DockerHubPushAmd64:
|
||||
needs: CheckLabels
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
DockerHubPush:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
DocsCheck:
|
||||
needs: DockerHubPush
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/docs_check
|
||||
REPO_COPY=${{runner.temp}}/docs_check/ClickHouse
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.TEMP_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Docs Check
|
||||
run: |
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 docs_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
121
.github/workflows/docs_release.yml
vendored
Normal file
121
.github/workflows/docs_release.yml
vendored
Normal file
@ -0,0 +1,121 @@
|
||||
name: DocsReleaseChecks
|
||||
|
||||
env:
|
||||
# Force the stdout and stderr streams to be unbuffered
|
||||
PYTHONUNBUFFERED: 1
|
||||
|
||||
concurrency:
|
||||
group: master-release
|
||||
cancel-in-progress: true
|
||||
on: # yamllint disable-line rule:truthy
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'website/**'
|
||||
- 'benchmark/**'
|
||||
- 'docker/**'
|
||||
- '.github/**'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
DockerHubPushAarch64:
|
||||
runs-on: [self-hosted, func-tester-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
DockerHubPushAmd64:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
DockerHubPush:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
DocsRelease:
|
||||
needs: DockerHubPush
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Set envs
|
||||
# https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#multiline-strings
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/docs_release
|
||||
REPO_COPY=${{runner.temp}}/docs_release/ClickHouse
|
||||
CLOUDFLARE_TOKEN=${{secrets.CLOUDFLARE}}
|
||||
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||
RCSK
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.TEMP_PATH }}
|
||||
- name: Docs Release
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 docs_release.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
44
.github/workflows/jepsen.yml
vendored
Normal file
44
.github/workflows/jepsen.yml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
name: JepsenWorkflow
|
||||
env:
|
||||
# Force the stdout and stderr streams to be unbuffered
|
||||
PYTHONUNBUFFERED: 1
|
||||
concurrency:
|
||||
group: jepsen
|
||||
on: # yamllint disable-line rule:truthy
|
||||
schedule:
|
||||
- cron: '0 */6 * * *'
|
||||
workflow_run:
|
||||
workflows: ["PullRequestCI"]
|
||||
types:
|
||||
- completed
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
KeeperJepsenRelease:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/keeper_jepsen
|
||||
REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Jepsen Test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci"
|
||||
python3 keeper_jepsen_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
421
.github/workflows/main.yml
vendored
421
.github/workflows/main.yml
vendored
@ -1,421 +0,0 @@
|
||||
name: CIGithubActions
|
||||
on: # yamllint disable-line rule:truthy
|
||||
pull_request:
|
||||
types:
|
||||
- labeled
|
||||
- unlabeled
|
||||
- synchronize
|
||||
- reopened
|
||||
- opened
|
||||
branches:
|
||||
- master
|
||||
jobs:
|
||||
CheckLabels:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Labels check
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 run_check.py
|
||||
DockerHubPush:
|
||||
needs: CheckLabels
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 docker_images_check.py
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images.json
|
||||
StyleCheck:
|
||||
needs: DockerHubPush
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/style_check
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Style Check
|
||||
env:
|
||||
TEMP_PATH: ${{ runner.temp }}/style_check
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 style_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
DocsCheck:
|
||||
needs: DockerHubPush
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/docs_check
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Docs Check
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/docs_check
|
||||
REPO_COPY: ${{runner.temp}}/docs_check/ClickHouse
|
||||
run: |
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 docs_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebDebug:
|
||||
needs: DockerHubPush
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NUMBER: 7
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderDebAsan:
|
||||
needs: DockerHubPush
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/images_path
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/build_check
|
||||
IMAGES_PATH: ${{runner.temp}}/images_path
|
||||
REPO_COPY: ${{runner.temp}}/build_check/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
BUILD_NUMBER: 3
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 build_check.py "$CHECK_NAME" $BUILD_NUMBER
|
||||
- name: Upload build URLs to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: ${{ env.BUILD_NAME }}
|
||||
path: ${{ runner.temp }}/build_check/${{ env.BUILD_NAME }}.json
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
BuilderReport:
|
||||
needs: [BuilderDebDebug, BuilderDebAsan]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Report Builder
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/report_check
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'ClickHouse build check (actions)'
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatelessTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateless_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateless tests (debug, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateless_debug/ClickHouse
|
||||
REQUIRED_BUILD_NUMBER: 7
|
||||
KILL_TIMEOUT: 10800
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FunctionalStatefulTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Functional test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stateful_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stateful tests (debug, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stateful_debug/ClickHouse
|
||||
REQUIRED_BUILD_NUMBER: 7
|
||||
KILL_TIMEOUT: 3600
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 functional_test_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER $KILL_TIMEOUT
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
StressTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Stress test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/stress_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Stress tests (debug, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/stress_debug/ClickHouse
|
||||
REQUIRED_BUILD_NUMBER: 7
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 stress_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
ASTFuzzerTestDebug:
|
||||
needs: [BuilderDebDebug]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Fuzzer
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/ast_fuzzer_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'AST fuzzer (debug, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/ast_fuzzer_debug/ClickHouse
|
||||
REQUIRED_BUILD_NUMBER: 7
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 ast_fuzzer_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
IntegrationTestsAsan:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, stress-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Integration test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/integration_tests_debug
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Integration tests (asan, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/integration_tests_debug/ClickHouse
|
||||
REQUIRED_BUILD_NUMBER: 3
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 integration_test_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
UnitTestsAsan:
|
||||
needs: [BuilderDebAsan]
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
path: ${{runner.temp}}/reports_dir
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Unit test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/unit_tests_asan
|
||||
REPORTS_PATH: ${{runner.temp}}/reports_dir
|
||||
CHECK_NAME: 'Unit tests (asan, actions)'
|
||||
REPO_COPY: ${{runner.temp}}/unit_tests_asan/ClickHouse
|
||||
REQUIRED_BUILD_NUMBER: 3
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 unit_tests_check.py "$CHECK_NAME" $REQUIRED_BUILD_NUMBER
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FastTest:
|
||||
needs: DockerHubPush
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, builder]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Fast Test
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/fasttest
|
||||
REPO_COPY: ${{runner.temp}}/fasttest/ClickHouse
|
||||
CACHES_PATH: ${{runner.temp}}/../ccaches
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 fast_test_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
PVSCheck:
|
||||
needs: DockerHubPush
|
||||
if: ${{ !contains(github.event.pull_request.labels.*.name, 'pr-documentation') && !contains(github.event.pull_request.labels.*.name, 'pr-doc-fix') }}
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'recursive'
|
||||
- name: PVS Check
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/pvs_check
|
||||
REPO_COPY: ${{runner.temp}}/pvs_check/ClickHouse
|
||||
run: |
|
||||
sudo rm -fr $TEMP_PATH
|
||||
mkdir -p $TEMP_PATH
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci && python3 pvs_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
FinishCheck:
|
||||
needs: [StyleCheck, DockerHubPush, CheckLabels, BuilderReport, FastTest, FunctionalStatelessTestDebug, FunctionalStatefulTestDebug, DocsCheck, StressTestDebug, ASTFuzzerTestDebug, IntegrationTestsAsan, PVSCheck, UnitTestsAsan]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Finish label
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 finish_check.py
|
2877
.github/workflows/master.yml
vendored
Normal file
2877
.github/workflows/master.yml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
3066
.github/workflows/pull_request.yml
vendored
Normal file
3066
.github/workflows/pull_request.yml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
86
.github/workflows/release.yml
vendored
86
.github/workflows/release.yml
vendored
@ -1,55 +1,39 @@
|
||||
name: DocsReleaseChecks
|
||||
concurrency:
|
||||
group: master-release
|
||||
cancel-in-progress: true
|
||||
name: ReleaseWorkflow
|
||||
# - Gets artifacts from S3
|
||||
# - Sends it to JFROG Artifactory
|
||||
# - Adds them to the release assets
|
||||
|
||||
on: # yamllint disable-line rule:truthy
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'website/**'
|
||||
- 'benchmark/**'
|
||||
- 'docker/**'
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
|
||||
jobs:
|
||||
DockerHubPush:
|
||||
ReleasePublish:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Images check
|
||||
run: |
|
||||
cd $GITHUB_WORKSPACE/tests/ci
|
||||
python3 docker_images_check.py
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images.json
|
||||
DocsRelease:
|
||||
needs: DockerHubPush
|
||||
runs-on: [self-hosted, func-tester]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{runner.temp}}/docs_release
|
||||
- name: Docs Release
|
||||
env:
|
||||
TEMP_PATH: ${{runner.temp}}/docs_release
|
||||
REPO_COPY: ${{runner.temp}}/docs_release/ClickHouse
|
||||
CLOUDFLARE_TOKEN: ${{secrets.CLOUDFLARE}}
|
||||
ROBOT_CLICKHOUSE_SSH_KEY: ${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||
run: |
|
||||
cp -r $GITHUB_WORKSPACE $TEMP_PATH
|
||||
cd $REPO_COPY/tests/ci
|
||||
python3 docs_release.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill $(docker ps -q) ||:
|
||||
docker rm -f $(docker ps -a -q) ||:
|
||||
sudo rm -fr $TEMP_PATH
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
JFROG_API_KEY=${{ secrets.JFROG_KEY_API_PACKAGES }}
|
||||
TEMP_PATH=${{runner.temp}}/release_packages
|
||||
REPO_COPY=${{runner.temp}}/release_packages/ClickHouse
|
||||
EOF
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
- name: Download packages and push to Artifactory
|
||||
env:
|
||||
run: |
|
||||
rm -rf "$TEMP_PATH" && mkdir -p "$REPO_COPY"
|
||||
cp -r "$GITHUB_WORKSPACE" "$REPO_COPY"
|
||||
cd "$REPO_COPY"
|
||||
python3 ./tests/ci/push_to_artifactory.py --release "${{ github.ref }}" \
|
||||
--commit '${{ github.sha }}' --all
|
||||
- name: Upload packages to release assets
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
file: ${{runner.temp}}/release_packages/*
|
||||
overwrite: true
|
||||
tag: ${{ github.ref }}
|
||||
file_glob: true
|
||||
|
1782
.github/workflows/release_branches.yml
vendored
Normal file
1782
.github/workflows/release_branches.yml
vendored
Normal file
File diff suppressed because it is too large
Load Diff
42
.github/workflows/woboq.yml
vendored
Normal file
42
.github/workflows/woboq.yml
vendored
Normal file
@ -0,0 +1,42 @@
|
||||
name: WoboqBuilder
|
||||
env:
|
||||
# Force the stdout and stderr streams to be unbuffered
|
||||
PYTHONUNBUFFERED: 1
|
||||
|
||||
concurrency:
|
||||
group: woboq
|
||||
on: # yamllint disable-line rule:truthy
|
||||
schedule:
|
||||
- cron: '0 */18 * * *'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
# don't use dockerhub push because this image updates so rarely
|
||||
WoboqCodebrowser:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Set envs
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
TEMP_PATH=${{runner.temp}}/codebrowser
|
||||
REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
submodules: 'true'
|
||||
- name: Codebrowser
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 codebrowser_check.py
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
docker kill "$(docker ps -q)" ||:
|
||||
docker rm -f "$(docker ps -a -q)" ||:
|
||||
sudo rm -fr "$TEMP_PATH"
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -13,6 +13,7 @@
|
||||
/build_*
|
||||
/build-*
|
||||
/tests/venv
|
||||
/obj-x86_64-linux-gnu/
|
||||
|
||||
# logs
|
||||
*.log
|
||||
|
27
.gitmodules
vendored
27
.gitmodules
vendored
@ -17,7 +17,7 @@
|
||||
[submodule "contrib/zlib-ng"]
|
||||
path = contrib/zlib-ng
|
||||
url = https://github.com/ClickHouse-Extras/zlib-ng.git
|
||||
branch = clickhouse-new
|
||||
branch = clickhouse-2.0.x
|
||||
[submodule "contrib/googletest"]
|
||||
path = contrib/googletest
|
||||
url = https://github.com/google/googletest.git
|
||||
@ -54,8 +54,8 @@
|
||||
url = https://github.com/ClickHouse-Extras/Turbo-Base64.git
|
||||
[submodule "contrib/arrow"]
|
||||
path = contrib/arrow
|
||||
url = https://github.com/ClickHouse-Extras/arrow
|
||||
branch = clickhouse-arrow-2.0.0
|
||||
url = https://github.com/ClickHouse-Extras/arrow.git
|
||||
branch = blessed/release-6.0.1
|
||||
[submodule "contrib/thrift"]
|
||||
path = contrib/thrift
|
||||
url = https://github.com/apache/thrift.git
|
||||
@ -135,12 +135,9 @@
|
||||
[submodule "contrib/flatbuffers"]
|
||||
path = contrib/flatbuffers
|
||||
url = https://github.com/ClickHouse-Extras/flatbuffers.git
|
||||
[submodule "contrib/libc-headers"]
|
||||
path = contrib/libc-headers
|
||||
url = https://github.com/ClickHouse-Extras/libc-headers.git
|
||||
[submodule "contrib/replxx"]
|
||||
path = contrib/replxx
|
||||
url = https://github.com/AmokHuginnsson/replxx.git
|
||||
url = https://github.com/ClickHouse-Extras/replxx.git
|
||||
[submodule "contrib/avro"]
|
||||
path = contrib/avro
|
||||
url = https://github.com/ClickHouse-Extras/avro.git
|
||||
@ -171,12 +168,6 @@
|
||||
[submodule "contrib/sentry-native"]
|
||||
path = contrib/sentry-native
|
||||
url = https://github.com/ClickHouse-Extras/sentry-native.git
|
||||
[submodule "contrib/gcem"]
|
||||
path = contrib/gcem
|
||||
url = https://github.com/kthohr/gcem.git
|
||||
[submodule "contrib/stats"]
|
||||
path = contrib/stats
|
||||
url = https://github.com/kthohr/stats.git
|
||||
[submodule "contrib/krb5"]
|
||||
path = contrib/krb5
|
||||
url = https://github.com/ClickHouse-Extras/krb5
|
||||
@ -199,8 +190,8 @@
|
||||
url = https://github.com/xz-mirror/xz
|
||||
[submodule "contrib/abseil-cpp"]
|
||||
path = contrib/abseil-cpp
|
||||
url = https://github.com/ClickHouse-Extras/abseil-cpp.git
|
||||
branch = lts_2020_02_25
|
||||
url = https://github.com/abseil/abseil-cpp.git
|
||||
branch = lts_2021_11_02
|
||||
[submodule "contrib/dragonbox"]
|
||||
path = contrib/dragonbox
|
||||
url = https://github.com/ClickHouse-Extras/dragonbox.git
|
||||
@ -256,3 +247,9 @@
|
||||
[submodule "contrib/sysroot"]
|
||||
path = contrib/sysroot
|
||||
url = https://github.com/ClickHouse-Extras/sysroot.git
|
||||
[submodule "contrib/hive-metastore"]
|
||||
path = contrib/hive-metastore
|
||||
url = https://github.com/ClickHouse-Extras/hive-metastore
|
||||
[submodule "contrib/azure"]
|
||||
path = contrib/azure
|
||||
url = https://github.com/ClickHouse-Extras/azure-sdk-for-cpp.git
|
||||
|
1997
CHANGELOG.md
1997
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
203
CMakeLists.txt
203
CMakeLists.txt
@ -139,7 +139,6 @@ if (ENABLE_FUZZING)
|
||||
set (ENABLE_CLICKHOUSE_ODBC_BRIDGE OFF)
|
||||
set (ENABLE_LIBRARIES 0)
|
||||
set (ENABLE_SSL 1)
|
||||
set (USE_INTERNAL_SSL_LIBRARY 1)
|
||||
set (USE_UNWIND ON)
|
||||
set (ENABLE_EMBEDDED_COMPILER 0)
|
||||
set (ENABLE_EXAMPLES 0)
|
||||
@ -149,6 +148,9 @@ if (ENABLE_FUZZING)
|
||||
set (ENABLE_JEMALLOC 0)
|
||||
set (ENABLE_CHECK_HEAVY_BUILDS 1)
|
||||
set (GLIBC_COMPATIBILITY OFF)
|
||||
|
||||
# For codegen_select_fuzzer
|
||||
set (ENABLE_PROTOBUF 1)
|
||||
endif()
|
||||
|
||||
# Global libraries
|
||||
@ -169,9 +171,7 @@ endif ()
|
||||
include (cmake/check_flags.cmake)
|
||||
include (cmake/add_warning.cmake)
|
||||
|
||||
if (NOT MSVC)
|
||||
set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wall") # -Werror and many more is also added inside cmake/warnings.cmake
|
||||
endif ()
|
||||
set (COMMON_WARNING_FLAGS "${COMMON_WARNING_FLAGS} -Wall") # -Werror and many more is also added inside cmake/warnings.cmake
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
# clang: warning: argument unused during compilation: '-specs=/usr/share/dpkg/no-pie-compile.specs' [-Wunused-command-line-argument]
|
||||
@ -201,7 +201,7 @@ endif ()
|
||||
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
||||
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
||||
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT UNBUNDLED AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND MAKE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
||||
# Only for Linux, x86_64 or aarch64.
|
||||
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
|
||||
elseif(GLIBC_COMPATIBILITY)
|
||||
@ -219,37 +219,13 @@ endif()
|
||||
# Make sure the final executable has symbols exported
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -rdynamic")
|
||||
|
||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy" "llvm-objcopy-13" "llvm-objcopy-12" "llvm-objcopy-11" "llvm-objcopy-10" "llvm-objcopy-9" "llvm-objcopy-8" "objcopy")
|
||||
|
||||
if (NOT OBJCOPY_PATH AND OS_DARWIN)
|
||||
find_program (BREW_PATH NAMES "brew")
|
||||
if (BREW_PATH)
|
||||
execute_process (COMMAND ${BREW_PATH} --prefix llvm ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE LLVM_PREFIX)
|
||||
if (LLVM_PREFIX)
|
||||
find_program (OBJCOPY_PATH NAMES "llvm-objcopy" PATHS "${LLVM_PREFIX}/bin" NO_DEFAULT_PATH)
|
||||
endif ()
|
||||
if (NOT OBJCOPY_PATH)
|
||||
execute_process (COMMAND ${BREW_PATH} --prefix binutils ERROR_QUIET OUTPUT_STRIP_TRAILING_WHITESPACE OUTPUT_VARIABLE BINUTILS_PREFIX)
|
||||
if (BINUTILS_PREFIX)
|
||||
find_program (OBJCOPY_PATH NAMES "objcopy" PATHS "${BINUTILS_PREFIX}/bin" NO_DEFAULT_PATH)
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (OBJCOPY_PATH)
|
||||
message (STATUS "Using objcopy: ${OBJCOPY_PATH}")
|
||||
else ()
|
||||
message (FATAL_ERROR "Cannot find objcopy.")
|
||||
endif ()
|
||||
|
||||
if (OS_DARWIN)
|
||||
# The `-all_load` flag forces loading of all symbols from all libraries,
|
||||
# and leads to multiply-defined symbols. This flag allows force loading
|
||||
# from a _specific_ library, which is what we need.
|
||||
set(WHOLE_ARCHIVE -force_load)
|
||||
# The `-noall_load` flag is the default and now obsolete.
|
||||
set(NO_WHOLE_ARCHIVE "")
|
||||
set(NO_WHOLE_ARCHIVE "-undefined,error") # Effectively, a no-op. Here to avoid empty "-Wl, " sequence to be generated in the command line.
|
||||
else ()
|
||||
set(WHOLE_ARCHIVE --whole-archive)
|
||||
set(NO_WHOLE_ARCHIVE --no-whole-archive)
|
||||
@ -278,6 +254,13 @@ if (NOT CMAKE_BUILD_TYPE_UC STREQUAL "RELEASE")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
set(USE_DEBUG_HELPERS ON)
|
||||
else ()
|
||||
set(USE_DEBUG_HELPERS ON)
|
||||
endif()
|
||||
option(USE_DEBUG_HELPERS "Enable debug helpers" ${USE_DEBUG_HELPERS})
|
||||
|
||||
# Create BuildID when using lld. For other linkers it is created by default.
|
||||
if (LINKER_NAME MATCHES "lld$")
|
||||
# SHA1 is not cryptographically secure but it is the best what lld is offering.
|
||||
@ -312,6 +295,10 @@ include(cmake/cpu_features.cmake)
|
||||
# Enable it explicitly.
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -fasynchronous-unwind-tables")
|
||||
|
||||
# Reproducible builds
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffile-prefix-map=${CMAKE_SOURCE_DIR}=.")
|
||||
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} -ffile-prefix-map=${CMAKE_SOURCE_DIR}=.")
|
||||
|
||||
if (${CMAKE_VERSION} VERSION_LESS "3.12.4")
|
||||
# CMake < 3.12 doesn't support setting 20 as a C++ standard version.
|
||||
# We will add C++ standard controlling flag in CMAKE_CXX_FLAGS manually for now.
|
||||
@ -392,6 +379,8 @@ if (COMPILER_CLANG)
|
||||
option(ENABLE_THINLTO "Clang-specific link time optimization" ON)
|
||||
endif()
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fstrict-vtable-pointers")
|
||||
|
||||
# Set new experimental pass manager, it's a performance, build time and binary size win.
|
||||
# Can be removed after https://reviews.llvm.org/D66490 merged and released to at least two versions of clang.
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fexperimental-new-pass-manager")
|
||||
@ -402,32 +391,13 @@ if (COMPILER_CLANG)
|
||||
# completely.
|
||||
if (ENABLE_THINLTO AND NOT ENABLE_TESTS AND NOT SANITIZE)
|
||||
# Link time optimization
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin")
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin")
|
||||
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
||||
set (CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO "${CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO} -flto=thin -fwhole-program-vtables")
|
||||
elseif (ENABLE_THINLTO)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable ThinLTO")
|
||||
endif ()
|
||||
|
||||
# Always prefer llvm tools when using clang. For instance, we cannot use GNU ar when llvm LTO is enabled
|
||||
find_program (LLVM_AR_PATH NAMES "llvm-ar" "llvm-ar-13" "llvm-ar-12" "llvm-ar-11" "llvm-ar-10" "llvm-ar-9" "llvm-ar-8")
|
||||
|
||||
if (LLVM_AR_PATH)
|
||||
message(STATUS "Using llvm-ar: ${LLVM_AR_PATH}.")
|
||||
set (CMAKE_AR ${LLVM_AR_PATH})
|
||||
else ()
|
||||
message(WARNING "Cannot find llvm-ar. System ar will be used instead. It does not work with ThinLTO.")
|
||||
endif ()
|
||||
|
||||
find_program (LLVM_RANLIB_PATH NAMES "llvm-ranlib" "llvm-ranlib-13" "llvm-ranlib-12" "llvm-ranlib-11" "llvm-ranlib-10" "llvm-ranlib-9" "llvm-ranlib-8")
|
||||
|
||||
if (LLVM_RANLIB_PATH)
|
||||
message(STATUS "Using llvm-ranlib: ${LLVM_RANLIB_PATH}.")
|
||||
set (CMAKE_RANLIB ${LLVM_RANLIB_PATH})
|
||||
else ()
|
||||
message(WARNING "Cannot find llvm-ranlib. System ranlib will be used instead. It does not work with ThinLTO.")
|
||||
endif ()
|
||||
|
||||
elseif (ENABLE_THINLTO)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "ThinLTO is only available with CLang")
|
||||
endif ()
|
||||
@ -435,20 +405,7 @@ endif ()
|
||||
# Turns on all external libs like s3, kafka, ODBC, ...
|
||||
option(ENABLE_LIBRARIES "Enable all external libraries by default" ON)
|
||||
|
||||
# We recommend avoiding this mode for production builds because we can't guarantee
|
||||
# all needed libraries exist in your system.
|
||||
# This mode exists for enthusiastic developers who are searching for trouble.
|
||||
# The whole idea of using unknown version of libraries from the OS distribution is deeply flawed.
|
||||
# Useful for maintainers of OS packages.
|
||||
option (UNBUNDLED "Use system libraries instead of ones in contrib/" OFF)
|
||||
|
||||
if (UNBUNDLED)
|
||||
set(NOT_UNBUNDLED OFF)
|
||||
else ()
|
||||
set(NOT_UNBUNDLED ON)
|
||||
endif ()
|
||||
|
||||
if (UNBUNDLED OR NOT (OS_LINUX OR OS_DARWIN))
|
||||
if (NOT (OS_LINUX OR OS_DARWIN))
|
||||
# Using system libs can cause a lot of warnings in includes (on macro expansion).
|
||||
option(WERROR "Enable -Werror compiler option" OFF)
|
||||
else ()
|
||||
@ -456,13 +413,19 @@ else ()
|
||||
endif ()
|
||||
|
||||
if (WERROR)
|
||||
add_warning(error)
|
||||
# Don't pollute CMAKE_CXX_FLAGS with -Werror as it will break some CMake checks.
|
||||
# Instead, adopt modern cmake usage requirement.
|
||||
target_compile_options(global-libs INTERFACE "-Werror")
|
||||
endif ()
|
||||
|
||||
# Make this extra-checks for correct library dependencies.
|
||||
if (OS_LINUX AND NOT SANITIZE)
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,--no-undefined")
|
||||
set (CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} -Wl,--no-undefined")
|
||||
target_link_options(global-libs INTERFACE "-Wl,--no-undefined")
|
||||
endif ()
|
||||
|
||||
# Increase stack size on Musl. We need big stack for our recursive-descend parser.
|
||||
if (USE_MUSL)
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-z,stack-size=2097152")
|
||||
endif ()
|
||||
|
||||
include(cmake/dbms_glob_sources.cmake)
|
||||
@ -488,23 +451,15 @@ if (MAKE_STATIC_LIBRARIES)
|
||||
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wl,-no-pie")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie -Wl,-no-pie")
|
||||
endif ()
|
||||
else ()
|
||||
set (CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
endif ()
|
||||
|
||||
# https://github.com/include-what-you-use/include-what-you-use
|
||||
option (USE_INCLUDE_WHAT_YOU_USE "Automatically reduce unneeded includes in source code (external tool)" OFF)
|
||||
|
||||
if (USE_INCLUDE_WHAT_YOU_USE)
|
||||
find_program(IWYU_PATH NAMES include-what-you-use iwyu)
|
||||
if (NOT IWYU_PATH)
|
||||
message(FATAL_ERROR "Could not find the program include-what-you-use")
|
||||
endif()
|
||||
if (${CMAKE_VERSION} VERSION_LESS "3.3.0")
|
||||
message(FATAL_ERROR "include-what-you-use requires CMake version at least 3.3.")
|
||||
endif()
|
||||
# This is required for clang on Arch linux, that uses PIE by default.
|
||||
# See enable-SSP-and-PIE-by-default.patch [1].
|
||||
#
|
||||
# [1]: https://github.com/archlinux/svntogit-packages/blob/6e681aa860e65ad46a1387081482eb875c2200f2/trunk/enable-SSP-and-PIE-by-default.patch
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie")
|
||||
endif ()
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
@ -527,89 +482,12 @@ message (STATUS
|
||||
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
|
||||
MAKE_STATIC_LIBRARIES=${MAKE_STATIC_LIBRARIES}
|
||||
SPLIT_SHARED=${SPLIT_SHARED_LIBRARIES}
|
||||
UNBUNDLED=${UNBUNDLED}
|
||||
CCACHE=${CCACHE_FOUND} ${CCACHE_VERSION}")
|
||||
|
||||
include (GNUInstallDirs)
|
||||
include (cmake/contrib_finder.cmake)
|
||||
|
||||
find_contrib_lib(double-conversion) # Must be before parquet
|
||||
include (cmake/find/ssl.cmake)
|
||||
include (cmake/find/ldap.cmake) # after ssl
|
||||
include (cmake/find/icu.cmake)
|
||||
include (cmake/find/xz.cmake)
|
||||
include (cmake/find/zlib.cmake)
|
||||
include (cmake/find/zstd.cmake)
|
||||
include (cmake/find/ltdl.cmake) # for odbc
|
||||
# openssl, zlib before poco
|
||||
include (cmake/find/sparsehash.cmake)
|
||||
include (cmake/find/re2.cmake)
|
||||
include (cmake/find/krb5.cmake)
|
||||
include (cmake/find/libgsasl.cmake)
|
||||
include (cmake/find/cyrus-sasl.cmake)
|
||||
include (cmake/find/rdkafka.cmake)
|
||||
include (cmake/find/libuv.cmake) # for amqpcpp and cassandra
|
||||
include (cmake/find/amqpcpp.cmake)
|
||||
include (cmake/find/capnp.cmake)
|
||||
include (cmake/find/llvm.cmake)
|
||||
include (cmake/find/h3.cmake)
|
||||
include (cmake/find/libxml2.cmake)
|
||||
include (cmake/find/brotli.cmake)
|
||||
include (cmake/find/protobuf.cmake)
|
||||
include (cmake/find/grpc.cmake)
|
||||
include (cmake/find/pdqsort.cmake)
|
||||
include (cmake/find/miniselect.cmake)
|
||||
include (cmake/find/hdfs3.cmake) # uses protobuf
|
||||
include (cmake/find/poco.cmake)
|
||||
include (cmake/find/curl.cmake)
|
||||
include (cmake/find/s3.cmake)
|
||||
include (cmake/find/base64.cmake)
|
||||
include (cmake/find/parquet.cmake)
|
||||
include (cmake/find/simdjson.cmake)
|
||||
include (cmake/find/fast_float.cmake)
|
||||
include (cmake/find/rapidjson.cmake)
|
||||
include (cmake/find/fastops.cmake)
|
||||
include (cmake/find/odbc.cmake)
|
||||
include (cmake/find/nanodbc.cmake)
|
||||
include (cmake/find/sqlite.cmake)
|
||||
include (cmake/find/rocksdb.cmake)
|
||||
include (cmake/find/libpqxx.cmake)
|
||||
include (cmake/find/nuraft.cmake)
|
||||
include (cmake/find/yaml-cpp.cmake)
|
||||
include (cmake/find/s2geometry.cmake)
|
||||
include (cmake/find/nlp.cmake)
|
||||
include (cmake/find/bzip2.cmake)
|
||||
include (cmake/find/filelog.cmake)
|
||||
|
||||
if(NOT USE_INTERNAL_PARQUET_LIBRARY)
|
||||
set (ENABLE_ORC OFF CACHE INTERNAL "")
|
||||
endif()
|
||||
include (cmake/find/orc.cmake)
|
||||
|
||||
include (cmake/find/avro.cmake)
|
||||
include (cmake/find/msgpack.cmake)
|
||||
include (cmake/find/cassandra.cmake)
|
||||
include (cmake/find/sentry.cmake)
|
||||
include (cmake/find/stats.cmake)
|
||||
include (cmake/find/datasketches.cmake)
|
||||
include (cmake/find/libprotobuf-mutator.cmake)
|
||||
|
||||
set (USE_INTERNAL_CITYHASH_LIBRARY ON CACHE INTERNAL "")
|
||||
find_contrib_lib(cityhash)
|
||||
|
||||
find_contrib_lib(farmhash)
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
include (cmake/find/gtest.cmake)
|
||||
endif ()
|
||||
|
||||
# Need to process before "contrib" dir:
|
||||
include (cmake/find/mysqlclient.cmake)
|
||||
|
||||
# When testing for memory leaks with Valgrind, don't link tcmalloc or jemalloc.
|
||||
|
||||
include (cmake/print_flags.cmake)
|
||||
|
||||
if (TARGET global-group)
|
||||
install (EXPORT global DESTINATION cmake)
|
||||
endif ()
|
||||
@ -624,7 +502,7 @@ macro (add_executable target)
|
||||
# invoke built-in add_executable
|
||||
# explicitly acquire and interpose malloc symbols by clickhouse_malloc
|
||||
# if GLIBC_COMPATIBILITY is ON and ENABLE_THINLTO is on than provide memcpy symbol explicitly to neutrialize thinlto's libcall generation.
|
||||
if (GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
|
||||
if (ARCH_AMD64 AND GLIBC_COMPATIBILITY AND ENABLE_THINLTO)
|
||||
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc> $<TARGET_OBJECTS:memcpy>)
|
||||
else ()
|
||||
_add_executable (${ARGV} $<TARGET_OBJECTS:clickhouse_malloc>)
|
||||
@ -661,6 +539,7 @@ include_directories(${ConfigIncludePath})
|
||||
|
||||
# Add as many warnings as possible for our own code.
|
||||
include (cmake/warnings.cmake)
|
||||
include (cmake/print_flags.cmake)
|
||||
|
||||
add_subdirectory (base)
|
||||
add_subdirectory (src)
|
||||
@ -668,6 +547,4 @@ add_subdirectory (programs)
|
||||
add_subdirectory (tests)
|
||||
add_subdirectory (utils)
|
||||
|
||||
include (cmake/print_include_directories.cmake)
|
||||
|
||||
include (cmake/sanitize_target_link_libraries.cmake)
|
||||
|
@ -2,7 +2,13 @@
|
||||
|
||||
ClickHouse is an open project, and you can contribute to it in many ways. You can help with ideas, code, or documentation. We appreciate any efforts that help us to make the project better.
|
||||
|
||||
Thank you.
|
||||
Thank you!
|
||||
|
||||
## Legal Info
|
||||
|
||||
When you open your first pull-request to ClickHouse repo, a bot will invite you to accept ClickHouse Individual CLA (Contributor License Agreement). It is a simple few click process. For subsequent pull-requests the bot will check if you have already signed it and won't bother you again.
|
||||
|
||||
Optionally, to make contributions even more tight legally, your employer as a legal entity may want to sign a ClickHouse Corporate CLA with ClickHouse, Inc. If you're interested to do so, contact us at [legal@clickhouse.com](mailto:legal@clickhouse.com).
|
||||
|
||||
## Technical Info
|
||||
|
||||
|
@ -18,3 +18,25 @@ if (NOT DEFINED ENV{CLION_IDE} AND NOT DEFINED ENV{XCODE_IDE})
|
||||
set(CMAKE_GENERATOR "Ninja" CACHE INTERNAL "" FORCE)
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
|
||||
# Default toolchain - this is needed to avoid dependency on OS files.
|
||||
execute_process(COMMAND uname -s OUTPUT_VARIABLE OS)
|
||||
execute_process(COMMAND uname -m OUTPUT_VARIABLE ARCH)
|
||||
|
||||
if (OS MATCHES "Linux"
|
||||
AND NOT DEFINED CMAKE_TOOLCHAIN_FILE
|
||||
AND NOT DISABLE_HERMETIC_BUILD
|
||||
AND ($ENV{CC} MATCHES ".*clang.*" OR CMAKE_C_COMPILER MATCHES ".*clang.*"))
|
||||
|
||||
if (ARCH MATCHES "amd64|x86_64")
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-x86_64.cmake" CACHE INTERNAL "" FORCE)
|
||||
elseif (ARCH MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-aarch64.cmake" CACHE INTERNAL "" FORCE)
|
||||
elseif (ARCH MATCHES "^(ppc64le.*|PPC64LE.*)")
|
||||
set (CMAKE_TOOLCHAIN_FILE "cmake/linux/toolchain-ppc64le.cmake" CACHE INTERNAL "" FORCE)
|
||||
else ()
|
||||
message (FATAL_ERROR "Unsupported architecture: ${ARCH}")
|
||||
endif ()
|
||||
|
||||
endif()
|
||||
|
@ -10,5 +10,6 @@ ClickHouse® is an open-source column-oriented database management system that a
|
||||
* [YouTube channel](https://www.youtube.com/c/ClickHouseDB) has a lot of content about ClickHouse in video format.
|
||||
* [Slack](https://join.slack.com/t/clickhousedb/shared_invite/zt-rxm3rdrk-lIUmhLC3V8WTaL0TGxsOmg) and [Telegram](https://telegram.me/clickhouse_en) allow chatting with ClickHouse users in real-time.
|
||||
* [Blog](https://clickhouse.com/blog/en/) contains various ClickHouse-related articles, as well as announcements and reports about events.
|
||||
* [Code Browser](https://clickhouse.com/codebrowser/html_report/ClickHouse/index.html) with syntax highlight and navigation.
|
||||
* [Code Browser (Woboq)](https://clickhouse.com/codebrowser/html_report/ClickHouse/index.html) with syntax highlight and navigation.
|
||||
* [Code Browser (github.dev)](https://github.dev/ClickHouse/ClickHouse) with syntax highlight, powered by github.dev.
|
||||
* [Contacts](https://clickhouse.com/company/#contact) can help to get your questions answered if there are any.
|
||||
|
@ -19,9 +19,12 @@ The following versions of ClickHouse server are currently being supported with s
|
||||
| 21.4 | :x: |
|
||||
| 21.5 | :x: |
|
||||
| 21.6 | :x: |
|
||||
| 21.7 | ✅ |
|
||||
| 21.7 | :x: |
|
||||
| 21.8 | ✅ |
|
||||
| 21.9 | ✅ |
|
||||
| 21.9 | :x: |
|
||||
| 21.10 | ✅ |
|
||||
| 21.11 | ✅ |
|
||||
| 21.12 | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
|
@ -9,7 +9,3 @@ add_subdirectory (pcg-random)
|
||||
add_subdirectory (widechar_width)
|
||||
add_subdirectory (readpassphrase)
|
||||
add_subdirectory (bridge)
|
||||
|
||||
if (USE_MYSQL)
|
||||
add_subdirectory (mysqlxx)
|
||||
endif ()
|
||||
|
@ -1,8 +1,6 @@
|
||||
set (SRCS
|
||||
argsToConfig.cpp
|
||||
coverage.cpp
|
||||
DateLUT.cpp
|
||||
DateLUTImpl.cpp
|
||||
demangle.cpp
|
||||
getFQDNOrHostName.cpp
|
||||
getMemoryAmount.cpp
|
||||
@ -18,19 +16,20 @@ set (SRCS
|
||||
sleep.cpp
|
||||
terminalColors.cpp
|
||||
errnoToString.cpp
|
||||
getResource.cpp
|
||||
StringRef.cpp
|
||||
)
|
||||
|
||||
if (ENABLE_REPLXX)
|
||||
list (APPEND SRCS ReplxxLineReader.cpp)
|
||||
elseif (ENABLE_READLINE)
|
||||
list (APPEND SRCS ReadlineLineReader.cpp)
|
||||
endif ()
|
||||
|
||||
if (USE_DEBUG_HELPERS)
|
||||
set (INCLUDE_DEBUG_HELPERS "-include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"")
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${INCLUDE_DEBUG_HELPERS}")
|
||||
get_target_property(MAGIC_ENUM_INCLUDE_DIR ch_contrib::magic_enum INTERFACE_INCLUDE_DIRECTORIES)
|
||||
# CMake generator expression will do insane quoting when it encounters special character like quotes, spaces, etc.
|
||||
# Prefixing "SHELL:" will force it to use the original text.
|
||||
set (INCLUDE_DEBUG_HELPERS "SHELL:-I\"${MAGIC_ENUM_INCLUDE_DIR}\" -include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"")
|
||||
# Use generator expression as we don't want to pollute CMAKE_CXX_FLAGS, which will interfere with CMake check system.
|
||||
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:${INCLUDE_DEBUG_HELPERS}>)
|
||||
endif ()
|
||||
|
||||
add_library (common ${SRCS})
|
||||
@ -41,51 +40,25 @@ else ()
|
||||
target_compile_definitions(common PUBLIC WITH_COVERAGE=0)
|
||||
endif ()
|
||||
|
||||
if (USE_INTERNAL_CCTZ)
|
||||
set_source_files_properties(DateLUTImpl.cpp PROPERTIES COMPILE_DEFINITIONS USE_INTERNAL_CCTZ)
|
||||
endif()
|
||||
|
||||
target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..")
|
||||
|
||||
if (OS_DARWIN AND NOT MAKE_STATIC_LIBRARIES)
|
||||
target_link_libraries(common PUBLIC -Wl,-U,_inside_main)
|
||||
endif()
|
||||
|
||||
# Allow explicit fallback to readline
|
||||
if (NOT ENABLE_REPLXX AND ENABLE_READLINE)
|
||||
message (STATUS "Attempt to fallback to readline explicitly")
|
||||
set (READLINE_PATHS "/usr/local/opt/readline/lib")
|
||||
# First try find custom lib for macos users (default lib without history support)
|
||||
find_library (READLINE_LIB NAMES readline PATHS ${READLINE_PATHS} NO_DEFAULT_PATH)
|
||||
if (NOT READLINE_LIB)
|
||||
find_library (READLINE_LIB NAMES readline PATHS ${READLINE_PATHS})
|
||||
endif ()
|
||||
|
||||
set(READLINE_INCLUDE_PATHS "/usr/local/opt/readline/include")
|
||||
find_path (READLINE_INCLUDE_DIR NAMES readline/readline.h PATHS ${READLINE_INCLUDE_PATHS} NO_DEFAULT_PATH)
|
||||
if (NOT READLINE_INCLUDE_DIR)
|
||||
find_path (READLINE_INCLUDE_DIR NAMES readline/readline.h PATHS ${READLINE_INCLUDE_PATHS})
|
||||
endif ()
|
||||
if (READLINE_INCLUDE_DIR AND READLINE_LIB)
|
||||
target_link_libraries(common PUBLIC ${READLINE_LIB})
|
||||
target_compile_definitions(common PUBLIC USE_READLINE=1)
|
||||
message (STATUS "Using readline: ${READLINE_INCLUDE_DIR} : ${READLINE_LIB}")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
target_link_libraries (common
|
||||
PUBLIC
|
||||
${CITYHASH_LIBRARIES}
|
||||
ch_contrib::cityhash
|
||||
boost::headers_only
|
||||
boost::system
|
||||
Poco::Net
|
||||
Poco::Net::SSL
|
||||
Poco::Util
|
||||
Poco::Foundation
|
||||
replxx
|
||||
cctz
|
||||
fmt
|
||||
magic_enum
|
||||
ch_contrib::replxx
|
||||
ch_contrib::cctz
|
||||
ch_contrib::fmt
|
||||
ch_contrib::magic_enum
|
||||
)
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
|
@ -18,8 +18,8 @@ struct CachedFn
|
||||
{
|
||||
private:
|
||||
using Traits = FnTraits<decltype(Func)>;
|
||||
using DecayedArgs = TLMap<std::decay_t, typename Traits::Args>;
|
||||
using Key = TLChangeRoot<std::tuple, DecayedArgs>;
|
||||
using DecayedArgs = TypeListMap<std::decay_t, typename Traits::Args>;
|
||||
using Key = TypeListChangeRoot<std::tuple, DecayedArgs>;
|
||||
using Result = typename Traits::Ret;
|
||||
|
||||
std::map<Key, Result> cache; // Can't use hashmap as tuples are unhashable by default
|
||||
|
@ -1,6 +1,6 @@
|
||||
#pragma once
|
||||
|
||||
#include "Typelist.h"
|
||||
#include "TypeList.h"
|
||||
|
||||
namespace detail
|
||||
{
|
||||
@ -14,7 +14,7 @@ struct FnTraits<R(A...)>
|
||||
static constexpr bool value = std::is_invocable_r_v<R, F, A...>;
|
||||
|
||||
using Ret = R;
|
||||
using Args = Typelist<A...>;
|
||||
using Args = TypeList<A...>;
|
||||
};
|
||||
|
||||
template <class R, class ...A>
|
||||
|
@ -10,16 +10,6 @@
|
||||
#include <sys/types.h>
|
||||
|
||||
|
||||
#ifdef OS_LINUX
|
||||
/// We can detect if code is linked with one or another readline variants or open the library dynamically.
|
||||
# include <dlfcn.h>
|
||||
extern "C"
|
||||
{
|
||||
char * readline(const char *) __attribute__((__weak__));
|
||||
char * (*readline_ptr)(const char *) = readline;
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef HAS_RESERVED_IDENTIFIER
|
||||
#pragma clang diagnostic ignored "-Wreserved-identifier"
|
||||
#endif
|
||||
@ -152,33 +142,6 @@ LineReader::InputStatus LineReader::readOneLine(const String & prompt)
|
||||
{
|
||||
input.clear();
|
||||
|
||||
#ifdef OS_LINUX
|
||||
if (!readline_ptr)
|
||||
{
|
||||
for (const auto * name : {"libreadline.so", "libreadline.so.0", "libeditline.so", "libeditline.so.0"})
|
||||
{
|
||||
void * dl_handle = dlopen(name, RTLD_LAZY);
|
||||
if (dl_handle)
|
||||
{
|
||||
readline_ptr = reinterpret_cast<char * (*)(const char *)>(dlsym(dl_handle, "readline"));
|
||||
if (readline_ptr)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Minimal support for readline
|
||||
if (readline_ptr)
|
||||
{
|
||||
char * line_read = (*readline_ptr)(prompt.c_str());
|
||||
if (!line_read)
|
||||
return ABORT;
|
||||
input = line_read;
|
||||
}
|
||||
else
|
||||
#endif
|
||||
{
|
||||
std::cout << prompt;
|
||||
std::getline(std::cin, input);
|
||||
|
@ -53,7 +53,6 @@ protected:
|
||||
|
||||
String input;
|
||||
|
||||
private:
|
||||
bool multiline;
|
||||
|
||||
Patterns extenders;
|
||||
|
@ -1,187 +0,0 @@
|
||||
#include <base/ReadlineLineReader.h>
|
||||
#include <base/errnoToString.h>
|
||||
#include <base/scope_guard.h>
|
||||
|
||||
#include <errno.h>
|
||||
#include <signal.h>
|
||||
#include <string.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
/// Trim ending whitespace inplace
|
||||
void trim(String & s)
|
||||
{
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](int ch) { return !std::isspace(ch); }).base(), s.end());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
static const LineReader::Suggest * suggest;
|
||||
|
||||
/// Points to current word to suggest.
|
||||
static LineReader::Suggest::Words::const_iterator pos;
|
||||
/// Points after the last possible match.
|
||||
static LineReader::Suggest::Words::const_iterator end;
|
||||
|
||||
/// Set iterators to the matched range of words if any.
|
||||
static void findRange(const char * prefix, size_t prefix_length)
|
||||
{
|
||||
std::string prefix_str(prefix);
|
||||
if (auto completions = suggest->getCompletions(prefix_str, prefix_length))
|
||||
std::tie(pos, end) = *completions;
|
||||
}
|
||||
|
||||
/// Iterates through matched range.
|
||||
static char * nextMatch()
|
||||
{
|
||||
if (pos >= end)
|
||||
return nullptr;
|
||||
|
||||
/// readline will free memory by itself.
|
||||
char * word = strdup(pos->c_str());
|
||||
++pos;
|
||||
return word;
|
||||
}
|
||||
|
||||
static char * generate(const char * text, int state)
|
||||
{
|
||||
if (!suggest->ready)
|
||||
return nullptr;
|
||||
if (state == 0)
|
||||
findRange(text, strlen(text));
|
||||
|
||||
/// Do not append whitespace after word. For unknown reason, rl_completion_append_character = '\0' does not work.
|
||||
rl_completion_suppress_append = 1;
|
||||
|
||||
return nextMatch();
|
||||
};
|
||||
|
||||
ReadlineLineReader::ReadlineLineReader(
|
||||
const Suggest & suggest_, const String & history_file_path_, bool multiline_, Patterns extenders_, Patterns delimiters_)
|
||||
: LineReader(history_file_path_, multiline_, std::move(extenders_), std::move(delimiters_))
|
||||
{
|
||||
suggest = &suggest_;
|
||||
|
||||
if (!history_file_path.empty())
|
||||
{
|
||||
int res = read_history(history_file_path.c_str());
|
||||
if (res)
|
||||
std::cerr << "Cannot read history from file " + history_file_path + ": "+ errnoToString(errno) << std::endl;
|
||||
}
|
||||
|
||||
/// Added '.' to the default list. Because it is used to separate database and table.
|
||||
rl_basic_word_break_characters = word_break_characters;
|
||||
|
||||
/// Not append whitespace after single suggestion. Because whitespace after function name is meaningless.
|
||||
rl_completion_append_character = '\0';
|
||||
|
||||
rl_completion_entry_function = generate;
|
||||
|
||||
/// Install Ctrl+C signal handler that will be used in interactive mode.
|
||||
|
||||
if (rl_initialize())
|
||||
throw std::runtime_error("Cannot initialize readline");
|
||||
|
||||
auto clear_prompt_or_exit = [](int)
|
||||
{
|
||||
/// This is signal safe.
|
||||
ssize_t res = write(STDOUT_FILENO, "\n", 1);
|
||||
|
||||
/// Allow to quit client while query is in progress by pressing Ctrl+C twice.
|
||||
/// (First press to Ctrl+C will try to cancel query by InterruptListener).
|
||||
if (res == 1 && rl_line_buffer[0] && !RL_ISSTATE(RL_STATE_DONE))
|
||||
{
|
||||
rl_replace_line("", 0);
|
||||
if (rl_forced_update_display())
|
||||
_exit(0);
|
||||
}
|
||||
else
|
||||
{
|
||||
/// A little dirty, but we struggle to find better way to correctly
|
||||
/// force readline to exit after returning from the signal handler.
|
||||
_exit(0);
|
||||
}
|
||||
};
|
||||
|
||||
if (signal(SIGINT, clear_prompt_or_exit) == SIG_ERR)
|
||||
throw std::runtime_error(std::string("Cannot set signal handler for readline: ") + errnoToString(errno));
|
||||
|
||||
rl_variable_bind("completion-ignore-case", "on");
|
||||
// TODO: it doesn't work
|
||||
// history_write_timestamps = 1;
|
||||
}
|
||||
|
||||
ReadlineLineReader::~ReadlineLineReader()
|
||||
{
|
||||
}
|
||||
|
||||
LineReader::InputStatus ReadlineLineReader::readOneLine(const String & prompt)
|
||||
{
|
||||
input.clear();
|
||||
|
||||
const char* cinput = readline(prompt.c_str());
|
||||
if (cinput == nullptr)
|
||||
return (errno != EAGAIN) ? ABORT : RESET_LINE;
|
||||
input = cinput;
|
||||
|
||||
trim(input);
|
||||
return INPUT_LINE;
|
||||
}
|
||||
|
||||
void ReadlineLineReader::addToHistory(const String & line)
|
||||
{
|
||||
add_history(line.c_str());
|
||||
|
||||
// Flush changes to the disk
|
||||
// NOTE readline builds a buffer of all the lines to write, and write them in one syscall.
|
||||
// Thus there is no need to lock the history file here.
|
||||
write_history(history_file_path.c_str());
|
||||
}
|
||||
|
||||
#if RL_VERSION_MAJOR >= 7
|
||||
|
||||
#define BRACK_PASTE_PREF "\033[200~"
|
||||
#define BRACK_PASTE_SUFF "\033[201~"
|
||||
|
||||
#define BRACK_PASTE_LAST '~'
|
||||
#define BRACK_PASTE_SLEN 6
|
||||
|
||||
/// This handler bypasses some unused macro/event checkings and remove trailing newlines before insertion.
|
||||
static int clickhouse_rl_bracketed_paste_begin(int /* count */, int /* key */)
|
||||
{
|
||||
std::string buf;
|
||||
buf.reserve(128);
|
||||
|
||||
RL_SETSTATE(RL_STATE_MOREINPUT);
|
||||
SCOPE_EXIT(RL_UNSETSTATE(RL_STATE_MOREINPUT));
|
||||
int c;
|
||||
while ((c = rl_read_key()) >= 0)
|
||||
{
|
||||
if (c == '\r')
|
||||
c = '\n';
|
||||
buf.push_back(c);
|
||||
if (buf.size() >= BRACK_PASTE_SLEN && c == BRACK_PASTE_LAST && buf.substr(buf.size() - BRACK_PASTE_SLEN) == BRACK_PASTE_SUFF)
|
||||
{
|
||||
buf.resize(buf.size() - BRACK_PASTE_SLEN);
|
||||
break;
|
||||
}
|
||||
}
|
||||
trim(buf);
|
||||
return static_cast<size_t>(rl_insert_text(buf.c_str())) == buf.size() ? 0 : 1;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
void ReadlineLineReader::enableBracketedPaste()
|
||||
{
|
||||
#if RL_VERSION_MAJOR >= 7
|
||||
rl_variable_bind("enable-bracketed-paste", "on");
|
||||
|
||||
/// Use our bracketed paste handler to get better user experience. See comments above.
|
||||
rl_bind_keyseq(BRACK_PASTE_PREF, clickhouse_rl_bracketed_paste_begin);
|
||||
#endif
|
||||
};
|
@ -1,19 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "LineReader.h"
|
||||
|
||||
#include <readline/readline.h>
|
||||
#include <readline/history.h>
|
||||
|
||||
class ReadlineLineReader : public LineReader
|
||||
{
|
||||
public:
|
||||
ReadlineLineReader(const Suggest & suggest, const String & history_file_path, bool multiline, Patterns extenders_, Patterns delimiters_);
|
||||
~ReadlineLineReader() override;
|
||||
|
||||
void enableBracketedPaste() override;
|
||||
|
||||
private:
|
||||
InputStatus readOneLine(const String & prompt) override;
|
||||
void addToHistory(const String & line) override;
|
||||
};
|
@ -22,7 +22,24 @@ namespace
|
||||
/// Trim ending whitespace inplace
|
||||
void trim(String & s)
|
||||
{
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](int ch) { return !std::isspace(ch); }).base(), s.end());
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](unsigned char ch) { return !std::isspace(ch); }).base(), s.end());
|
||||
}
|
||||
|
||||
/// Check if string ends with given character after skipping whitespaces.
|
||||
bool ends_with(const std::string_view & s, const std::string_view & p)
|
||||
{
|
||||
auto ss = std::string_view(s.data(), s.rend() - std::find_if(s.rbegin(), s.rend(), [](unsigned char ch) { return !std::isspace(ch); }));
|
||||
return ss.ends_with(p);
|
||||
}
|
||||
|
||||
std::string getEditor()
|
||||
{
|
||||
const char * editor = std::getenv("EDITOR");
|
||||
|
||||
if (!editor || !*editor)
|
||||
editor = "vim";
|
||||
|
||||
return editor;
|
||||
}
|
||||
|
||||
/// Copied from replxx::src/util.cxx::now_ms_str() under the terms of 3-clause BSD license of Replxx.
|
||||
@ -123,6 +140,7 @@ ReplxxLineReader::ReplxxLineReader(
|
||||
Patterns delimiters_,
|
||||
replxx::Replxx::highlighter_callback_t highlighter_)
|
||||
: LineReader(history_file_path_, multiline_, std::move(extenders_), std::move(delimiters_)), highlighter(std::move(highlighter_))
|
||||
, editor(getEditor())
|
||||
{
|
||||
using namespace std::placeholders;
|
||||
using Replxx = replxx::Replxx;
|
||||
@ -178,8 +196,28 @@ ReplxxLineReader::ReplxxLineReader(
|
||||
rx.bind_key(Replxx::KEY::control('N'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_NEXT, code); });
|
||||
rx.bind_key(Replxx::KEY::control('P'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::HISTORY_PREVIOUS, code); });
|
||||
|
||||
auto commit_action = [this](char32_t code)
|
||||
{
|
||||
std::string_view str = rx.get_state().text();
|
||||
|
||||
/// Always commit line when we see extender at the end. It will start a new prompt.
|
||||
for (const auto * extender : extenders)
|
||||
if (ends_with(str, extender))
|
||||
return rx.invoke(Replxx::ACTION::COMMIT_LINE, code);
|
||||
|
||||
/// If we see an delimiter at the end, commit right away.
|
||||
for (const auto * delimiter : delimiters)
|
||||
if (ends_with(str, delimiter))
|
||||
return rx.invoke(Replxx::ACTION::COMMIT_LINE, code);
|
||||
|
||||
/// If we allow multiline and there is already something in the input, start a newline.
|
||||
if (multiline && !input.empty())
|
||||
return rx.invoke(Replxx::ACTION::NEW_LINE, code);
|
||||
return rx.invoke(Replxx::ACTION::COMMIT_LINE, code);
|
||||
};
|
||||
/// bind C-j to ENTER action.
|
||||
rx.bind_key(Replxx::KEY::control('J'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::COMMIT_LINE, code); });
|
||||
rx.bind_key(Replxx::KEY::control('J'), commit_action);
|
||||
rx.bind_key(Replxx::KEY::ENTER, commit_action);
|
||||
|
||||
/// By default COMPLETE_NEXT/COMPLETE_PREV was binded to C-p/C-n, re-bind
|
||||
/// to M-P/M-N (that was used for HISTORY_COMMON_PREFIX_SEARCH before, but
|
||||
@ -236,14 +274,13 @@ void ReplxxLineReader::addToHistory(const String & line)
|
||||
rx.print("Unlock of history file failed: %s\n", errnoToString(errno).c_str());
|
||||
}
|
||||
|
||||
int ReplxxLineReader::execute(const std::string & command)
|
||||
/// See comments in ShellCommand::executeImpl()
|
||||
/// (for the vfork via dlsym())
|
||||
int ReplxxLineReader::executeEditor(const std::string & path)
|
||||
{
|
||||
std::vector<char> argv0("sh", &("sh"[3]));
|
||||
std::vector<char> argv1("-c", &("-c"[3]));
|
||||
std::vector<char> argv2(command.data(), command.data() + command.size() + 1);
|
||||
|
||||
const char * filename = "/bin/sh";
|
||||
char * const argv[] = {argv0.data(), argv1.data(), argv2.data(), nullptr};
|
||||
std::vector<char> argv0(editor.data(), editor.data() + editor.size() + 1);
|
||||
std::vector<char> argv1(path.data(), path.data() + path.size() + 1);
|
||||
char * const argv[] = {argv0.data(), argv1.data(), nullptr};
|
||||
|
||||
static void * real_vfork = dlsym(RTLD_DEFAULT, "vfork");
|
||||
if (!real_vfork)
|
||||
@ -260,6 +297,7 @@ int ReplxxLineReader::execute(const std::string & command)
|
||||
return -1;
|
||||
}
|
||||
|
||||
/// Child
|
||||
if (0 == pid)
|
||||
{
|
||||
sigset_t mask;
|
||||
@ -267,16 +305,26 @@ int ReplxxLineReader::execute(const std::string & command)
|
||||
sigprocmask(0, nullptr, &mask);
|
||||
sigprocmask(SIG_UNBLOCK, &mask, nullptr);
|
||||
|
||||
execv(filename, argv);
|
||||
execvp(editor.c_str(), argv);
|
||||
rx.print("Cannot execute %s: %s\n", editor.c_str(), errnoToString(errno).c_str());
|
||||
_exit(-1);
|
||||
}
|
||||
|
||||
int status = 0;
|
||||
if (-1 == waitpid(pid, &status, 0))
|
||||
do
|
||||
{
|
||||
rx.print("Cannot waitpid: %s\n", errnoToString(errno).c_str());
|
||||
return -1;
|
||||
}
|
||||
int exited_pid = waitpid(pid, &status, 0);
|
||||
if (exited_pid == -1)
|
||||
{
|
||||
if (errno == EINTR)
|
||||
continue;
|
||||
|
||||
rx.print("Cannot waitpid: %s\n", errnoToString(errno).c_str());
|
||||
return -1;
|
||||
}
|
||||
else
|
||||
break;
|
||||
} while (true);
|
||||
return status;
|
||||
}
|
||||
|
||||
@ -290,10 +338,6 @@ void ReplxxLineReader::openEditor()
|
||||
return;
|
||||
}
|
||||
|
||||
const char * editor = std::getenv("EDITOR");
|
||||
if (!editor || !*editor)
|
||||
editor = "vim";
|
||||
|
||||
replxx::Replxx::State state(rx.get_state());
|
||||
|
||||
size_t bytes_written = 0;
|
||||
@ -316,7 +360,7 @@ void ReplxxLineReader::openEditor()
|
||||
return;
|
||||
}
|
||||
|
||||
if (0 == execute(fmt::format("{} {}", editor, filename)))
|
||||
if (0 == executeEditor(filename))
|
||||
{
|
||||
try
|
||||
{
|
||||
|
@ -22,7 +22,7 @@ public:
|
||||
private:
|
||||
InputStatus readOneLine(const String & prompt) override;
|
||||
void addToHistory(const String & line) override;
|
||||
int execute(const std::string & command);
|
||||
int executeEditor(const std::string & path);
|
||||
void openEditor();
|
||||
|
||||
replxx::Replxx rx;
|
||||
@ -31,4 +31,6 @@ private:
|
||||
// used to call flock() to synchronize multiple clients using same history file
|
||||
int history_file_fd = -1;
|
||||
bool bracketed_paste_enabled = false;
|
||||
|
||||
std::string editor;
|
||||
};
|
||||
|
@ -48,7 +48,9 @@ struct StringRef
|
||||
std::string toString() const { return std::string(data, size); }
|
||||
|
||||
explicit operator std::string() const { return toString(); }
|
||||
constexpr explicit operator std::string_view() const { return {data, size}; }
|
||||
std::string_view toView() const { return std::string_view(data, size); }
|
||||
|
||||
constexpr explicit operator std::string_view() const { return std::string_view(data, size); }
|
||||
};
|
||||
|
||||
/// Here constexpr doesn't implicate inline, see https://www.viva64.com/en/w/v1043/
|
||||
|
44
base/base/TypeList.h
Normal file
44
base/base/TypeList.h
Normal file
@ -0,0 +1,44 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
#include "defines.h"
|
||||
#include "TypePair.h"
|
||||
|
||||
/// General-purpose typelist. Easy on compilation times as it does not use recursion.
|
||||
template <typename ...Args>
|
||||
struct TypeList { static constexpr size_t size = sizeof...(Args); };
|
||||
|
||||
namespace TypeListUtils /// In some contexts it's more handy to use functions instead of aliases
|
||||
{
|
||||
template <typename ...LArgs, typename ...RArgs>
|
||||
constexpr TypeList<LArgs..., RArgs...> concat(TypeList<LArgs...>, TypeList<RArgs...>) { return {}; }
|
||||
|
||||
template <typename T, typename ...Args>
|
||||
constexpr TypeList<T, Args...> prepend(TypeList<Args...>) { return {}; }
|
||||
|
||||
template <typename T, typename ...Args>
|
||||
constexpr TypeList<Args..., T> append(TypeList<Args...>) { return {}; }
|
||||
|
||||
template <template <typename> typename F, typename ...Args>
|
||||
constexpr TypeList<F<Args>...> map(TypeList<Args...>) { return {}; }
|
||||
|
||||
template <template <typename...> typename Root, typename ...Args>
|
||||
constexpr Root<Args...> changeRoot(TypeList<Args...>) { return {}; }
|
||||
|
||||
template <typename F, typename ...Args>
|
||||
constexpr void forEach(TypeList<Args...>, F && f) { (std::forward<F>(f)(Id<Args>{}), ...); }
|
||||
}
|
||||
|
||||
template <typename TypeListLeft, typename TypeListRight>
|
||||
using TypeListConcat = decltype(TypeListUtils::concat(TypeListLeft{}, TypeListRight{}));
|
||||
|
||||
template <typename T, typename List> using TypeListPrepend = decltype(TypeListUtils::prepend<T>(List{}));
|
||||
template <typename T, typename List> using TypeListAppend = decltype(TypeListUtils::append<T>(List{}));
|
||||
|
||||
template <template <typename> typename F, typename List>
|
||||
using TypeListMap = decltype(TypeListUtils::map<F>(List{}));
|
||||
|
||||
template <template <typename...> typename Root, typename List>
|
||||
using TypeListChangeRoot = decltype(TypeListUtils::changeRoot<Root>(List{}));
|
21
base/base/TypeLists.h
Normal file
21
base/base/TypeLists.h
Normal file
@ -0,0 +1,21 @@
|
||||
#pragma once
|
||||
|
||||
#include "TypeList.h"
|
||||
#include "extended_types.h"
|
||||
#include "Decimal.h"
|
||||
#include "UUID.h"
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
using TypeListNativeInt = TypeList<UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64>;
|
||||
using TypeListFloat = TypeList<Float32, Float64>;
|
||||
using TypeListNativeNumber = TypeListConcat<TypeListNativeInt, TypeListFloat>;
|
||||
using TypeListWideInt = TypeList<UInt128, Int128, UInt256, Int256>;
|
||||
using TypeListInt = TypeListConcat<TypeListNativeInt, TypeListWideInt>;
|
||||
using TypeListIntAndFloat = TypeListConcat<TypeListInt, TypeListFloat>;
|
||||
using TypeListDecimal = TypeList<Decimal32, Decimal64, Decimal128, Decimal256>;
|
||||
using TypeListNumber = TypeListConcat<TypeListIntAndFloat, TypeListDecimal>;
|
||||
using TypeListNumberWithUUID = TypeListAppend<UUID, TypeListNumber>;
|
||||
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
#pragma once
|
||||
|
||||
template <class T, class V> struct TypePair { };
|
||||
template <class T> struct Id { };
|
||||
template <typename T, typename V> struct TypePair {};
|
||||
template <typename T> struct Id {};
|
||||
|
@ -1,44 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include <cstddef>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
#include "defines.h"
|
||||
#include "TypePair.h"
|
||||
|
||||
/// General-purpose typelist. Easy on compilation times as it does not use recursion.
|
||||
template <class ...Args>
|
||||
struct Typelist { static constexpr size_t size = sizeof...(Args); };
|
||||
|
||||
namespace TLUtils /// In some contexts it's more handy to use functions instead of aliases
|
||||
{
|
||||
template <class ...LArgs, class ...RArgs>
|
||||
constexpr Typelist<LArgs..., RArgs...> concat(Typelist<LArgs...>, Typelist<RArgs...>) { return {}; }
|
||||
|
||||
template <class T, class ...Args>
|
||||
constexpr Typelist<T, Args...> prepend(Typelist<Args...>) { return {}; }
|
||||
|
||||
template <class T, class ...Args>
|
||||
constexpr Typelist<Args..., T> append(Typelist<Args...>) { return {}; }
|
||||
|
||||
template <template<class> class F, class ...Args>
|
||||
constexpr Typelist<F<Args>...> map(Typelist<Args...>) { return {}; }
|
||||
|
||||
template <template<class...> class Root, class ...Args>
|
||||
constexpr Root<Args...> changeRoot(Typelist<Args...>) { return {}; }
|
||||
|
||||
template <class F, class ...Args>
|
||||
constexpr void forEach(Typelist<Args...>, F && f) { (std::forward<F>(f)(Id<Args>{}), ...); }
|
||||
}
|
||||
|
||||
template <class TLLeft, class TLRight>
|
||||
using TLConcat = decltype(TLUtils::concat(TLLeft{}, TLRight{}));
|
||||
|
||||
template <class T, class Typelist> using TLPrepend = decltype(TLUtils::prepend<T>(Typelist{}));
|
||||
template <class T, class Typelist> using TLAppend = decltype(TLUtils::append<T>(Typelist{}));
|
||||
|
||||
template <template<class> class F, class Typelist>
|
||||
using TLMap = decltype(TLUtils::map<F>(Typelist{}));
|
||||
|
||||
template <template<class...> class Root, class Typelist>
|
||||
using TLChangeRoot = decltype(TLUtils::changeRoot<Root>(Typelist{}));
|
@ -1,18 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
#include "Typelist.h"
|
||||
#include "extended_types.h"
|
||||
#include "Decimal.h"
|
||||
#include "UUID.h"
|
||||
|
||||
namespace DB
|
||||
{
|
||||
using TLIntegral = Typelist<UInt8, UInt16, UInt32, UInt64, Int8, Int16, Int32, Int64, Float32, Float64>;
|
||||
using TLExtendedIntegral = Typelist<UInt128, Int128, UInt256, Int256>;
|
||||
using TLDecimals = Typelist<Decimal32, Decimal64, Decimal128, Decimal256>;
|
||||
|
||||
using TLIntegralWithExtended = TLConcat<TLIntegral, TLExtendedIntegral>;
|
||||
|
||||
using TLNumbers = TLConcat<TLIntegralWithExtended, TLDecimals>;
|
||||
using TLNumbersWithUUID = TLAppend<UUID, TLNumbers>;
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
#pragma once
|
||||
|
||||
#include "strong_typedef.h"
|
||||
#include "extended_types.h"
|
||||
#include <base/strong_typedef.h>
|
||||
#include <base/extended_types.h>
|
||||
|
||||
namespace DB
|
||||
{
|
||||
using UUID = StrongTypedef<UInt128, struct UUIDTag>;
|
||||
using UUID = StrongTypedef<UInt128, struct UUIDTag>;
|
||||
}
|
||||
|
@ -28,8 +28,8 @@
|
||||
#define NO_INLINE __attribute__((__noinline__))
|
||||
#define MAY_ALIAS __attribute__((__may_alias__))
|
||||
|
||||
#if !defined(__x86_64__) && !defined(__aarch64__) && !defined(__PPC__)
|
||||
# error "The only supported platforms are x86_64 and AArch64, PowerPC (work in progress)"
|
||||
#if !defined(__x86_64__) && !defined(__aarch64__) && !defined(__PPC__) && !(defined(__riscv) && (__riscv_xlen == 64))
|
||||
# error "The only supported platforms are x86_64 and AArch64, PowerPC (work in progress) and RISC-V 64 (experimental)"
|
||||
#endif
|
||||
|
||||
/// Check for presence of address sanitizer
|
||||
|
@ -1,8 +1,11 @@
|
||||
#include <base/getPageSize.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <cstdlib>
|
||||
|
||||
Int64 getPageSize()
|
||||
{
|
||||
return sysconf(_SC_PAGESIZE);
|
||||
Int64 page_size = sysconf(_SC_PAGESIZE);
|
||||
if (page_size < 0)
|
||||
abort();
|
||||
return page_size;
|
||||
}
|
||||
|
@ -6,6 +6,7 @@
|
||||
#include <tuple>
|
||||
#include <iomanip>
|
||||
#include <iostream>
|
||||
#include <magic_enum.hpp>
|
||||
|
||||
/** Usage:
|
||||
*
|
||||
@ -61,6 +62,13 @@ std::enable_if_t<priority == 2, Out> & dumpImpl(Out & out, T && x, std::decay_t<
|
||||
}
|
||||
|
||||
|
||||
template <int priority, typename Out, typename T>
|
||||
std::enable_if_t<priority == 3 && std::is_enum_v<std::decay_t<T>>, Out> &
|
||||
dumpImpl(Out & out, T && x)
|
||||
{
|
||||
return out << magic_enum::enum_name(x);
|
||||
}
|
||||
|
||||
/// string and const char * - output not as container or pointer.
|
||||
|
||||
template <int priority, typename Out, typename T>
|
||||
@ -131,15 +139,26 @@ Out & dumpValue(Out & out, T && x)
|
||||
template <typename Out, typename T>
|
||||
Out & dump(Out & out, const char * name, T && x)
|
||||
{
|
||||
// Dumping string literal, printing name and demangled type is irrelevant.
|
||||
if constexpr (std::is_same_v<const char *, std::decay_t<std::remove_reference_t<T>>>)
|
||||
{
|
||||
const auto name_len = strlen(name);
|
||||
const auto value_len = strlen(x);
|
||||
// `name` is the same as quoted `x`
|
||||
if (name_len > 2 && value_len > 0 && name[0] == '"' && name[name_len - 1] == '"'
|
||||
&& strncmp(name + 1, x, std::min(value_len, name_len) - 1) == 0)
|
||||
return out << x;
|
||||
}
|
||||
|
||||
out << demangle(typeid(x).name()) << " " << name << " = ";
|
||||
return dumpValue(out, x);
|
||||
return dumpValue(out, x) << "; ";
|
||||
}
|
||||
|
||||
#ifdef __clang__
|
||||
#pragma clang diagnostic ignored "-Wgnu-zero-variadic-macro-arguments"
|
||||
#endif
|
||||
|
||||
#define DUMPVAR(VAR) ::dump(std::cerr, #VAR, (VAR)); std::cerr << "; ";
|
||||
#define DUMPVAR(VAR) ::dump(std::cerr, #VAR, (VAR));
|
||||
#define DUMPHEAD std::cerr << __FILE__ << ':' << __LINE__ << " [ " << getThreadId() << " ] ";
|
||||
#define DUMPTAIL std::cerr << '\n';
|
||||
|
||||
|
@ -123,6 +123,12 @@ bool hasPHDRCache()
|
||||
#else
|
||||
|
||||
void updatePHDRCache() {}
|
||||
bool hasPHDRCache() { return false; }
|
||||
|
||||
#if defined(USE_MUSL)
|
||||
/// With statically linked with musl, dl_iterate_phdr is immutable.
|
||||
bool hasPHDRCache() { return true; }
|
||||
#else
|
||||
bool hasPHDRCache() { return false; }
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
#include <base/scope_guard.h>
|
||||
#include <base/logger_useful.h>
|
||||
#include <Common/MemoryTracker.h>
|
||||
#include <Common/LockMemoryExceptionInThread.h>
|
||||
|
||||
/// Same as SCOPE_EXIT() but block the MEMORY_LIMIT_EXCEEDED errors.
|
||||
///
|
||||
@ -12,8 +12,7 @@
|
||||
///
|
||||
/// NOTE: it should be used with caution.
|
||||
#define SCOPE_EXIT_MEMORY(...) SCOPE_EXIT( \
|
||||
MemoryTracker::LockExceptionInThread \
|
||||
lock_memory_tracker(VariableContext::Global); \
|
||||
LockMemoryExceptionInThread lock_memory_tracker(VariableContext::Global); \
|
||||
__VA_ARGS__; \
|
||||
)
|
||||
|
||||
@ -57,8 +56,7 @@
|
||||
#define SCOPE_EXIT_MEMORY_SAFE(...) SCOPE_EXIT( \
|
||||
try \
|
||||
{ \
|
||||
MemoryTracker::LockExceptionInThread \
|
||||
lock_memory_tracker(VariableContext::Global); \
|
||||
LockMemoryExceptionInThread lock_memory_tracker(VariableContext::Global); \
|
||||
__VA_ARGS__; \
|
||||
} \
|
||||
catch (...) \
|
||||
|
@ -125,11 +125,11 @@ public:
|
||||
template <size_t Bits, typename Signed, size_t Bits2, typename Signed2>
|
||||
struct common_type<wide::integer<Bits, Signed>, wide::integer<Bits2, Signed2>>
|
||||
{
|
||||
using type = std::conditional_t < Bits == Bits2,
|
||||
wide::integer<
|
||||
Bits,
|
||||
std::conditional_t<(std::is_same_v<Signed, Signed2> && std::is_same_v<Signed2, signed>), signed, unsigned>>,
|
||||
std::conditional_t<Bits2<Bits, wide::integer<Bits, Signed>, wide::integer<Bits2, Signed2>>>;
|
||||
using type = std::conditional_t<Bits == Bits2,
|
||||
wide::integer<
|
||||
Bits,
|
||||
std::conditional_t<(std::is_same_v<Signed, Signed2> && std::is_same_v<Signed2, signed>), signed, unsigned>>,
|
||||
std::conditional_t<Bits2<Bits, wide::integer<Bits, Signed>, wide::integer<Bits2, Signed2>>>;
|
||||
};
|
||||
|
||||
template <size_t Bits, typename Signed, typename Arithmetic>
|
||||
@ -827,7 +827,7 @@ public:
|
||||
|
||||
CompilerUInt128 a = (CompilerUInt128(numerator.items[1]) << 64) + numerator.items[0];
|
||||
CompilerUInt128 b = (CompilerUInt128(denominator.items[1]) << 64) + denominator.items[0];
|
||||
CompilerUInt128 c = a / b;
|
||||
CompilerUInt128 c = a / b; // NOLINT
|
||||
|
||||
integer<Bits, Signed> res;
|
||||
res.items[0] = c;
|
||||
@ -1020,8 +1020,15 @@ constexpr integer<Bits, Signed>::integer(std::initializer_list<T> il) noexcept
|
||||
{
|
||||
auto it = il.begin();
|
||||
for (size_t i = 0; i < _impl::item_count; ++i)
|
||||
{
|
||||
if (it < il.end())
|
||||
{
|
||||
items[i] = *it;
|
||||
++it;
|
||||
}
|
||||
else
|
||||
items[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -9,6 +9,7 @@
|
||||
|
||||
#include <Common/StringUtils/StringUtils.h>
|
||||
#include <Common/SensitiveDataMasker.h>
|
||||
#include <Common/config.h>
|
||||
#include <base/errnoToString.h>
|
||||
#include <IO/ReadHelpers.h>
|
||||
#include <Formats/registerFormats.h>
|
||||
|
@ -13,30 +13,21 @@
|
||||
#if defined(__linux__)
|
||||
#include <sys/prctl.h>
|
||||
#endif
|
||||
#include <fcntl.h>
|
||||
#include <errno.h>
|
||||
#include <string.h>
|
||||
#include <signal.h>
|
||||
#include <cxxabi.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#include <typeinfo>
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <memory>
|
||||
#include <base/scope_guard.h>
|
||||
|
||||
#include <Poco/Observer.h>
|
||||
#include <Poco/AutoPtr.h>
|
||||
#include <Poco/PatternFormatter.h>
|
||||
#include <Poco/Message.h>
|
||||
#include <Poco/Util/Application.h>
|
||||
#include <Poco/Exception.h>
|
||||
#include <Poco/ErrorHandler.h>
|
||||
#include <Poco/Condition.h>
|
||||
#include <Poco/SyslogChannel.h>
|
||||
#include <Poco/DirectoryIterator.h>
|
||||
|
||||
#include <base/logger_useful.h>
|
||||
#include <base/ErrorHandlers.h>
|
||||
@ -56,13 +47,16 @@
|
||||
#include <Common/getMultipleKeysFromConfig.h>
|
||||
#include <Common/ClickHouseRevision.h>
|
||||
#include <Common/Config/ConfigProcessor.h>
|
||||
#include <Common/MemorySanitizer.h>
|
||||
#include <Common/SymbolIndex.h>
|
||||
#include <Common/getExecutablePath.h>
|
||||
#include <Common/getHashOfLoadedBinary.h>
|
||||
#include <Common/Elf.h>
|
||||
#include <Common/setThreadName.h>
|
||||
#include <filesystem>
|
||||
|
||||
#include <loggers/OwnFormattingChannel.h>
|
||||
#include <loggers/OwnPatternFormatter.h>
|
||||
|
||||
#include <Common/config_version.h>
|
||||
|
||||
#if defined(OS_DARWIN)
|
||||
@ -111,7 +105,7 @@ static void writeSignalIDtoSignalPipe(int sig)
|
||||
errno = saved_errno;
|
||||
}
|
||||
|
||||
/** Signal handler for HUP / USR1 */
|
||||
/** Signal handler for HUP */
|
||||
static void closeLogsSignalHandler(int sig, siginfo_t *, void *)
|
||||
{
|
||||
DENY_ALLOCATIONS_IN_SCOPE;
|
||||
@ -168,7 +162,7 @@ __attribute__((__weak__)) void collectCrashLog(
|
||||
|
||||
|
||||
/** The thread that read info about signal or std::terminate from pipe.
|
||||
* On HUP / USR1, close log files (for new files to be opened later).
|
||||
* On HUP, close log files (for new files to be opened later).
|
||||
* On information about std::terminate, write it to log.
|
||||
* On other signals, write info to log.
|
||||
*/
|
||||
@ -208,7 +202,7 @@ public:
|
||||
LOG_INFO(log, "Stop SignalListener thread");
|
||||
break;
|
||||
}
|
||||
else if (sig == SIGHUP || sig == SIGUSR1)
|
||||
else if (sig == SIGHUP)
|
||||
{
|
||||
LOG_DEBUG(log, "Received signal to close logs.");
|
||||
BaseDaemon::instance().closeLogs(BaseDaemon::instance().logger());
|
||||
@ -675,6 +669,34 @@ void BaseDaemon::initialize(Application & self)
|
||||
if ((!log_path.empty() && is_daemon) || config().has("logger.stderr"))
|
||||
{
|
||||
std::string stderr_path = config().getString("logger.stderr", log_path + "/stderr.log");
|
||||
|
||||
/// Check that stderr is writable before freopen(),
|
||||
/// since freopen() will make stderr invalid on error,
|
||||
/// and logging to stderr will be broken,
|
||||
/// so the following code (that is used in every program) will not write anything:
|
||||
///
|
||||
/// int main(int argc, char ** argv)
|
||||
/// {
|
||||
/// try
|
||||
/// {
|
||||
/// DB::SomeApp app;
|
||||
/// return app.run(argc, argv);
|
||||
/// }
|
||||
/// catch (...)
|
||||
/// {
|
||||
/// std::cerr << DB::getCurrentExceptionMessage(true) << "\n";
|
||||
/// return 1;
|
||||
/// }
|
||||
/// }
|
||||
if (access(stderr_path.c_str(), W_OK))
|
||||
{
|
||||
int fd;
|
||||
if ((fd = creat(stderr_path.c_str(), 0600)) == -1 && errno != EEXIST)
|
||||
throw Poco::OpenFileException("File " + stderr_path + " (logger.stderr) is not writable");
|
||||
if (fd != -1)
|
||||
::close(fd);
|
||||
}
|
||||
|
||||
if (!freopen(stderr_path.c_str(), "a+", stderr))
|
||||
throw Poco::OpenFileException("Cannot attach stderr to " + stderr_path);
|
||||
|
||||
@ -811,7 +833,7 @@ void BaseDaemon::initializeTerminationAndSignalProcessing()
|
||||
/// SIGTSTP is added for debugging purposes. To output a stack trace of any running thread at anytime.
|
||||
|
||||
addSignalHandler({SIGABRT, SIGSEGV, SIGILL, SIGBUS, SIGSYS, SIGFPE, SIGPIPE, SIGTSTP, SIGTRAP}, signalHandler, &handled_signals);
|
||||
addSignalHandler({SIGHUP, SIGUSR1}, closeLogsSignalHandler, &handled_signals);
|
||||
addSignalHandler({SIGHUP}, closeLogsSignalHandler, &handled_signals);
|
||||
addSignalHandler({SIGINT, SIGQUIT, SIGTERM}, terminateRequestedSignalHandler, &handled_signals);
|
||||
|
||||
#if defined(SANITIZER)
|
||||
@ -973,11 +995,19 @@ void BaseDaemon::setupWatchdog()
|
||||
memcpy(argv0, new_process_name, std::min(strlen(new_process_name), original_process_name.size()));
|
||||
}
|
||||
|
||||
/// If streaming compression of logs is used then we write watchdog logs to cerr
|
||||
if (config().getRawString("logger.stream_compress", "false") == "true")
|
||||
{
|
||||
Poco::AutoPtr<OwnPatternFormatter> pf = new OwnPatternFormatter;
|
||||
Poco::AutoPtr<DB::OwnFormattingChannel> log = new DB::OwnFormattingChannel(pf, new Poco::ConsoleChannel(std::cerr));
|
||||
logger().setChannel(log);
|
||||
}
|
||||
|
||||
logger().information(fmt::format("Will watch for the process with pid {}", pid));
|
||||
|
||||
/// Forward signals to the child process.
|
||||
addSignalHandler(
|
||||
{SIGHUP, SIGUSR1, SIGINT, SIGQUIT, SIGTERM},
|
||||
{SIGHUP, SIGINT, SIGQUIT, SIGTERM},
|
||||
[](int sig, siginfo_t *, void *)
|
||||
{
|
||||
/// Forward all signals except INT as it can be send by terminal to the process group when user press Ctrl+C,
|
||||
|
@ -12,6 +12,6 @@ endif()
|
||||
|
||||
target_link_libraries (daemon PUBLIC loggers PRIVATE clickhouse_common_io clickhouse_common_config common ${EXECINFO_LIBRARIES})
|
||||
|
||||
if (USE_SENTRY)
|
||||
target_link_libraries (daemon PRIVATE ${SENTRY_LIBRARY})
|
||||
if (TARGET ch_contrib::sentry)
|
||||
target_link_libraries (daemon PRIVATE ch_contrib::sentry)
|
||||
endif ()
|
||||
|
@ -37,11 +37,3 @@ GraphiteWriter::GraphiteWriter(const std::string & config_name, const std::strin
|
||||
root_path += sub_path;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
std::string GraphiteWriter::getPerServerPath(const std::string & server_name, const std::string & root_path)
|
||||
{
|
||||
std::string path = root_path + "." + server_name;
|
||||
std::replace(path.begin() + root_path.size() + 1, path.end(), '.', '_');
|
||||
return path;
|
||||
}
|
||||
|
@ -8,10 +8,10 @@
|
||||
#include <base/logger_useful.h>
|
||||
|
||||
|
||||
/// пишет в Graphite данные в формате
|
||||
/// Writes to Graphite in the following format
|
||||
/// path value timestamp\n
|
||||
/// path может иметь любую вложенность. Директории разделяются с помощью "."
|
||||
/// у нас принят следующий формат path - root_path.server_name.sub_path.key
|
||||
/// path can be arbitrary nested. Elements are separated by '.'
|
||||
/// Example: root_path.server_name.sub_path.key
|
||||
class GraphiteWriter
|
||||
{
|
||||
public:
|
||||
@ -32,8 +32,6 @@ public:
|
||||
writeImpl(key_val_vec, timestamp, custom_root_path);
|
||||
}
|
||||
|
||||
/// возвращает путь root_path.server_name
|
||||
static std::string getPerServerPath(const std::string & server_name, const std::string & root_path = "one_min");
|
||||
private:
|
||||
template <typename T>
|
||||
void writeImpl(const T & data, time_t timestamp, const std::string & custom_root_path)
|
||||
|
@ -182,7 +182,6 @@ TRAP(vlimit)
|
||||
TRAP(wcsnrtombs)
|
||||
TRAP(wcsrtombs)
|
||||
TRAP(wctomb)
|
||||
TRAP(wordexp)
|
||||
TRAP(basename)
|
||||
TRAP(catgets)
|
||||
TRAP(dbm_clearerr)
|
||||
@ -195,9 +194,8 @@ TRAP(dbm_nextkey)
|
||||
TRAP(dbm_open)
|
||||
TRAP(dbm_store)
|
||||
TRAP(dirname)
|
||||
#if !defined(SANITIZER)
|
||||
TRAP(dlerror) // Used by tsan
|
||||
#endif
|
||||
// TRAP(dlerror) // It is not thread-safe. But it is used by dynamic linker to load some name resolution plugins. Also used by TSan.
|
||||
/// Note: we should better get rid of glibc, dynamic linking and all that sort of annoying garbage altogether.
|
||||
TRAP(ftw)
|
||||
TRAP(getc_unlocked)
|
||||
//TRAP(getenv) // Ok at program startup
|
||||
@ -245,4 +243,21 @@ TRAP(lgammaf32x)
|
||||
TRAP(lgammaf64)
|
||||
TRAP(lgammaf64x)
|
||||
|
||||
/// These functions are unused by ClickHouse and we should be aware if they are accidentally get used.
|
||||
/// Sometimes people report that these function contain vulnerabilities (these reports are bogus for ClickHouse).
|
||||
TRAP(mq_close)
|
||||
TRAP(mq_getattr)
|
||||
TRAP(mq_setattr)
|
||||
TRAP(mq_notify)
|
||||
TRAP(mq_open)
|
||||
TRAP(mq_receive)
|
||||
TRAP(mq_send)
|
||||
TRAP(mq_unlink)
|
||||
TRAP(mq_timedsend)
|
||||
TRAP(mq_timedreceive)
|
||||
|
||||
/// These functions are also unused by ClickHouse.
|
||||
TRAP(wordexp)
|
||||
TRAP(wordfree)
|
||||
|
||||
#endif
|
||||
|
@ -5,9 +5,11 @@
|
||||
#include <Poco/Util/AbstractConfiguration.h>
|
||||
#include "OwnFormattingChannel.h"
|
||||
#include "OwnPatternFormatter.h"
|
||||
#include "OwnSplitChannel.h"
|
||||
#include <Poco/ConsoleChannel.h>
|
||||
#include <Poco/Logger.h>
|
||||
#include <Poco/Net/RemoteSyslogChannel.h>
|
||||
#include <Interpreters/TextLog.h>
|
||||
#include <filesystem>
|
||||
|
||||
namespace fs = std::filesystem;
|
||||
@ -62,7 +64,13 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
|
||||
if (!log_path.empty())
|
||||
{
|
||||
createDirectory(log_path);
|
||||
std::cerr << "Logging " << log_level_string << " to " << log_path << std::endl;
|
||||
|
||||
std::string ext;
|
||||
if (config.getRawString("logger.stream_compress", "false") == "true")
|
||||
ext = ".lz4";
|
||||
|
||||
std::cerr << "Logging " << log_level_string << " to " << log_path << ext << std::endl;
|
||||
|
||||
auto log_level = Poco::Logger::parseLevel(log_level_string);
|
||||
if (log_level > max_log_level)
|
||||
{
|
||||
@ -75,6 +83,7 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
|
||||
log_file->setProperty(Poco::FileChannel::PROP_ROTATION, config.getRawString("logger.size", "100M"));
|
||||
log_file->setProperty(Poco::FileChannel::PROP_ARCHIVE, "number");
|
||||
log_file->setProperty(Poco::FileChannel::PROP_COMPRESS, config.getRawString("logger.compress", "true"));
|
||||
log_file->setProperty(Poco::FileChannel::PROP_STREAMCOMPRESS, config.getRawString("logger.stream_compress", "false"));
|
||||
log_file->setProperty(Poco::FileChannel::PROP_PURGECOUNT, config.getRawString("logger.count", "1"));
|
||||
log_file->setProperty(Poco::FileChannel::PROP_FLUSH, config.getRawString("logger.flush", "true"));
|
||||
log_file->setProperty(Poco::FileChannel::PROP_ROTATEONOPEN, config.getRawString("logger.rotateOnOpen", "false"));
|
||||
@ -100,13 +109,18 @@ void Loggers::buildLoggers(Poco::Util::AbstractConfiguration & config, Poco::Log
|
||||
max_log_level = errorlog_level;
|
||||
}
|
||||
|
||||
std::cerr << "Logging errors to " << errorlog_path << std::endl;
|
||||
std::string ext;
|
||||
if (config.getRawString("logger.stream_compress", "false") == "true")
|
||||
ext = ".lz4";
|
||||
|
||||
std::cerr << "Logging errors to " << errorlog_path << ext << std::endl;
|
||||
|
||||
error_log_file = new Poco::FileChannel;
|
||||
error_log_file->setProperty(Poco::FileChannel::PROP_PATH, fs::weakly_canonical(errorlog_path));
|
||||
error_log_file->setProperty(Poco::FileChannel::PROP_ROTATION, config.getRawString("logger.size", "100M"));
|
||||
error_log_file->setProperty(Poco::FileChannel::PROP_ARCHIVE, "number");
|
||||
error_log_file->setProperty(Poco::FileChannel::PROP_COMPRESS, config.getRawString("logger.compress", "true"));
|
||||
error_log_file->setProperty(Poco::FileChannel::PROP_STREAMCOMPRESS, config.getRawString("logger.stream_compress", "false"));
|
||||
error_log_file->setProperty(Poco::FileChannel::PROP_PURGECOUNT, config.getRawString("logger.count", "1"));
|
||||
error_log_file->setProperty(Poco::FileChannel::PROP_FLUSH, config.getRawString("logger.flush", "true"));
|
||||
error_log_file->setProperty(Poco::FileChannel::PROP_ROTATEONOPEN, config.getRawString("logger.rotateOnOpen", "false"));
|
||||
|
@ -5,9 +5,12 @@
|
||||
#include <Poco/AutoPtr.h>
|
||||
#include <Poco/FileChannel.h>
|
||||
#include <Poco/Util/Application.h>
|
||||
#include <Interpreters/TextLog.h>
|
||||
#include "OwnSplitChannel.h"
|
||||
|
||||
namespace DB
|
||||
{
|
||||
class TextLog;
|
||||
}
|
||||
|
||||
namespace Poco::Util
|
||||
{
|
||||
|
@ -1,16 +1,12 @@
|
||||
#include "OwnPatternFormatter.h"
|
||||
|
||||
#include <functional>
|
||||
#include <optional>
|
||||
#include <sys/time.h>
|
||||
#include <IO/WriteBufferFromString.h>
|
||||
#include <IO/WriteHelpers.h>
|
||||
#include <Common/HashTable/Hash.h>
|
||||
#include <Interpreters/InternalTextLogsQueue.h>
|
||||
#include <Common/CurrentThread.h>
|
||||
#include <base/getThreadId.h>
|
||||
#include <base/terminalColors.h>
|
||||
#include "Loggers.h"
|
||||
|
||||
|
||||
OwnPatternFormatter::OwnPatternFormatter(bool color_)
|
||||
|
@ -10,6 +10,8 @@
|
||||
#include <Poco/Message.h>
|
||||
#include <Common/CurrentThread.h>
|
||||
#include <Common/DNSResolver.h>
|
||||
#include <Common/setThreadName.h>
|
||||
#include <Common/LockMemoryExceptionInThread.h>
|
||||
#include <base/getThreadId.h>
|
||||
#include <Common/SensitiveDataMasker.h>
|
||||
#include <Common/IO.h>
|
||||
@ -57,7 +59,7 @@ void OwnSplitChannel::tryLogSplit(const Poco::Message & msg)
|
||||
/// but let's log it into the stderr at least.
|
||||
catch (...)
|
||||
{
|
||||
MemoryTracker::LockExceptionInThread lock_memory_tracker(VariableContext::Global);
|
||||
LockMemoryExceptionInThread lock_memory_tracker(VariableContext::Global);
|
||||
|
||||
const std::string & exception_message = getCurrentExceptionMessage(true);
|
||||
const std::string & message = msg.getText();
|
||||
|
@ -1,11 +1,16 @@
|
||||
#pragma once
|
||||
#include <atomic>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <mutex>
|
||||
#include <Poco/AutoPtr.h>
|
||||
#include <Poco/Channel.h>
|
||||
#include "ExtendedLogChannel.h"
|
||||
#include <Interpreters/TextLog.h>
|
||||
|
||||
namespace DB
|
||||
{
|
||||
class TextLog;
|
||||
}
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
@ -1,64 +0,0 @@
|
||||
add_library (mysqlxx
|
||||
Connection.cpp
|
||||
Exception.cpp
|
||||
Query.cpp
|
||||
ResultBase.cpp
|
||||
UseQueryResult.cpp
|
||||
Row.cpp
|
||||
Value.cpp
|
||||
Pool.cpp
|
||||
PoolFactory.cpp
|
||||
PoolWithFailover.cpp
|
||||
)
|
||||
|
||||
target_include_directories (mysqlxx PUBLIC ..)
|
||||
|
||||
if (USE_INTERNAL_MYSQL_LIBRARY)
|
||||
target_include_directories (mysqlxx PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/mariadb-connector-c/include")
|
||||
target_include_directories (mysqlxx PUBLIC "${ClickHouse_BINARY_DIR}/contrib/mariadb-connector-c/include")
|
||||
else ()
|
||||
set(PLATFORM_LIBRARIES ${CMAKE_DL_LIBS})
|
||||
|
||||
if (USE_MYSQL)
|
||||
target_include_directories (mysqlxx SYSTEM PRIVATE ${MYSQL_INCLUDE_DIR})
|
||||
endif ()
|
||||
|
||||
if (APPLE)
|
||||
find_library (ICONV_LIBRARY iconv)
|
||||
set (MYSQLCLIENT_LIBRARIES ${MYSQLCLIENT_LIBRARIES} ${STATIC_MYSQLCLIENT_LIB} ${ICONV_LIBRARY})
|
||||
elseif (USE_STATIC_LIBRARIES AND STATIC_MYSQLCLIENT_LIB)
|
||||
set (MYSQLCLIENT_LIBRARIES ${STATIC_MYSQLCLIENT_LIB})
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
target_link_libraries (mysqlxx
|
||||
PUBLIC
|
||||
common
|
||||
PRIVATE
|
||||
${MYSQLCLIENT_LIBRARIES}
|
||||
${ZLIB_LIBRARIES}
|
||||
)
|
||||
|
||||
if(OPENSSL_LIBRARIES)
|
||||
target_link_libraries(mysqlxx PRIVATE ${OPENSSL_LIBRARIES})
|
||||
endif()
|
||||
|
||||
target_link_libraries(mysqlxx PRIVATE ${PLATFORM_LIBRARIES})
|
||||
|
||||
if (NOT USE_INTERNAL_MYSQL_LIBRARY AND OPENSSL_INCLUDE_DIR)
|
||||
target_include_directories (mysqlxx SYSTEM PRIVATE ${OPENSSL_INCLUDE_DIR})
|
||||
endif ()
|
||||
|
||||
target_no_warning(mysqlxx reserved-macro-identifier)
|
||||
|
||||
if (NOT USE_INTERNAL_MYSQL_LIBRARY AND USE_STATIC_LIBRARIES)
|
||||
message(WARNING "Statically linking with system mysql/mariadb only works "
|
||||
"if mysql client libraries are built with same openssl version as "
|
||||
"we are going to use now. It wouldn't work if GnuTLS is used. "
|
||||
"Try -D\"USE_INTERNAL_MYSQL_LIBRARY\"=ON or -D\"ENABLE_MYSQL\"=OFF or "
|
||||
"-D\"USE_STATIC_LIBRARIES\"=OFF")
|
||||
endif ()
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
add_subdirectory (tests)
|
||||
endif ()
|
@ -1,433 +0,0 @@
|
||||
# https://github.com/apache/arrow/blob/master/cpp/cmake_modules/FindArrow.cmake
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# - Find Arrow (arrow/api.h, libarrow.a, libarrow.so)
|
||||
# This module defines
|
||||
# ARROW_FOUND, whether Arrow has been found
|
||||
# ARROW_FULL_SO_VERSION, full shared object version of found Arrow "100.0.0"
|
||||
# ARROW_IMPORT_LIB, path to libarrow's import library (Windows only)
|
||||
# ARROW_INCLUDE_DIR, directory containing headers
|
||||
# ARROW_LIBS, deprecated. Use ARROW_LIB_DIR instead
|
||||
# ARROW_LIB_DIR, directory containing Arrow libraries
|
||||
# ARROW_SHARED_IMP_LIB, deprecated. Use ARROW_IMPORT_LIB instead
|
||||
# ARROW_SHARED_LIB, path to libarrow's shared library
|
||||
# ARROW_SO_VERSION, shared object version of found Arrow such as "100"
|
||||
# ARROW_STATIC_LIB, path to libarrow.a
|
||||
# ARROW_VERSION, version of found Arrow
|
||||
# ARROW_VERSION_MAJOR, major version of found Arrow
|
||||
# ARROW_VERSION_MINOR, minor version of found Arrow
|
||||
# ARROW_VERSION_PATCH, patch version of found Arrow
|
||||
|
||||
if(DEFINED ARROW_FOUND)
|
||||
return()
|
||||
endif()
|
||||
|
||||
include(FindPkgConfig)
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
set(ARROW_SEARCH_LIB_PATH_SUFFIXES)
|
||||
if(CMAKE_LIBRARY_ARCHITECTURE)
|
||||
list(APPEND ARROW_SEARCH_LIB_PATH_SUFFIXES "lib/${CMAKE_LIBRARY_ARCHITECTURE}")
|
||||
endif()
|
||||
list(APPEND ARROW_SEARCH_LIB_PATH_SUFFIXES
|
||||
"lib64"
|
||||
"lib32"
|
||||
"lib"
|
||||
"bin")
|
||||
set(ARROW_CONFIG_SUFFIXES
|
||||
"_RELEASE"
|
||||
"_RELWITHDEBINFO"
|
||||
"_MINSIZEREL"
|
||||
"_DEBUG"
|
||||
"")
|
||||
if(CMAKE_BUILD_TYPE)
|
||||
string(TOUPPER ${CMAKE_BUILD_TYPE} ARROW_CONFIG_SUFFIX_PREFERRED)
|
||||
set(ARROW_CONFIG_SUFFIX_PREFERRED "_${ARROW_CONFIG_SUFFIX_PREFERRED}")
|
||||
list(INSERT ARROW_CONFIG_SUFFIXES 0 "${ARROW_CONFIG_SUFFIX_PREFERRED}")
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED ARROW_MSVC_STATIC_LIB_SUFFIX)
|
||||
if(MSVC)
|
||||
set(ARROW_MSVC_STATIC_LIB_SUFFIX "_static")
|
||||
else()
|
||||
set(ARROW_MSVC_STATIC_LIB_SUFFIX "")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Internal function.
|
||||
#
|
||||
# Set shared library name for ${base_name} to ${output_variable}.
|
||||
#
|
||||
# Example:
|
||||
# arrow_build_shared_library_name(ARROW_SHARED_LIBRARY_NAME arrow)
|
||||
# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.so on Linux
|
||||
# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.dylib on macOS
|
||||
# # -> ARROW_SHARED_LIBRARY_NAME=arrow.dll with MSVC on Windows
|
||||
# # -> ARROW_SHARED_LIBRARY_NAME=libarrow.dll with MinGW on Windows
|
||||
function(arrow_build_shared_library_name output_variable base_name)
|
||||
set(${output_variable}
|
||||
"${CMAKE_SHARED_LIBRARY_PREFIX}${base_name}${CMAKE_SHARED_LIBRARY_SUFFIX}"
|
||||
PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Internal function.
|
||||
#
|
||||
# Set import library name for ${base_name} to ${output_variable}.
|
||||
# This is useful only for MSVC build. Import library is used only
|
||||
# with MSVC build.
|
||||
#
|
||||
# Example:
|
||||
# arrow_build_import_library_name(ARROW_IMPORT_LIBRARY_NAME arrow)
|
||||
# # -> ARROW_IMPORT_LIBRARY_NAME=arrow on Linux (meaningless)
|
||||
# # -> ARROW_IMPORT_LIBRARY_NAME=arrow on macOS (meaningless)
|
||||
# # -> ARROW_IMPORT_LIBRARY_NAME=arrow.lib with MSVC on Windows
|
||||
# # -> ARROW_IMPORT_LIBRARY_NAME=libarrow.dll.a with MinGW on Windows
|
||||
function(arrow_build_import_library_name output_variable base_name)
|
||||
set(${output_variable}
|
||||
"${CMAKE_IMPORT_LIBRARY_PREFIX}${base_name}${CMAKE_IMPORT_LIBRARY_SUFFIX}"
|
||||
PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Internal function.
|
||||
#
|
||||
# Set static library name for ${base_name} to ${output_variable}.
|
||||
#
|
||||
# Example:
|
||||
# arrow_build_static_library_name(ARROW_STATIC_LIBRARY_NAME arrow)
|
||||
# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.a on Linux
|
||||
# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.a on macOS
|
||||
# # -> ARROW_STATIC_LIBRARY_NAME=arrow.lib with MSVC on Windows
|
||||
# # -> ARROW_STATIC_LIBRARY_NAME=libarrow.dll.a with MinGW on Windows
|
||||
function(arrow_build_static_library_name output_variable base_name)
|
||||
set(
|
||||
${output_variable}
|
||||
"${CMAKE_STATIC_LIBRARY_PREFIX}${base_name}${ARROW_MSVC_STATIC_LIB_SUFFIX}${CMAKE_STATIC_LIBRARY_SUFFIX}"
|
||||
PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Internal function.
|
||||
#
|
||||
# Set macro value for ${macro_name} in ${header_content} to ${output_variable}.
|
||||
#
|
||||
# Example:
|
||||
# arrow_extract_macro_value(version_major
|
||||
# "ARROW_VERSION_MAJOR"
|
||||
# "#define ARROW_VERSION_MAJOR 1.0.0")
|
||||
# # -> version_major=1.0.0
|
||||
function(arrow_extract_macro_value output_variable macro_name header_content)
|
||||
string(REGEX MATCH "#define +${macro_name} +[^\r\n]+" macro_definition
|
||||
"${header_content}")
|
||||
string(REGEX
|
||||
REPLACE "^#define +${macro_name} +(.+)$" "\\1" macro_value "${macro_definition}")
|
||||
set(${output_variable} "${macro_value}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Internal macro only for arrow_find_package.
|
||||
#
|
||||
# Find package in HOME.
|
||||
macro(arrow_find_package_home)
|
||||
find_path(${prefix}_include_dir "${header_path}"
|
||||
PATHS "${home}"
|
||||
PATH_SUFFIXES "include"
|
||||
NO_DEFAULT_PATH)
|
||||
set(include_dir "${${prefix}_include_dir}")
|
||||
set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE)
|
||||
|
||||
if(MSVC)
|
||||
set(CMAKE_SHARED_LIBRARY_SUFFIXES_ORIGINAL ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
# .dll isn't found by find_library with MSVC because .dll isn't included in
|
||||
# CMAKE_FIND_LIBRARY_SUFFIXES.
|
||||
list(APPEND CMAKE_FIND_LIBRARY_SUFFIXES "${CMAKE_SHARED_LIBRARY_SUFFIX}")
|
||||
endif()
|
||||
find_library(${prefix}_shared_lib
|
||||
NAMES "${shared_lib_name}"
|
||||
PATHS "${home}"
|
||||
PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
|
||||
NO_DEFAULT_PATH)
|
||||
if(MSVC)
|
||||
set(CMAKE_SHARED_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_ORIGINAL})
|
||||
endif()
|
||||
set(shared_lib "${${prefix}_shared_lib}")
|
||||
set(${prefix}_SHARED_LIB "${shared_lib}" PARENT_SCOPE)
|
||||
if(shared_lib)
|
||||
add_library(${target_shared} SHARED IMPORTED)
|
||||
set_target_properties(${target_shared} PROPERTIES IMPORTED_LOCATION "${shared_lib}")
|
||||
if(include_dir)
|
||||
set_target_properties(${target_shared}
|
||||
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}")
|
||||
endif()
|
||||
find_library(${prefix}_import_lib
|
||||
NAMES "${import_lib_name}"
|
||||
PATHS "${home}"
|
||||
PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
|
||||
NO_DEFAULT_PATH)
|
||||
set(import_lib "${${prefix}_import_lib}")
|
||||
set(${prefix}_IMPORT_LIB "${import_lib}" PARENT_SCOPE)
|
||||
if(import_lib)
|
||||
set_target_properties(${target_shared} PROPERTIES IMPORTED_IMPLIB "${import_lib}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
find_library(${prefix}_static_lib
|
||||
NAMES "${static_lib_name}"
|
||||
PATHS "${home}"
|
||||
PATH_SUFFIXES ${ARROW_SEARCH_LIB_PATH_SUFFIXES}
|
||||
NO_DEFAULT_PATH)
|
||||
set(static_lib "${${prefix}_static_lib}")
|
||||
set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE)
|
||||
if(static_lib)
|
||||
add_library(${target_static} STATIC IMPORTED)
|
||||
set_target_properties(${target_static} PROPERTIES IMPORTED_LOCATION "${static_lib}")
|
||||
if(include_dir)
|
||||
set_target_properties(${target_static}
|
||||
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}")
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
# Internal macro only for arrow_find_package.
|
||||
#
|
||||
# Find package by CMake package configuration.
|
||||
macro(arrow_find_package_cmake_package_configuration)
|
||||
find_package(${cmake_package_name} CONFIG)
|
||||
if(${cmake_package_name}_FOUND)
|
||||
set(${prefix}_USE_CMAKE_PACKAGE_CONFIG TRUE PARENT_SCOPE)
|
||||
if(TARGET ${target_shared})
|
||||
foreach(suffix ${ARROW_CONFIG_SUFFIXES})
|
||||
get_target_property(shared_lib ${target_shared} IMPORTED_LOCATION${suffix})
|
||||
if(shared_lib)
|
||||
# Remove shared library version:
|
||||
# libarrow.so.100.0.0 -> libarrow.so
|
||||
# Because ARROW_HOME and pkg-config approaches don't add
|
||||
# shared library version.
|
||||
string(REGEX
|
||||
REPLACE "(${CMAKE_SHARED_LIBRARY_SUFFIX})[.0-9]+$" "\\1" shared_lib
|
||||
"${shared_lib}")
|
||||
set(${prefix}_SHARED_LIB "${shared_lib}" PARENT_SCOPE)
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
if(TARGET ${target_static})
|
||||
foreach(suffix ${ARROW_CONFIG_SUFFIXES})
|
||||
get_target_property(static_lib ${target_static} IMPORTED_LOCATION${suffix})
|
||||
if(static_lib)
|
||||
set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE)
|
||||
break()
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
# Internal macro only for arrow_find_package.
|
||||
#
|
||||
# Find package by pkg-config.
|
||||
macro(arrow_find_package_pkg_config)
|
||||
pkg_check_modules(${prefix}_PC ${pkg_config_name})
|
||||
if(${prefix}_PC_FOUND)
|
||||
set(${prefix}_USE_PKG_CONFIG TRUE PARENT_SCOPE)
|
||||
|
||||
set(include_dir "${${prefix}_PC_INCLUDEDIR}")
|
||||
set(lib_dir "${${prefix}_PC_LIBDIR}")
|
||||
set(shared_lib_paths "${${prefix}_PC_LINK_LIBRARIES}")
|
||||
# Use the first shared library path as the IMPORTED_LOCATION
|
||||
# for ${target_shared}. This assumes that the first shared library
|
||||
# path is the shared library path for this module.
|
||||
list(GET shared_lib_paths 0 first_shared_lib_path)
|
||||
# Use the rest shared library paths as the INTERFACE_LINK_LIBRARIES
|
||||
# for ${target_shared}. This assumes that the rest shared library
|
||||
# paths are dependency library paths for this module.
|
||||
list(LENGTH shared_lib_paths n_shared_lib_paths)
|
||||
if(n_shared_lib_paths LESS_EQUAL 1)
|
||||
set(rest_shared_lib_paths)
|
||||
else()
|
||||
list(SUBLIST
|
||||
shared_lib_paths
|
||||
1
|
||||
-1
|
||||
rest_shared_lib_paths)
|
||||
endif()
|
||||
|
||||
set(${prefix}_VERSION "${${prefix}_PC_VERSION}" PARENT_SCOPE)
|
||||
set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE)
|
||||
set(${prefix}_SHARED_LIB "${first_shared_lib_path}" PARENT_SCOPE)
|
||||
|
||||
add_library(${target_shared} SHARED IMPORTED)
|
||||
set_target_properties(${target_shared}
|
||||
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES
|
||||
"${include_dir}"
|
||||
INTERFACE_LINK_LIBRARIES
|
||||
"${rest_shared_lib_paths}"
|
||||
IMPORTED_LOCATION
|
||||
"${first_shared_lib_path}")
|
||||
get_target_property(shared_lib ${target_shared} IMPORTED_LOCATION)
|
||||
|
||||
find_library(${prefix}_static_lib
|
||||
NAMES "${static_lib_name}"
|
||||
PATHS "${lib_dir}"
|
||||
NO_DEFAULT_PATH)
|
||||
set(static_lib "${${prefix}_static_lib}")
|
||||
set(${prefix}_STATIC_LIB "${static_lib}" PARENT_SCOPE)
|
||||
if(static_lib)
|
||||
add_library(${target_static} STATIC IMPORTED)
|
||||
set_target_properties(${target_static}
|
||||
PROPERTIES INTERFACE_INCLUDE_DIRECTORIES "${include_dir}"
|
||||
IMPORTED_LOCATION "${static_lib}")
|
||||
endif()
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
function(arrow_find_package
|
||||
prefix
|
||||
home
|
||||
base_name
|
||||
header_path
|
||||
cmake_package_name
|
||||
pkg_config_name)
|
||||
arrow_build_shared_library_name(shared_lib_name ${base_name})
|
||||
arrow_build_import_library_name(import_lib_name ${base_name})
|
||||
arrow_build_static_library_name(static_lib_name ${base_name})
|
||||
|
||||
set(target_shared ${base_name}_shared)
|
||||
set(target_static ${base_name}_static)
|
||||
|
||||
if(home)
|
||||
arrow_find_package_home()
|
||||
set(${prefix}_FIND_APPROACH "HOME: ${home}" PARENT_SCOPE)
|
||||
else()
|
||||
arrow_find_package_cmake_package_configuration()
|
||||
if(${cmake_package_name}_FOUND)
|
||||
set(${prefix}_FIND_APPROACH
|
||||
"CMake package configuration: ${cmake_package_name}"
|
||||
PARENT_SCOPE)
|
||||
else()
|
||||
arrow_find_package_pkg_config()
|
||||
set(${prefix}_FIND_APPROACH "pkg-config: ${pkg_config_name}" PARENT_SCOPE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT include_dir)
|
||||
if(TARGET ${target_shared})
|
||||
get_target_property(include_dir ${target_shared} INTERFACE_INCLUDE_DIRECTORIES)
|
||||
elseif(TARGET ${target_static})
|
||||
get_target_property(include_dir ${target_static} INTERFACE_INCLUDE_DIRECTORIES)
|
||||
endif()
|
||||
endif()
|
||||
if(include_dir)
|
||||
set(${prefix}_INCLUDE_DIR "${include_dir}" PARENT_SCOPE)
|
||||
endif()
|
||||
|
||||
if(shared_lib)
|
||||
get_filename_component(lib_dir "${shared_lib}" DIRECTORY)
|
||||
elseif(static_lib)
|
||||
get_filename_component(lib_dir "${static_lib}" DIRECTORY)
|
||||
else()
|
||||
set(lib_dir NOTFOUND)
|
||||
endif()
|
||||
set(${prefix}_LIB_DIR "${lib_dir}" PARENT_SCOPE)
|
||||
# For backward compatibility
|
||||
set(${prefix}_LIBS "${lib_dir}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
if(NOT "$ENV{ARROW_HOME}" STREQUAL "")
|
||||
file(TO_CMAKE_PATH "$ENV{ARROW_HOME}" ARROW_HOME)
|
||||
endif()
|
||||
arrow_find_package(ARROW
|
||||
"${ARROW_HOME}"
|
||||
arrow
|
||||
arrow/api.h
|
||||
Arrow
|
||||
arrow)
|
||||
|
||||
if(ARROW_HOME)
|
||||
if(ARROW_INCLUDE_DIR)
|
||||
file(READ "${ARROW_INCLUDE_DIR}/arrow/util/config.h" ARROW_CONFIG_H_CONTENT)
|
||||
arrow_extract_macro_value(ARROW_VERSION_MAJOR "ARROW_VERSION_MAJOR"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
arrow_extract_macro_value(ARROW_VERSION_MINOR "ARROW_VERSION_MINOR"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
arrow_extract_macro_value(ARROW_VERSION_PATCH "ARROW_VERSION_PATCH"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
if("${ARROW_VERSION_MAJOR}" STREQUAL ""
|
||||
OR "${ARROW_VERSION_MINOR}" STREQUAL ""
|
||||
OR "${ARROW_VERSION_PATCH}" STREQUAL "")
|
||||
set(ARROW_VERSION "0.0.0")
|
||||
else()
|
||||
set(ARROW_VERSION
|
||||
"${ARROW_VERSION_MAJOR}.${ARROW_VERSION_MINOR}.${ARROW_VERSION_PATCH}")
|
||||
endif()
|
||||
|
||||
arrow_extract_macro_value(ARROW_SO_VERSION_QUOTED "ARROW_SO_VERSION"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
string(REGEX REPLACE "^\"(.+)\"$" "\\1" ARROW_SO_VERSION "${ARROW_SO_VERSION_QUOTED}")
|
||||
arrow_extract_macro_value(ARROW_FULL_SO_VERSION_QUOTED "ARROW_FULL_SO_VERSION"
|
||||
"${ARROW_CONFIG_H_CONTENT}")
|
||||
string(REGEX
|
||||
REPLACE "^\"(.+)\"$" "\\1" ARROW_FULL_SO_VERSION
|
||||
"${ARROW_FULL_SO_VERSION_QUOTED}")
|
||||
endif()
|
||||
else()
|
||||
if(ARROW_USE_CMAKE_PACKAGE_CONFIG)
|
||||
find_package(Arrow CONFIG)
|
||||
elseif(ARROW_USE_PKG_CONFIG)
|
||||
pkg_get_variable(ARROW_SO_VERSION arrow so_version)
|
||||
pkg_get_variable(ARROW_FULL_SO_VERSION arrow full_so_version)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(ARROW_ABI_VERSION ${ARROW_SO_VERSION})
|
||||
|
||||
mark_as_advanced(ARROW_ABI_VERSION
|
||||
ARROW_CONFIG_SUFFIXES
|
||||
ARROW_FULL_SO_VERSION
|
||||
ARROW_IMPORT_LIB
|
||||
ARROW_INCLUDE_DIR
|
||||
ARROW_LIBS
|
||||
ARROW_LIB_DIR
|
||||
ARROW_SEARCH_LIB_PATH_SUFFIXES
|
||||
ARROW_SHARED_IMP_LIB
|
||||
ARROW_SHARED_LIB
|
||||
ARROW_SO_VERSION
|
||||
ARROW_STATIC_LIB
|
||||
ARROW_VERSION
|
||||
ARROW_VERSION_MAJOR
|
||||
ARROW_VERSION_MINOR
|
||||
ARROW_VERSION_PATCH)
|
||||
|
||||
find_package_handle_standard_args(Arrow REQUIRED_VARS
|
||||
# The first required variable is shown
|
||||
# in the found message. So this list is
|
||||
# not sorted alphabetically.
|
||||
ARROW_INCLUDE_DIR
|
||||
ARROW_LIB_DIR
|
||||
ARROW_FULL_SO_VERSION
|
||||
ARROW_SO_VERSION
|
||||
VERSION_VAR
|
||||
ARROW_VERSION)
|
||||
set(ARROW_FOUND ${Arrow_FOUND})
|
||||
|
||||
if(Arrow_FOUND AND NOT Arrow_FIND_QUIETLY)
|
||||
message(STATUS "Arrow version: ${ARROW_VERSION} (${ARROW_FIND_APPROACH})")
|
||||
message(STATUS "Arrow SO and ABI version: ${ARROW_SO_VERSION}")
|
||||
message(STATUS "Arrow full SO version: ${ARROW_FULL_SO_VERSION}")
|
||||
message(STATUS "Found the Arrow core shared library: ${ARROW_SHARED_LIB}")
|
||||
message(STATUS "Found the Arrow core import library: ${ARROW_IMPORT_LIB}")
|
||||
message(STATUS "Found the Arrow core static library: ${ARROW_STATIC_LIB}")
|
||||
endif()
|
@ -1,394 +0,0 @@
|
||||
# Distributed under the OSI-approved BSD 3-Clause License. See accompanying
|
||||
# file Copyright.txt or https://cmake.org/licensing for details.
|
||||
|
||||
#.rst:
|
||||
# FindICU
|
||||
# -------
|
||||
#
|
||||
# Find the International Components for Unicode (ICU) libraries and
|
||||
# programs.
|
||||
#
|
||||
# This module supports multiple components.
|
||||
# Components can include any of: ``data``, ``i18n``, ``io``, ``le``,
|
||||
# ``lx``, ``test``, ``tu`` and ``uc``.
|
||||
#
|
||||
# Note that on Windows ``data`` is named ``dt`` and ``i18n`` is named
|
||||
# ``in``; any of the names may be used, and the appropriate
|
||||
# platform-specific library name will be automatically selected.
|
||||
#
|
||||
# This module reports information about the ICU installation in
|
||||
# several variables. General variables::
|
||||
#
|
||||
# ICU_VERSION - ICU release version
|
||||
# ICU_FOUND - true if the main programs and libraries were found
|
||||
# ICU_LIBRARIES - component libraries to be linked
|
||||
# ICU_INCLUDE_DIRS - the directories containing the ICU headers
|
||||
#
|
||||
# Imported targets::
|
||||
#
|
||||
# ICU::<C>
|
||||
#
|
||||
# Where ``<C>`` is the name of an ICU component, for example
|
||||
# ``ICU::i18n``.
|
||||
#
|
||||
# ICU programs are reported in::
|
||||
#
|
||||
# ICU_GENCNVAL_EXECUTABLE - path to gencnval executable
|
||||
# ICU_ICUINFO_EXECUTABLE - path to icuinfo executable
|
||||
# ICU_GENBRK_EXECUTABLE - path to genbrk executable
|
||||
# ICU_ICU-CONFIG_EXECUTABLE - path to icu-config executable
|
||||
# ICU_GENRB_EXECUTABLE - path to genrb executable
|
||||
# ICU_GENDICT_EXECUTABLE - path to gendict executable
|
||||
# ICU_DERB_EXECUTABLE - path to derb executable
|
||||
# ICU_PKGDATA_EXECUTABLE - path to pkgdata executable
|
||||
# ICU_UCONV_EXECUTABLE - path to uconv executable
|
||||
# ICU_GENCFU_EXECUTABLE - path to gencfu executable
|
||||
# ICU_MAKECONV_EXECUTABLE - path to makeconv executable
|
||||
# ICU_GENNORM2_EXECUTABLE - path to gennorm2 executable
|
||||
# ICU_GENCCODE_EXECUTABLE - path to genccode executable
|
||||
# ICU_GENSPREP_EXECUTABLE - path to gensprep executable
|
||||
# ICU_ICUPKG_EXECUTABLE - path to icupkg executable
|
||||
# ICU_GENCMN_EXECUTABLE - path to gencmn executable
|
||||
#
|
||||
# ICU component libraries are reported in::
|
||||
#
|
||||
# ICU_<C>_FOUND - ON if component was found
|
||||
# ICU_<C>_LIBRARIES - libraries for component
|
||||
#
|
||||
# ICU datafiles are reported in::
|
||||
#
|
||||
# ICU_MAKEFILE_INC - Makefile.inc
|
||||
# ICU_PKGDATA_INC - pkgdata.inc
|
||||
#
|
||||
# Note that ``<C>`` is the uppercased name of the component.
|
||||
#
|
||||
# This module reads hints about search results from::
|
||||
#
|
||||
# ICU_ROOT - the root of the ICU installation
|
||||
#
|
||||
# The environment variable ``ICU_ROOT`` may also be used; the
|
||||
# ICU_ROOT variable takes precedence.
|
||||
#
|
||||
# The following cache variables may also be set::
|
||||
#
|
||||
# ICU_<P>_EXECUTABLE - the path to executable <P>
|
||||
# ICU_INCLUDE_DIR - the directory containing the ICU headers
|
||||
# ICU_<C>_LIBRARY - the library for component <C>
|
||||
#
|
||||
# .. note::
|
||||
#
|
||||
# In most cases none of the above variables will require setting,
|
||||
# unless multiple ICU versions are available and a specific version
|
||||
# is required.
|
||||
#
|
||||
# Other variables one may set to control this module are::
|
||||
#
|
||||
# ICU_DEBUG - Set to ON to enable debug output from FindICU.
|
||||
|
||||
# Written by Roger Leigh <rleigh@codelibre.net>
|
||||
|
||||
set(icu_programs
|
||||
gencnval
|
||||
icuinfo
|
||||
genbrk
|
||||
icu-config
|
||||
genrb
|
||||
gendict
|
||||
derb
|
||||
pkgdata
|
||||
uconv
|
||||
gencfu
|
||||
makeconv
|
||||
gennorm2
|
||||
genccode
|
||||
gensprep
|
||||
icupkg
|
||||
gencmn)
|
||||
|
||||
set(icu_data
|
||||
Makefile.inc
|
||||
pkgdata.inc)
|
||||
|
||||
# The ICU checks are contained in a function due to the large number
|
||||
# of temporary variables needed.
|
||||
function(_ICU_FIND)
|
||||
# Set up search paths, taking compiler into account. Search ICU_ROOT,
|
||||
# with ICU_ROOT in the environment as a fallback if unset.
|
||||
if(ICU_ROOT)
|
||||
list(APPEND icu_roots "${ICU_ROOT}")
|
||||
else()
|
||||
if(NOT "$ENV{ICU_ROOT}" STREQUAL "")
|
||||
file(TO_CMAKE_PATH "$ENV{ICU_ROOT}" NATIVE_PATH)
|
||||
list(APPEND icu_roots "${NATIVE_PATH}")
|
||||
set(ICU_ROOT "${NATIVE_PATH}"
|
||||
CACHE PATH "Location of the ICU installation" FORCE)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Find include directory
|
||||
list(APPEND icu_include_suffixes "include")
|
||||
find_path(ICU_INCLUDE_DIR
|
||||
NAMES "unicode/utypes.h"
|
||||
HINTS ${icu_roots}
|
||||
PATH_SUFFIXES ${icu_include_suffixes}
|
||||
DOC "ICU include directory")
|
||||
set(ICU_INCLUDE_DIR "${ICU_INCLUDE_DIR}" PARENT_SCOPE)
|
||||
|
||||
# Get version
|
||||
if(ICU_INCLUDE_DIR AND EXISTS "${ICU_INCLUDE_DIR}/unicode/uvernum.h")
|
||||
file(STRINGS "${ICU_INCLUDE_DIR}/unicode/uvernum.h" icu_header_str
|
||||
REGEX "^#define[\t ]+U_ICU_VERSION[\t ]+\".*\".*")
|
||||
|
||||
string(REGEX REPLACE "^#define[\t ]+U_ICU_VERSION[\t ]+\"([^ \\n]*)\".*"
|
||||
"\\1" icu_version_string "${icu_header_str}")
|
||||
set(ICU_VERSION "${icu_version_string}")
|
||||
set(ICU_VERSION "${icu_version_string}" PARENT_SCOPE)
|
||||
unset(icu_header_str)
|
||||
unset(icu_version_string)
|
||||
endif()
|
||||
|
||||
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
|
||||
# 64-bit binary directory
|
||||
set(_bin64 "bin64")
|
||||
# 64-bit library directory
|
||||
set(_lib64 "lib64")
|
||||
endif()
|
||||
|
||||
|
||||
# Find all ICU programs
|
||||
list(APPEND icu_binary_suffixes "${_bin64}" "bin")
|
||||
foreach(program ${icu_programs})
|
||||
string(TOUPPER "${program}" program_upcase)
|
||||
set(cache_var "ICU_${program_upcase}_EXECUTABLE")
|
||||
set(program_var "ICU_${program_upcase}_EXECUTABLE")
|
||||
find_program("${cache_var}" "${program}"
|
||||
HINTS ${icu_roots}
|
||||
PATH_SUFFIXES ${icu_binary_suffixes}
|
||||
DOC "ICU ${program} executable")
|
||||
mark_as_advanced(cache_var)
|
||||
set("${program_var}" "${${cache_var}}" PARENT_SCOPE)
|
||||
endforeach()
|
||||
|
||||
# Find all ICU libraries
|
||||
list(APPEND icu_library_suffixes "${_lib64}" "lib")
|
||||
set(ICU_REQUIRED_LIBS_FOUND ON)
|
||||
foreach(component ${ICU_FIND_COMPONENTS})
|
||||
string(TOUPPER "${component}" component_upcase)
|
||||
set(component_cache "ICU_${component_upcase}_LIBRARY")
|
||||
set(component_cache_release "${component_cache}_RELEASE")
|
||||
set(component_cache_debug "${component_cache}_DEBUG")
|
||||
set(component_found "${component_upcase}_FOUND")
|
||||
set(component_libnames "icu${component}")
|
||||
set(component_debug_libnames "icu${component}d")
|
||||
|
||||
# Special case deliberate library naming mismatches between Unix
|
||||
# and Windows builds
|
||||
unset(component_libnames)
|
||||
unset(component_debug_libnames)
|
||||
list(APPEND component_libnames "icu${component}")
|
||||
list(APPEND component_debug_libnames "icu${component}d")
|
||||
if(component STREQUAL "data")
|
||||
list(APPEND component_libnames "icudt")
|
||||
# Note there is no debug variant at present
|
||||
list(APPEND component_debug_libnames "icudtd")
|
||||
endif()
|
||||
if(component STREQUAL "dt")
|
||||
list(APPEND component_libnames "icudata")
|
||||
# Note there is no debug variant at present
|
||||
list(APPEND component_debug_libnames "icudatad")
|
||||
endif()
|
||||
if(component STREQUAL "i18n")
|
||||
list(APPEND component_libnames "icuin")
|
||||
list(APPEND component_debug_libnames "icuind")
|
||||
endif()
|
||||
if(component STREQUAL "in")
|
||||
list(APPEND component_libnames "icui18n")
|
||||
list(APPEND component_debug_libnames "icui18nd")
|
||||
endif()
|
||||
|
||||
find_library("${component_cache_release}" ${component_libnames}
|
||||
HINTS ${icu_roots}
|
||||
PATH_SUFFIXES ${icu_library_suffixes}
|
||||
DOC "ICU ${component} library (release)")
|
||||
find_library("${component_cache_debug}" ${component_debug_libnames}
|
||||
HINTS ${icu_roots}
|
||||
PATH_SUFFIXES ${icu_library_suffixes}
|
||||
DOC "ICU ${component} library (debug)")
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(ICU_${component_upcase})
|
||||
mark_as_advanced("${component_cache_release}" "${component_cache_debug}")
|
||||
if(${component_cache})
|
||||
set("${component_found}" ON)
|
||||
list(APPEND ICU_LIBRARY "${${component_cache}}")
|
||||
endif()
|
||||
mark_as_advanced("${component_found}")
|
||||
set("${component_cache}" "${${component_cache}}" PARENT_SCOPE)
|
||||
set("${component_found}" "${${component_found}}" PARENT_SCOPE)
|
||||
if(${component_found})
|
||||
if (ICU_FIND_REQUIRED_${component})
|
||||
list(APPEND ICU_LIBS_FOUND "${component} (required)")
|
||||
else()
|
||||
list(APPEND ICU_LIBS_FOUND "${component} (optional)")
|
||||
endif()
|
||||
else()
|
||||
if (ICU_FIND_REQUIRED_${component})
|
||||
set(ICU_REQUIRED_LIBS_FOUND OFF)
|
||||
list(APPEND ICU_LIBS_NOTFOUND "${component} (required)")
|
||||
else()
|
||||
list(APPEND ICU_LIBS_NOTFOUND "${component} (optional)")
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
set(_ICU_REQUIRED_LIBS_FOUND "${ICU_REQUIRED_LIBS_FOUND}" PARENT_SCOPE)
|
||||
set(ICU_LIBRARY "${ICU_LIBRARY}" PARENT_SCOPE)
|
||||
|
||||
# Find all ICU data files
|
||||
if(CMAKE_LIBRARY_ARCHITECTURE)
|
||||
list(APPEND icu_data_suffixes
|
||||
"${_lib64}/${CMAKE_LIBRARY_ARCHITECTURE}/icu/${ICU_VERSION}"
|
||||
"lib/${CMAKE_LIBRARY_ARCHITECTURE}/icu/${ICU_VERSION}"
|
||||
"${_lib64}/${CMAKE_LIBRARY_ARCHITECTURE}/icu"
|
||||
"lib/${CMAKE_LIBRARY_ARCHITECTURE}/icu")
|
||||
endif()
|
||||
list(APPEND icu_data_suffixes
|
||||
"${_lib64}/icu/${ICU_VERSION}"
|
||||
"lib/icu/${ICU_VERSION}"
|
||||
"${_lib64}/icu"
|
||||
"lib/icu")
|
||||
foreach(data ${icu_data})
|
||||
string(TOUPPER "${data}" data_upcase)
|
||||
string(REPLACE "." "_" data_upcase "${data_upcase}")
|
||||
set(cache_var "ICU_${data_upcase}")
|
||||
set(data_var "ICU_${data_upcase}")
|
||||
find_file("${cache_var}" "${data}"
|
||||
HINTS ${icu_roots}
|
||||
PATH_SUFFIXES ${icu_data_suffixes}
|
||||
DOC "ICU ${data} data file")
|
||||
mark_as_advanced(cache_var)
|
||||
set("${data_var}" "${${cache_var}}" PARENT_SCOPE)
|
||||
endforeach()
|
||||
|
||||
if(NOT ICU_FIND_QUIETLY)
|
||||
if(ICU_LIBS_FOUND)
|
||||
message(STATUS "Found the following ICU libraries:")
|
||||
foreach(found ${ICU_LIBS_FOUND})
|
||||
message(STATUS " ${found}")
|
||||
endforeach()
|
||||
endif()
|
||||
if(ICU_LIBS_NOTFOUND)
|
||||
message(STATUS "The following ICU libraries were not found:")
|
||||
foreach(notfound ${ICU_LIBS_NOTFOUND})
|
||||
message(STATUS " ${notfound}")
|
||||
endforeach()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(ICU_DEBUG)
|
||||
message(STATUS "--------FindICU.cmake search debug--------")
|
||||
message(STATUS "ICU binary path search order: ${icu_roots}")
|
||||
message(STATUS "ICU include path search order: ${icu_roots}")
|
||||
message(STATUS "ICU library path search order: ${icu_roots}")
|
||||
message(STATUS "----------------")
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
_ICU_FIND()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(ICU
|
||||
FOUND_VAR ICU_FOUND
|
||||
REQUIRED_VARS ICU_INCLUDE_DIR
|
||||
ICU_LIBRARY
|
||||
_ICU_REQUIRED_LIBS_FOUND
|
||||
VERSION_VAR ICU_VERSION
|
||||
FAIL_MESSAGE "Failed to find all ICU components")
|
||||
|
||||
unset(_ICU_REQUIRED_LIBS_FOUND)
|
||||
|
||||
if(ICU_FOUND)
|
||||
set(ICU_INCLUDE_DIRS "${ICU_INCLUDE_DIR}")
|
||||
set(ICU_LIBRARIES "${ICU_LIBRARY}")
|
||||
foreach(_ICU_component ${ICU_FIND_COMPONENTS})
|
||||
string(TOUPPER "${_ICU_component}" _ICU_component_upcase)
|
||||
set(_ICU_component_cache "ICU_${_ICU_component_upcase}_LIBRARY")
|
||||
set(_ICU_component_cache_release "ICU_${_ICU_component_upcase}_LIBRARY_RELEASE")
|
||||
set(_ICU_component_cache_debug "ICU_${_ICU_component_upcase}_LIBRARY_DEBUG")
|
||||
set(_ICU_component_lib "ICU_${_ICU_component_upcase}_LIBRARIES")
|
||||
set(_ICU_component_found "${_ICU_component_upcase}_FOUND")
|
||||
set(_ICU_imported_target "ICU::${_ICU_component}")
|
||||
if(${_ICU_component_found})
|
||||
set("${_ICU_component_lib}" "${${_ICU_component_cache}}")
|
||||
if(NOT TARGET ${_ICU_imported_target})
|
||||
add_library(${_ICU_imported_target} UNKNOWN IMPORTED)
|
||||
if(ICU_INCLUDE_DIR)
|
||||
set_target_properties(${_ICU_imported_target} PROPERTIES
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${ICU_INCLUDE_DIR}")
|
||||
endif()
|
||||
if(EXISTS "${${_ICU_component_cache}}")
|
||||
set_target_properties(${_ICU_imported_target} PROPERTIES
|
||||
IMPORTED_LINK_INTERFACE_LANGUAGES "CXX"
|
||||
IMPORTED_LOCATION "${${_ICU_component_cache}}")
|
||||
endif()
|
||||
if(EXISTS "${${_ICU_component_cache_release}}")
|
||||
set_property(TARGET ${_ICU_imported_target} APPEND PROPERTY
|
||||
IMPORTED_CONFIGURATIONS RELEASE)
|
||||
set_target_properties(${_ICU_imported_target} PROPERTIES
|
||||
IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "CXX"
|
||||
IMPORTED_LOCATION_RELEASE "${${_ICU_component_cache_release}}")
|
||||
endif()
|
||||
if(EXISTS "${${_ICU_component_cache_debug}}")
|
||||
set_property(TARGET ${_ICU_imported_target} APPEND PROPERTY
|
||||
IMPORTED_CONFIGURATIONS DEBUG)
|
||||
set_target_properties(${_ICU_imported_target} PROPERTIES
|
||||
IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "CXX"
|
||||
IMPORTED_LOCATION_DEBUG "${${_ICU_component_cache_debug}}")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
unset(_ICU_component_upcase)
|
||||
unset(_ICU_component_cache)
|
||||
unset(_ICU_component_lib)
|
||||
unset(_ICU_component_found)
|
||||
unset(_ICU_imported_target)
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(ICU_DEBUG)
|
||||
message(STATUS "--------FindICU.cmake results debug--------")
|
||||
message(STATUS "ICU found: ${ICU_FOUND}")
|
||||
message(STATUS "ICU_VERSION number: ${ICU_VERSION}")
|
||||
message(STATUS "ICU_ROOT directory: ${ICU_ROOT}")
|
||||
message(STATUS "ICU_INCLUDE_DIR directory: ${ICU_INCLUDE_DIR}")
|
||||
message(STATUS "ICU_LIBRARIES: ${ICU_LIBRARIES}")
|
||||
|
||||
foreach(program IN LISTS icu_programs)
|
||||
string(TOUPPER "${program}" program_upcase)
|
||||
set(program_lib "ICU_${program_upcase}_EXECUTABLE")
|
||||
message(STATUS "${program} program: ${${program_lib}}")
|
||||
unset(program_upcase)
|
||||
unset(program_lib)
|
||||
endforeach()
|
||||
|
||||
foreach(data IN LISTS icu_data)
|
||||
string(TOUPPER "${data}" data_upcase)
|
||||
string(REPLACE "." "_" data_upcase "${data_upcase}")
|
||||
set(data_lib "ICU_${data_upcase}")
|
||||
message(STATUS "${data} data: ${${data_lib}}")
|
||||
unset(data_upcase)
|
||||
unset(data_lib)
|
||||
endforeach()
|
||||
|
||||
foreach(component IN LISTS ICU_FIND_COMPONENTS)
|
||||
string(TOUPPER "${component}" component_upcase)
|
||||
set(component_lib "ICU_${component_upcase}_LIBRARIES")
|
||||
set(component_found "${component_upcase}_FOUND")
|
||||
message(STATUS "${component} library found: ${${component_found}}")
|
||||
message(STATUS "${component} library: ${${component_lib}}")
|
||||
unset(component_upcase)
|
||||
unset(component_lib)
|
||||
unset(component_found)
|
||||
endforeach()
|
||||
message(STATUS "----------------")
|
||||
endif()
|
||||
|
||||
unset(icu_programs)
|
@ -1,55 +0,0 @@
|
||||
# Find OpenLDAP libraries.
|
||||
#
|
||||
# Can be configured with:
|
||||
# OPENLDAP_ROOT_DIR - path to the OpenLDAP installation prefix
|
||||
# OPENLDAP_USE_STATIC_LIBS - look for static version of the libraries
|
||||
# OPENLDAP_USE_REENTRANT_LIBS - look for thread-safe version of the libraries
|
||||
#
|
||||
# Sets values of:
|
||||
# OPENLDAP_FOUND - TRUE if found
|
||||
# OPENLDAP_INCLUDE_DIRS - paths to the include directories
|
||||
# OPENLDAP_LIBRARIES - paths to the libldap and liblber libraries
|
||||
# OPENLDAP_LDAP_LIBRARY - paths to the libldap library
|
||||
# OPENLDAP_LBER_LIBRARY - paths to the liblber library
|
||||
#
|
||||
|
||||
if(OPENLDAP_USE_STATIC_LIBS)
|
||||
set(_orig_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
if(WIN32)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".lib" ".a" ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
else()
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ".a")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(_r_suffix)
|
||||
if(OPENLDAP_USE_REENTRANT_LIBS)
|
||||
set(_r_suffix "_r")
|
||||
endif()
|
||||
|
||||
if(OPENLDAP_ROOT_DIR)
|
||||
find_path(OPENLDAP_INCLUDE_DIRS NAMES "ldap.h" "lber.h" PATHS "${OPENLDAP_ROOT_DIR}" PATH_SUFFIXES "include" NO_DEFAULT_PATH)
|
||||
find_library(OPENLDAP_LDAP_LIBRARY NAMES "ldap${_r_suffix}" PATHS "${OPENLDAP_ROOT_DIR}" PATH_SUFFIXES "lib" NO_DEFAULT_PATH)
|
||||
find_library(OPENLDAP_LBER_LIBRARY NAMES "lber" PATHS "${OPENLDAP_ROOT_DIR}" PATH_SUFFIXES "lib" NO_DEFAULT_PATH)
|
||||
else()
|
||||
find_path(OPENLDAP_INCLUDE_DIRS NAMES "ldap.h" "lber.h")
|
||||
find_library(OPENLDAP_LDAP_LIBRARY NAMES "ldap${_r_suffix}")
|
||||
find_library(OPENLDAP_LBER_LIBRARY NAMES "lber")
|
||||
endif()
|
||||
|
||||
unset(_r_suffix)
|
||||
|
||||
set(OPENLDAP_LIBRARIES ${OPENLDAP_LDAP_LIBRARY} ${OPENLDAP_LBER_LIBRARY})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(
|
||||
OpenLDAP DEFAULT_MSG
|
||||
OPENLDAP_INCLUDE_DIRS OPENLDAP_LDAP_LIBRARY OPENLDAP_LBER_LIBRARY
|
||||
)
|
||||
|
||||
mark_as_advanced(OPENLDAP_INCLUDE_DIRS OPENLDAP_LIBRARIES OPENLDAP_LDAP_LIBRARY OPENLDAP_LBER_LIBRARY)
|
||||
|
||||
if(OPENLDAP_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_orig_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
unset(_orig_CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||
endif()
|
@ -1,132 +0,0 @@
|
||||
# https://github.com/apache/arrow/blob/master/cpp/cmake_modules/FindParquet.cmake
|
||||
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# - Find Parquet (parquet/api/reader.h, libparquet.a, libparquet.so)
|
||||
#
|
||||
# This module requires Arrow from which it uses
|
||||
# arrow_find_package()
|
||||
#
|
||||
# This module defines
|
||||
# PARQUET_FOUND, whether Parquet has been found
|
||||
# PARQUET_IMPORT_LIB, path to libparquet's import library (Windows only)
|
||||
# PARQUET_INCLUDE_DIR, directory containing headers
|
||||
# PARQUET_LIBS, deprecated. Use PARQUET_LIB_DIR instead
|
||||
# PARQUET_LIB_DIR, directory containing Parquet libraries
|
||||
# PARQUET_SHARED_IMP_LIB, deprecated. Use PARQUET_IMPORT_LIB instead
|
||||
# PARQUET_SHARED_LIB, path to libparquet's shared library
|
||||
# PARQUET_SO_VERSION, shared object version of found Parquet such as "100"
|
||||
# PARQUET_STATIC_LIB, path to libparquet.a
|
||||
|
||||
if(DEFINED PARQUET_FOUND)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(find_package_arguments)
|
||||
if(${CMAKE_FIND_PACKAGE_NAME}_FIND_VERSION)
|
||||
list(APPEND find_package_arguments "${${CMAKE_FIND_PACKAGE_NAME}_FIND_VERSION}")
|
||||
endif()
|
||||
if(${CMAKE_FIND_PACKAGE_NAME}_FIND_REQUIRED)
|
||||
list(APPEND find_package_arguments REQUIRED)
|
||||
endif()
|
||||
if(${CMAKE_FIND_PACKAGE_NAME}_FIND_QUIETLY)
|
||||
list(APPEND find_package_arguments QUIET)
|
||||
endif()
|
||||
find_package(Arrow ${find_package_arguments})
|
||||
|
||||
if(NOT "$ENV{PARQUET_HOME}" STREQUAL "")
|
||||
file(TO_CMAKE_PATH "$ENV{PARQUET_HOME}" PARQUET_HOME)
|
||||
endif()
|
||||
|
||||
if((NOT PARQUET_HOME) AND ARROW_HOME)
|
||||
set(PARQUET_HOME ${ARROW_HOME})
|
||||
endif()
|
||||
|
||||
if(ARROW_FOUND)
|
||||
arrow_find_package(PARQUET
|
||||
"${PARQUET_HOME}"
|
||||
parquet
|
||||
parquet/api/reader.h
|
||||
Parquet
|
||||
parquet)
|
||||
if(PARQUET_HOME)
|
||||
if(PARQUET_INCLUDE_DIR)
|
||||
file(READ "${PARQUET_INCLUDE_DIR}/parquet/parquet_version.h"
|
||||
PARQUET_VERSION_H_CONTENT)
|
||||
arrow_extract_macro_value(PARQUET_VERSION_MAJOR "PARQUET_VERSION_MAJOR"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
arrow_extract_macro_value(PARQUET_VERSION_MINOR "PARQUET_VERSION_MINOR"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
arrow_extract_macro_value(PARQUET_VERSION_PATCH "PARQUET_VERSION_PATCH"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
if("${PARQUET_VERSION_MAJOR}" STREQUAL ""
|
||||
OR "${PARQUET_VERSION_MINOR}" STREQUAL ""
|
||||
OR "${PARQUET_VERSION_PATCH}" STREQUAL "")
|
||||
set(PARQUET_VERSION "0.0.0")
|
||||
else()
|
||||
set(PARQUET_VERSION
|
||||
"${PARQUET_VERSION_MAJOR}.${PARQUET_VERSION_MINOR}.${PARQUET_VERSION_PATCH}")
|
||||
endif()
|
||||
|
||||
arrow_extract_macro_value(PARQUET_SO_VERSION_QUOTED "PARQUET_SO_VERSION"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
string(REGEX
|
||||
REPLACE "^\"(.+)\"$" "\\1" PARQUET_SO_VERSION "${PARQUET_SO_VERSION_QUOTED}")
|
||||
arrow_extract_macro_value(PARQUET_FULL_SO_VERSION_QUOTED "PARQUET_FULL_SO_VERSION"
|
||||
"${PARQUET_VERSION_H_CONTENT}")
|
||||
string(REGEX
|
||||
REPLACE "^\"(.+)\"$" "\\1" PARQUET_FULL_SO_VERSION
|
||||
"${PARQUET_FULL_SO_VERSION_QUOTED}")
|
||||
endif()
|
||||
else()
|
||||
if(PARQUET_USE_CMAKE_PACKAGE_CONFIG)
|
||||
find_package(Parquet CONFIG)
|
||||
elseif(PARQUET_USE_PKG_CONFIG)
|
||||
pkg_get_variable(PARQUET_SO_VERSION parquet so_version)
|
||||
pkg_get_variable(PARQUET_FULL_SO_VERSION parquet full_so_version)
|
||||
endif()
|
||||
endif()
|
||||
set(PARQUET_ABI_VERSION "${PARQUET_SO_VERSION}")
|
||||
endif()
|
||||
|
||||
mark_as_advanced(PARQUET_ABI_VERSION
|
||||
PARQUET_IMPORT_LIB
|
||||
PARQUET_INCLUDE_DIR
|
||||
PARQUET_LIBS
|
||||
PARQUET_LIB_DIR
|
||||
PARQUET_SHARED_IMP_LIB
|
||||
PARQUET_SHARED_LIB
|
||||
PARQUET_SO_VERSION
|
||||
PARQUET_STATIC_LIB
|
||||
PARQUET_VERSION)
|
||||
|
||||
find_package_handle_standard_args(Parquet
|
||||
REQUIRED_VARS
|
||||
PARQUET_INCLUDE_DIR
|
||||
PARQUET_LIB_DIR
|
||||
PARQUET_SO_VERSION
|
||||
VERSION_VAR
|
||||
PARQUET_VERSION)
|
||||
set(PARQUET_FOUND ${Parquet_FOUND})
|
||||
|
||||
if(Parquet_FOUND AND NOT Parquet_FIND_QUIETLY)
|
||||
message(STATUS "Parquet version: ${PARQUET_VERSION} (${PARQUET_FIND_APPROACH})")
|
||||
message(STATUS "Found the Parquet shared library: ${PARQUET_SHARED_LIB}")
|
||||
message(STATUS "Found the Parquet import library: ${PARQUET_IMPORT_LIB}")
|
||||
message(STATUS "Found the Parquet static library: ${PARQUET_STATIC_LIB}")
|
||||
endif()
|
@ -1,44 +0,0 @@
|
||||
# - Try to find cityhash headers and libraries.
|
||||
#
|
||||
# Usage of this module as follows:
|
||||
#
|
||||
# find_package(cityhash)
|
||||
#
|
||||
# Variables used by this module, they can change the default behaviour and need
|
||||
# to be set before calling find_package:
|
||||
#
|
||||
# CITYHASH_ROOT_DIR Set this variable to the root installation of
|
||||
# cityhash if the module has problems finding
|
||||
# the proper installation path.
|
||||
#
|
||||
# Variables defined by this module:
|
||||
#
|
||||
# CITYHASH_FOUND System has cityhash libs/headers
|
||||
# CITYHASH_LIBRARIES The cityhash library/libraries
|
||||
# CITYHASH_INCLUDE_DIR The location of cityhash headers
|
||||
|
||||
find_path(CITYHASH_ROOT_DIR
|
||||
NAMES include/city.h
|
||||
)
|
||||
|
||||
find_library(CITYHASH_LIBRARIES
|
||||
NAMES cityhash
|
||||
PATHS ${CITYHASH_ROOT_DIR}/lib ${CITYHASH_LIBRARIES_PATHS}
|
||||
)
|
||||
|
||||
find_path(CITYHASH_INCLUDE_DIR
|
||||
NAMES city.h
|
||||
PATHS ${CITYHASH_ROOT_DIR}/include ${CITYHASH_INCLUDE_PATHS}
|
||||
)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(cityhash DEFAULT_MSG
|
||||
CITYHASH_LIBRARIES
|
||||
CITYHASH_INCLUDE_DIR
|
||||
)
|
||||
|
||||
mark_as_advanced(
|
||||
CITYHASH_ROOT_DIR
|
||||
CITYHASH_LIBRARIES
|
||||
CITYHASH_INCLUDE_DIR
|
||||
)
|
@ -1,44 +0,0 @@
|
||||
# - Try to find double-conversion headers and libraries.
|
||||
#
|
||||
# Usage of this module as follows:
|
||||
#
|
||||
# find_package(double-conversion)
|
||||
#
|
||||
# Variables used by this module, they can change the default behaviour and need
|
||||
# to be set before calling find_package:
|
||||
#
|
||||
# DOUBLE_CONVERSION_ROOT_DIR Set this variable to the root installation of
|
||||
# double-conversion if the module has problems finding
|
||||
# the proper installation path.
|
||||
#
|
||||
# Variables defined by this module:
|
||||
#
|
||||
# DOUBLE_CONVERSION_FOUND System has double-conversion libs/headers
|
||||
# DOUBLE_CONVERSION_LIBRARIES The double-conversion library/libraries
|
||||
# DOUBLE_CONVERSION_INCLUDE_DIR The location of double-conversion headers
|
||||
|
||||
find_path(DOUBLE_CONVERSION_ROOT_DIR
|
||||
NAMES include/double-conversion/double-conversion.h
|
||||
)
|
||||
|
||||
find_library(DOUBLE_CONVERSION_LIBRARIES
|
||||
NAMES double-conversion
|
||||
PATHS ${DOUBLE_CONVERSION_ROOT_DIR}/lib ${BTRIE_CITYHASH_PATHS}
|
||||
)
|
||||
|
||||
find_path(DOUBLE_CONVERSION_INCLUDE_DIR
|
||||
NAMES double-conversion/double-conversion.h
|
||||
PATHS ${DOUBLE_CONVERSION_ROOT_DIR}/include ${DOUBLE_CONVERSION_INCLUDE_PATHS}
|
||||
)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(double_conversion DEFAULT_MSG
|
||||
DOUBLE_CONVERSION_LIBRARIES
|
||||
DOUBLE_CONVERSION_INCLUDE_DIR
|
||||
)
|
||||
|
||||
mark_as_advanced(
|
||||
DOUBLE_CONVERSION_ROOT_DIR
|
||||
DOUBLE_CONVERSION_LIBRARIES
|
||||
DOUBLE_CONVERSION_INCLUDE_DIR
|
||||
)
|
@ -1,44 +0,0 @@
|
||||
# - Try to find farmhash headers and libraries.
|
||||
#
|
||||
# Usage of this module as follows:
|
||||
#
|
||||
# find_package(farmhash)
|
||||
#
|
||||
# Variables used by this module, they can change the default behaviour and need
|
||||
# to be set before calling find_package:
|
||||
#
|
||||
# FARMHASH_ROOT_DIR Set this variable to the root installation of
|
||||
# farmhash if the module has problems finding
|
||||
# the proper installation path.
|
||||
#
|
||||
# Variables defined by this module:
|
||||
#
|
||||
# FARMHASH_FOUND System has farmhash libs/headers
|
||||
# FARMHASH_LIBRARIES The farmhash library/libraries
|
||||
# FARMHASH_INCLUDE_DIR The location of farmhash headers
|
||||
|
||||
find_path(FARMHASH_ROOT_DIR
|
||||
NAMES include/farmhash.h
|
||||
)
|
||||
|
||||
find_library(FARMHASH_LIBRARIES
|
||||
NAMES farmhash
|
||||
PATHS ${FARMHASH_ROOT_DIR}/lib ${FARMHASH_LIBRARIES_PATHS}
|
||||
)
|
||||
|
||||
find_path(FARMHASH_INCLUDE_DIR
|
||||
NAMES farmhash.h
|
||||
PATHS ${FARMHASH_ROOT_DIR}/include ${FARMHASH_INCLUDE_PATHS}
|
||||
)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(farmhash DEFAULT_MSG
|
||||
FARMHASH_LIBRARIES
|
||||
FARMHASH_INCLUDE_DIR
|
||||
)
|
||||
|
||||
mark_as_advanced(
|
||||
FARMHASH_ROOT_DIR
|
||||
FARMHASH_LIBRARIES
|
||||
FARMHASH_INCLUDE_DIR
|
||||
)
|
@ -1,337 +0,0 @@
|
||||
#[[
|
||||
Defines the following variables:
|
||||
``gRPC_FOUND``
|
||||
Whether the gRPC framework is found
|
||||
``gRPC_INCLUDE_DIRS``
|
||||
The include directories of the gRPC framework, including the include directories of the C++ wrapper.
|
||||
``gRPC_LIBRARIES``
|
||||
The libraries of the gRPC framework.
|
||||
``gRPC_CPP_PLUGIN``
|
||||
The plugin for generating gRPC client and server C++ stubs from `.proto` files
|
||||
``gRPC_PYTHON_PLUGIN``
|
||||
The plugin for generating gRPC client and server Python stubs from `.proto` files
|
||||
|
||||
The following :prop_tgt:`IMPORTED` targets are also defined:
|
||||
``grpc++``
|
||||
``grpc++_unsecure``
|
||||
``grpc_cpp_plugin``
|
||||
``grpc_python_plugin``
|
||||
|
||||
Set the following variables to adjust the behaviour of this script:
|
||||
``gRPC_USE_UNSECURE_LIBRARIES``
|
||||
if set gRPC_LIBRARIES will be filled with the unsecure version of the libraries (i.e. without SSL)
|
||||
instead of the secure ones.
|
||||
``gRPC_DEBUG`
|
||||
if set the debug message will be printed.
|
||||
|
||||
Add custom commands to process ``.proto`` files to C++::
|
||||
protobuf_generate_grpc_cpp(<SRCS> <HDRS>
|
||||
[DESCRIPTORS <DESC>] [EXPORT_MACRO <MACRO>] [<ARGN>...])
|
||||
|
||||
``SRCS``
|
||||
Variable to define with autogenerated source files
|
||||
``HDRS``
|
||||
Variable to define with autogenerated header files
|
||||
``DESCRIPTORS``
|
||||
Variable to define with autogenerated descriptor files, if requested.
|
||||
``EXPORT_MACRO``
|
||||
is a macro which should expand to ``__declspec(dllexport)`` or
|
||||
``__declspec(dllimport)`` depending on what is being compiled.
|
||||
``ARGN``
|
||||
``.proto`` files
|
||||
#]]
|
||||
|
||||
# Function to generate C++ files from .proto files.
|
||||
# This function is a modified version of the function PROTOBUF_GENERATE_CPP() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||
function(PROTOBUF_GENERATE_GRPC_CPP SRCS HDRS)
|
||||
cmake_parse_arguments(protobuf_generate_grpc_cpp "" "EXPORT_MACRO;DESCRIPTORS" "" ${ARGN})
|
||||
|
||||
set(_proto_files "${protobuf_generate_grpc_cpp_UNPARSED_ARGUMENTS}")
|
||||
if(NOT _proto_files)
|
||||
message(SEND_ERROR "Error: PROTOBUF_GENERATE_GRPC_CPP() called without any proto files")
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(PROTOBUF_GENERATE_GRPC_CPP_APPEND_PATH)
|
||||
set(_append_arg APPEND_PATH)
|
||||
endif()
|
||||
|
||||
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||
set(_descriptors DESCRIPTORS)
|
||||
endif()
|
||||
|
||||
if(DEFINED PROTOBUF_IMPORT_DIRS AND NOT DEFINED Protobuf_IMPORT_DIRS)
|
||||
set(Protobuf_IMPORT_DIRS "${PROTOBUF_IMPORT_DIRS}")
|
||||
endif()
|
||||
|
||||
if(DEFINED Protobuf_IMPORT_DIRS)
|
||||
set(_import_arg IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
|
||||
endif()
|
||||
|
||||
set(_outvar)
|
||||
protobuf_generate_grpc(${_append_arg} ${_descriptors} LANGUAGE cpp EXPORT_MACRO ${protobuf_generate_cpp_EXPORT_MACRO} OUT_VAR _outvar ${_import_arg} PROTOS ${_proto_files})
|
||||
|
||||
set(${SRCS})
|
||||
set(${HDRS})
|
||||
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||
set(${protobuf_generate_grpc_cpp_DESCRIPTORS})
|
||||
endif()
|
||||
|
||||
foreach(_file ${_outvar})
|
||||
if(_file MATCHES "cc$")
|
||||
list(APPEND ${SRCS} ${_file})
|
||||
elseif(_file MATCHES "desc$")
|
||||
list(APPEND ${protobuf_generate_grpc_cpp_DESCRIPTORS} ${_file})
|
||||
else()
|
||||
list(APPEND ${HDRS} ${_file})
|
||||
endif()
|
||||
endforeach()
|
||||
set(${SRCS} ${${SRCS}} PARENT_SCOPE)
|
||||
set(${HDRS} ${${HDRS}} PARENT_SCOPE)
|
||||
if(protobuf_generate_grpc_cpp_DESCRIPTORS)
|
||||
set(${protobuf_generate_grpc_cpp_DESCRIPTORS} "${${protobuf_generate_grpc_cpp_DESCRIPTORS}}" PARENT_SCOPE)
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
# Helper function.
|
||||
# This function is a modified version of the function protobuf_generate() copied from https://github.com/Kitware/CMake/blob/master/Modules/FindProtobuf.cmake.
|
||||
function(protobuf_generate_grpc)
|
||||
set(_options APPEND_PATH DESCRIPTORS)
|
||||
set(_singleargs LANGUAGE OUT_VAR EXPORT_MACRO PROTOC_OUT_DIR)
|
||||
if(COMMAND target_sources)
|
||||
list(APPEND _singleargs TARGET)
|
||||
endif()
|
||||
set(_multiargs PROTOS IMPORT_DIRS GENERATE_EXTENSIONS)
|
||||
|
||||
cmake_parse_arguments(protobuf_generate_grpc "${_options}" "${_singleargs}" "${_multiargs}" "${ARGN}")
|
||||
|
||||
if(NOT protobuf_generate_grpc_PROTOS AND NOT protobuf_generate_grpc_TARGET)
|
||||
message(SEND_ERROR "Error: protobuf_generate_grpc called without any targets or source files")
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(NOT protobuf_generate_grpc_OUT_VAR AND NOT protobuf_generate_grpc_TARGET)
|
||||
message(SEND_ERROR "Error: protobuf_generate_grpc called without a target or output variable")
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(NOT protobuf_generate_grpc_LANGUAGE)
|
||||
set(protobuf_generate_grpc_LANGUAGE cpp)
|
||||
endif()
|
||||
string(TOLOWER ${protobuf_generate_grpc_LANGUAGE} protobuf_generate_grpc_LANGUAGE)
|
||||
|
||||
if(NOT protobuf_generate_grpc_PROTOC_OUT_DIR)
|
||||
set(protobuf_generate_grpc_PROTOC_OUT_DIR ${CMAKE_CURRENT_BINARY_DIR})
|
||||
endif()
|
||||
|
||||
if(protobuf_generate_grpc_EXPORT_MACRO AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||
set(_dll_export_decl "dllexport_decl=${protobuf_generate_grpc_EXPORT_MACRO}:")
|
||||
endif()
|
||||
|
||||
if(NOT protobuf_generate_grpc_GENERATE_EXTENSIONS)
|
||||
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||
set(protobuf_generate_grpc_GENERATE_EXTENSIONS .pb.h .pb.cc .grpc.pb.h .grpc.pb.cc)
|
||||
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||
set(protobuf_generate_grpc_GENERATE_EXTENSIONS _pb2.py)
|
||||
else()
|
||||
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for GENERATE_EXTENSIONS")
|
||||
return()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT protobuf_generate_grpc_PLUGIN)
|
||||
if(protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||
set(protobuf_generate_grpc_PLUGIN "grpc_cpp_plugin")
|
||||
elseif(protobuf_generate_grpc_LANGUAGE STREQUAL python)
|
||||
set(protobuf_generate_grpc_PLUGIN "grpc_python_plugin")
|
||||
else()
|
||||
message(SEND_ERROR "Error: protobuf_generate_grpc given unknown Language ${LANGUAGE}, please provide a value for PLUGIN")
|
||||
return()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(protobuf_generate_grpc_TARGET)
|
||||
get_target_property(_source_list ${protobuf_generate_grpc_TARGET} SOURCES)
|
||||
foreach(_file ${_source_list})
|
||||
if(_file MATCHES "proto$")
|
||||
list(APPEND protobuf_generate_grpc_PROTOS ${_file})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
if(NOT protobuf_generate_grpc_PROTOS)
|
||||
message(SEND_ERROR "Error: protobuf_generate_grpc could not find any .proto files")
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(protobuf_generate_grpc_APPEND_PATH)
|
||||
# Create an include path for each file specified
|
||||
foreach(_file ${protobuf_generate_grpc_PROTOS})
|
||||
get_filename_component(_abs_file ${_file} ABSOLUTE)
|
||||
get_filename_component(_abs_path ${_abs_file} PATH)
|
||||
list(FIND _protobuf_include_path ${_abs_path} _contains_already)
|
||||
if(${_contains_already} EQUAL -1)
|
||||
list(APPEND _protobuf_include_path -I ${_abs_path})
|
||||
endif()
|
||||
endforeach()
|
||||
else()
|
||||
set(_protobuf_include_path -I ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
foreach(DIR ${protobuf_generate_grpc_IMPORT_DIRS})
|
||||
get_filename_component(ABS_PATH ${DIR} ABSOLUTE)
|
||||
list(FIND _protobuf_include_path ${ABS_PATH} _contains_already)
|
||||
if(${_contains_already} EQUAL -1)
|
||||
list(APPEND _protobuf_include_path -I ${ABS_PATH})
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
set(_generated_srcs_all)
|
||||
foreach(_proto ${protobuf_generate_grpc_PROTOS})
|
||||
get_filename_component(_abs_file ${_proto} ABSOLUTE)
|
||||
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
||||
get_filename_component(_basename ${_proto} NAME_WE)
|
||||
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
||||
|
||||
set(_possible_rel_dir)
|
||||
if(NOT protobuf_generate_grpc_APPEND_PATH)
|
||||
set(_possible_rel_dir ${_rel_dir}/)
|
||||
endif()
|
||||
|
||||
set(_generated_srcs)
|
||||
foreach(_ext ${protobuf_generate_grpc_GENERATE_EXTENSIONS})
|
||||
list(APPEND _generated_srcs "${protobuf_generate_grpc_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}${_ext}")
|
||||
endforeach()
|
||||
|
||||
if(protobuf_generate_grpc_DESCRIPTORS AND protobuf_generate_grpc_LANGUAGE STREQUAL cpp)
|
||||
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
|
||||
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
|
||||
list(APPEND _generated_srcs ${_descriptor_file})
|
||||
endif()
|
||||
list(APPEND _generated_srcs_all ${_generated_srcs})
|
||||
|
||||
add_custom_command(
|
||||
OUTPUT ${_generated_srcs}
|
||||
COMMAND protobuf::protoc
|
||||
ARGS --${protobuf_generate_grpc_LANGUAGE}_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||
--grpc_out ${_dll_export_decl}${protobuf_generate_grpc_PROTOC_OUT_DIR}
|
||||
--plugin=protoc-gen-grpc=$<TARGET_FILE:${protobuf_generate_grpc_PLUGIN}>
|
||||
${_dll_desc_out} ${_protobuf_include_path} ${_abs_file}
|
||||
DEPENDS ${_abs_file} protobuf::protoc ${protobuf_generate_grpc_PLUGIN}
|
||||
COMMENT "Running ${protobuf_generate_grpc_LANGUAGE} protocol buffer compiler on ${_proto}"
|
||||
VERBATIM)
|
||||
endforeach()
|
||||
|
||||
set_source_files_properties(${_generated_srcs_all} PROPERTIES GENERATED TRUE)
|
||||
if(protobuf_generate_grpc_OUT_VAR)
|
||||
set(${protobuf_generate_grpc_OUT_VAR} ${_generated_srcs_all} PARENT_SCOPE)
|
||||
endif()
|
||||
if(protobuf_generate_grpc_TARGET)
|
||||
target_sources(${protobuf_generate_grpc_TARGET} PRIVATE ${_generated_srcs_all})
|
||||
endif()
|
||||
endfunction()
|
||||
|
||||
|
||||
# Find the libraries.
|
||||
if(gRPC_USE_STATIC_LIBS)
|
||||
# Support preference of static libs by adjusting CMAKE_FIND_LIBRARY_SUFFIXES
|
||||
set(_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
if(WIN32)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
else()
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
find_library(gRPC_LIBRARY NAMES grpc)
|
||||
find_library(gRPC_CPP_LIBRARY NAMES grpc++)
|
||||
find_library(gRPC_UNSECURE_LIBRARY NAMES grpc_unsecure)
|
||||
find_library(gRPC_CPP_UNSECURE_LIBRARY NAMES grpc++_unsecure)
|
||||
find_library(gRPC_CARES_LIBRARY NAMES cares)
|
||||
|
||||
set(gRPC_LIBRARIES)
|
||||
if(gRPC_USE_UNSECURE_LIBRARIES)
|
||||
if(gRPC_UNSECURE_LIBRARY)
|
||||
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_UNSECURE_LIBRARY})
|
||||
endif()
|
||||
if(gRPC_CPP_UNSECURE_LIBRARY)
|
||||
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_UNSECURE_LIBRARY})
|
||||
endif()
|
||||
else()
|
||||
if(gRPC_LIBRARY)
|
||||
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_LIBRARY})
|
||||
endif()
|
||||
if(gRPC_CPP_UNSECURE_LIBRARY)
|
||||
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CPP_LIBRARY})
|
||||
endif()
|
||||
endif()
|
||||
set(gRPC_LIBRARIES ${gRPC_LIBRARIES} ${gRPC_CARES_LIBRARY})
|
||||
|
||||
# Restore the original find library ordering.
|
||||
if(gRPC_USE_STATIC_LIBS)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ${_gRPC_ORIG_CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
endif()
|
||||
|
||||
# Find the include directories.
|
||||
find_path(gRPC_INCLUDE_DIR grpc/grpc.h)
|
||||
find_path(gRPC_CPP_INCLUDE_DIR grpc++/grpc++.h)
|
||||
|
||||
if(gRPC_INCLUDE_DIR AND gRPC_CPP_INCLUDE_DIR AND NOT(gRPC_INCLUDE_DIR STREQUAL gRPC_CPP_INCLUDE_DIR))
|
||||
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR} ${gRPC_CPP_INCLUDE_DIR})
|
||||
elseif(gRPC_INCLUDE_DIR)
|
||||
set(gRPC_INCLUDE_DIRS ${gRPC_INCLUDE_DIR})
|
||||
else()
|
||||
set(gRPC_INCLUDE_DIRS ${gRPC_CPP_INCLUDE_DIR})
|
||||
endif()
|
||||
|
||||
# Get full path to plugin.
|
||||
find_program(gRPC_CPP_PLUGIN
|
||||
NAMES grpc_cpp_plugin
|
||||
DOC "The plugin for generating gRPC client and server C++ stubs from `.proto` files")
|
||||
|
||||
find_program(gRPC_PYTHON_PLUGIN
|
||||
NAMES grpc_python_plugin
|
||||
DOC "The plugin for generating gRPC client and server Python stubs from `.proto` files")
|
||||
|
||||
# Add imported targets.
|
||||
if(gRPC_CPP_LIBRARY AND NOT TARGET grpc++)
|
||||
add_library(grpc++ UNKNOWN IMPORTED)
|
||||
set_target_properties(grpc++ PROPERTIES
|
||||
IMPORTED_LOCATION "${gRPC_CPP_LIBRARY}")
|
||||
set_target_properties(grpc++ PROPERTIES
|
||||
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
|
||||
endif()
|
||||
|
||||
if(gRPC_CPP_UNSECURE_LIBRARY AND NOT TARGET grpc++_unsecure)
|
||||
add_library(grpc++_unsecure UNKNOWN IMPORTED)
|
||||
set_target_properties(grpc++_unsecure PROPERTIES
|
||||
IMPORTED_LOCATION "${gRPC_CPP_UNSECURE_LIBRARY}")
|
||||
set_target_properties(grpc++_unsecure PROPERTIES
|
||||
INTERFACE_INCLUDE_DIRECTORIES ${gRPC_INCLUDE_DIRS})
|
||||
endif()
|
||||
|
||||
if(gRPC_CPP_PLUGIN AND NOT TARGET grpc_cpp_plugin)
|
||||
add_executable(grpc_cpp_plugin IMPORTED)
|
||||
set_target_properties(grpc_cpp_plugin PROPERTIES
|
||||
IMPORTED_LOCATION "${gRPC_CPP_PLUGIN}")
|
||||
endif()
|
||||
|
||||
if(gRPC_PYTHON_PLUGIN AND NOT TARGET grpc_python_plugin)
|
||||
add_executable(grpc_python_plugin IMPORTED)
|
||||
set_target_properties(grpc_python_plugin PROPERTIES
|
||||
IMPORTED_LOCATION "${gRPC_PYTHON_PLUGIN}")
|
||||
endif()
|
||||
|
||||
#include(FindPackageHandleStandardArgs.cmake)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(gRPC
|
||||
REQUIRED_VARS gRPC_LIBRARY gRPC_CPP_LIBRARY gRPC_UNSECURE_LIBRARY gRPC_CPP_UNSECURE_LIBRARY gRPC_CARES_LIBRARY
|
||||
gRPC_INCLUDE_DIR gRPC_CPP_INCLUDE_DIR gRPC_CPP_PLUGIN gRPC_PYTHON_PLUGIN)
|
||||
|
||||
if(gRPC_FOUND)
|
||||
if(gRPC_DEBUG)
|
||||
message(STATUS "gRPC: INCLUDE_DIRS=${gRPC_INCLUDE_DIRS}")
|
||||
message(STATUS "gRPC: LIBRARIES=${gRPC_LIBRARIES}")
|
||||
message(STATUS "gRPC: CPP_PLUGIN=${gRPC_CPP_PLUGIN}")
|
||||
message(STATUS "gRPC: PYTHON_PLUGIN=${gRPC_PYTHON_PLUGIN}")
|
||||
endif()
|
||||
endif()
|
@ -12,13 +12,13 @@ macro (add_warning flag)
|
||||
if (SUPPORTS_CXXFLAG_${underscored_flag})
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -W${flag}")
|
||||
else ()
|
||||
message (WARNING "Flag -W${flag} is unsupported")
|
||||
message (STATUS "Flag -W${flag} is unsupported")
|
||||
endif ()
|
||||
|
||||
if (SUPPORTS_CFLAG_${underscored_flag})
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -W${flag}")
|
||||
else ()
|
||||
message (WARNING "Flag -W${flag} is unsupported")
|
||||
message (STATUS "Flag -W${flag} is unsupported")
|
||||
endif ()
|
||||
|
||||
endmacro ()
|
||||
@ -39,7 +39,7 @@ macro (target_add_warning target flag)
|
||||
if (SUPPORTS_CXXFLAG_${underscored_flag})
|
||||
target_compile_options (${target} PRIVATE "-W${flag}")
|
||||
else ()
|
||||
message (WARNING "Flag -W${flag} is unsupported")
|
||||
message (STATUS "Flag -W${flag} is unsupported")
|
||||
endif ()
|
||||
endmacro ()
|
||||
|
||||
|
@ -16,3 +16,7 @@ endif ()
|
||||
if (CMAKE_SYSTEM_PROCESSOR MATCHES "^(ppc64le.*|PPC64LE.*)")
|
||||
set (ARCH_PPC64LE 1)
|
||||
endif ()
|
||||
if (CMAKE_SYSTEM_PROCESSOR MATCHES "riscv64")
|
||||
set (ARCH_RISCV64 1)
|
||||
endif ()
|
||||
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||
SET(VERSION_REVISION 54457)
|
||||
SET(VERSION_MAJOR 21)
|
||||
SET(VERSION_MINOR 12)
|
||||
SET(VERSION_REVISION 54459)
|
||||
SET(VERSION_MAJOR 22)
|
||||
SET(VERSION_MINOR 2)
|
||||
SET(VERSION_PATCH 1)
|
||||
SET(VERSION_GITHASH 503a418dedf0011e9040c3a1b6913e0b5488be4c)
|
||||
SET(VERSION_DESCRIBE v21.12.1.1-prestable)
|
||||
SET(VERSION_STRING 21.12.1.1)
|
||||
SET(VERSION_GITHASH dfe64a2789bbf51046bb6b5476f874f7b59d124c)
|
||||
SET(VERSION_DESCRIBE v22.2.1.1-prestable)
|
||||
SET(VERSION_STRING 22.2.1.1)
|
||||
# end of autochange
|
||||
|
@ -1,23 +0,0 @@
|
||||
macro(find_contrib_lib LIB_NAME)
|
||||
|
||||
string(TOLOWER ${LIB_NAME} LIB_NAME_LC)
|
||||
string(TOUPPER ${LIB_NAME} LIB_NAME_UC)
|
||||
string(REPLACE "-" "_" LIB_NAME_UC ${LIB_NAME_UC})
|
||||
|
||||
option (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY "Use bundled library ${LIB_NAME} instead of system" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT USE_INTERNAL_${LIB_NAME_UC}_LIBRARY)
|
||||
find_package ("${LIB_NAME}")
|
||||
if (NOT ${LIB_NAME_UC}_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use system ${LIB_NAME}")
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
if (NOT ${LIB_NAME_UC}_FOUND)
|
||||
set (USE_INTERNAL_${LIB_NAME_UC}_LIBRARY 1)
|
||||
set (${LIB_NAME_UC}_LIBRARIES ${LIB_NAME_LC})
|
||||
set (${LIB_NAME_UC}_INCLUDE_DIR ${${LIB_NAME_UC}_CONTRIB_INCLUDE_DIR})
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using ${LIB_NAME}: ${${LIB_NAME_UC}_INCLUDE_DIR} : ${${LIB_NAME_UC}_LIBRARIES}")
|
||||
endmacro()
|
@ -28,6 +28,9 @@ option (ARCH_NATIVE "Add -march=native compiler flag. This makes your binaries n
|
||||
if (ARCH_NATIVE)
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=native")
|
||||
|
||||
elseif (ARCH_AARCH64)
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -march=armv8-a+crc")
|
||||
|
||||
else ()
|
||||
set (TEST_FLAG "-mssse3")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
@ -43,7 +46,6 @@ else ()
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||
endif ()
|
||||
|
||||
|
||||
set (TEST_FLAG "-msse4.1")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
check_cxx_source_compiles("
|
||||
@ -132,15 +134,17 @@ else ()
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||
endif ()
|
||||
|
||||
set (TEST_FLAG "-mavx512f -mavx512bw")
|
||||
set (TEST_FLAG "-mavx512f -mavx512bw -mavx512vl")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
check_cxx_source_compiles("
|
||||
#include <immintrin.h>
|
||||
int main() {
|
||||
auto a = _mm512_setzero_epi32();
|
||||
(void)a;
|
||||
(void)a;
|
||||
auto b = _mm512_add_epi16(__m512i(), __m512i());
|
||||
(void)b;
|
||||
auto c = _mm_cmp_epi8_mask(__m128i(), __m128i(), 0);
|
||||
(void)c;
|
||||
return 0;
|
||||
}
|
||||
" HAVE_AVX512)
|
||||
@ -160,9 +164,9 @@ else ()
|
||||
" HAVE_BMI)
|
||||
if (HAVE_BMI AND ENABLE_BMI)
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} ${TEST_FLAG}")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
#Limit avx2/avx512 flag for specific source build
|
||||
# Limit avx2/avx512 flag for specific source build
|
||||
set (X86_INTRINSICS_FLAGS "")
|
||||
if (ENABLE_AVX2_FOR_SPEC_OP)
|
||||
if (HAVE_BMI)
|
||||
@ -179,7 +183,7 @@ else ()
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mbmi")
|
||||
endif ()
|
||||
if (HAVE_AVX512)
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw -mprefer-vector-width=256")
|
||||
set (X86_INTRINSICS_FLAGS "${X86_INTRINSICS_FLAGS} -mavx512f -mavx512bw -mavx512vl -mprefer-vector-width=256")
|
||||
endif ()
|
||||
endif ()
|
||||
endif ()
|
||||
|
@ -1,29 +0,0 @@
|
||||
if (MISSING_INTERNAL_LIBUV_LIBRARY)
|
||||
message (WARNING "Can't find internal libuv needed for AMQP-CPP library")
|
||||
set (ENABLE_AMQPCPP OFF CACHE INTERNAL "")
|
||||
endif()
|
||||
|
||||
option(ENABLE_AMQPCPP "Enalbe AMQP-CPP" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_AMQPCPP)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/AMQP-CPP is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal AMQP-CPP library")
|
||||
set (USE_AMQPCPP 0)
|
||||
return()
|
||||
endif ()
|
||||
|
||||
set (USE_AMQPCPP 1)
|
||||
set (AMQPCPP_LIBRARY amqp-cpp ${OPENSSL_LIBRARIES})
|
||||
|
||||
set (AMQPCPP_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP/include")
|
||||
list (APPEND AMQPCPP_INCLUDE_DIR
|
||||
"${LIBUV_INCLUDE_DIR}"
|
||||
"${ClickHouse_SOURCE_DIR}/contrib/AMQP-CPP")
|
||||
|
||||
list (APPEND AMQPCPP_LIBRARY "${LIBUV_LIBRARY}")
|
||||
|
||||
message (STATUS "Using AMQP-CPP=${USE_AMQPCPP}: ${AMQPCPP_INCLUDE_DIR} : ${AMQPCPP_LIBRARY}")
|
@ -1,36 +0,0 @@
|
||||
# Needed when using Apache Avro serialization format
|
||||
option (ENABLE_AVRO "Enable Avro" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_AVRO)
|
||||
if (USE_INTERNAL_AVRO_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal avro library with ENABLE_AVRO=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_AVRO_LIBRARY
|
||||
"Set to FALSE to use system avro library instead of bundled" ON) # TODO: provide unbundled support
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/CMakeLists.txt")
|
||||
if (USE_INTERNAL_AVRO_LIBRARY)
|
||||
message(WARNING "submodule contrib/avro is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal avro")
|
||||
set(USE_INTERNAL_AVRO_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_AVRO_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if (NOT USE_INTERNAL_AVRO_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Using system avro library is not supported yet")
|
||||
elseif(NOT MISSING_INTERNAL_AVRO_LIBRARY)
|
||||
include(cmake/find/snappy.cmake)
|
||||
set(AVROCPP_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/avro/lang/c++/include")
|
||||
set(AVROCPP_LIBRARY avrocpp)
|
||||
set(USE_INTERNAL_AVRO_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (AVROCPP_LIBRARY AND AVROCPP_INCLUDE_DIR)
|
||||
set(USE_AVRO 1)
|
||||
endif()
|
||||
|
||||
message (STATUS "Using avro=${USE_AVRO}: ${AVROCPP_INCLUDE_DIR} : ${AVROCPP_LIBRARY}")
|
@ -1,25 +0,0 @@
|
||||
if(ARCH_AMD64 OR ARCH_ARM)
|
||||
option (ENABLE_BASE64 "Enable base64" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_BASE64)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "base64 library is only supported on x86_64 and aarch64")
|
||||
endif()
|
||||
|
||||
if (NOT ENABLE_BASE64)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64/LICENSE")
|
||||
set (MISSING_INTERNAL_BASE64_LIBRARY 1)
|
||||
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init")
|
||||
endif ()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/base64")
|
||||
message (WARNING "submodule contrib/base64 is missing. to fix try run: \n git submodule update --init")
|
||||
else()
|
||||
set (BASE64_LIBRARY base64)
|
||||
set (USE_BASE64 1)
|
||||
endif()
|
||||
|
||||
if (NOT USE_BASE64)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable base64")
|
||||
endif()
|
@ -1,47 +0,0 @@
|
||||
option (ENABLE_BROTLI "Enable brotli" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_BROTLI)
|
||||
if (USE_INTERNAL_BROTLI_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal brotly library with ENABLE_BROTLI=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (UNBUNDLED)
|
||||
# Many system ship only dynamic brotly libraries, so we back off to bundled by default
|
||||
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ${USE_STATIC_LIBRARIES})
|
||||
else()
|
||||
option (USE_INTERNAL_BROTLI_LIBRARY "Set to FALSE to use system libbrotli library instead of bundled" ON)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include/brotli/decode.h")
|
||||
if (USE_INTERNAL_BROTLI_LIBRARY)
|
||||
message (WARNING "submodule contrib/brotli is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find internal brotli")
|
||||
set (USE_INTERNAL_BROTLI_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_BROTLI_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if(NOT USE_INTERNAL_BROTLI_LIBRARY)
|
||||
find_library(BROTLI_LIBRARY_COMMON brotlicommon)
|
||||
find_library(BROTLI_LIBRARY_DEC brotlidec)
|
||||
find_library(BROTLI_LIBRARY_ENC brotlienc)
|
||||
find_path(BROTLI_INCLUDE_DIR NAMES brotli/decode.h brotli/encode.h brotli/port.h brotli/types.h PATHS ${BROTLI_INCLUDE_PATHS})
|
||||
if(BROTLI_LIBRARY_DEC AND BROTLI_LIBRARY_ENC AND BROTLI_LIBRARY_COMMON)
|
||||
set(BROTLI_LIBRARY ${BROTLI_LIBRARY_DEC} ${BROTLI_LIBRARY_ENC} ${BROTLI_LIBRARY_COMMON})
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use system brotli")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (BROTLI_LIBRARY AND BROTLI_INCLUDE_DIR)
|
||||
set (USE_BROTLI 1)
|
||||
elseif (NOT MISSING_INTERNAL_BROTLI_LIBRARY)
|
||||
set (BROTLI_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/brotli/c/include")
|
||||
set (USE_INTERNAL_BROTLI_LIBRARY 1)
|
||||
set (BROTLI_LIBRARY brotli)
|
||||
set (USE_BROTLI 1)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using brotli=${USE_BROTLI}: ${BROTLI_INCLUDE_DIR} : ${BROTLI_LIBRARY}")
|
@ -1,19 +0,0 @@
|
||||
option(ENABLE_BZIP2 "Enable bzip2 compression support" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_BZIP2)
|
||||
message (STATUS "bzip2 compression disabled")
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/bzip2/bzlib.h")
|
||||
message (WARNING "submodule contrib/bzip2 is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal bzip2 library")
|
||||
set (USE_NLP 0)
|
||||
return()
|
||||
endif ()
|
||||
|
||||
set (USE_BZIP2 1)
|
||||
set (BZIP2_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/bzip2")
|
||||
set (BZIP2_LIBRARY bzip2)
|
||||
|
||||
message (STATUS "Using bzip2=${USE_BZIP2}: ${BZIP2_INCLUDE_DIR} : ${BZIP2_LIBRARY}")
|
@ -1,42 +0,0 @@
|
||||
option (ENABLE_CAPNP "Enable Cap'n Proto" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_CAPNP)
|
||||
if (USE_INTERNAL_CAPNP_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal capnproto library with ENABLE_CAPNP=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_CAPNP_LIBRARY "Set to FALSE to use system capnproto library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/capnproto/CMakeLists.txt")
|
||||
if(USE_INTERNAL_CAPNP_LIBRARY)
|
||||
message(WARNING "submodule contrib/capnproto is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "cannot find internal capnproto")
|
||||
set(USE_INTERNAL_CAPNP_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_CAPNP_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
# FIXME: refactor to use `add_library(… IMPORTED)` if possible.
|
||||
if (NOT USE_INTERNAL_CAPNP_LIBRARY)
|
||||
find_library (KJ kj)
|
||||
find_library (CAPNP capnp)
|
||||
find_library (CAPNPC capnpc)
|
||||
|
||||
if(KJ AND CAPNP AND CAPNPC)
|
||||
set (CAPNP_LIBRARIES ${CAPNPC} ${CAPNP} ${KJ})
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system capnproto")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (CAPNP_LIBRARIES)
|
||||
set (USE_CAPNP 1)
|
||||
elseif(NOT MISSING_INTERNAL_CAPNP_LIBRARY)
|
||||
set (CAPNP_LIBRARIES capnpc)
|
||||
set (USE_CAPNP 1)
|
||||
set (USE_INTERNAL_CAPNP_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using capnp=${USE_CAPNP}: ${CAPNP_LIBRARIES}")
|
@ -1,34 +0,0 @@
|
||||
if (MISSING_INTERNAL_LIBUV_LIBRARY)
|
||||
message (WARNING "Disabling cassandra due to missing libuv")
|
||||
set (ENABLE_CASSANDRA OFF CACHE INTERNAL "")
|
||||
endif()
|
||||
|
||||
option(ENABLE_CASSANDRA "Enable Cassandra" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_CASSANDRA)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (APPLE)
|
||||
set(CMAKE_MACOSX_RPATH ON)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cassandra")
|
||||
message (ERROR "submodule contrib/cassandra is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal Cassandra")
|
||||
set (USE_CASSANDRA 0)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set (USE_CASSANDRA 1)
|
||||
set (CASSANDRA_INCLUDE_DIR
|
||||
"${ClickHouse_SOURCE_DIR}/contrib/cassandra/include/")
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
set (CASSANDRA_LIBRARY cassandra_static)
|
||||
else()
|
||||
set (CASSANDRA_LIBRARY cassandra)
|
||||
endif()
|
||||
|
||||
set (CASS_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/cassandra")
|
||||
|
||||
message (STATUS "Using cassandra=${USE_CASSANDRA}: ${CASSANDRA_INCLUDE_DIR} : ${CASSANDRA_LIBRARY}")
|
@ -31,11 +31,7 @@ if (CCACHE_FOUND AND NOT COMPILER_MATCHES_CCACHE)
|
||||
|
||||
if (CCACHE_VERSION VERSION_GREATER "3.2.0" OR NOT CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
||||
message(STATUS "Using ${CCACHE_FOUND} ${CCACHE_VERSION}")
|
||||
|
||||
set (CMAKE_CXX_COMPILER_LAUNCHER ${CCACHE_FOUND} ${CMAKE_CXX_COMPILER_LAUNCHER})
|
||||
set (CMAKE_C_COMPILER_LAUNCHER ${CCACHE_FOUND} ${CMAKE_C_COMPILER_LAUNCHER})
|
||||
|
||||
set_property (GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_FOUND})
|
||||
set(LAUNCHER ${CCACHE_FOUND})
|
||||
|
||||
# debian (debhelpers) set SOURCE_DATE_EPOCH environment variable, that is
|
||||
# filled from the debian/changelog or current time.
|
||||
@ -44,16 +40,14 @@ if (CCACHE_FOUND AND NOT COMPILER_MATCHES_CCACHE)
|
||||
# of the manifest, which do not allow to use previous cache,
|
||||
# - 4.2+ ccache ignores SOURCE_DATE_EPOCH for every file w/o __DATE__/__TIME__
|
||||
#
|
||||
# So for:
|
||||
# - 4.2+ does not require any sloppiness
|
||||
# - 4.0+ will ignore SOURCE_DATE_EPOCH environment variable.
|
||||
if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.2")
|
||||
message(STATUS "ccache is 4.2+ no quirks for SOURCE_DATE_EPOCH required")
|
||||
elseif (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0")
|
||||
# Exclude SOURCE_DATE_EPOCH env for ccache versions between [4.0, 4.2).
|
||||
if (CCACHE_VERSION VERSION_GREATER_EQUAL "4.0" AND CCACHE_VERSION VERSION_LESS "4.2")
|
||||
message(STATUS "Ignore SOURCE_DATE_EPOCH for ccache")
|
||||
set_property (GLOBAL PROPERTY RULE_LAUNCH_COMPILE "env -u SOURCE_DATE_EPOCH")
|
||||
set_property (GLOBAL PROPERTY RULE_LAUNCH_LINK "env -u SOURCE_DATE_EPOCH")
|
||||
set(LAUNCHER env -u SOURCE_DATE_EPOCH ${CCACHE_FOUND})
|
||||
endif()
|
||||
|
||||
set (CMAKE_CXX_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_CXX_COMPILER_LAUNCHER})
|
||||
set (CMAKE_C_COMPILER_LAUNCHER ${LAUNCHER} ${CMAKE_C_COMPILER_LAUNCHER})
|
||||
else ()
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Not using ${CCACHE_FOUND} ${CCACHE_VERSION} bug: https://bugzilla.samba.org/show_bug.cgi?id=8118")
|
||||
endif ()
|
||||
|
@ -1,37 +0,0 @@
|
||||
option (ENABLE_CURL "Enable curl" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_CURL)
|
||||
if (USE_INTERNAL_CURL)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal curl with ENABLE_CURL=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_CURL "Use internal curl library" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT USE_INTERNAL_CURL)
|
||||
find_package (CURL)
|
||||
if (NOT CURL_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system curl")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT CURL_FOUND)
|
||||
set (USE_INTERNAL_CURL 1)
|
||||
set (CURL_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/curl")
|
||||
|
||||
# find_package(CURL) compatibility for the following packages that uses
|
||||
# find_package(CURL)/include(FindCURL):
|
||||
# - mariadb-connector-c
|
||||
# - aws-s3-cmake
|
||||
# - sentry-native
|
||||
set (CURL_FOUND ON CACHE BOOL "")
|
||||
set (CURL_ROOT_DIR ${CURL_LIBRARY_DIR} CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIR ${CURL_LIBRARY_DIR}/include CACHE PATH "")
|
||||
set (CURL_INCLUDE_DIRS ${CURL_LIBRARY_DIR}/include CACHE PATH "")
|
||||
set (CURL_LIBRARY curl CACHE STRING "")
|
||||
set (CURL_LIBRARIES ${CURL_LIBRARY} CACHE STRING "")
|
||||
set (CURL_VERSION_STRING 7.67.0 CACHE STRING "")
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using curl: ${CURL_INCLUDE_DIRS} : ${CURL_LIBRARIES}")
|
@ -1,71 +1,8 @@
|
||||
option (USE_LIBCXX "Use libc++ and libc++abi instead of libstdc++" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT USE_LIBCXX)
|
||||
if (USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal libcxx with USE_LIBCXX=OFF")
|
||||
endif()
|
||||
|
||||
target_link_libraries(global-libs INTERFACE -l:libstdc++.a -l:libstdc++fs.a) # Always link these libraries as static
|
||||
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT ${NOT_UNBUNDLED})
|
||||
|
||||
option (USE_INTERNAL_LIBCXX_LIBRARY "Disable to use system libcxx and libcxxabi libraries instead of bundled"
|
||||
${USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libcxx/CMakeLists.txt")
|
||||
if (USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
message(WARNING "submodule contrib/libcxx is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libcxx")
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY 0)
|
||||
endif()
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY_DEFAULT 0)
|
||||
set(MISSING_INTERNAL_LIBCXX_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -D_LIBCPP_DEBUG=0") # More checks in debug build.
|
||||
|
||||
if (NOT USE_INTERNAL_LIBCXX_LIBRARY)
|
||||
find_library (LIBCXX_LIBRARY c++)
|
||||
find_library (LIBCXXFS_LIBRARY c++fs)
|
||||
find_library (LIBCXXABI_LIBRARY c++abi)
|
||||
add_subdirectory(contrib/libcxxabi-cmake)
|
||||
add_subdirectory(contrib/libcxx-cmake)
|
||||
|
||||
if(LIBCXX_LIBRARY AND LIBCXXABI_LIBRARY) # c++fs is now a part of the libc++
|
||||
set (HAVE_LIBCXX 1)
|
||||
else ()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libcxx")
|
||||
endif()
|
||||
# Exception handling library is embedded into libcxxabi.
|
||||
|
||||
if(NOT LIBCXXFS_LIBRARY)
|
||||
set(LIBCXXFS_LIBRARY ${LIBCXX_LIBRARY})
|
||||
endif()
|
||||
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
|
||||
|
||||
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
|
||||
endif ()
|
||||
|
||||
if (NOT HAVE_LIBCXX AND NOT MISSING_INTERNAL_LIBCXX_LIBRARY)
|
||||
set (LIBCXX_LIBRARY cxx)
|
||||
set (LIBCXXABI_LIBRARY cxxabi)
|
||||
add_subdirectory(contrib/libcxxabi-cmake)
|
||||
add_subdirectory(contrib/libcxx-cmake)
|
||||
|
||||
# Exception handling library is embedded into libcxxabi.
|
||||
|
||||
set (HAVE_LIBCXX 1)
|
||||
set(USE_INTERNAL_LIBCXX_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (HAVE_LIBCXX)
|
||||
target_link_libraries(global-libs INTERFACE ${LIBCXX_LIBRARY} ${LIBCXXABI_LIBRARY} ${LIBCXXFS_LIBRARY})
|
||||
|
||||
message (STATUS "Using libcxx: ${LIBCXX_LIBRARY}")
|
||||
message (STATUS "Using libcxxfs: ${LIBCXXFS_LIBRARY}")
|
||||
message (STATUS "Using libcxxabi: ${LIBCXXABI_LIBRARY}")
|
||||
else()
|
||||
target_link_libraries(global-libs INTERFACE -l:libstdc++.a -l:libstdc++fs.a) # Always link these libraries as static
|
||||
target_link_libraries(global-libs INTERFACE ${EXCEPTION_HANDLING_LIBRARY})
|
||||
endif()
|
||||
target_link_libraries(global-libs INTERFACE cxx cxxabi)
|
||||
|
@ -1,23 +0,0 @@
|
||||
if (${ENABLE_LIBRARIES} AND ${ENABLE_KRB5})
|
||||
set (DEFAULT_ENABLE_CYRUS_SASL 1)
|
||||
else()
|
||||
set (DEFAULT_ENABLE_CYRUS_SASL 0)
|
||||
endif()
|
||||
|
||||
OPTION(ENABLE_CYRUS_SASL "Enable cyrus-sasl" ${DEFAULT_ENABLE_CYRUS_SASL})
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/README")
|
||||
message (WARNING "submodule contrib/cyrus-sasl is missing. to fix try run: \n git submodule update --init")
|
||||
set (ENABLE_CYRUS_SASL 0)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_CYRUS_SASL)
|
||||
|
||||
set (USE_CYRUS_SASL 1)
|
||||
set (CYRUS_SASL_LIBRARY sasl2)
|
||||
|
||||
set (CYRUS_SASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/cyrus-sasl/include")
|
||||
|
||||
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using cyrus-sasl: krb5=${USE_KRB5}: ${CYRUS_SASL_INCLUDE_DIR} : ${CYRUS_SASL_LIBRARY}")
|
@ -1,29 +0,0 @@
|
||||
option (ENABLE_DATASKETCHES "Enable DataSketches" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (ENABLE_DATASKETCHES)
|
||||
|
||||
option (USE_INTERNAL_DATASKETCHES_LIBRARY "Set to FALSE to use system DataSketches library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/CMakeLists.txt")
|
||||
if (USE_INTERNAL_DATASKETCHES_LIBRARY)
|
||||
message(WARNING "submodule contrib/datasketches-cpp is missing. to fix try run: \n git submodule update --init")
|
||||
endif()
|
||||
set(MISSING_INTERNAL_DATASKETCHES_LIBRARY 1)
|
||||
set(USE_INTERNAL_DATASKETCHES_LIBRARY 0)
|
||||
endif()
|
||||
|
||||
if (USE_INTERNAL_DATASKETCHES_LIBRARY)
|
||||
set(DATASKETCHES_LIBRARY theta)
|
||||
set(DATASKETCHES_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/common/include" "${ClickHouse_SOURCE_DIR}/contrib/datasketches-cpp/theta/include")
|
||||
elseif (NOT MISSING_INTERNAL_DATASKETCHES_LIBRARY)
|
||||
find_library(DATASKETCHES_LIBRARY theta)
|
||||
find_path(DATASKETCHES_INCLUDE_DIR NAMES theta_sketch.hpp PATHS ${DATASKETCHES_INCLUDE_PATHS})
|
||||
endif()
|
||||
|
||||
if (DATASKETCHES_LIBRARY AND DATASKETCHES_INCLUDE_DIR)
|
||||
set(USE_DATASKETCHES 1)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
message (STATUS "Using datasketches=${USE_DATASKETCHES}: ${DATASKETCHES_INCLUDE_DIR} : ${DATASKETCHES_LIBRARY}")
|
@ -1,6 +0,0 @@
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/fast_float/fast_float.h")
|
||||
message (FATAL_ERROR "submodule contrib/fast_float is missing. to fix try run: \n git submodule update --init")
|
||||
endif ()
|
||||
|
||||
set(FAST_FLOAT_LIBRARY fast_float)
|
||||
set(FAST_FLOAT_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/fast_float/include/")
|
@ -1,24 +0,0 @@
|
||||
if(ARCH_AMD64 AND NOT OS_FREEBSD AND NOT OS_DARWIN)
|
||||
option(ENABLE_FASTOPS "Enable fast vectorized mathematical functions library by Mikhail Parakhin" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_FASTOPS)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Fastops library is supported on x86_64 only, and not FreeBSD or Darwin")
|
||||
endif()
|
||||
|
||||
if(NOT ENABLE_FASTOPS)
|
||||
set(USE_FASTOPS 0)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/fastops/fastops/fastops.h")
|
||||
message(WARNING "submodule contrib/fastops is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal fastops library")
|
||||
set(MISSING_INTERNAL_FASTOPS_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if(NOT MISSING_INTERNAL_FASTOPS_LIBRARY)
|
||||
set(USE_FASTOPS 1)
|
||||
set(FASTOPS_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/fastops/)
|
||||
set(FASTOPS_LIBRARY fastops)
|
||||
endif()
|
||||
|
||||
message(STATUS "Using fastops=${USE_FASTOPS}: ${FASTOPS_INCLUDE_DIR} : ${FASTOPS_LIBRARY}")
|
@ -1,8 +0,0 @@
|
||||
# StorageFileLog only support Linux platform
|
||||
if (OS_LINUX)
|
||||
set (USE_FILELOG 1)
|
||||
message (STATUS "Using StorageFileLog = 1")
|
||||
else()
|
||||
message(STATUS "StorageFileLog is only supported on Linux")
|
||||
endif ()
|
||||
|
@ -1,16 +0,0 @@
|
||||
if(NOT DEFINED ENABLE_GPERF OR ENABLE_GPERF)
|
||||
# Check if gperf was installed
|
||||
find_program(GPERF gperf)
|
||||
if(GPERF)
|
||||
option(ENABLE_GPERF "Use gperf function hash generator tool" ${ENABLE_LIBRARIES})
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (ENABLE_GPERF)
|
||||
if(NOT GPERF)
|
||||
message(FATAL_ERROR "Could not find the program gperf")
|
||||
endif()
|
||||
set(USE_GPERF 1)
|
||||
endif()
|
||||
|
||||
message(STATUS "Using gperf=${USE_GPERF}: ${GPERF}")
|
@ -1,72 +0,0 @@
|
||||
# disable grpc due to conflicts of abseil (required by grpc) dynamic annotations with libtsan.a
|
||||
if (SANITIZE STREQUAL "thread" AND COMPILER_GCC)
|
||||
set(ENABLE_GRPC_DEFAULT OFF)
|
||||
else()
|
||||
set(ENABLE_GRPC_DEFAULT ${ENABLE_LIBRARIES})
|
||||
endif()
|
||||
|
||||
option(ENABLE_GRPC "Use gRPC" ${ENABLE_GRPC_DEFAULT})
|
||||
|
||||
if(NOT ENABLE_GRPC)
|
||||
if(USE_INTERNAL_GRPC_LIBRARY)
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal gRPC library with ENABLE_GRPC=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
if(NOT USE_PROTOBUF)
|
||||
message(WARNING "Cannot use gRPC library without protobuf")
|
||||
endif()
|
||||
|
||||
# Normally we use the internal gRPC framework.
|
||||
# You can set USE_INTERNAL_GRPC_LIBRARY to OFF to force using the external gRPC framework, which should be installed in the system in this case.
|
||||
# The external gRPC framework can be installed in the system by running
|
||||
# sudo apt-get install libgrpc++-dev protobuf-compiler-grpc
|
||||
option(USE_INTERNAL_GRPC_LIBRARY "Set to FALSE to use system gRPC library instead of bundled. (Experimental. Set to OFF on your own risk)" ${NOT_UNBUNDLED})
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/grpc/CMakeLists.txt")
|
||||
if(USE_INTERNAL_GRPC_LIBRARY)
|
||||
message(WARNING "submodule contrib/grpc is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't use internal grpc")
|
||||
set(USE_INTERNAL_GRPC_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_GRPC_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if(USE_SSL)
|
||||
set(gRPC_USE_UNSECURE_LIBRARIES FALSE)
|
||||
else()
|
||||
set(gRPC_USE_UNSECURE_LIBRARIES TRUE)
|
||||
endif()
|
||||
|
||||
if(NOT USE_INTERNAL_GRPC_LIBRARY)
|
||||
find_package(gRPC)
|
||||
if(NOT gRPC_INCLUDE_DIRS OR NOT gRPC_LIBRARIES)
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system gRPC library")
|
||||
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
|
||||
elseif(NOT gRPC_CPP_PLUGIN)
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find system grpc_cpp_plugin")
|
||||
set(EXTERNAL_GRPC_LIBRARY_FOUND 0)
|
||||
else()
|
||||
set(EXTERNAL_GRPC_LIBRARY_FOUND 1)
|
||||
set(USE_GRPC 1)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT EXTERNAL_GRPC_LIBRARY_FOUND AND NOT MISSING_INTERNAL_GRPC_LIBRARY)
|
||||
set(gRPC_INCLUDE_DIRS "${ClickHouse_SOURCE_DIR}/contrib/grpc/include")
|
||||
if(gRPC_USE_UNSECURE_LIBRARIES)
|
||||
set(gRPC_LIBRARIES grpc_unsecure grpc++_unsecure)
|
||||
else()
|
||||
set(gRPC_LIBRARIES grpc grpc++)
|
||||
endif()
|
||||
set(gRPC_CPP_PLUGIN $<TARGET_FILE:grpc_cpp_plugin>)
|
||||
set(gRPC_PYTHON_PLUGIN $<TARGET_FILE:grpc_python_plugin>)
|
||||
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/grpc-cmake/protobuf_generate_grpc.cmake")
|
||||
|
||||
set(USE_INTERNAL_GRPC_LIBRARY 1)
|
||||
set(USE_GRPC 1)
|
||||
endif()
|
||||
|
||||
message(STATUS "Using gRPC=${USE_GRPC}: ${gRPC_INCLUDE_DIRS} : ${gRPC_LIBRARIES} : ${gRPC_CPP_PLUGIN}")
|
@ -1,40 +0,0 @@
|
||||
# included only if ENABLE_TESTS=1
|
||||
|
||||
option (USE_INTERNAL_GTEST_LIBRARY "Set to FALSE to use system Google Test instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest/CMakeLists.txt")
|
||||
if (USE_INTERNAL_GTEST_LIBRARY)
|
||||
message (WARNING "submodule contrib/googletest is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal gtest")
|
||||
set (USE_INTERNAL_GTEST_LIBRARY 0)
|
||||
endif ()
|
||||
|
||||
set (MISSING_INTERNAL_GTEST_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if(NOT USE_INTERNAL_GTEST_LIBRARY)
|
||||
# TODO: autodetect of GTEST_SRC_DIR by EXISTS /usr/src/googletest/CMakeLists.txt
|
||||
if(NOT GTEST_SRC_DIR)
|
||||
find_package(GTest)
|
||||
if (NOT GTEST_INCLUDE_DIRS)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system Google Test")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (NOT GTEST_SRC_DIR AND NOT GTEST_INCLUDE_DIRS AND NOT MISSING_INTERNAL_GTEST_LIBRARY)
|
||||
set (USE_INTERNAL_GTEST_LIBRARY 1)
|
||||
set (GTEST_MAIN_LIBRARIES gtest_main)
|
||||
set (GTEST_LIBRARIES gtest)
|
||||
set (GTEST_BOTH_LIBRARIES ${GTEST_MAIN_LIBRARIES} ${GTEST_LIBRARIES})
|
||||
set (GTEST_INCLUDE_DIRS ${ClickHouse_SOURCE_DIR}/contrib/googletest/googletest)
|
||||
elseif(USE_INTERNAL_GTEST_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Wouldn't use internal Google Test library")
|
||||
set (USE_INTERNAL_GTEST_LIBRARY 0)
|
||||
endif ()
|
||||
|
||||
if((GTEST_INCLUDE_DIRS AND GTEST_BOTH_LIBRARIES) OR GTEST_SRC_DIR)
|
||||
set(USE_GTEST 1)
|
||||
endif()
|
||||
|
||||
message (STATUS "Using gtest=${USE_GTEST}: ${GTEST_INCLUDE_DIRS} : ${GTEST_BOTH_LIBRARIES} : ${GTEST_SRC_DIR}")
|
@ -1,39 +0,0 @@
|
||||
option (ENABLE_H3 "Enable H3" ${ENABLE_LIBRARIES})
|
||||
if(NOT ENABLE_H3)
|
||||
if(USE_INTERNAL_H3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal h3 library with ENABLE_H3=OFF")
|
||||
endif ()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option(USE_INTERNAL_H3_LIBRARY "Set to FALSE to use system h3 library instead of bundled"
|
||||
ON) # we are not aware of any distribution that provides h3 package
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include/h3Index.h")
|
||||
if(USE_INTERNAL_H3_LIBRARY)
|
||||
message(WARNING "submodule contrib/h3 is missing. to fix try run: \n git submodule update --init")
|
||||
message(${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal h3 library")
|
||||
set(USE_INTERNAL_H3_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_H3_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if(NOT USE_INTERNAL_H3_LIBRARY)
|
||||
find_library(H3_LIBRARY h3)
|
||||
find_path(H3_INCLUDE_DIR NAMES h3/h3api.h PATHS ${H3_INCLUDE_PATHS})
|
||||
|
||||
if(NOT H3_LIBRARY OR NOT H3_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system h3 library")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (H3_LIBRARY AND H3_INCLUDE_DIR)
|
||||
set (USE_H3 1)
|
||||
elseif(NOT MISSING_INTERNAL_H3_LIBRARY)
|
||||
set (H3_LIBRARY h3)
|
||||
set (H3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/h3/src/h3lib/include")
|
||||
set (USE_H3 1)
|
||||
set (USE_INTERNAL_H3_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
message (STATUS "Using h3=${USE_H3}: ${H3_INCLUDE_DIR} : ${H3_LIBRARY}")
|
@ -1,45 +0,0 @@
|
||||
if(NOT ARCH_ARM AND NOT OS_FREEBSD AND NOT APPLE AND USE_PROTOBUF AND NOT ARCH_PPC64LE)
|
||||
option(ENABLE_HDFS "Enable HDFS" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_HDFS OR USE_INTERNAL_HDFS3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use HDFS3 with current configuration")
|
||||
endif()
|
||||
|
||||
if(NOT ENABLE_HDFS)
|
||||
if(USE_INTERNAL_HDFS3_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library with ENABLE_HDFS3=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option(USE_INTERNAL_HDFS3_LIBRARY "Set to FALSE to use system HDFS3 instead of bundled (experimental - set to OFF on your own risk)"
|
||||
ON) # We don't know any linux distribution with package for it
|
||||
|
||||
if(NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include/hdfs/hdfs.h")
|
||||
if(USE_INTERNAL_HDFS3_LIBRARY)
|
||||
message(WARNING "submodule contrib/libhdfs3 is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal HDFS3 library")
|
||||
set(USE_INTERNAL_HDFS3_LIBRARY 0)
|
||||
endif()
|
||||
set(MISSING_INTERNAL_HDFS3_LIBRARY 1)
|
||||
endif()
|
||||
|
||||
if(NOT USE_INTERNAL_HDFS3_LIBRARY)
|
||||
find_library(HDFS3_LIBRARY hdfs3)
|
||||
find_path(HDFS3_INCLUDE_DIR NAMES hdfs/hdfs.h PATHS ${HDFS3_INCLUDE_PATHS})
|
||||
if(NOT HDFS3_LIBRARY OR NOT HDFS3_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot find system HDFS3 library")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(HDFS3_LIBRARY AND HDFS3_INCLUDE_DIR)
|
||||
set(USE_HDFS 1)
|
||||
elseif(NOT MISSING_INTERNAL_HDFS3_LIBRARY AND LIBGSASL_LIBRARY AND LIBXML2_LIBRARIES)
|
||||
set(HDFS3_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libhdfs3/include")
|
||||
set(HDFS3_LIBRARY hdfs3)
|
||||
set(USE_INTERNAL_HDFS3_LIBRARY 1)
|
||||
set(USE_HDFS 1)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannout enable HDFS3")
|
||||
endif()
|
||||
|
||||
message(STATUS "Using hdfs3=${USE_HDFS}: ${HDFS3_INCLUDE_DIR} : ${HDFS3_LIBRARY}")
|
@ -1,51 +0,0 @@
|
||||
if (OS_LINUX)
|
||||
option(ENABLE_ICU "Enable ICU" ${ENABLE_LIBRARIES})
|
||||
else ()
|
||||
option(ENABLE_ICU "Enable ICU" 0)
|
||||
endif ()
|
||||
|
||||
if (NOT ENABLE_ICU)
|
||||
if(USE_INTERNAL_ICU_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal icu library with ENABLE_ICU=OFF")
|
||||
endif()
|
||||
message(STATUS "Build without ICU (support for collations and charset conversion functions will be disabled)")
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_ICU_LIBRARY "Set to FALSE to use system ICU library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/icu/icu4c/LICENSE")
|
||||
if (USE_INTERNAL_ICU_LIBRARY)
|
||||
message (WARNING "submodule contrib/icu is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal ICU")
|
||||
set (USE_INTERNAL_ICU_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_ICU_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if(NOT USE_INTERNAL_ICU_LIBRARY)
|
||||
if (APPLE)
|
||||
set(ICU_ROOT "/usr/local/opt/icu4c" CACHE STRING "")
|
||||
endif()
|
||||
find_package(ICU COMPONENTS i18n uc data) # TODO: remove Modules/FindICU.cmake after cmake 3.7
|
||||
#set (ICU_LIBRARIES ${ICU_I18N_LIBRARY} ${ICU_UC_LIBRARY} ${ICU_DATA_LIBRARY} CACHE STRING "")
|
||||
if(ICU_FOUND)
|
||||
set(USE_ICU 1)
|
||||
else()
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system ICU")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (ICU_LIBRARY AND ICU_INCLUDE_DIR)
|
||||
set (USE_ICU 1)
|
||||
elseif (NOT MISSING_INTERNAL_ICU_LIBRARY)
|
||||
set (USE_INTERNAL_ICU_LIBRARY 1)
|
||||
set (ICU_LIBRARIES icui18n icuuc icudata)
|
||||
set (USE_ICU 1)
|
||||
endif ()
|
||||
|
||||
if(USE_ICU)
|
||||
message(STATUS "Using icu=${USE_ICU}: ${ICU_INCLUDE_DIR} : ${ICU_LIBRARIES}")
|
||||
else()
|
||||
message(STATUS "Build without ICU (support for collations and charset conversion functions will be disabled)")
|
||||
endif()
|
@ -1,25 +0,0 @@
|
||||
OPTION(ENABLE_KRB5 "Enable krb5" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/krb5/README")
|
||||
message (WARNING "submodule contrib/krb5 is missing. to fix try run: \n git submodule update --init")
|
||||
set (ENABLE_KRB5 0)
|
||||
endif ()
|
||||
|
||||
if (NOT CMAKE_SYSTEM_NAME MATCHES "Linux" AND NOT (CMAKE_SYSTEM_NAME MATCHES "Darwin" AND NOT CMAKE_CROSSCOMPILING))
|
||||
message (WARNING "krb5 disabled in non-Linux and non-native-Darwin environments")
|
||||
set (ENABLE_KRB5 0)
|
||||
endif ()
|
||||
|
||||
if (ENABLE_KRB5)
|
||||
|
||||
set (USE_KRB5 1)
|
||||
set (KRB5_LIBRARY krb5)
|
||||
|
||||
set (KRB5_INCLUDE_DIR
|
||||
"${ClickHouse_SOURCE_DIR}/contrib/krb5/src/include"
|
||||
"${ClickHouse_BINARY_DIR}/contrib/krb5-cmake/include"
|
||||
)
|
||||
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using krb5=${USE_KRB5}: ${KRB5_INCLUDE_DIR} : ${KRB5_LIBRARY}")
|
@ -1,104 +0,0 @@
|
||||
if (UNBUNDLED AND USE_STATIC_LIBRARIES)
|
||||
set (ENABLE_LDAP OFF CACHE INTERNAL "")
|
||||
endif()
|
||||
|
||||
option (ENABLE_LDAP "Enable LDAP" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_LDAP)
|
||||
if(USE_INTERNAL_LDAP_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal LDAP library with ENABLE_LDAP=OFF")
|
||||
endif ()
|
||||
return()
|
||||
endif()
|
||||
|
||||
option (USE_INTERNAL_LDAP_LIBRARY "Set to FALSE to use system *LDAP library instead of bundled" ${NOT_UNBUNDLED})
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/openldap/README")
|
||||
if (USE_INTERNAL_LDAP_LIBRARY)
|
||||
message (WARNING "Submodule contrib/openldap is missing. To fix try running:\n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal LDAP library")
|
||||
endif ()
|
||||
|
||||
set (USE_INTERNAL_LDAP_LIBRARY 0)
|
||||
set (MISSING_INTERNAL_LDAP_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
set (OPENLDAP_USE_STATIC_LIBS ${USE_STATIC_LIBRARIES})
|
||||
set (OPENLDAP_USE_REENTRANT_LIBS 1)
|
||||
|
||||
if (NOT USE_INTERNAL_LDAP_LIBRARY)
|
||||
if (OPENLDAP_USE_STATIC_LIBS)
|
||||
message (WARNING "Unable to use external static OpenLDAP libraries, falling back to the bundled version.")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Unable to use external OpenLDAP")
|
||||
set (USE_INTERNAL_LDAP_LIBRARY 1)
|
||||
else ()
|
||||
if (APPLE AND NOT OPENLDAP_ROOT_DIR)
|
||||
set (OPENLDAP_ROOT_DIR "/usr/local/opt/openldap")
|
||||
endif ()
|
||||
|
||||
find_package (OpenLDAP)
|
||||
|
||||
if (NOT OPENLDAP_FOUND)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system OpenLDAP")
|
||||
endif()
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (NOT OPENLDAP_FOUND AND NOT MISSING_INTERNAL_LDAP_LIBRARY)
|
||||
string (TOLOWER "${CMAKE_SYSTEM_NAME}" _system_name)
|
||||
string (TOLOWER "${CMAKE_SYSTEM_PROCESSOR}" _system_processor)
|
||||
|
||||
if (
|
||||
"${_system_processor}" STREQUAL "amd64" OR
|
||||
"${_system_processor}" STREQUAL "x64"
|
||||
)
|
||||
set (_system_processor "x86_64")
|
||||
elseif (
|
||||
"${_system_processor}" STREQUAL "arm64"
|
||||
)
|
||||
set (_system_processor "aarch64")
|
||||
endif ()
|
||||
|
||||
if (
|
||||
( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "x86_64" ) OR
|
||||
( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "aarch64" ) OR
|
||||
( "${_system_name}" STREQUAL "linux" AND "${_system_processor}" STREQUAL "ppc64le" ) OR
|
||||
( "${_system_name}" STREQUAL "freebsd" AND "${_system_processor}" STREQUAL "x86_64" ) OR
|
||||
( "${_system_name}" STREQUAL "freebsd" AND "${_system_processor}" STREQUAL "aarch64" ) OR
|
||||
( "${_system_name}" STREQUAL "darwin" AND "${_system_processor}" STREQUAL "x86_64" ) OR
|
||||
( "${_system_name}" STREQUAL "darwin" AND "${_system_processor}" STREQUAL "aarch64" )
|
||||
)
|
||||
set (_ldap_supported_platform TRUE)
|
||||
endif ()
|
||||
|
||||
if (NOT _ldap_supported_platform)
|
||||
message (WARNING "LDAP support using the bundled library is not implemented for ${CMAKE_SYSTEM_NAME} ${CMAKE_SYSTEM_PROCESSOR} platform.")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable LDAP support")
|
||||
elseif (NOT USE_SSL)
|
||||
message (WARNING "LDAP support using the bundled library is not possible if SSL is not used.")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot enable LDAP support")
|
||||
else ()
|
||||
set (USE_INTERNAL_LDAP_LIBRARY 1)
|
||||
set (OPENLDAP_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/openldap")
|
||||
set (OPENLDAP_INCLUDE_DIRS
|
||||
"${ClickHouse_SOURCE_DIR}/contrib/openldap-cmake/${_system_name}_${_system_processor}/include"
|
||||
"${ClickHouse_SOURCE_DIR}/contrib/openldap/include"
|
||||
)
|
||||
# Below, 'ldap'/'ldap_r' and 'lber' will be resolved to
|
||||
# the targets defined in contrib/openldap-cmake/CMakeLists.txt
|
||||
if (OPENLDAP_USE_REENTRANT_LIBS)
|
||||
set (OPENLDAP_LDAP_LIBRARY "ldap_r")
|
||||
else ()
|
||||
set (OPENLDAP_LDAP_LIBRARY "ldap")
|
||||
endif()
|
||||
set (OPENLDAP_LBER_LIBRARY "lber")
|
||||
set (OPENLDAP_LIBRARIES ${OPENLDAP_LDAP_LIBRARY} ${OPENLDAP_LBER_LIBRARY})
|
||||
set (OPENLDAP_FOUND 1)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (OPENLDAP_FOUND)
|
||||
set (USE_LDAP 1)
|
||||
endif ()
|
||||
|
||||
message (STATUS "Using ldap=${USE_LDAP}: ${OPENLDAP_INCLUDE_DIRS} : ${OPENLDAP_LIBRARIES}")
|
@ -1,45 +0,0 @@
|
||||
option(ENABLE_GSASL_LIBRARY "Enable gsasl library" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_GSASL_LIBRARY)
|
||||
if(USE_INTERNAL_LIBGSASL_LIBRARY)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Cannot use internal libgsasl library with ENABLE_GSASL_LIBRARY=OFF")
|
||||
endif()
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (UNBUNDLED)
|
||||
# when USE_STATIC_LIBRARIES we usually need to pick up hell a lot of dependencies for libgsasl
|
||||
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ${USE_STATIC_LIBRARIES})
|
||||
else()
|
||||
option (USE_INTERNAL_LIBGSASL_LIBRARY "Set to FALSE to use system libgsasl library instead of bundled" ON)
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src/gsasl.h")
|
||||
if (USE_INTERNAL_LIBGSASL_LIBRARY)
|
||||
message (WARNING "submodule contrib/libgsasl is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libgsasl")
|
||||
set (USE_INTERNAL_LIBGSASL_LIBRARY 0)
|
||||
endif ()
|
||||
set (MISSING_INTERNAL_LIBGSASL_LIBRARY 1)
|
||||
endif ()
|
||||
|
||||
if (NOT USE_INTERNAL_LIBGSASL_LIBRARY)
|
||||
find_library (LIBGSASL_LIBRARY gsasl)
|
||||
find_path (LIBGSASL_INCLUDE_DIR NAMES gsasl.h PATHS ${LIBGSASL_INCLUDE_PATHS})
|
||||
if (NOT LIBGSASL_LIBRARY OR NOT LIBGSASL_INCLUDE_DIR)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find system libgsasl")
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
if (LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
|
||||
elseif (NOT MISSING_INTERNAL_LIBGSASL_LIBRARY)
|
||||
set (LIBGSASL_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/src" "${ClickHouse_SOURCE_DIR}/contrib/libgsasl/linux_x86_64/include")
|
||||
set (USE_INTERNAL_LIBGSASL_LIBRARY 1)
|
||||
set (LIBGSASL_LIBRARY libgsasl)
|
||||
endif ()
|
||||
|
||||
if(LIBGSASL_LIBRARY AND LIBGSASL_INCLUDE_DIR)
|
||||
set (USE_LIBGSASL 1)
|
||||
endif()
|
||||
|
||||
message (STATUS "Using libgsasl=${USE_LIBGSASL}: ${LIBGSASL_INCLUDE_DIR} : ${LIBGSASL_LIBRARY}")
|
@ -1,31 +0,0 @@
|
||||
option(ENABLE_LIBPQXX "Enalbe libpqxx" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_LIBPQXX)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/CMakeLists.txt")
|
||||
message (WARNING "submodule contrib/libpqxx is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpqxx library")
|
||||
set (USE_LIBPQXX 0)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libpq/include")
|
||||
message (ERROR "submodule contrib/libpq is missing. to fix try run: \n git submodule update --init")
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't find internal libpq needed for libpqxx")
|
||||
set (USE_LIBPQXX 0)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT USE_INTERNAL_SSL_LIBRARY)
|
||||
set (USE_LIBPQXX 0)
|
||||
else ()
|
||||
set (USE_LIBPQXX 1)
|
||||
set (LIBPQXX_LIBRARY libpqxx)
|
||||
set (LIBPQ_LIBRARY libpq)
|
||||
set (LIBPQXX_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpqxx/include")
|
||||
set (LIBPQ_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/libpq")
|
||||
message (STATUS "Using libpqxx=${USE_LIBPQXX}: ${LIBPQXX_INCLUDE_DIR} : ${LIBPQXX_LIBRARY}")
|
||||
message (STATUS "Using libpq: ${LIBPQ_ROOT_DIR} : ${LIBPQ_INCLUDE_DIR} : ${LIBPQ_LIBRARY}")
|
||||
endif()
|
@ -1,11 +0,0 @@
|
||||
option(USE_LIBPROTOBUF_MUTATOR "Enable libprotobuf-mutator" ${ENABLE_FUZZING})
|
||||
|
||||
if (NOT USE_LIBPROTOBUF_MUTATOR)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(LibProtobufMutator_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libprotobuf-mutator")
|
||||
|
||||
if (NOT EXISTS "${LibProtobufMutator_SOURCE_DIR}/README.md")
|
||||
message (ERROR "submodule contrib/libprotobuf-mutator is missing. to fix try run: \n git submodule update --init")
|
||||
endif()
|
@ -1,22 +0,0 @@
|
||||
if (OS_DARWIN AND COMPILER_GCC)
|
||||
message (WARNING "libuv cannot be built with GCC in macOS due to a bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=93082")
|
||||
SET(MISSING_INTERNAL_LIBUV_LIBRARY 1)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${ClickHouse_SOURCE_DIR}/contrib/libuv")
|
||||
message (WARNING "submodule contrib/libuv is missing. to fix try run: \n git submodule update --init")
|
||||
SET(MISSING_INTERNAL_LIBUV_LIBRARY 1)
|
||||
return()
|
||||
endif()
|
||||
|
||||
if (MAKE_STATIC_LIBRARIES)
|
||||
set (LIBUV_LIBRARY uv_a)
|
||||
else()
|
||||
set (LIBUV_LIBRARY uv)
|
||||
endif()
|
||||
|
||||
set (LIBUV_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/libuv")
|
||||
set (LIBUV_INCLUDE_DIR "${LIBUV_ROOT_DIR}/include")
|
||||
|
||||
message (STATUS "Using libuv: ${LIBUV_ROOT_DIR} : ${LIBUV_LIBRARY}")
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user