mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-10 01:25:21 +00:00
Merge remote-tracking branch 'upstream/master' into HEAD
This commit is contained in:
commit
ca80240d84
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -18,5 +18,24 @@ tests/ci/run_check.py
|
||||
### Changelog entry (a user-readable short description of the changes that goes to CHANGELOG.md):
|
||||
...
|
||||
|
||||
### Documentation entry for user-facing changes
|
||||
|
||||
- [ ] Documentation is written (mandatory for new features)
|
||||
|
||||
<!---
|
||||
Directly edit documentation source files in the "docs" folder with the same pull-request as code changes
|
||||
|
||||
or
|
||||
|
||||
Add a user-readable short description of the changes that should be added to docs.clickhouse.com below.
|
||||
|
||||
At a minimum, the following information should be added (but add more as needed).
|
||||
- Motivation: Why is this function, table engine, etc. useful to ClickHouse users?
|
||||
|
||||
- Parameters: If the feature being added takes arguments, options or is influenced by settings, please list them below with a brief explanation.
|
||||
|
||||
- Example use: A query or command.
|
||||
-->
|
||||
|
||||
|
||||
> Information about CI checks: https://clickhouse.com/docs/en/development/continuous-integration/
|
||||
|
208
.github/workflows/backport_branches.yml
vendored
208
.github/workflows/backport_branches.yml
vendored
@ -12,11 +12,10 @@ jobs:
|
||||
PythonUnitTests:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Python unit tests
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
@ -24,34 +23,32 @@ jobs:
|
||||
DockerHubPushAarch64:
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
DockerHubPushAmd64:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
@ -59,18 +56,17 @@ jobs:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
@ -79,7 +75,7 @@ jobs:
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
@ -94,13 +90,12 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: CompatibilityCheck
|
||||
@ -132,28 +127,25 @@ jobs:
|
||||
BUILD_NAME=package_release
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
fetch-depth: 0 # For a proper version and performance artifacts
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --single-branch --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -177,28 +169,25 @@ jobs:
|
||||
BUILD_NAME=package_aarch64
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
fetch-depth: 0 # For a proper version and performance artifacts
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --single-branch --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -222,26 +211,24 @@ jobs:
|
||||
BUILD_NAME=package_asan
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --single-branch --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -265,26 +252,24 @@ jobs:
|
||||
BUILD_NAME=package_tsan
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --single-branch --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -308,26 +293,24 @@ jobs:
|
||||
BUILD_NAME=package_debug
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --single-branch --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -351,28 +334,25 @@ jobs:
|
||||
BUILD_NAME=binary_darwin
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --single-branch --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -396,28 +376,25 @@ jobs:
|
||||
BUILD_NAME=binary_darwin_aarch64
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --single-branch --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -436,12 +413,10 @@ jobs:
|
||||
- BuilderDebAarch64
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself
|
||||
- name: Check docker clickhouse/clickhouse-server building
|
||||
run: |
|
||||
@ -477,14 +452,13 @@ jobs:
|
||||
NEEDS_DATA_PATH=${{runner.temp}}/needs.json
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Report Builder
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -516,14 +490,13 @@ jobs:
|
||||
NEEDS_DATA_PATH=${{runner.temp}}/needs.json
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Report Builder
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -556,14 +529,13 @@ jobs:
|
||||
KILL_TIMEOUT=10800
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Functional test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -594,14 +566,13 @@ jobs:
|
||||
KILL_TIMEOUT=3600
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Functional test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -635,14 +606,13 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Stress test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -672,14 +642,13 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Integration test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -706,11 +675,10 @@ jobs:
|
||||
- CompatibilityCheck
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Finish label
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
|
3
.github/workflows/cherry_pick.yml
vendored
3
.github/workflows/cherry_pick.yml
vendored
@ -28,8 +28,9 @@ jobs:
|
||||
REPO_TEAM=core
|
||||
EOF
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
|
||||
fetch-depth: 0
|
||||
- name: Cherry pick
|
||||
|
63
.github/workflows/docs_check.yml
vendored
63
.github/workflows/docs_check.yml
vendored
@ -21,11 +21,10 @@ jobs:
|
||||
CheckLabels:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Labels check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
@ -34,17 +33,16 @@ jobs:
|
||||
needs: CheckLabels
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
@ -52,17 +50,16 @@ jobs:
|
||||
needs: CheckLabels
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
@ -70,18 +67,17 @@ jobs:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
@ -90,7 +86,7 @@ jobs:
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
@ -110,15 +106,14 @@ jobs:
|
||||
- name: Download changed images
|
||||
# even if artifact does not exist, e.g. on `do not test` label or failed Docker job
|
||||
continue-on-error: true
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.TEMP_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Style Check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
@ -140,15 +135,14 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/docs_check/ClickHouse
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.TEMP_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Docs Check
|
||||
run: |
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -167,11 +161,10 @@ jobs:
|
||||
- DocsCheck
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Finish label
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
|
40
.github/workflows/docs_release.yml
vendored
40
.github/workflows/docs_release.yml
vendored
@ -23,34 +23,32 @@ jobs:
|
||||
DockerHubPushAarch64:
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
DockerHubPushAmd64:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
@ -58,18 +56,17 @@ jobs:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
@ -78,7 +75,7 @@ jobs:
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
@ -97,13 +94,12 @@ jobs:
|
||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||
RCSK
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.TEMP_PATH }}
|
||||
|
12
.github/workflows/jepsen.yml
vendored
12
.github/workflows/jepsen.yml
vendored
@ -19,12 +19,10 @@ jobs:
|
||||
TEMP_PATH=${{runner.temp}}/keeper_jepsen
|
||||
REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
fetch-depth: 0
|
||||
- name: Jepsen Test
|
||||
run: |
|
||||
@ -50,12 +48,10 @@ jobs:
|
||||
# TEMP_PATH=${{runner.temp}}/server_jepsen
|
||||
# REPO_COPY=${{runner.temp}}/server_jepsen/ClickHouse
|
||||
# EOF
|
||||
# - name: Clear repository
|
||||
# run: |
|
||||
# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
# - name: Check out repository code
|
||||
# uses: actions/checkout@v2
|
||||
# uses: ClickHouse/checkout@v1
|
||||
# with:
|
||||
# clear-repository: true
|
||||
# fetch-depth: 0
|
||||
# - name: Jepsen Test
|
||||
# run: |
|
||||
|
948
.github/workflows/master.yml
vendored
948
.github/workflows/master.yml
vendored
File diff suppressed because it is too large
Load Diff
50
.github/workflows/nightly.yml
vendored
50
.github/workflows/nightly.yml
vendored
@ -16,34 +16,32 @@ jobs:
|
||||
DockerHubPushAarch64:
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64 --all
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
DockerHubPushAmd64:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64 --all
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
@ -51,18 +49,17 @@ jobs:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
@ -71,7 +68,7 @@ jobs:
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
@ -90,22 +87,17 @@ jobs:
|
||||
EOF
|
||||
echo "COVERITY_TOKEN=${{ secrets.COVERITY_TOKEN }}" >> "$GITHUB_ENV"
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
id: coverity-checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --single-branch --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -115,7 +107,7 @@ jobs:
|
||||
run: |
|
||||
curl --form token="${COVERITY_TOKEN}" \
|
||||
--form email='security+coverity@clickhouse.com' \
|
||||
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tgz" \
|
||||
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tar.zst" \
|
||||
--form version="${GITHUB_REF#refs/heads/}-${GITHUB_SHA::6}" \
|
||||
--form description="Nighly Scan: $(date +'%Y-%m-%dT%H:%M:%S')" \
|
||||
https://scan.coverity.com/builds?project=ClickHouse%2FClickHouse
|
||||
@ -134,8 +126,10 @@ jobs:
|
||||
CC: clang-15
|
||||
CXX: clang++-15
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||
submodules: true
|
||||
- name: Set up JDK 11
|
||||
|
1247
.github/workflows/pull_request.yml
vendored
1247
.github/workflows/pull_request.yml
vendored
File diff suppressed because it is too large
Load Diff
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/release_packages/ClickHouse
|
||||
EOF
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
# Always use the most recent script version
|
||||
ref: master
|
||||
@ -50,12 +50,10 @@ jobs:
|
||||
DockerServerImages:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
fetch-depth: 0 # otherwise we will have no version info
|
||||
- name: Check docker clickhouse/clickhouse-server building
|
||||
run: |
|
||||
|
514
.github/workflows/release_branches.yml
vendored
514
.github/workflows/release_branches.yml
vendored
File diff suppressed because it is too large
Load Diff
2
.github/workflows/tags_stable.yml
vendored
2
.github/workflows/tags_stable.yml
vendored
@ -34,7 +34,7 @@ jobs:
|
||||
run: |
|
||||
echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
ref: master
|
||||
fetch-depth: 0
|
||||
|
6
.github/workflows/woboq.yml
vendored
6
.github/workflows/woboq.yml
vendored
@ -21,12 +21,10 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: 'true'
|
||||
- name: Codebrowser
|
||||
run: |
|
||||
|
183
.gitmodules
vendored
183
.gitmodules
vendored
@ -1,88 +1,88 @@
|
||||
[submodule "contrib/poco"]
|
||||
path = contrib/poco
|
||||
url = https://github.com/ClickHouse/poco.git
|
||||
url = https://github.com/ClickHouse/poco
|
||||
branch = clickhouse
|
||||
[submodule "contrib/zstd"]
|
||||
path = contrib/zstd
|
||||
url = https://github.com/facebook/zstd.git
|
||||
url = https://github.com/facebook/zstd
|
||||
[submodule "contrib/lz4"]
|
||||
path = contrib/lz4
|
||||
url = https://github.com/lz4/lz4.git
|
||||
url = https://github.com/lz4/lz4
|
||||
[submodule "contrib/librdkafka"]
|
||||
path = contrib/librdkafka
|
||||
url = https://github.com/ClickHouse/librdkafka.git
|
||||
url = https://github.com/ClickHouse/librdkafka
|
||||
[submodule "contrib/cctz"]
|
||||
path = contrib/cctz
|
||||
url = https://github.com/ClickHouse/cctz.git
|
||||
url = https://github.com/ClickHouse/cctz
|
||||
[submodule "contrib/zlib-ng"]
|
||||
path = contrib/zlib-ng
|
||||
url = https://github.com/ClickHouse/zlib-ng.git
|
||||
url = https://github.com/ClickHouse/zlib-ng
|
||||
branch = clickhouse-2.0.x
|
||||
[submodule "contrib/googletest"]
|
||||
path = contrib/googletest
|
||||
url = https://github.com/google/googletest.git
|
||||
url = https://github.com/google/googletest
|
||||
[submodule "contrib/capnproto"]
|
||||
path = contrib/capnproto
|
||||
url = https://github.com/capnproto/capnproto.git
|
||||
url = https://github.com/capnproto/capnproto
|
||||
[submodule "contrib/double-conversion"]
|
||||
path = contrib/double-conversion
|
||||
url = https://github.com/google/double-conversion.git
|
||||
url = https://github.com/google/double-conversion
|
||||
[submodule "contrib/re2"]
|
||||
path = contrib/re2
|
||||
url = https://github.com/google/re2.git
|
||||
url = https://github.com/google/re2
|
||||
[submodule "contrib/mariadb-connector-c"]
|
||||
path = contrib/mariadb-connector-c
|
||||
url = https://github.com/ClickHouse/mariadb-connector-c.git
|
||||
url = https://github.com/ClickHouse/mariadb-connector-c
|
||||
[submodule "contrib/jemalloc"]
|
||||
path = contrib/jemalloc
|
||||
url = https://github.com/jemalloc/jemalloc.git
|
||||
url = https://github.com/jemalloc/jemalloc
|
||||
[submodule "contrib/unixodbc"]
|
||||
path = contrib/unixodbc
|
||||
url = https://github.com/ClickHouse/UnixODBC.git
|
||||
url = https://github.com/ClickHouse/UnixODBC
|
||||
[submodule "contrib/protobuf"]
|
||||
path = contrib/protobuf
|
||||
url = https://github.com/ClickHouse/protobuf.git
|
||||
url = https://github.com/ClickHouse/protobuf
|
||||
branch = v3.13.0.1
|
||||
[submodule "contrib/boost"]
|
||||
path = contrib/boost
|
||||
url = https://github.com/ClickHouse/boost.git
|
||||
url = https://github.com/ClickHouse/boost
|
||||
[submodule "contrib/base64"]
|
||||
path = contrib/base64
|
||||
url = https://github.com/ClickHouse/Turbo-Base64.git
|
||||
url = https://github.com/ClickHouse/Turbo-Base64
|
||||
[submodule "contrib/arrow"]
|
||||
path = contrib/arrow
|
||||
url = https://github.com/ClickHouse/arrow.git
|
||||
url = https://github.com/ClickHouse/arrow
|
||||
branch = blessed/release-6.0.1
|
||||
[submodule "contrib/thrift"]
|
||||
path = contrib/thrift
|
||||
url = https://github.com/apache/thrift.git
|
||||
url = https://github.com/apache/thrift
|
||||
[submodule "contrib/libhdfs3"]
|
||||
path = contrib/libhdfs3
|
||||
url = https://github.com/ClickHouse/libhdfs3.git
|
||||
url = https://github.com/ClickHouse/libhdfs3
|
||||
[submodule "contrib/libxml2"]
|
||||
path = contrib/libxml2
|
||||
url = https://github.com/GNOME/libxml2.git
|
||||
url = https://github.com/GNOME/libxml2
|
||||
[submodule "contrib/libgsasl"]
|
||||
path = contrib/libgsasl
|
||||
url = https://github.com/ClickHouse/libgsasl.git
|
||||
url = https://github.com/ClickHouse/libgsasl
|
||||
[submodule "contrib/snappy"]
|
||||
path = contrib/snappy
|
||||
url = https://github.com/ClickHouse/snappy.git
|
||||
url = https://github.com/ClickHouse/snappy
|
||||
[submodule "contrib/cppkafka"]
|
||||
path = contrib/cppkafka
|
||||
url = https://github.com/mfontanini/cppkafka.git
|
||||
url = https://github.com/mfontanini/cppkafka
|
||||
[submodule "contrib/brotli"]
|
||||
path = contrib/brotli
|
||||
url = https://github.com/google/brotli.git
|
||||
url = https://github.com/google/brotli
|
||||
[submodule "contrib/h3"]
|
||||
path = contrib/h3
|
||||
url = https://github.com/ClickHouse/h3
|
||||
[submodule "contrib/libunwind"]
|
||||
path = contrib/libunwind
|
||||
url = https://github.com/ClickHouse/libunwind.git
|
||||
url = https://github.com/ClickHouse/libunwind
|
||||
[submodule "contrib/simdjson"]
|
||||
path = contrib/simdjson
|
||||
url = https://github.com/simdjson/simdjson.git
|
||||
url = https://github.com/simdjson/simdjson
|
||||
[submodule "contrib/rapidjson"]
|
||||
path = contrib/rapidjson
|
||||
url = https://github.com/ClickHouse/rapidjson
|
||||
@ -94,68 +94,68 @@
|
||||
url = https://github.com/ClickHouse/orc
|
||||
[submodule "contrib/sparsehash-c11"]
|
||||
path = contrib/sparsehash-c11
|
||||
url = https://github.com/sparsehash/sparsehash-c11.git
|
||||
url = https://github.com/sparsehash/sparsehash-c11
|
||||
[submodule "contrib/grpc"]
|
||||
path = contrib/grpc
|
||||
url = https://github.com/ClickHouse/grpc.git
|
||||
url = https://github.com/ClickHouse/grpc
|
||||
branch = v1.33.2
|
||||
[submodule "contrib/aws"]
|
||||
path = contrib/aws
|
||||
url = https://github.com/ClickHouse/aws-sdk-cpp.git
|
||||
url = https://github.com/ClickHouse/aws-sdk-cpp
|
||||
[submodule "aws-c-event-stream"]
|
||||
path = contrib/aws-c-event-stream
|
||||
url = https://github.com/ClickHouse/aws-c-event-stream.git
|
||||
url = https://github.com/awslabs/aws-c-event-stream
|
||||
[submodule "aws-c-common"]
|
||||
path = contrib/aws-c-common
|
||||
url = https://github.com/ClickHouse/aws-c-common.git
|
||||
url = https://github.com/ClickHouse/aws-c-common
|
||||
[submodule "aws-checksums"]
|
||||
path = contrib/aws-checksums
|
||||
url = https://github.com/ClickHouse/aws-checksums.git
|
||||
url = https://github.com/awslabs/aws-checksums
|
||||
[submodule "contrib/curl"]
|
||||
path = contrib/curl
|
||||
url = https://github.com/curl/curl.git
|
||||
url = https://github.com/curl/curl
|
||||
[submodule "contrib/icudata"]
|
||||
path = contrib/icudata
|
||||
url = https://github.com/ClickHouse/icudata.git
|
||||
url = https://github.com/ClickHouse/icudata
|
||||
[submodule "contrib/icu"]
|
||||
path = contrib/icu
|
||||
url = https://github.com/unicode-org/icu.git
|
||||
url = https://github.com/unicode-org/icu
|
||||
[submodule "contrib/flatbuffers"]
|
||||
path = contrib/flatbuffers
|
||||
url = https://github.com/ClickHouse/flatbuffers.git
|
||||
url = https://github.com/ClickHouse/flatbuffers
|
||||
[submodule "contrib/replxx"]
|
||||
path = contrib/replxx
|
||||
url = https://github.com/ClickHouse/replxx.git
|
||||
url = https://github.com/ClickHouse/replxx
|
||||
[submodule "contrib/avro"]
|
||||
path = contrib/avro
|
||||
url = https://github.com/ClickHouse/avro.git
|
||||
url = https://github.com/ClickHouse/avro
|
||||
ignore = untracked
|
||||
[submodule "contrib/msgpack-c"]
|
||||
path = contrib/msgpack-c
|
||||
url = https://github.com/msgpack/msgpack-c
|
||||
[submodule "contrib/libcpuid"]
|
||||
path = contrib/libcpuid
|
||||
url = https://github.com/ClickHouse/libcpuid.git
|
||||
url = https://github.com/ClickHouse/libcpuid
|
||||
[submodule "contrib/openldap"]
|
||||
path = contrib/openldap
|
||||
url = https://github.com/ClickHouse/openldap.git
|
||||
url = https://github.com/ClickHouse/openldap
|
||||
[submodule "contrib/AMQP-CPP"]
|
||||
path = contrib/AMQP-CPP
|
||||
url = https://github.com/ClickHouse/AMQP-CPP.git
|
||||
url = https://github.com/ClickHouse/AMQP-CPP
|
||||
[submodule "contrib/cassandra"]
|
||||
path = contrib/cassandra
|
||||
url = https://github.com/ClickHouse/cpp-driver.git
|
||||
url = https://github.com/ClickHouse/cpp-driver
|
||||
branch = clickhouse
|
||||
[submodule "contrib/libuv"]
|
||||
path = contrib/libuv
|
||||
url = https://github.com/ClickHouse/libuv.git
|
||||
url = https://github.com/ClickHouse/libuv
|
||||
branch = clickhouse
|
||||
[submodule "contrib/fmtlib"]
|
||||
path = contrib/fmtlib
|
||||
url = https://github.com/fmtlib/fmt.git
|
||||
url = https://github.com/fmtlib/fmt
|
||||
[submodule "contrib/sentry-native"]
|
||||
path = contrib/sentry-native
|
||||
url = https://github.com/ClickHouse/sentry-native.git
|
||||
url = https://github.com/ClickHouse/sentry-native
|
||||
[submodule "contrib/krb5"]
|
||||
path = contrib/krb5
|
||||
url = https://github.com/ClickHouse/krb5
|
||||
@ -172,17 +172,17 @@
|
||||
url = https://github.com/danlark1/miniselect
|
||||
[submodule "contrib/rocksdb"]
|
||||
path = contrib/rocksdb
|
||||
url = https://github.com/ClickHouse/rocksdb.git
|
||||
url = https://github.com/ClickHouse/rocksdb
|
||||
[submodule "contrib/xz"]
|
||||
path = contrib/xz
|
||||
url = https://github.com/xz-mirror/xz
|
||||
[submodule "contrib/abseil-cpp"]
|
||||
path = contrib/abseil-cpp
|
||||
url = https://github.com/abseil/abseil-cpp.git
|
||||
url = https://github.com/abseil/abseil-cpp
|
||||
branch = lts_2021_11_02
|
||||
[submodule "contrib/dragonbox"]
|
||||
path = contrib/dragonbox
|
||||
url = https://github.com/ClickHouse/dragonbox.git
|
||||
url = https://github.com/ClickHouse/dragonbox
|
||||
[submodule "contrib/fast_float"]
|
||||
path = contrib/fast_float
|
||||
url = https://github.com/fastfloat/fast_float
|
||||
@ -191,44 +191,44 @@
|
||||
url = https://github.com/ClickHouse/libpq
|
||||
[submodule "contrib/boringssl"]
|
||||
path = contrib/boringssl
|
||||
url = https://github.com/ClickHouse/boringssl.git
|
||||
url = https://github.com/ClickHouse/boringssl
|
||||
branch = unknown_branch_from_artur
|
||||
[submodule "contrib/NuRaft"]
|
||||
path = contrib/NuRaft
|
||||
url = https://github.com/ClickHouse/NuRaft.git
|
||||
url = https://github.com/ClickHouse/NuRaft
|
||||
[submodule "contrib/nanodbc"]
|
||||
path = contrib/nanodbc
|
||||
url = https://github.com/ClickHouse/nanodbc.git
|
||||
url = https://github.com/ClickHouse/nanodbc
|
||||
[submodule "contrib/datasketches-cpp"]
|
||||
path = contrib/datasketches-cpp
|
||||
url = https://github.com/ClickHouse/datasketches-cpp.git
|
||||
url = https://github.com/ClickHouse/datasketches-cpp
|
||||
[submodule "contrib/yaml-cpp"]
|
||||
path = contrib/yaml-cpp
|
||||
url = https://github.com/ClickHouse/yaml-cpp.git
|
||||
url = https://github.com/ClickHouse/yaml-cpp
|
||||
[submodule "contrib/cld2"]
|
||||
path = contrib/cld2
|
||||
url = https://github.com/ClickHouse/cld2.git
|
||||
url = https://github.com/ClickHouse/cld2
|
||||
[submodule "contrib/libstemmer_c"]
|
||||
path = contrib/libstemmer_c
|
||||
url = https://github.com/ClickHouse/libstemmer_c.git
|
||||
url = https://github.com/ClickHouse/libstemmer_c
|
||||
[submodule "contrib/wordnet-blast"]
|
||||
path = contrib/wordnet-blast
|
||||
url = https://github.com/ClickHouse/wordnet-blast.git
|
||||
url = https://github.com/ClickHouse/wordnet-blast
|
||||
[submodule "contrib/lemmagen-c"]
|
||||
path = contrib/lemmagen-c
|
||||
url = https://github.com/ClickHouse/lemmagen-c.git
|
||||
url = https://github.com/ClickHouse/lemmagen-c
|
||||
[submodule "contrib/libpqxx"]
|
||||
path = contrib/libpqxx
|
||||
url = https://github.com/ClickHouse/libpqxx.git
|
||||
url = https://github.com/ClickHouse/libpqxx
|
||||
[submodule "contrib/sqlite-amalgamation"]
|
||||
path = contrib/sqlite-amalgamation
|
||||
url = https://github.com/azadkuh/sqlite-amalgamation
|
||||
url = https://github.com/ClickHouse/sqlite-amalgamation
|
||||
[submodule "contrib/s2geometry"]
|
||||
path = contrib/s2geometry
|
||||
url = https://github.com/ClickHouse/s2geometry.git
|
||||
url = https://github.com/ClickHouse/s2geometry
|
||||
[submodule "contrib/bzip2"]
|
||||
path = contrib/bzip2
|
||||
url = https://github.com/ClickHouse/bzip2.git
|
||||
url = https://github.com/ClickHouse/bzip2
|
||||
[submodule "contrib/magic_enum"]
|
||||
path = contrib/magic_enum
|
||||
url = https://github.com/Neargye/magic_enum
|
||||
@ -237,60 +237,93 @@
|
||||
url = https://github.com/google/libprotobuf-mutator
|
||||
[submodule "contrib/sysroot"]
|
||||
path = contrib/sysroot
|
||||
url = https://github.com/ClickHouse/sysroot.git
|
||||
url = https://github.com/ClickHouse/sysroot
|
||||
[submodule "contrib/nlp-data"]
|
||||
path = contrib/nlp-data
|
||||
url = https://github.com/ClickHouse/nlp-data.git
|
||||
url = https://github.com/ClickHouse/nlp-data
|
||||
[submodule "contrib/hive-metastore"]
|
||||
path = contrib/hive-metastore
|
||||
url = https://github.com/ClickHouse/hive-metastore
|
||||
[submodule "contrib/azure"]
|
||||
path = contrib/azure
|
||||
url = https://github.com/ClickHouse/azure-sdk-for-cpp.git
|
||||
url = https://github.com/ClickHouse/azure-sdk-for-cpp
|
||||
[submodule "contrib/minizip-ng"]
|
||||
path = contrib/minizip-ng
|
||||
url = https://github.com/zlib-ng/minizip-ng
|
||||
[submodule "contrib/annoy"]
|
||||
path = contrib/annoy
|
||||
url = https://github.com/ClickHouse/annoy.git
|
||||
url = https://github.com/ClickHouse/annoy
|
||||
branch = ClickHouse-master
|
||||
[submodule "contrib/qpl"]
|
||||
path = contrib/qpl
|
||||
url = https://github.com/intel/qpl.git
|
||||
url = https://github.com/intel/qpl
|
||||
[submodule "contrib/wyhash"]
|
||||
path = contrib/wyhash
|
||||
url = https://github.com/wangyi-fudan/wyhash.git
|
||||
url = https://github.com/wangyi-fudan/wyhash
|
||||
[submodule "contrib/hashidsxx"]
|
||||
path = contrib/hashidsxx
|
||||
url = https://github.com/schoentoon/hashidsxx.git
|
||||
url = https://github.com/schoentoon/hashidsxx
|
||||
[submodule "contrib/nats-io"]
|
||||
path = contrib/nats-io
|
||||
url = https://github.com/ClickHouse/nats.c.git
|
||||
url = https://github.com/ClickHouse/nats.c
|
||||
[submodule "contrib/vectorscan"]
|
||||
path = contrib/vectorscan
|
||||
url = https://github.com/VectorCamp/vectorscan.git
|
||||
url = https://github.com/VectorCamp/vectorscan
|
||||
[submodule "contrib/c-ares"]
|
||||
path = contrib/c-ares
|
||||
url = https://github.com/ClickHouse/c-ares
|
||||
[submodule "contrib/llvm-project"]
|
||||
path = contrib/llvm-project
|
||||
url = https://github.com/ClickHouse/llvm-project.git
|
||||
url = https://github.com/ClickHouse/llvm-project
|
||||
[submodule "contrib/corrosion"]
|
||||
path = contrib/corrosion
|
||||
url = https://github.com/corrosion-rs/corrosion.git
|
||||
url = https://github.com/corrosion-rs/corrosion
|
||||
[submodule "contrib/morton-nd"]
|
||||
path = contrib/morton-nd
|
||||
url = https://github.com/morton-nd/morton-nd
|
||||
[submodule "contrib/xxHash"]
|
||||
path = contrib/xxHash
|
||||
url = https://github.com/Cyan4973/xxHash.git
|
||||
url = https://github.com/Cyan4973/xxHash
|
||||
[submodule "contrib/crc32-s390x"]
|
||||
path = contrib/crc32-s390x
|
||||
url = https://github.com/linux-on-ibm-z/crc32-s390x
|
||||
[submodule "contrib/openssl"]
|
||||
path = contrib/openssl
|
||||
url = https://github.com/openssl/openssl
|
||||
branch = openssl-3.0
|
||||
[submodule "contrib/google-benchmark"]
|
||||
path = contrib/google-benchmark
|
||||
url = https://github.com/google/benchmark.git
|
||||
url = https://github.com/google/benchmark
|
||||
[submodule "contrib/libdivide"]
|
||||
path = contrib/libdivide
|
||||
url = https://github.com/ridiculousfish/libdivide.git
|
||||
url = https://github.com/ridiculousfish/libdivide
|
||||
[submodule "contrib/aws-crt-cpp"]
|
||||
path = contrib/aws-crt-cpp
|
||||
url = https://github.com/ClickHouse/aws-crt-cpp
|
||||
[submodule "contrib/aws-c-io"]
|
||||
path = contrib/aws-c-io
|
||||
url = https://github.com/ClickHouse/aws-c-io
|
||||
[submodule "contrib/aws-c-mqtt"]
|
||||
path = contrib/aws-c-mqtt
|
||||
url = https://github.com/awslabs/aws-c-mqtt
|
||||
[submodule "contrib/aws-c-auth"]
|
||||
path = contrib/aws-c-auth
|
||||
url = https://github.com/awslabs/aws-c-auth
|
||||
[submodule "contrib/aws-c-cal"]
|
||||
path = contrib/aws-c-cal
|
||||
url = https://github.com/ClickHouse/aws-c-cal
|
||||
[submodule "contrib/aws-c-sdkutils"]
|
||||
path = contrib/aws-c-sdkutils
|
||||
url = https://github.com/awslabs/aws-c-sdkutils
|
||||
[submodule "contrib/aws-c-http"]
|
||||
path = contrib/aws-c-http
|
||||
url = https://github.com/awslabs/aws-c-http
|
||||
[submodule "contrib/aws-c-s3"]
|
||||
path = contrib/aws-c-s3
|
||||
url = https://github.com/awslabs/aws-c-s3
|
||||
[submodule "contrib/aws-c-compression"]
|
||||
path = contrib/aws-c-compression
|
||||
url = https://github.com/awslabs/aws-c-compression
|
||||
[submodule "contrib/aws-s2n-tls"]
|
||||
path = contrib/aws-s2n-tls
|
||||
url = https://github.com/ClickHouse/s2n-tls
|
||||
|
@ -17,6 +17,9 @@
|
||||
|
||||
### <a id="2212"></a> ClickHouse release 22.12, 2022-12-15
|
||||
|
||||
#### Backward Incompatible Change
|
||||
* Add `GROUP BY ALL` syntax: [#37631](https://github.com/ClickHouse/ClickHouse/issues/37631). [#42265](https://github.com/ClickHouse/ClickHouse/pull/42265) ([刘陶峰](https://github.com/taofengliu)). If you have a column or an alias named `all` and doing `GROUP BY all` without the intention to group by all the columns, the query will have a different semantic. To keep the old semantic, put `all` into backticks or double quotes `"all"` to make it an identifier instead of a keyword.
|
||||
|
||||
#### Upgrade Notes
|
||||
* Fixed backward incompatibility in (de)serialization of states of `min`, `max`, `any*`, `argMin`, `argMax` aggregate functions with `String` argument. The incompatibility affects 22.9, 22.10 and 22.11 branches (fixed since 22.9.6, 22.10.4 and 22.11.2 correspondingly). Some minor releases of 22.3, 22.7 and 22.8 branches are also affected: 22.3.13...22.3.14 (fixed since 22.3.15), 22.8.6...22.8.9 (fixed since 22.8.10), 22.7.6 and newer (will not be fixed in 22.7, we recommend upgrading from 22.7.* to 22.8.10 or newer). This release note does not concern users that have never used affected versions. Incompatible versions append an extra `'\0'` to strings when reading states of the aggregate functions mentioned above. For example, if an older version saved state of `anyState('foobar')` to `state_column` then the incompatible version will print `'foobar\0'` on `anyMerge(state_column)`. Also incompatible versions write states of the aggregate functions without trailing `'\0'`. Newer versions (that have the fix) can correctly read data written by all versions including incompatible versions, except one corner case. If an incompatible version saved a state with a string that actually ends with null character, then newer version will trim trailing `'\0'` when reading state of affected aggregate function. For example, if an incompatible version saved state of `anyState('abrac\0dabra\0')` to `state_column` then newer versions will print `'abrac\0dabra'` on `anyMerge(state_column)`. The issue also affects distributed queries when an incompatible version works in a cluster together with older or newer versions. [#43038](https://github.com/ClickHouse/ClickHouse/pull/43038) ([Alexander Tokmakov](https://github.com/tavplubix), [Raúl Marín](https://github.com/Algunenano)). Note: all the official ClickHouse builds already include the patches. This is not necessarily true for unofficial third-party builds that should be avoided.
|
||||
|
||||
|
@ -73,22 +73,7 @@ message (STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
|
||||
|
||||
string (TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UC)
|
||||
|
||||
option(USE_STATIC_LIBRARIES "Disable to use shared libraries" ON)
|
||||
# DEVELOPER ONLY.
|
||||
# Faster linking if turned on.
|
||||
option(SPLIT_SHARED_LIBRARIES "Keep all internal libraries as separate .so files" OFF)
|
||||
|
||||
if (USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
|
||||
message(FATAL_ERROR "SPLIT_SHARED_LIBRARIES=1 must not be used together with USE_STATIC_LIBRARIES=1")
|
||||
endif()
|
||||
|
||||
if (NOT USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
|
||||
set(BUILD_SHARED_LIBS 1 CACHE INTERNAL "")
|
||||
endif ()
|
||||
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
list(REVERSE CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||
endif ()
|
||||
list(REVERSE CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||
|
||||
option (ENABLE_FUZZING "Fuzzy testing using libfuzzer" OFF)
|
||||
|
||||
@ -171,7 +156,7 @@ option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests"
|
||||
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
||||
option(ENABLE_BENCHMARKS "Build all benchmark programs in 'benchmarks' subdirectories" OFF)
|
||||
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND USE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT USE_MUSL)
|
||||
# Only for Linux, x86_64 or aarch64.
|
||||
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
|
||||
elseif(GLIBC_COMPATIBILITY)
|
||||
@ -377,15 +362,15 @@ set (DEBUG_INFO_FLAGS "-g -gdwarf-4")
|
||||
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS}")
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_CXX_FLAGS_ADD}")
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_C_FLAGS_ADD}")
|
||||
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
|
||||
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${COMPILER_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
set (CMAKE_ASM_FLAGS_RELWITHDEBINFO "${CMAKE_ASM_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_ASM_FLAGS_ADD}")
|
||||
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
if (OS_DARWIN)
|
||||
@ -467,22 +452,13 @@ endif ()
|
||||
|
||||
set (CMAKE_POSTFIX_VARIABLE "CMAKE_${CMAKE_BUILD_TYPE_UC}_POSTFIX")
|
||||
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
|
||||
if (OS_LINUX AND NOT ARCH_AARCH64)
|
||||
# Slightly more efficient code can be generated
|
||||
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie -Wl,-no-pie")
|
||||
endif ()
|
||||
else ()
|
||||
set (CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
# This is required for clang on Arch linux, that uses PIE by default.
|
||||
# See enable-SSP-and-PIE-by-default.patch [1].
|
||||
#
|
||||
# [1]: https://github.com/archlinux/svntogit-packages/blob/6e681aa860e65ad46a1387081482eb875c2200f2/trunk/enable-SSP-and-PIE-by-default.patch
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie")
|
||||
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
|
||||
if (OS_LINUX AND NOT ARCH_AARCH64)
|
||||
# Slightly more efficient code can be generated
|
||||
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie -Wl,-no-pie")
|
||||
endif ()
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
@ -504,10 +480,7 @@ else ()
|
||||
set (CLICKHOUSE_ETC_DIR "${CMAKE_INSTALL_PREFIX}/etc")
|
||||
endif ()
|
||||
|
||||
message (STATUS
|
||||
"Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE} ;
|
||||
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
|
||||
SPLIT_SHARED_LIBRARIES=${SPLIT_SHARED_LIBRARIES}")
|
||||
message (STATUS "Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE}")
|
||||
|
||||
include (GNUInstallDirs)
|
||||
|
||||
@ -553,7 +526,7 @@ macro (clickhouse_add_executable target)
|
||||
# - _je_zone_register due to JEMALLOC_PRIVATE_NAMESPACE=je_ under OS X.
|
||||
# - but jemalloc-cmake does not run private_namespace.sh
|
||||
# so symbol name should be _zone_register
|
||||
if (ENABLE_JEMALLOC AND USE_STATIC_LIBRARIES AND OS_DARWIN)
|
||||
if (ENABLE_JEMALLOC AND OS_DARWIN)
|
||||
set_property(TARGET ${target} APPEND PROPERTY LINK_OPTIONS -u_zone_register)
|
||||
endif()
|
||||
endif()
|
||||
|
4
LICENSE
4
LICENSE
@ -1,4 +1,4 @@
|
||||
Copyright 2016-2022 ClickHouse, Inc.
|
||||
Copyright 2016-2023 ClickHouse, Inc.
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
@ -188,7 +188,7 @@ Copyright 2016-2022 ClickHouse, Inc.
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2016-2022 ClickHouse, Inc.
|
||||
Copyright 2016-2023 ClickHouse, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
@ -16,6 +16,6 @@ ClickHouse® is an open-source column-oriented database management system that a
|
||||
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||
|
||||
## Upcoming events
|
||||
* [**v22.12 Release Webinar**](https://clickhouse.com/company/events/v22-12-release-webinar) 22.12 is the ClickHouse Christmas release. There are plenty of gifts (a new JOIN algorithm among them) and we adopted something from MongoDB. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
||||
* **Recording available**: [**v22.12 Release Webinar**](https://www.youtube.com/watch?v=sREupr6uc2k) 22.12 is the ClickHouse Christmas release. There are plenty of gifts (a new JOIN algorithm among them) and we adopted something from MongoDB. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release.
|
||||
* [**ClickHouse Meetup at the CHEQ office in Tel Aviv**](https://www.meetup.com/clickhouse-tel-aviv-user-group/events/289599423/) - Jan 16 - We are very excited to be holding our next in-person ClickHouse meetup at the CHEQ office in Tel Aviv! Hear from CHEQ, ServiceNow and Contentsquare, as well as a deep dive presentation from ClickHouse CTO Alexey Milovidov. Join us for a fun evening of talks, food and discussion!
|
||||
* [**ClickHouse Meetup at Microsoft Office in Seattle**](https://www.meetup.com/clickhouse-seattle-user-group/events/290310025/) - Jan 18 - Keep an eye on this space as we will be announcing speakers soon!
|
||||
|
@ -10,7 +10,7 @@
|
||||
#include <base/MoveOrCopyIfThrow.h>
|
||||
|
||||
/** Pool for limited size objects that cannot be used from different threads simultaneously.
|
||||
* The main use case is to have fixed size of objects that can be reused in difference threads during their lifetime
|
||||
* The main use case is to have fixed size of objects that can be reused in different threads during their lifetime
|
||||
* and have to be initialized on demand.
|
||||
* Two main properties of pool are allocated objects size and borrowed objects size.
|
||||
* Allocated objects size is size of objects that are currently allocated by the pool.
|
||||
|
@ -8,16 +8,13 @@ set (SRCS
|
||||
getPageSize.cpp
|
||||
getThreadId.cpp
|
||||
JSON.cpp
|
||||
LineReader.cpp
|
||||
mremap.cpp
|
||||
phdr_cache.cpp
|
||||
preciseExp10.cpp
|
||||
setTerminalEcho.cpp
|
||||
shift10.cpp
|
||||
sleep.cpp
|
||||
terminalColors.cpp
|
||||
errnoToString.cpp
|
||||
ReplxxLineReader.cpp
|
||||
StringRef.cpp
|
||||
safeExit.cpp
|
||||
throwError.cpp
|
||||
@ -40,17 +37,8 @@ else ()
|
||||
target_compile_definitions(common PUBLIC WITH_COVERAGE=0)
|
||||
endif ()
|
||||
|
||||
# FIXME: move libraries for line reading out from base
|
||||
if (TARGET ch_rust::skim)
|
||||
target_link_libraries(common PUBLIC ch_rust::skim)
|
||||
endif()
|
||||
|
||||
target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..")
|
||||
|
||||
if (OS_DARWIN AND NOT USE_STATIC_LIBRARIES)
|
||||
target_link_libraries(common PUBLIC -Wl,-U,_inside_main)
|
||||
endif()
|
||||
|
||||
target_link_libraries (common
|
||||
PUBLIC
|
||||
ch_contrib::cityhash
|
||||
|
53
base/base/IPv4andIPv6.h
Normal file
53
base/base/IPv4andIPv6.h
Normal file
@ -0,0 +1,53 @@
|
||||
#pragma once
|
||||
|
||||
#include <base/strong_typedef.h>
|
||||
#include <base/extended_types.h>
|
||||
#include <Common/memcmpSmall.h>
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
using IPv4 = StrongTypedef<UInt32, struct IPv4Tag>;
|
||||
|
||||
struct IPv6 : StrongTypedef<UInt128, struct IPv6Tag>
|
||||
{
|
||||
constexpr IPv6() = default;
|
||||
constexpr explicit IPv6(const UInt128 & x) : StrongTypedef(x) {}
|
||||
constexpr explicit IPv6(UInt128 && x) : StrongTypedef(std::move(x)) {}
|
||||
|
||||
IPv6 & operator=(const UInt128 & rhs) { StrongTypedef::operator=(rhs); return *this; }
|
||||
IPv6 & operator=(UInt128 && rhs) { StrongTypedef::operator=(std::move(rhs)); return *this; }
|
||||
|
||||
bool operator<(const IPv6 & rhs) const
|
||||
{
|
||||
return
|
||||
memcmp16(
|
||||
reinterpret_cast<const unsigned char *>(toUnderType().items),
|
||||
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
|
||||
) < 0;
|
||||
}
|
||||
|
||||
bool operator>(const IPv6 & rhs) const
|
||||
{
|
||||
return
|
||||
memcmp16(
|
||||
reinterpret_cast<const unsigned char *>(toUnderType().items),
|
||||
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
|
||||
) > 0;
|
||||
}
|
||||
|
||||
bool operator==(const IPv6 & rhs) const
|
||||
{
|
||||
return
|
||||
memcmp16(
|
||||
reinterpret_cast<const unsigned char *>(toUnderType().items),
|
||||
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
|
||||
) == 0;
|
||||
}
|
||||
|
||||
bool operator<=(const IPv6 & rhs) const { return !operator>(rhs); }
|
||||
bool operator>=(const IPv6 & rhs) const { return !operator<(rhs); }
|
||||
bool operator!=(const IPv6 & rhs) const { return !operator==(rhs); }
|
||||
};
|
||||
|
||||
}
|
@ -2,6 +2,7 @@
|
||||
|
||||
#include "Decimal.h"
|
||||
#include "UUID.h"
|
||||
#include "IPv4andIPv6.h"
|
||||
|
||||
namespace DB
|
||||
{
|
||||
@ -35,6 +36,8 @@ TN_MAP(Float32)
|
||||
TN_MAP(Float64)
|
||||
TN_MAP(String)
|
||||
TN_MAP(UUID)
|
||||
TN_MAP(IPv4)
|
||||
TN_MAP(IPv6)
|
||||
TN_MAP(Decimal32)
|
||||
TN_MAP(Decimal64)
|
||||
TN_MAP(Decimal128)
|
||||
|
@ -1,28 +0,0 @@
|
||||
#include <base/setTerminalEcho.h>
|
||||
#include <base/errnoToString.h>
|
||||
#include <stdexcept>
|
||||
#include <cstring>
|
||||
#include <string>
|
||||
#include <termios.h>
|
||||
#include <unistd.h>
|
||||
|
||||
|
||||
void setTerminalEcho(bool enable)
|
||||
{
|
||||
/// Obtain terminal attributes,
|
||||
/// toggle the ECHO flag
|
||||
/// and set them back.
|
||||
|
||||
struct termios tty{};
|
||||
|
||||
if (0 != tcgetattr(STDIN_FILENO, &tty))
|
||||
throw std::runtime_error(std::string("setTerminalEcho failed get: ") + errnoToString());
|
||||
|
||||
if (enable)
|
||||
tty.c_lflag |= ECHO;
|
||||
else
|
||||
tty.c_lflag &= ~ECHO;
|
||||
|
||||
if (0 != tcsetattr(STDIN_FILENO, TCSANOW, &tty))
|
||||
throw std::runtime_error(std::string("setTerminalEcho failed set: ") + errnoToString());
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
/// Enable or disable echoing of typed characters. Throws std::runtime_error on error.
|
||||
void setTerminalEcho(bool enable);
|
@ -37,7 +37,7 @@ if (GLIBC_COMPATIBILITY)
|
||||
|
||||
target_include_directories(glibc-compatibility PRIVATE libcxxabi ${musl_arch_include_dir})
|
||||
|
||||
if (( NOT USE_STATIC_LIBRARIES AND NOT USE_STATIC_LIBRARIES ) OR ENABLE_OPENSSL_DYNAMIC)
|
||||
if (ENABLE_OPENSSL_DYNAMIC)
|
||||
target_compile_options(glibc-compatibility PRIVATE -fPIC)
|
||||
endif ()
|
||||
|
||||
|
@ -102,6 +102,11 @@ elseif (ARCH_AMD64)
|
||||
SET(ENABLE_AVX512_FOR_SPEC_OP 0)
|
||||
endif()
|
||||
|
||||
# ClickHouse can be cross-compiled (e.g. on an ARM host for x86) but it is also possible to build ClickHouse on x86 w/o AVX for x86 w/
|
||||
# AVX. We only check that the compiler can emit certain SIMD instructions, we don't care if the host system is able to run the binary.
|
||||
# Therefore, use check_cxx_source_compiles (= does the code compile+link?) instead of check_cxx_source_runs (= does the code
|
||||
# compile+link+run).
|
||||
|
||||
set (TEST_FLAG "-mssse3")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
check_cxx_source_compiles("
|
||||
|
@ -25,7 +25,7 @@ if (SANITIZE)
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${ASAN_FLAGS}")
|
||||
endif()
|
||||
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libasan")
|
||||
endif ()
|
||||
if (COMPILER_GCC)
|
||||
@ -50,7 +50,7 @@ if (SANITIZE)
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=memory")
|
||||
endif()
|
||||
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libmsan")
|
||||
endif ()
|
||||
|
||||
@ -71,7 +71,7 @@ if (SANITIZE)
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=thread")
|
||||
endif()
|
||||
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libtsan")
|
||||
endif ()
|
||||
if (COMPILER_GCC)
|
||||
@ -103,7 +103,7 @@ if (SANITIZE)
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined")
|
||||
endif()
|
||||
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libubsan")
|
||||
endif ()
|
||||
if (COMPILER_GCC)
|
||||
|
19
contrib/CMakeLists.txt
vendored
19
contrib/CMakeLists.txt
vendored
@ -115,12 +115,25 @@ endif()
|
||||
add_contrib (llvm-project-cmake llvm-project)
|
||||
add_contrib (libfuzzer-cmake llvm-project)
|
||||
add_contrib (libxml2-cmake libxml2)
|
||||
add_contrib (aws-s3-cmake
|
||||
|
||||
add_contrib (aws-cmake
|
||||
aws
|
||||
aws-c-auth
|
||||
aws-c-cal
|
||||
aws-c-common
|
||||
aws-c-compression
|
||||
aws-c-event-stream
|
||||
aws-c-http
|
||||
aws-c-io
|
||||
aws-c-mqtt
|
||||
aws-c-s3
|
||||
aws-c-sdkutils
|
||||
aws-s2n-tls
|
||||
aws-checksums
|
||||
aws-crt-cpp
|
||||
aws-cmake
|
||||
)
|
||||
|
||||
add_contrib (base64-cmake base64)
|
||||
add_contrib (simdjson-cmake simdjson)
|
||||
add_contrib (rapidjson-cmake rapidjson)
|
||||
@ -166,6 +179,10 @@ add_contrib (c-ares-cmake c-ares)
|
||||
add_contrib (qpl-cmake qpl)
|
||||
add_contrib (morton-nd-cmake morton-nd)
|
||||
|
||||
if (ARCH_S390X)
|
||||
add_contrib(crc32-s390x-cmake crc32-s390x)
|
||||
endif()
|
||||
|
||||
add_contrib (annoy-cmake annoy)
|
||||
|
||||
add_contrib (xxHash-cmake xxHash)
|
||||
|
@ -78,23 +78,14 @@ set(FLATBUFFERS_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/flatbuffers")
|
||||
set(FLATBUFFERS_INCLUDE_DIR "${FLATBUFFERS_SRC_DIR}/include")
|
||||
|
||||
# set flatbuffers CMake options
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
set(FLATBUFFERS_BUILD_FLATLIB ON CACHE BOOL "Enable the build of the flatbuffers library")
|
||||
set(FLATBUFFERS_BUILD_SHAREDLIB OFF CACHE BOOL "Disable the build of the flatbuffers shared library")
|
||||
else ()
|
||||
set(FLATBUFFERS_BUILD_SHAREDLIB ON CACHE BOOL "Enable the build of the flatbuffers shared library")
|
||||
set(FLATBUFFERS_BUILD_FLATLIB OFF CACHE BOOL "Disable the build of the flatbuffers library")
|
||||
endif ()
|
||||
set(FLATBUFFERS_BUILD_FLATLIB ON CACHE BOOL "Enable the build of the flatbuffers library")
|
||||
set(FLATBUFFERS_BUILD_SHAREDLIB OFF CACHE BOOL "Disable the build of the flatbuffers shared library")
|
||||
set(FLATBUFFERS_BUILD_TESTS OFF CACHE BOOL "Skip flatbuffers tests")
|
||||
|
||||
add_subdirectory(${FLATBUFFERS_SRC_DIR} "${FLATBUFFERS_BINARY_DIR}")
|
||||
|
||||
add_library(_flatbuffers INTERFACE)
|
||||
if(USE_STATIC_LIBRARIES)
|
||||
target_link_libraries(_flatbuffers INTERFACE flatbuffers)
|
||||
else()
|
||||
target_link_libraries(_flatbuffers INTERFACE flatbuffers_shared)
|
||||
endif()
|
||||
target_link_libraries(_flatbuffers INTERFACE flatbuffers)
|
||||
target_include_directories(_flatbuffers INTERFACE ${FLATBUFFERS_INCLUDE_DIR})
|
||||
|
||||
# === hdfs
|
||||
|
2
contrib/aws
vendored
2
contrib/aws
vendored
@ -1 +1 @@
|
||||
Subproject commit 00b03604543367d7e310cb0993973fdcb723ea79
|
||||
Subproject commit 4a12641211d4dbc8e2fdb2dd0f1eea0927db9252
|
1
contrib/aws-c-auth
vendored
Submodule
1
contrib/aws-c-auth
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 30df6c407e2df43bd244e2c34c9b4a4b87372bfb
|
1
contrib/aws-c-cal
vendored
Submodule
1
contrib/aws-c-cal
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 85dd7664b786a389c6fb1a6f031ab4bb2282133d
|
2
contrib/aws-c-common
vendored
2
contrib/aws-c-common
vendored
@ -1 +1 @@
|
||||
Subproject commit 736a82d1697c108b04a277e66438a7f4e19b6857
|
||||
Subproject commit 324fd1d973ccb25c813aa747bf1759cfde5121c5
|
1
contrib/aws-c-compression
vendored
Submodule
1
contrib/aws-c-compression
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit b517b7decd0dac30be2162f5186c250221c53aff
|
2
contrib/aws-c-event-stream
vendored
2
contrib/aws-c-event-stream
vendored
@ -1 +1 @@
|
||||
Subproject commit 3bc33662f9ccff4f4cbcf9509cc78c26e022fde0
|
||||
Subproject commit 39bfa94a14b7126bf0c1330286ef8db452d87e66
|
1
contrib/aws-c-http
vendored
Submodule
1
contrib/aws-c-http
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 2c5a2a7d5556600b9782ffa6c9d7e09964df1abc
|
1
contrib/aws-c-io
vendored
Submodule
1
contrib/aws-c-io
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 5d32c453560d0823df521a686bf7fbacde7f9be3
|
1
contrib/aws-c-mqtt
vendored
Submodule
1
contrib/aws-c-mqtt
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 882c689561a3db1466330ccfe3b63637e0a575d3
|
1
contrib/aws-c-s3
vendored
Submodule
1
contrib/aws-c-s3
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit a41255ece72a7c887bba7f9d998ca3e14f4c8a1b
|
1
contrib/aws-c-sdkutils
vendored
Submodule
1
contrib/aws-c-sdkutils
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 25bf5cf225f977c3accc6a05a0a7a181ef2a4a30
|
2
contrib/aws-checksums
vendored
2
contrib/aws-checksums
vendored
@ -1 +1 @@
|
||||
Subproject commit 519d6d9093819b6cf89ffff589a27ef8f83d0f65
|
||||
Subproject commit 48e7c0e01479232f225c8044d76c84e74192889d
|
114
contrib/aws-cmake/AwsFeatureTests.cmake
Normal file
114
contrib/aws-cmake/AwsFeatureTests.cmake
Normal file
@ -0,0 +1,114 @@
|
||||
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0.
|
||||
|
||||
include(CheckCSourceRuns)
|
||||
|
||||
option(USE_CPU_EXTENSIONS "Whenever possible, use functions optimized for CPUs with specific extensions (ex: SSE, AVX)." ON)
|
||||
|
||||
# In the current (11/2/21) state of mingw64, the packaged gcc is not capable of emitting properly aligned avx2 instructions under certain circumstances.
|
||||
# This leads to crashes for windows builds using mingw64 when invoking the avx2-enabled versions of certain functions. Until we can find a better
|
||||
# work-around, disable avx2 (and all other extensions) in mingw builds.
|
||||
#
|
||||
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412
|
||||
#
|
||||
if (MINGW)
|
||||
message(STATUS "MINGW detected! Disabling avx2 and other CPU extensions")
|
||||
set(USE_CPU_EXTENSIONS OFF)
|
||||
endif()
|
||||
|
||||
if(NOT CMAKE_CROSSCOMPILING)
|
||||
check_c_source_runs("
|
||||
#include <stdbool.h>
|
||||
bool foo(int a, int b, int *c) {
|
||||
return __builtin_mul_overflow(a, b, c);
|
||||
}
|
||||
|
||||
int main() {
|
||||
int out;
|
||||
if (foo(1, 2, &out)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}" AWS_HAVE_GCC_OVERFLOW_MATH_EXTENSIONS)
|
||||
|
||||
if (USE_CPU_EXTENSIONS)
|
||||
check_c_source_runs("
|
||||
int main() {
|
||||
int foo = 42;
|
||||
_mulx_u32(1, 2, &foo);
|
||||
return foo != 2;
|
||||
}" AWS_HAVE_MSVC_MULX)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <Windows.h>
|
||||
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
|
||||
int main() {
|
||||
return 0;
|
||||
}
|
||||
#else
|
||||
it's not windows desktop
|
||||
#endif
|
||||
" AWS_HAVE_WINAPI_DESKTOP)
|
||||
|
||||
check_c_source_compiles("
|
||||
int main() {
|
||||
#if !(defined(__x86_64__) || defined(__i386__) || defined(_M_X64) || defined(_M_IX86))
|
||||
# error \"not intel\"
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
" AWS_ARCH_INTEL)
|
||||
|
||||
check_c_source_compiles("
|
||||
int main() {
|
||||
#if !(defined(__aarch64__) || defined(_M_ARM64))
|
||||
# error \"not arm64\"
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
" AWS_ARCH_ARM64)
|
||||
|
||||
check_c_source_compiles("
|
||||
int main() {
|
||||
#if !(defined(__arm__) || defined(_M_ARM))
|
||||
# error \"not arm\"
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
" AWS_ARCH_ARM32)
|
||||
|
||||
check_c_source_compiles("
|
||||
int main() {
|
||||
int foo = 42, bar = 24;
|
||||
__asm__ __volatile__(\"\":\"=r\"(foo):\"r\"(bar):\"memory\");
|
||||
}" AWS_HAVE_GCC_INLINE_ASM)
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <sys/auxv.h>
|
||||
int main() {
|
||||
#ifdef __linux__
|
||||
getauxval(AT_HWCAP);
|
||||
getauxval(AT_HWCAP2);
|
||||
#endif
|
||||
return 0;
|
||||
}" AWS_HAVE_AUXV)
|
||||
|
||||
string(REGEX MATCH "^(aarch64|arm)" ARM_CPU "${CMAKE_SYSTEM_PROCESSOR}")
|
||||
if(NOT LEGACY_COMPILER_SUPPORT OR ARM_CPU)
|
||||
check_c_source_compiles("
|
||||
#include <execinfo.h>
|
||||
int main() {
|
||||
backtrace(NULL, 0);
|
||||
return 0;
|
||||
}" AWS_HAVE_EXECINFO)
|
||||
endif()
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <linux/if_link.h>
|
||||
int main() {
|
||||
return 1;
|
||||
}" AWS_HAVE_LINUX_IF_LINK_H)
|
74
contrib/aws-cmake/AwsSIMD.cmake
Normal file
74
contrib/aws-cmake/AwsSIMD.cmake
Normal file
@ -0,0 +1,74 @@
|
||||
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0.
|
||||
|
||||
include(CheckCCompilerFlag)
|
||||
include(CheckIncludeFile)
|
||||
|
||||
if (USE_CPU_EXTENSIONS)
|
||||
if (MSVC)
|
||||
check_c_compiler_flag("/arch:AVX2" HAVE_M_AVX2_FLAG)
|
||||
if (HAVE_M_AVX2_FLAG)
|
||||
set(AVX2_CFLAGS "/arch:AVX2")
|
||||
endif()
|
||||
else()
|
||||
check_c_compiler_flag(-mavx2 HAVE_M_AVX2_FLAG)
|
||||
if (HAVE_M_AVX2_FLAG)
|
||||
set(AVX2_CFLAGS "-mavx -mavx2")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
||||
cmake_push_check_state()
|
||||
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${AVX2_CFLAGS}")
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <immintrin.h>
|
||||
#include <emmintrin.h>
|
||||
#include <string.h>
|
||||
|
||||
int main() {
|
||||
__m256i vec;
|
||||
memset(&vec, 0, sizeof(vec));
|
||||
|
||||
_mm256_shuffle_epi8(vec, vec);
|
||||
_mm256_set_epi32(1,2,3,4,5,6,7,8);
|
||||
_mm256_permutevar8x32_epi32(vec, vec);
|
||||
|
||||
return 0;
|
||||
}" HAVE_AVX2_INTRINSICS)
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <immintrin.h>
|
||||
#include <string.h>
|
||||
|
||||
int main() {
|
||||
__m256i vec;
|
||||
memset(&vec, 0, sizeof(vec));
|
||||
return (int)_mm256_extract_epi64(vec, 2);
|
||||
}" HAVE_MM256_EXTRACT_EPI64)
|
||||
|
||||
cmake_pop_check_state()
|
||||
endif() # USE_CPU_EXTENSIONS
|
||||
|
||||
macro(simd_add_definition_if target definition)
|
||||
if(${definition})
|
||||
target_compile_definitions(${target} PRIVATE -D${definition})
|
||||
endif(${definition})
|
||||
endmacro(simd_add_definition_if)
|
||||
|
||||
# Configure private preprocessor definitions for SIMD-related features
|
||||
# Does not set any processor feature codegen flags
|
||||
function(simd_add_definitions target)
|
||||
simd_add_definition_if(${target} HAVE_AVX2_INTRINSICS)
|
||||
simd_add_definition_if(${target} HAVE_MM256_EXTRACT_EPI64)
|
||||
endfunction(simd_add_definitions)
|
||||
|
||||
# Adds source files only if AVX2 is supported. These files will be built with
|
||||
# avx2 intrinsics enabled.
|
||||
# Usage: simd_add_source_avx2(target file1.c file2.c ...)
|
||||
function(simd_add_source_avx2 target)
|
||||
foreach(file ${ARGN})
|
||||
target_sources(${target} PRIVATE ${file})
|
||||
set_source_files_properties(${file} PROPERTIES COMPILE_FLAGS "${AVX2_CFLAGS}")
|
||||
endforeach()
|
||||
endfunction(simd_add_source_avx2)
|
50
contrib/aws-cmake/AwsThreadAffinity.cmake
Normal file
50
contrib/aws-cmake/AwsThreadAffinity.cmake
Normal file
@ -0,0 +1,50 @@
|
||||
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0.
|
||||
|
||||
include(CheckSymbolExists)
|
||||
|
||||
# Check if the platform supports setting thread affinity
|
||||
# (important for hitting full NIC entitlement on NUMA architectures)
|
||||
function(aws_set_thread_affinity_method target)
|
||||
|
||||
# Non-POSIX, Android, and Apple platforms do not support thread affinity.
|
||||
if (NOT UNIX OR ANDROID OR APPLE)
|
||||
target_compile_definitions(${target} PRIVATE
|
||||
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE)
|
||||
return()
|
||||
endif()
|
||||
|
||||
cmake_push_check_state()
|
||||
list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE)
|
||||
list(APPEND CMAKE_REQUIRED_LIBRARIES pthread)
|
||||
|
||||
set(headers "pthread.h")
|
||||
# BSDs put nonportable pthread declarations in a separate header.
|
||||
if(CMAKE_SYSTEM_NAME MATCHES BSD)
|
||||
set(headers "${headers};pthread_np.h")
|
||||
endif()
|
||||
|
||||
# Using pthread attrs is the preferred method, but is glibc-specific.
|
||||
check_symbol_exists(pthread_attr_setaffinity_np "${headers}" USE_PTHREAD_ATTR_SETAFFINITY)
|
||||
if (USE_PTHREAD_ATTR_SETAFFINITY)
|
||||
target_compile_definitions(${target} PRIVATE
|
||||
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD_ATTR)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# This method is still nonportable, but is supported by musl and BSDs.
|
||||
check_symbol_exists(pthread_setaffinity_np "${headers}" USE_PTHREAD_SETAFFINITY)
|
||||
if (USE_PTHREAD_SETAFFINITY)
|
||||
target_compile_definitions(${target} PRIVATE
|
||||
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# If we got here, we expected thread affinity support but didn't find it.
|
||||
# We still build with degraded NUMA performance, but show a warning.
|
||||
message(WARNING "No supported method for setting thread affinity")
|
||||
target_compile_definitions(${target} PRIVATE
|
||||
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE)
|
||||
|
||||
cmake_pop_check_state()
|
||||
endfunction()
|
61
contrib/aws-cmake/AwsThreadName.cmake
Normal file
61
contrib/aws-cmake/AwsThreadName.cmake
Normal file
@ -0,0 +1,61 @@
|
||||
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0.
|
||||
|
||||
include(CheckSymbolExists)
|
||||
|
||||
# Check how the platform supports setting thread name
|
||||
function(aws_set_thread_name_method target)
|
||||
|
||||
if (WINDOWS)
|
||||
# On Windows we do a runtime check, instead of compile-time check
|
||||
return()
|
||||
elseif (APPLE)
|
||||
# All Apple platforms we support have the same function, so no need for compile-time check.
|
||||
return()
|
||||
endif()
|
||||
|
||||
cmake_push_check_state()
|
||||
list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE)
|
||||
list(APPEND CMAKE_REQUIRED_LIBRARIES pthread)
|
||||
|
||||
# The start of the test program
|
||||
set(c_source_start "
|
||||
#define _GNU_SOURCE
|
||||
#include <pthread.h>
|
||||
|
||||
#if defined(__FreeBSD__) || defined(__NETBSD__)
|
||||
#include <pthread_np.h>
|
||||
#endif
|
||||
|
||||
int main() {
|
||||
pthread_t thread_id;
|
||||
")
|
||||
|
||||
# The end of the test program
|
||||
set(c_source_end "}")
|
||||
|
||||
# pthread_setname_np() usually takes 2 args
|
||||
check_c_source_compiles("
|
||||
${c_source_start}
|
||||
pthread_setname_np(thread_id, \"asdf\");
|
||||
${c_source_end}"
|
||||
PTHREAD_SETNAME_TAKES_2ARGS)
|
||||
if (PTHREAD_SETNAME_TAKES_2ARGS)
|
||||
target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_2ARGS)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# But on NetBSD it takes 3!
|
||||
check_c_source_compiles("
|
||||
${c_source_start}
|
||||
pthread_setname_np(thread_id, \"asdf\", NULL);
|
||||
${c_source_end}
|
||||
" PTHREAD_SETNAME_TAKES_3ARGS)
|
||||
if (PTHREAD_SETNAME_TAKES_3ARGS)
|
||||
target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_3ARGS)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# And on many older/weirder platforms it's just not supported
|
||||
cmake_pop_check_state()
|
||||
endfunction()
|
376
contrib/aws-cmake/CMakeLists.txt
Normal file
376
contrib/aws-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,376 @@
|
||||
set(ENABLE_AWS_S3_DEFAULT OFF)
|
||||
|
||||
if(ENABLE_LIBRARIES AND (OS_LINUX OR OS_DARWIN) AND TARGET OpenSSL::Crypto)
|
||||
set(ENABLE_AWS_S3_DEFAULT ON)
|
||||
endif()
|
||||
|
||||
option(ENABLE_AWS_S3 "Enable AWS S3" ${ENABLE_AWS_S3_DEFAULT})
|
||||
|
||||
if(ENABLE_AWS_S3)
|
||||
if(NOT TARGET OpenSSL::Crypto)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use AWS SDK without OpenSSL")
|
||||
elseif(NOT (OS_LINUX OR OS_DARWIN))
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use AWS SDK with platform ${CMAKE_SYSTEM_NAME}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT ENABLE_AWS_S3)
|
||||
message(STATUS "Not using AWS S3")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
||||
# Utilities.
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsFeatureTests.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsThreadAffinity.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsThreadName.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsSIMD.cmake")
|
||||
|
||||
|
||||
# Gather sources and options.
|
||||
set(AWS_SOURCES)
|
||||
set(AWS_PUBLIC_INCLUDES)
|
||||
set(AWS_PRIVATE_INCLUDES)
|
||||
set(AWS_PUBLIC_COMPILE_DEFS)
|
||||
set(AWS_PRIVATE_COMPILE_DEFS)
|
||||
set(AWS_PRIVATE_LIBS)
|
||||
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DDEBUG_BUILD")
|
||||
endif()
|
||||
|
||||
set(ENABLE_OPENSSL_ENCRYPTION ON)
|
||||
if (ENABLE_OPENSSL_ENCRYPTION)
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DENABLE_OPENSSL_ENCRYPTION")
|
||||
endif()
|
||||
|
||||
set(USE_S2N ON)
|
||||
if (USE_S2N)
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DUSE_S2N")
|
||||
endif()
|
||||
|
||||
|
||||
# Directories.
|
||||
SET(AWS_SDK_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws")
|
||||
SET(AWS_SDK_CORE_DIR "${AWS_SDK_DIR}/aws-cpp-sdk-core")
|
||||
SET(AWS_SDK_S3_DIR "${AWS_SDK_DIR}/aws-cpp-sdk-s3")
|
||||
|
||||
SET(AWS_AUTH_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-auth")
|
||||
SET(AWS_CAL_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-cal")
|
||||
SET(AWS_CHECKSUMS_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-checksums")
|
||||
SET(AWS_COMMON_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-common")
|
||||
SET(AWS_COMPRESSION_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-compression")
|
||||
SET(AWS_CRT_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-crt-cpp")
|
||||
SET(AWS_EVENT_STREAM_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-event-stream")
|
||||
SET(AWS_HTTP_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-http")
|
||||
SET(AWS_IO_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-io")
|
||||
SET(AWS_MQTT_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-mqtt")
|
||||
SET(AWS_S2N_TLS_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-s2n-tls")
|
||||
SET(AWS_S3_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-s3")
|
||||
SET(AWS_SDKUTILS_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-sdkutils")
|
||||
|
||||
|
||||
# aws-cpp-sdk-core
|
||||
file(GLOB AWS_SDK_CORE_SRC
|
||||
"${AWS_SDK_CORE_DIR}/source/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/auth/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/auth/bearer-token-provider/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/auth/signer/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/auth/signer-provider/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/client/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/config/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/config/defaults/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/endpoint/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/endpoint/internal/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/external/cjson/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/external/tinyxml2/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/http/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/http/standard/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/internal/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/monitoring/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/base64/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/openssl/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/factory/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/event/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/json/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/logging/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/memory/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/memory/stl/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/stream/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/threading/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/xml/*.cpp"
|
||||
)
|
||||
|
||||
if(OS_LINUX OR OS_DARWIN)
|
||||
file(GLOB AWS_SDK_CORE_NET_SRC "${AWS_SDK_CORE_DIR}/source/net/linux-shared/*.cpp")
|
||||
file(GLOB AWS_SDK_CORE_PLATFORM_SRC "${AWS_SDK_CORE_DIR}/source/platform/linux-shared/*.cpp")
|
||||
else()
|
||||
file(GLOB AWS_SDK_CORE_NET_SRC "${AWS_SDK_CORE_DIR}/source/net/*.cpp")
|
||||
set(AWS_SDK_CORE_PLATFORM_SRC)
|
||||
endif()
|
||||
|
||||
OPTION(USE_AWS_MEMORY_MANAGEMENT "Aws memory management" OFF)
|
||||
configure_file("${AWS_SDK_CORE_DIR}/include/aws/core/SDKConfig.h.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
|
||||
|
||||
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_MAJOR=1")
|
||||
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_MINOR=10")
|
||||
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_PATCH=36")
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_SDK_CORE_SRC} ${AWS_SDK_CORE_NET_SRC} ${AWS_SDK_CORE_PLATFORM_SRC})
|
||||
|
||||
list(APPEND AWS_PUBLIC_INCLUDES
|
||||
"${AWS_SDK_CORE_DIR}/include/"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include"
|
||||
)
|
||||
|
||||
|
||||
# aws-cpp-sdk-s3
|
||||
file(GLOB AWS_SDK_S3_SRC
|
||||
"${AWS_SDK_S3_DIR}/source/*.cpp"
|
||||
"${AWS_SDK_S3_DIR}/source/model/*.cpp"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_SDK_S3_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_SDK_S3_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-auth
|
||||
file(GLOB AWS_AUTH_SRC
|
||||
"${AWS_AUTH_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_AUTH_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_AUTH_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-cal
|
||||
file(GLOB AWS_CAL_SRC
|
||||
"${AWS_CAL_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
if (ENABLE_OPENSSL_ENCRYPTION)
|
||||
file(GLOB AWS_CAL_OS_SRC
|
||||
"${AWS_CAL_DIR}/source/unix/*.c"
|
||||
)
|
||||
list(APPEND AWS_PRIVATE_LIBS OpenSSL::Crypto)
|
||||
endif()
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_CAL_SRC} ${AWS_CAL_OS_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_CAL_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-event-stream
|
||||
file(GLOB AWS_EVENT_STREAM_SRC
|
||||
"${AWS_EVENT_STREAM_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_EVENT_STREAM_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_EVENT_STREAM_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-common
|
||||
file(GLOB AWS_COMMON_SRC
|
||||
"${AWS_COMMON_DIR}/source/*.c"
|
||||
"${AWS_COMMON_DIR}/source/external/*.c"
|
||||
"${AWS_COMMON_DIR}/source/posix/*.c"
|
||||
)
|
||||
|
||||
file(GLOB AWS_COMMON_ARCH_SRC
|
||||
"${AWS_COMMON_DIR}/source/arch/generic/*.c"
|
||||
)
|
||||
|
||||
if (AWS_ARCH_INTEL)
|
||||
file(GLOB AWS_COMMON_ARCH_SRC
|
||||
"${AWS_COMMON_DIR}/source/arch/intel/cpuid.c"
|
||||
"${AWS_COMMON_DIR}/source/arch/intel/asm/*.c"
|
||||
)
|
||||
elseif (AWS_ARCH_ARM64 OR AWS_ARCH_ARM32)
|
||||
if (AWS_HAVE_AUXV)
|
||||
file(GLOB AWS_COMMON_ARCH_SRC
|
||||
"${AWS_COMMON_DIR}/source/arch/arm/asm/*.c"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(AWS_COMMON_AVX2_SRC)
|
||||
if (HAVE_AVX2_INTRINSICS)
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DUSE_SIMD_ENCODING")
|
||||
set(AWS_COMMON_AVX2_SRC "${AWS_COMMON_DIR}/source/arch/intel/encoding_avx2.c")
|
||||
set_source_files_properties(${AWS_COMMON_AVX2_SRC} PROPERTIES COMPILE_FLAGS "${AVX2_CFLAGS}")
|
||||
endif()
|
||||
|
||||
configure_file("${AWS_COMMON_DIR}/include/aws/common/config.h.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/common/config.h" @ONLY)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_COMMON_SRC} ${AWS_COMMON_ARCH_SRC} ${AWS_COMMON_AVX2_SRC})
|
||||
|
||||
list(APPEND AWS_PUBLIC_INCLUDES
|
||||
"${AWS_COMMON_DIR}/include/"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include"
|
||||
)
|
||||
|
||||
|
||||
# aws-checksums
|
||||
file(GLOB AWS_CHECKSUMS_SRC
|
||||
"${AWS_CHECKSUMS_DIR}/source/*.c"
|
||||
"${AWS_CHECKSUMS_DIR}/source/intel/*.c"
|
||||
"${AWS_CHECKSUMS_DIR}/source/intel/asm/*.c"
|
||||
"${AWS_CHECKSUMS_DIR}/source/arm/*.c"
|
||||
)
|
||||
|
||||
if(AWS_ARCH_INTEL AND AWS_HAVE_GCC_INLINE_ASM)
|
||||
file(GLOB AWS_CHECKSUMS_ARCH_SRC
|
||||
"${AWS_CHECKSUMS_DIR}/source/intel/asm/*.c"
|
||||
)
|
||||
endif()
|
||||
|
||||
if (AWS_ARCH_ARM64)
|
||||
file(GLOB AWS_CHECKSUMS_ARCH_SRC
|
||||
"${AWS_CHECKSUMS_DIR}/source/arm/*.c"
|
||||
)
|
||||
set_source_files_properties("${AWS_CHECKSUMS_DIR}/source/arm/crc32c_arm.c" PROPERTIES COMPILE_FLAGS -march=armv8-a+crc)
|
||||
elseif (AWS_ARCH_ARM32)
|
||||
if (AWS_ARM32_CRC)
|
||||
file(GLOB AWS_CHECKSUMS_ARCH_SRC
|
||||
"${AWS_CHECKSUMS_DIR}/source/arm/*.c"
|
||||
"${AWS_CHECKSUMS_DIR}/source/arm/asm/*.c"
|
||||
)
|
||||
set_source_files_properties(source/arm/crc32c_arm.c PROPERTIES COMPILE_FLAGS -march=armv8-a+crc)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_CHECKSUMS_SRC} ${AWS_CHECKSUMS_ARCH_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_CHECKSUMS_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-io
|
||||
file(GLOB AWS_IO_SRC
|
||||
"${AWS_IO_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
if (OS_LINUX)
|
||||
file(GLOB AWS_IO_OS_SRC
|
||||
"${AWS_IO_DIR}/source/linux/*.c"
|
||||
"${AWS_IO_DIR}/source/posix/*.c"
|
||||
)
|
||||
elseif (OS_DARWIN)
|
||||
file(GLOB AWS_IO_OS_SRC
|
||||
"${AWS_IO_DIR}/source/bsd/*.c"
|
||||
"${AWS_IO_DIR}/source/posix/*.c"
|
||||
)
|
||||
endif()
|
||||
|
||||
set(AWS_IO_TLS_SRC)
|
||||
if (USE_S2N)
|
||||
file(GLOB AWS_IO_TLS_SRC
|
||||
"${AWS_IO_DIR}/source/s2n/*.c"
|
||||
)
|
||||
endif()
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_IO_SRC} ${AWS_IO_OS_SRC} ${AWS_IO_TLS_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_IO_DIR}/include/")
|
||||
|
||||
|
||||
# aws-s2n-tls
|
||||
if (USE_S2N)
|
||||
file(GLOB AWS_S2N_TLS_SRC
|
||||
"${AWS_S2N_TLS_DIR}/crypto/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/error/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/stuffer/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/pq-crypto/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/pq-crypto/kyber_r3/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/tls/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/tls/extensions/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/utils/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_S2N_TLS_SRC})
|
||||
|
||||
list(APPEND AWS_PRIVATE_INCLUDES
|
||||
"${AWS_S2N_TLS_DIR}/"
|
||||
"${AWS_S2N_TLS_DIR}/api/"
|
||||
)
|
||||
endif()
|
||||
|
||||
|
||||
# aws-crt-cpp
|
||||
file(GLOB AWS_CRT_SRC
|
||||
"${AWS_CRT_DIR}/source/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/auth/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/crypto/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/endpoints/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/external/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/http/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/io/*.cpp"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_CRT_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_CRT_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-mqtt
|
||||
file(GLOB AWS_MQTT_SRC
|
||||
"${AWS_MQTT_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_MQTT_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_MQTT_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-http
|
||||
file(GLOB AWS_HTTP_SRC
|
||||
"${AWS_HTTP_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_HTTP_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_HTTP_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-compression
|
||||
file(GLOB AWS_COMPRESSION_SRC
|
||||
"${AWS_COMPRESSION_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_COMPRESSION_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_COMPRESSION_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-s3
|
||||
file(GLOB AWS_S3_SRC
|
||||
"${AWS_S3_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_S3_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_S3_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-sdkutils
|
||||
file(GLOB AWS_SDKUTILS_SRC
|
||||
"${AWS_SDKUTILS_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_SDKUTILS_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_SDKUTILS_DIR}/include/")
|
||||
|
||||
|
||||
# Add library.
|
||||
add_library(_aws ${AWS_SOURCES})
|
||||
|
||||
target_include_directories(_aws SYSTEM BEFORE PUBLIC ${AWS_PUBLIC_INCLUDES})
|
||||
target_include_directories(_aws SYSTEM BEFORE PRIVATE ${AWS_PRIVATE_INCLUDES})
|
||||
target_compile_definitions(_aws PUBLIC ${AWS_PUBLIC_COMPILE_DEFS})
|
||||
target_compile_definitions(_aws PRIVATE ${AWS_PRIVATE_COMPILE_DEFS})
|
||||
target_link_libraries(_aws PRIVATE ${AWS_PRIVATE_LIBS})
|
||||
|
||||
aws_set_thread_affinity_method(_aws)
|
||||
aws_set_thread_name_method(_aws)
|
||||
|
||||
# The library is large - avoid bloat.
|
||||
if (OMIT_HEAVY_DEBUG_SYMBOLS)
|
||||
target_compile_options (_aws PRIVATE -g0)
|
||||
endif()
|
||||
|
||||
add_library(ch_contrib::aws_s3 ALIAS _aws)
|
1
contrib/aws-crt-cpp
vendored
Submodule
1
contrib/aws-crt-cpp
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit ec0bea288f451d884c0d80d534bc5c66241c39a4
|
1
contrib/aws-s2n-tls
vendored
Submodule
1
contrib/aws-s2n-tls
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 0f1ba9e5c4a67cb3898de0c0b4f911d4194dc8de
|
@ -1,122 +0,0 @@
|
||||
if(NOT OS_FREEBSD)
|
||||
option(ENABLE_S3 "Enable S3" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_S3)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use S3 on FreeBSD")
|
||||
endif()
|
||||
|
||||
if(NOT ENABLE_S3)
|
||||
message(STATUS "Not using S3")
|
||||
return()
|
||||
endif()
|
||||
|
||||
SET(AWS_S3_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
|
||||
SET(AWS_CORE_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core")
|
||||
SET(AWS_CHECKSUMS_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-checksums")
|
||||
SET(AWS_COMMON_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-common")
|
||||
SET(AWS_EVENT_STREAM_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-event-stream")
|
||||
|
||||
OPTION(USE_AWS_MEMORY_MANAGEMENT "Aws memory management" OFF)
|
||||
configure_file("${AWS_CORE_LIBRARY_DIR}/include/aws/core/SDKConfig.h.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
|
||||
|
||||
configure_file("${AWS_COMMON_LIBRARY_DIR}/include/aws/common/config.h.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/common/config.h" @ONLY)
|
||||
|
||||
|
||||
file(GLOB AWS_CORE_SOURCES
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/auth/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/client/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/http/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/http/standard/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/config/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/external/cjson/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/external/tinyxml2/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/internal/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/monitoring/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/net/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/linux-shared/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/platform/linux-shared/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/base64/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/event/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/crypto/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/crypto/openssl/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/crypto/factory/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/json/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/logging/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/memory/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/memory/stl/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/stream/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/threading/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/xml/*.cpp"
|
||||
)
|
||||
|
||||
file(GLOB AWS_S3_SOURCES
|
||||
"${AWS_S3_LIBRARY_DIR}/source/*.cpp"
|
||||
)
|
||||
|
||||
file(GLOB AWS_S3_MODEL_SOURCES
|
||||
"${AWS_S3_LIBRARY_DIR}/source/model/*.cpp"
|
||||
)
|
||||
|
||||
file(GLOB AWS_EVENT_STREAM_SOURCES
|
||||
"${AWS_EVENT_STREAM_LIBRARY_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
file(GLOB AWS_COMMON_SOURCES
|
||||
"${AWS_COMMON_LIBRARY_DIR}/source/*.c"
|
||||
"${AWS_COMMON_LIBRARY_DIR}/source/posix/*.c"
|
||||
)
|
||||
|
||||
file(GLOB AWS_CHECKSUMS_SOURCES
|
||||
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/*.c"
|
||||
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/intel/*.c"
|
||||
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/arm/*.c"
|
||||
)
|
||||
|
||||
file(GLOB S3_UNIFIED_SRC
|
||||
${AWS_EVENT_STREAM_SOURCES}
|
||||
${AWS_COMMON_SOURCES}
|
||||
${AWS_S3_SOURCES}
|
||||
${AWS_S3_MODEL_SOURCES}
|
||||
${AWS_CORE_SOURCES}
|
||||
)
|
||||
|
||||
set(S3_INCLUDES
|
||||
"${AWS_COMMON_LIBRARY_DIR}/include/"
|
||||
"${AWS_EVENT_STREAM_LIBRARY_DIR}/include/"
|
||||
"${AWS_S3_LIBRARY_DIR}/include/"
|
||||
"${AWS_CORE_LIBRARY_DIR}/include/"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/"
|
||||
)
|
||||
|
||||
add_library(_aws_s3_checksums ${AWS_CHECKSUMS_SOURCES})
|
||||
target_include_directories(_aws_s3_checksums SYSTEM PUBLIC "${AWS_CHECKSUMS_LIBRARY_DIR}/include/")
|
||||
if(CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
target_compile_definitions(_aws_s3_checksums PRIVATE "-DDEBUG_BUILD")
|
||||
endif()
|
||||
set_target_properties(_aws_s3_checksums PROPERTIES LINKER_LANGUAGE C)
|
||||
set_property(TARGET _aws_s3_checksums PROPERTY C_STANDARD 99)
|
||||
|
||||
add_library(_aws_s3 ${S3_UNIFIED_SRC})
|
||||
|
||||
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MAJOR=1")
|
||||
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MINOR=7")
|
||||
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_PATCH=231")
|
||||
target_include_directories(_aws_s3 SYSTEM BEFORE PUBLIC ${S3_INCLUDES})
|
||||
|
||||
if (TARGET OpenSSL::SSL)
|
||||
target_compile_definitions(_aws_s3 PUBLIC -DENABLE_OPENSSL_ENCRYPTION)
|
||||
target_link_libraries(_aws_s3 PRIVATE OpenSSL::Crypto OpenSSL::SSL)
|
||||
endif()
|
||||
|
||||
target_link_libraries(_aws_s3 PRIVATE _aws_s3_checksums)
|
||||
|
||||
# The library is large - avoid bloat.
|
||||
if (OMIT_HEAVY_DEBUG_SYMBOLS)
|
||||
target_compile_options (_aws_s3 PRIVATE -g0)
|
||||
target_compile_options (_aws_s3_checksums PRIVATE -g0)
|
||||
endif()
|
||||
|
||||
add_library(ch_contrib::aws_s3 ALIAS _aws_s3)
|
2
contrib/azure
vendored
2
contrib/azure
vendored
@ -1 +1 @@
|
||||
Subproject commit ef75afc075fc71fbcd8fe28dcda3794ae265fd1c
|
||||
Subproject commit ea8c3044f43f5afa7016d2d580ed201f495d7e94
|
@ -139,13 +139,6 @@ if(NOT OPENSSL_NO_ASM)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(BUILD_SHARED_LIBS)
|
||||
add_definitions(-DBORINGSSL_SHARED_LIBRARY)
|
||||
# Enable position-independent code globally. This is needed because
|
||||
# some library targets are OBJECT libraries.
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
|
||||
endif()
|
||||
|
||||
set(
|
||||
CRYPTO_ios_aarch64_SOURCES
|
||||
|
||||
|
@ -63,13 +63,8 @@ SET(SRCS
|
||||
"${LIBRARY_DIR}/src/lib/windows_port.c"
|
||||
)
|
||||
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
add_library(_c-ares STATIC ${SRCS})
|
||||
target_compile_definitions(_c-ares PUBLIC CARES_STATICLIB)
|
||||
else()
|
||||
add_library(_c-ares SHARED ${SRCS})
|
||||
target_compile_definitions(_c-ares PUBLIC CARES_BUILDING_LIBRARY)
|
||||
endif()
|
||||
add_library(_c-ares STATIC ${SRCS})
|
||||
target_compile_definitions(_c-ares PUBLIC CARES_STATICLIB)
|
||||
|
||||
target_compile_definitions(_c-ares PRIVATE HAVE_CONFIG_H=1)
|
||||
|
||||
|
2
contrib/cctz
vendored
2
contrib/cctz
vendored
@ -1 +1 @@
|
||||
Subproject commit 5c8528fb35e89ee0b3a7157490423fba0d4dd7b5
|
||||
Subproject commit 7c78edd52b4d65acc103c2f195818ffcabe6fe0d
|
1
contrib/crc32-s390x
vendored
Submodule
1
contrib/crc32-s390x
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 30980583bf9ed3fa193abb83a1849705ff457f70
|
27
contrib/crc32-s390x-cmake/CMakeLists.txt
Normal file
27
contrib/crc32-s390x-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,27 @@
|
||||
if(ARCH_S390X)
|
||||
option (ENABLE_CRC32_S390X "Enable crc32 on s390x platform" ON)
|
||||
endif()
|
||||
|
||||
if (NOT ENABLE_CRC32_S390X)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(CRC32_S390X_SOURCE_DIR ${ClickHouse_SOURCE_DIR}/contrib/crc32-s390x)
|
||||
set(CRC32_S390X_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/crc32-s390x)
|
||||
|
||||
set(CRC32_SRCS
|
||||
"${CRC32_S390X_SOURCE_DIR}/crc32-s390x.c"
|
||||
"${CRC32_S390X_SOURCE_DIR}/crc32be-vx.S"
|
||||
"${CRC32_S390X_SOURCE_DIR}/crc32le-vx.S"
|
||||
)
|
||||
|
||||
set(CRC32_HDRS
|
||||
"${CRC32_S390X_INCLUDE_DIR}/crc32-s390x.h"
|
||||
)
|
||||
|
||||
add_library(_crc32_s390x ${CRC32_SRCS} ${CRC32_HDRS})
|
||||
|
||||
target_include_directories(_crc32_s390x SYSTEM PUBLIC "${CRC32_S390X_INCLUDE_DIR}")
|
||||
target_compile_definitions(_crc32_s390x PUBLIC)
|
||||
|
||||
add_library(ch_contrib::crc32_s390x ALIAS _crc32_s390x)
|
2
contrib/googletest
vendored
2
contrib/googletest
vendored
@ -1 +1 @@
|
||||
Subproject commit e7e591764baba0a0c3c9ad0014430e7a27331d16
|
||||
Subproject commit 71140c3ca7a87bb1b5b9c9f1500fea8858cce344
|
@ -43,7 +43,10 @@ set_target_properties(unwind PROPERTIES FOLDER "contrib/libunwind-cmake")
|
||||
|
||||
target_include_directories(unwind SYSTEM BEFORE PUBLIC $<BUILD_INTERFACE:${LIBUNWIND_SOURCE_DIR}/include>)
|
||||
target_compile_definitions(unwind PRIVATE -D_LIBUNWIND_NO_HEAP=1 -D_DEBUG -D_LIBUNWIND_IS_NATIVE_ONLY)
|
||||
target_compile_options(unwind PRIVATE -fno-exceptions -funwind-tables -fno-sanitize=all $<$<COMPILE_LANGUAGE:CXX>:-nostdinc++ -fno-rtti>)
|
||||
|
||||
# We should enable optimizations (otherwise it will be too slow in debug)
|
||||
# and disable sanitizers (otherwise infinite loop may happen)
|
||||
target_compile_options(unwind PRIVATE -O3 -fno-exceptions -funwind-tables -fno-sanitize=all $<$<COMPILE_LANGUAGE:CXX>:-nostdinc++ -fno-rtti>)
|
||||
|
||||
check_c_compiler_flag(-Wunused-but-set-variable HAVE_WARNING_UNUSED_BUT_SET_VARIABLE)
|
||||
if (HAVE_WARNING_UNUSED_BUT_SET_VARIABLE)
|
||||
|
@ -136,11 +136,6 @@ add_library(ch_contrib::uv ALIAS _uv)
|
||||
target_compile_definitions(_uv PRIVATE ${uv_defines})
|
||||
target_include_directories(_uv SYSTEM PUBLIC ${SOURCE_DIR}/include PRIVATE ${SOURCE_DIR}/src)
|
||||
target_link_libraries(_uv ${uv_libraries})
|
||||
if (NOT USE_STATIC_LIBRARIES)
|
||||
target_compile_definitions(_uv
|
||||
INTERFACE USING_UV_SHARED=1
|
||||
PRIVATE BUILDING_UV_SHARED=1)
|
||||
endif()
|
||||
|
||||
if(UNIX)
|
||||
# Now for some gibbering horrors from beyond the stars...
|
||||
|
@ -6,8 +6,6 @@ endif()
|
||||
|
||||
option (ENABLE_EMBEDDED_COMPILER "Enable support for 'compile_expressions' option for query execution" ${ENABLE_EMBEDDED_COMPILER_DEFAULT})
|
||||
|
||||
# If USE_STATIC_LIBRARIES=0 was passed to CMake, we'll still build LLVM statically to keep complexity minimal.
|
||||
|
||||
if (NOT ENABLE_EMBEDDED_COMPILER)
|
||||
message(STATUS "Not using LLVM")
|
||||
return()
|
||||
|
@ -1,4 +1,4 @@
|
||||
if (NOT OS_FREEBSD AND NOT SPLIT_SHARED_LIBRARIES AND NOT (OS_DARWIN AND COMPILER_CLANG))
|
||||
if (NOT OS_FREEBSD AND NOT (OS_DARWIN AND COMPILER_CLANG))
|
||||
option (ENABLE_SENTRY "Enable Sentry" ${ENABLE_LIBRARIES})
|
||||
else()
|
||||
option (ENABLE_SENTRY "Enable Sentry" OFF)
|
||||
@ -51,11 +51,7 @@ endif()
|
||||
|
||||
add_library(_sentry ${SRCS})
|
||||
|
||||
if(BUILD_SHARED_LIBS)
|
||||
target_compile_definitions(_sentry PRIVATE SENTRY_BUILD_SHARED)
|
||||
else()
|
||||
target_compile_definitions(_sentry PUBLIC SENTRY_BUILD_STATIC)
|
||||
endif()
|
||||
target_compile_definitions(_sentry PUBLIC SENTRY_BUILD_STATIC)
|
||||
|
||||
target_link_libraries(_sentry PRIVATE ch_contrib::curl pthread)
|
||||
target_include_directories(_sentry PUBLIC "${SRC_DIR}/include" PRIVATE "${SRC_DIR}/src")
|
||||
|
2
contrib/sqlite-amalgamation
vendored
2
contrib/sqlite-amalgamation
vendored
@ -1 +1 @@
|
||||
Subproject commit 9818baa5d027ffb26d57f810dc4c597d4946781c
|
||||
Subproject commit 400ad7152a0c7ee07756d96ab4f6a8f6d1080916
|
2
contrib/sysroot
vendored
2
contrib/sysroot
vendored
@ -1 +1 @@
|
||||
Subproject commit e9fb375d0a1e5ebfd74c043f088f2342552103f8
|
||||
Subproject commit f0081b2649b94837855f3bc7d05ef326b100bad8
|
@ -2,7 +2,6 @@
|
||||
"docker/packager/binary": {
|
||||
"name": "clickhouse/binary-builder",
|
||||
"dependent": [
|
||||
"docker/test/split_build_smoke_test",
|
||||
"docker/test/codebrowser"
|
||||
]
|
||||
},
|
||||
@ -55,10 +54,6 @@
|
||||
"name": "clickhouse/stress-test",
|
||||
"dependent": []
|
||||
},
|
||||
"docker/test/split_build_smoke_test": {
|
||||
"name": "clickhouse/split-build-smoke-test",
|
||||
"dependent": []
|
||||
},
|
||||
"docker/test/codebrowser": {
|
||||
"name": "clickhouse/codebrowser",
|
||||
"dependent": []
|
||||
@ -68,10 +63,6 @@
|
||||
"name": "clickhouse/integration-tests-runner",
|
||||
"dependent": []
|
||||
},
|
||||
"docker/test/testflows/runner": {
|
||||
"name": "clickhouse/testflows-runner",
|
||||
"dependent": []
|
||||
},
|
||||
"docker/test/fasttest": {
|
||||
"name": "clickhouse/fasttest",
|
||||
"dependent": []
|
||||
|
@ -22,7 +22,8 @@ RUN apt-get update && \
|
||||
build-essential \
|
||||
libc6 \
|
||||
libc6-dev \
|
||||
libc6-dev-arm64-cross && \
|
||||
libc6-dev-arm64-cross \
|
||||
zstd && \
|
||||
apt-get clean
|
||||
|
||||
ENV CC=clang-${LLVM_VERSION}
|
||||
|
@ -107,8 +107,6 @@ fi
|
||||
mv ./programs/clickhouse* /output
|
||||
[ -x ./programs/self-extracting/clickhouse ] && mv ./programs/self-extracting/clickhouse /output
|
||||
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds
|
||||
find . -name '*.so' -print -exec mv '{}' /output \;
|
||||
find . -name '*.so.*' -print -exec mv '{}' /output \;
|
||||
|
||||
prepare_combined_output () {
|
||||
local OUTPUT
|
||||
@ -161,23 +159,23 @@ then
|
||||
git -C "$PERF_OUTPUT"/ch log -5
|
||||
(
|
||||
cd "$PERF_OUTPUT"/..
|
||||
tar -cv -I pigz -f /output/performance.tgz output
|
||||
tar -cv --zstd -f /output/performance.tar.zst output
|
||||
)
|
||||
fi
|
||||
|
||||
# May be set for split build or for performance test.
|
||||
# May be set for performance test.
|
||||
if [ "" != "$COMBINED_OUTPUT" ]
|
||||
then
|
||||
prepare_combined_output /output
|
||||
tar -cv -I pigz -f "$COMBINED_OUTPUT.tgz" /output
|
||||
tar -cv --zstd -f "$COMBINED_OUTPUT.tar.zst" /output
|
||||
rm -r /output/*
|
||||
mv "$COMBINED_OUTPUT.tgz" /output
|
||||
mv "$COMBINED_OUTPUT.tar.zst" /output
|
||||
fi
|
||||
|
||||
if [ "coverity" == "$COMBINED_OUTPUT" ]
|
||||
then
|
||||
tar -cv -I pigz -f "coverity-scan.tgz" cov-int
|
||||
mv "coverity-scan.tgz" /output
|
||||
tar -cv --zstd -f "coverity-scan.tar.zst" cov-int
|
||||
mv "coverity-scan.tar.zst" /output
|
||||
fi
|
||||
|
||||
ccache_status
|
||||
|
@ -100,12 +100,11 @@ def run_docker_image_with_env(
|
||||
subprocess.check_call(cmd, shell=True)
|
||||
|
||||
|
||||
def is_release_build(build_type, package_type, sanitizer, shared_libraries):
|
||||
def is_release_build(build_type, package_type, sanitizer):
|
||||
return (
|
||||
build_type == ""
|
||||
and package_type == "deb"
|
||||
and sanitizer == ""
|
||||
and not shared_libraries
|
||||
)
|
||||
|
||||
|
||||
@ -116,7 +115,6 @@ def parse_env_variables(
|
||||
package_type,
|
||||
cache,
|
||||
distcc_hosts,
|
||||
shared_libraries,
|
||||
clang_tidy,
|
||||
version,
|
||||
author,
|
||||
@ -218,7 +216,7 @@ def parse_env_variables(
|
||||
cmake_flags.append("-DCMAKE_INSTALL_PREFIX=/usr")
|
||||
cmake_flags.append("-DCMAKE_INSTALL_SYSCONFDIR=/etc")
|
||||
cmake_flags.append("-DCMAKE_INSTALL_LOCALSTATEDIR=/var")
|
||||
if is_release_build(build_type, package_type, sanitizer, shared_libraries):
|
||||
if is_release_build(build_type, package_type, sanitizer):
|
||||
cmake_flags.append("-DSPLIT_DEBUG_SYMBOLS=ON")
|
||||
result.append("WITH_PERFORMANCE=1")
|
||||
if is_cross_arm:
|
||||
@ -231,12 +229,10 @@ def parse_env_variables(
|
||||
cmake_flags.append(f"-DCMAKE_C_COMPILER={cc}")
|
||||
cmake_flags.append(f"-DCMAKE_CXX_COMPILER={cxx}")
|
||||
|
||||
# Create combined output archive for shared library build and for performance tests.
|
||||
# Create combined output archive for performance tests.
|
||||
if package_type == "coverity":
|
||||
result.append("COMBINED_OUTPUT=coverity")
|
||||
result.append('COVERITY_TOKEN="$COVERITY_TOKEN"')
|
||||
elif shared_libraries:
|
||||
result.append("COMBINED_OUTPUT=shared_build")
|
||||
|
||||
if sanitizer:
|
||||
result.append(f"SANITIZER={sanitizer}")
|
||||
@ -285,15 +281,6 @@ def parse_env_variables(
|
||||
result.append("BINARY_OUTPUT=tests")
|
||||
cmake_flags.append("-DENABLE_TESTS=1")
|
||||
|
||||
if shared_libraries:
|
||||
cmake_flags.append("-DUSE_STATIC_LIBRARIES=0 -DSPLIT_SHARED_LIBRARIES=1")
|
||||
# We can't always build utils because it requires too much space, but
|
||||
# we have to build them at least in some way in CI. The shared library
|
||||
# build is probably the least heavy disk-wise.
|
||||
cmake_flags.append("-DENABLE_UTILS=1")
|
||||
# utils are not included into clickhouse-bundle, so build everything
|
||||
build_target = "all"
|
||||
|
||||
if clang_tidy:
|
||||
cmake_flags.append("-DENABLE_CLANG_TIDY=1")
|
||||
cmake_flags.append("-DENABLE_TESTS=1")
|
||||
@ -371,7 +358,6 @@ if __name__ == "__main__":
|
||||
default="",
|
||||
)
|
||||
|
||||
parser.add_argument("--shared-libraries", action="store_true")
|
||||
parser.add_argument("--clang-tidy", action="store_true")
|
||||
parser.add_argument("--cache", choices=("ccache", "distcc", ""), default="")
|
||||
parser.add_argument(
|
||||
@ -424,7 +410,6 @@ if __name__ == "__main__":
|
||||
args.package_type,
|
||||
args.cache,
|
||||
args.distcc_hosts,
|
||||
args.shared_libraries,
|
||||
args.clang_tidy,
|
||||
args.version,
|
||||
args.author,
|
||||
|
@ -33,7 +33,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
||||
# lts / testing / prestable / etc
|
||||
ARG REPO_CHANNEL="stable"
|
||||
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
||||
ARG VERSION="22.12.1.1752"
|
||||
ARG VERSION="22.12.3.5"
|
||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||
|
||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||
|
@ -21,7 +21,7 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list
|
||||
|
||||
ARG REPO_CHANNEL="stable"
|
||||
ARG REPOSITORY="deb https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
||||
ARG VERSION="22.12.1.1752"
|
||||
ARG VERSION="22.12.3.5"
|
||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||
|
||||
# set non-empty deb_location_url url to create a docker image
|
||||
|
@ -80,7 +80,7 @@ do
|
||||
done
|
||||
|
||||
# if clickhouse user is defined - create it (user "default" already exists out of box)
|
||||
if [ -n "$CLICKHOUSE_USER" ] && [ "$CLICKHOUSE_USER" != "default" ] || [ -n "$CLICKHOUSE_PASSWORD" ]; then
|
||||
if [ -n "$CLICKHOUSE_USER" ] && [ "$CLICKHOUSE_USER" != "default" ] || [ -n "$CLICKHOUSE_PASSWORD" ] || [ "$CLICKHOUSE_ACCESS_MANAGEMENT" != "0" ]; then
|
||||
echo "$0: create new user '$CLICKHOUSE_USER' instead 'default'"
|
||||
cat <<EOT > /etc/clickhouse-server/users.d/default-user.xml
|
||||
<clickhouse>
|
||||
@ -120,8 +120,8 @@ if [ -n "$(ls /docker-entrypoint-initdb.d/)" ] || [ -n "$CLICKHOUSE_DB" ]; then
|
||||
pid="$!"
|
||||
|
||||
# check if clickhouse is ready to accept connections
|
||||
# will try to send ping clickhouse via http_port (max 12 retries by default, with 1 sec timeout and 1 sec delay between retries)
|
||||
tries=${CLICKHOUSE_INIT_TIMEOUT:-12}
|
||||
# will try to send ping clickhouse via http_port (max 1000 retries by default, with 1 sec timeout and 1 sec delay between retries)
|
||||
tries=${CLICKHOUSE_INIT_TIMEOUT:-1000}
|
||||
while ! wget --spider --no-check-certificate -T 1 -q "$URL" 2>/dev/null; do
|
||||
if [ "$tries" -le "0" ]; then
|
||||
echo >&2 'ClickHouse init process failed.'
|
||||
|
@ -9,6 +9,8 @@ RUN apt-get update \
|
||||
netbase \
|
||||
perl \
|
||||
pv \
|
||||
ripgrep \
|
||||
zstd \
|
||||
--yes --no-install-recommends
|
||||
|
||||
# Sanitizer options for services (clickhouse-server)
|
||||
|
@ -17,6 +17,7 @@ RUN apt-get update \
|
||||
python3-termcolor \
|
||||
unixodbc \
|
||||
pv \
|
||||
zstd \
|
||||
--yes --no-install-recommends
|
||||
|
||||
# Install CMake 3.20+ for Rust compilation
|
||||
|
@ -188,7 +188,7 @@ function build
|
||||
cp programs/clickhouse "$FASTTEST_OUTPUT/clickhouse"
|
||||
|
||||
strip programs/clickhouse -o "$FASTTEST_OUTPUT/clickhouse-stripped"
|
||||
gzip "$FASTTEST_OUTPUT/clickhouse-stripped"
|
||||
zstd --threads=0 "$FASTTEST_OUTPUT/clickhouse-stripped"
|
||||
fi
|
||||
ccache --show-stats ||:
|
||||
ccache --evict-older-than 1d ||:
|
||||
|
@ -2,6 +2,14 @@
|
||||
<profiles>
|
||||
<default>
|
||||
<max_execution_time>10</max_execution_time>
|
||||
<max_memory_usage>10G</max_memory_usage>
|
||||
|
||||
<!--
|
||||
Otherwise we will get the TOO_MANY_SIMULTANEOUS_QUERIES errors,
|
||||
they are ok, but complicate debugging.
|
||||
-->
|
||||
<table_function_remote_max_addresses>200</table_function_remote_max_addresses>
|
||||
|
||||
<!--
|
||||
Don't let the fuzzer change this setting (I've actually seen it
|
||||
do this before).
|
||||
@ -14,6 +22,15 @@
|
||||
<max_memory_usage>
|
||||
<max>10G</max>
|
||||
</max_memory_usage>
|
||||
|
||||
<!-- Analyzer is unstable, not ready for testing. -->
|
||||
<allow_experimental_analyzer>
|
||||
<readonly/>
|
||||
</allow_experimental_analyzer>
|
||||
|
||||
<table_function_remote_max_addresses>
|
||||
<max>200</max>
|
||||
</table_function_remote_max_addresses>
|
||||
</constraints>
|
||||
</default>
|
||||
</profiles>
|
||||
|
@ -51,7 +51,6 @@ function clone
|
||||
)
|
||||
|
||||
ls -lath ||:
|
||||
|
||||
}
|
||||
|
||||
function wget_with_retry
|
||||
@ -75,6 +74,7 @@ function download
|
||||
./clickhouse ||:
|
||||
ln -s ./clickhouse ./clickhouse-server
|
||||
ln -s ./clickhouse ./clickhouse-client
|
||||
ln -s ./clickhouse ./clickhouse-local
|
||||
|
||||
# clickhouse-server is in the current dir
|
||||
export PATH="$PWD:$PATH"
|
||||
@ -91,6 +91,12 @@ function configure
|
||||
cp -av --dereference "$script_dir"/query-fuzzer-tweaks-users.xml db/users.d
|
||||
cp -av --dereference "$script_dir"/allow-nullable-key.xml db/config.d
|
||||
|
||||
cat > db/config.d/max_server_memory_usage_to_ram_ratio.xml <<EOL
|
||||
<clickhouse>
|
||||
<max_server_memory_usage_to_ram_ratio>0.75</max_server_memory_usage_to_ram_ratio>
|
||||
</clickhouse>
|
||||
EOL
|
||||
|
||||
cat > db/config.d/core.xml <<EOL
|
||||
<clickhouse>
|
||||
<core_dump>
|
||||
@ -151,7 +157,7 @@ function fuzz
|
||||
mkdir -p /var/run/clickhouse-server
|
||||
|
||||
# NOTE: we use process substitution here to preserve keep $! as a pid of clickhouse-server
|
||||
clickhouse-server --config-file db/config.xml --pid-file /var/run/clickhouse-server/clickhouse-server.pid -- --path db 2>&1 | pigz > server.log.gz &
|
||||
clickhouse-server --config-file db/config.xml --pid-file /var/run/clickhouse-server/clickhouse-server.pid -- --path db > server.log 2>&1 &
|
||||
server_pid=$!
|
||||
|
||||
kill -0 $server_pid
|
||||
@ -235,13 +241,29 @@ quit
|
||||
# clickhouse-client. We don't check for existence of server process, because
|
||||
# the process is still present while the server is terminating and not
|
||||
# accepting the connections anymore.
|
||||
if clickhouse-client --query "select 1 format Null"
|
||||
then
|
||||
server_died=0
|
||||
else
|
||||
echo "Server live check returns $?"
|
||||
server_died=1
|
||||
fi
|
||||
|
||||
for _ in {1..100}
|
||||
do
|
||||
if clickhouse-client --query "SELECT 1" 2> err
|
||||
then
|
||||
server_died=0
|
||||
break
|
||||
else
|
||||
# There are legitimate queries leading to this error, example:
|
||||
# SELECT * FROM remote('127.0.0.{1..255}', system, one)
|
||||
if grep -F 'TOO_MANY_SIMULTANEOUS_QUERIES' err
|
||||
then
|
||||
# Give it some time to cool down
|
||||
clickhouse-client --query "SHOW PROCESSLIST"
|
||||
sleep 1
|
||||
else
|
||||
echo "Server live check returns $?"
|
||||
cat err
|
||||
server_died=1
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# wait in background to call wait in foreground and ensure that the
|
||||
# process is alive, since w/o job control this is the only way to obtain
|
||||
@ -256,12 +278,24 @@ quit
|
||||
if [ "$server_died" == 1 ]
|
||||
then
|
||||
# The server has died.
|
||||
task_exit_code=210
|
||||
echo "failure" > status.txt
|
||||
if ! zgrep --text -ao "Received signal.*\|Logical error.*\|Assertion.*failed\|Failed assertion.*\|.*runtime error: .*\|.*is located.*\|SUMMARY: AddressSanitizer:.*\|SUMMARY: MemorySanitizer:.*\|SUMMARY: ThreadSanitizer:.*\|.*_LIBCPP_ASSERT.*" server.log.gz > description.txt
|
||||
if ! rg --text -o 'Received signal.*|Logical error.*|Assertion.*failed|Failed assertion.*|.*runtime error: .*|.*is located.*|(SUMMARY|ERROR): [a-zA-Z]+Sanitizer:.*|.*_LIBCPP_ASSERT.*' server.log > description.txt
|
||||
then
|
||||
echo "Lost connection to server. See the logs." > description.txt
|
||||
fi
|
||||
|
||||
IS_SANITIZED=$(clickhouse-local --query "SELECT value LIKE '%-fsanitize=%' FROM system.build_options WHERE name = 'CXX_FLAGS'")
|
||||
|
||||
if [ "${IS_SANITIZED}" -eq "1" ] && rg --text 'Sanitizer:? (out-of-memory|out of memory|failed to allocate)|Child process was terminated by signal 9' description.txt
|
||||
then
|
||||
# OOM of sanitizer is not a problem we can handle - treat it as success, but preserve the description.
|
||||
# Why? Because sanitizers have the memory overhead, that is not controllable from inside clickhouse-server.
|
||||
task_exit_code=0
|
||||
echo "success" > status.txt
|
||||
else
|
||||
task_exit_code=210
|
||||
echo "failure" > status.txt
|
||||
fi
|
||||
|
||||
elif [ "$fuzzer_exit_code" == "143" ] || [ "$fuzzer_exit_code" == "0" ]
|
||||
then
|
||||
# Variants of a normal run:
|
||||
@ -284,18 +318,18 @@ quit
|
||||
# which is confusing.
|
||||
task_exit_code=$fuzzer_exit_code
|
||||
echo "failure" > status.txt
|
||||
{ grep --text -o "Found error:.*" fuzzer.log \
|
||||
|| grep --text -ao "Exception:.*" fuzzer.log \
|
||||
{ rg --text -o "Found error:.*" fuzzer.log \
|
||||
|| rg --text -ao "Exception:.*" fuzzer.log \
|
||||
|| echo "Fuzzer failed ($fuzzer_exit_code). See the logs." ; } \
|
||||
| tail -1 > description.txt
|
||||
fi
|
||||
|
||||
if test -f core.*; then
|
||||
pigz core.*
|
||||
mv core.*.gz core.gz
|
||||
zstd --threads=0 core.*
|
||||
mv core.*.zst core.zst
|
||||
fi
|
||||
|
||||
dmesg -T | grep -q -F -e 'Out of memory: Killed process' -e 'oom_reaper: reaped process' -e 'oom-kill:constraint=CONSTRAINT_NONE' && echo "OOM in dmesg" ||:
|
||||
dmesg -T | rg -q -F -e 'Out of memory: Killed process' -e 'oom_reaper: reaped process' -e 'oom-kill:constraint=CONSTRAINT_NONE' && echo "OOM in dmesg" ||:
|
||||
}
|
||||
|
||||
case "$stage" in
|
||||
@ -327,24 +361,28 @@ case "$stage" in
|
||||
time fuzz
|
||||
;&
|
||||
"report")
|
||||
|
||||
CORE_LINK=''
|
||||
if [ -f core.gz ]; then
|
||||
CORE_LINK='<a href="core.gz">core.gz</a>'
|
||||
if [ -f core.zst ]; then
|
||||
CORE_LINK='<a href="core.zst">core.zst</a>'
|
||||
fi
|
||||
|
||||
rg --text -F '<Fatal>' server.log > fatal.log ||:
|
||||
|
||||
zstd --threads=0 server.log
|
||||
|
||||
cat > report.html <<EOF ||:
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<style>
|
||||
body { font-family: "DejaVu Sans", "Noto Sans", Arial, sans-serif; background: #EEE; }
|
||||
h1 { margin-left: 10px; }
|
||||
th, td { border: 0; padding: 5px 10px 5px 10px; text-align: left; vertical-align: top; line-height: 1.5; background-color: #FFF;
|
||||
td { white-space: pre; font-family: Monospace, Courier New; }
|
||||
border: 0; box-shadow: 0 0 0 1px rgba(0, 0, 0, 0.05), 0 8px 25px -5px rgba(0, 0, 0, 0.1); }
|
||||
th, td { border: 0; padding: 5px 10px 5px 10px; text-align: left; vertical-align: top; line-height: 1.5; background-color: #FFF; }
|
||||
td { white-space: pre; font-family: Monospace, Courier New; box-shadow: 0 0 0 1px rgba(0, 0, 0, 0.05), 0 8px 25px -5px rgba(0, 0, 0, 0.1); }
|
||||
a { color: #06F; text-decoration: none; }
|
||||
a:hover, a:active { color: #F40; text-decoration: underline; }
|
||||
table { border: 0; }
|
||||
p.links a { padding: 5px; margin: 3px; background: #FFF; line-height: 2; white-space: nowrap; box-shadow: 0 0 0 1px rgba(0, 0, 0, 0.05), 0 8px 25px -5px rgba(0, 0, 0, 0.1); }
|
||||
th { cursor: pointer; }
|
||||
|
||||
</style>
|
||||
<title>AST Fuzzer for PR #${PR_TO_TEST} @ ${SHA_TO_TEST}</title>
|
||||
@ -352,17 +390,32 @@ th { cursor: pointer; }
|
||||
<body>
|
||||
<div class="main">
|
||||
|
||||
<h1>AST Fuzzer for PR #${PR_TO_TEST} @ ${SHA_TO_TEST}</h1>
|
||||
<h1>AST Fuzzer for PR <a href="https://github.com/ClickHouse/ClickHouse/pull/${PR_TO_TEST}">#${PR_TO_TEST}</a> @ ${SHA_TO_TEST}</h1>
|
||||
<p class="links">
|
||||
<a href="runlog.log">runlog.log</a>
|
||||
<a href="fuzzer.log">fuzzer.log</a>
|
||||
<a href="server.log.gz">server.log.gz</a>
|
||||
<a href="main.log">main.log</a>
|
||||
${CORE_LINK}
|
||||
<a href="run.log">run.log</a>
|
||||
<a href="fuzzer.log">fuzzer.log</a>
|
||||
<a href="server.log.zst">server.log.zst</a>
|
||||
<a href="main.log">main.log</a>
|
||||
${CORE_LINK}
|
||||
</p>
|
||||
<table>
|
||||
<tr><th>Test name</th><th>Test status</th><th>Description</th></tr>
|
||||
<tr><td>AST Fuzzer</td><td>$(cat status.txt)</td><td>$(cat description.txt)</td></tr>
|
||||
<tr>
|
||||
<th>Test name</th>
|
||||
<th>Test status</th>
|
||||
<th>Description</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>AST Fuzzer</td>
|
||||
<td>$(cat status.txt)</td>
|
||||
<td>$(
|
||||
clickhouse-local --input-format RawBLOB --output-format RawBLOB --query "SELECT encodeXMLComponent(*) FROM table" < description.txt || cat description.txt
|
||||
)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td colspan="3" style="white-space: pre-wrap;">$(
|
||||
clickhouse-local --input-format RawBLOB --output-format RawBLOB --query "SELECT encodeXMLComponent(*) FROM table" < fatal.log || cat fatal.log
|
||||
)</td>
|
||||
</tr>
|
||||
</table>
|
||||
</body>
|
||||
</html>
|
||||
|
@ -49,7 +49,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
||||
&& curl -o mysql-odbc.rpm "https://cdn.mysql.com/archives/mysql-connector-odbc-8.0/mysql-connector-odbc-8.0.27-1.el8.${rarch}.rpm" \
|
||||
&& rpm2archive mysql-odbc.rpm \
|
||||
&& tar xf mysql-odbc.rpm.tgz -C / ./usr/lib64/ \
|
||||
&& LINK_DIR=$(dpkg -L libodbc1 | grep '^/usr/lib/.*-linux-gnu/odbc$') \
|
||||
&& LINK_DIR=$(dpkg -L libodbc1 | rg '^/usr/lib/.*-linux-gnu/odbc$') \
|
||||
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR" \
|
||||
&& ln -s /usr/lib64/libmyodbc8a.so "$LINK_DIR"/libmyodbc.so
|
||||
|
||||
@ -57,14 +57,17 @@ RUN arch=${TARGETARCH:-amd64} \
|
||||
# ZooKeeper is not started by default, but consumes some space in containers.
|
||||
# 777 perms used to allow anybody to start/stop ZooKeeper
|
||||
ENV ZOOKEEPER_VERSION='3.6.3'
|
||||
RUN curl -O "https://dlcdn.apache.org/zookeeper/zookeeper-${ZOOKEEPER_VERSION}/apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz"
|
||||
RUN tar -zxvf apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz && mv apache-zookeeper-${ZOOKEEPER_VERSION}-bin /opt/zookeeper && chmod -R 777 /opt/zookeeper && rm apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz
|
||||
RUN echo $'tickTime=2500 \n\
|
||||
RUN curl "https://archive.apache.org/dist/zookeeper/zookeeper-${ZOOKEEPER_VERSION}/apache-zookeeper-${ZOOKEEPER_VERSION}-bin.tar.gz" | \
|
||||
tar -C opt -zxv && \
|
||||
mv /opt/apache-zookeeper-${ZOOKEEPER_VERSION}-bin /opt/zookeeper && \
|
||||
chmod -R 777 /opt/zookeeper && \
|
||||
echo $'tickTime=2500 \n\
|
||||
tickTime=2500 \n\
|
||||
dataDir=/zookeeper \n\
|
||||
clientPort=2181 \n\
|
||||
maxClientCnxns=80' > /opt/zookeeper/conf/zoo.cfg
|
||||
RUN mkdir /zookeeper && chmod -R 777 /zookeeper
|
||||
maxClientCnxns=80' > /opt/zookeeper/conf/zoo.cfg && \
|
||||
mkdir /zookeeper && \
|
||||
chmod -R 777 /zookeeper
|
||||
|
||||
ENV TZ=Etc/UTC
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
@ -8,6 +8,7 @@ RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||
|
||||
RUN apt-get update \
|
||||
&& env DEBIAN_FRONTEND=noninteractive apt-get install --yes \
|
||||
adduser \
|
||||
ca-certificates \
|
||||
bash \
|
||||
btrfs-progs \
|
||||
@ -83,6 +84,7 @@ RUN python3 -m pip install \
|
||||
pytest \
|
||||
pytest-order==1.0.0 \
|
||||
pytest-timeout \
|
||||
pytest-random \
|
||||
pytest-xdist \
|
||||
pytest-repeat \
|
||||
pytz \
|
||||
|
@ -0,0 +1,5 @@
|
||||
version: '2.3'
|
||||
# Used to pre-pull images with docker-compose
|
||||
services:
|
||||
clickhouse1:
|
||||
image: clickhouse/integration-test
|
@ -5,10 +5,10 @@ services:
|
||||
hostname: hdfs1
|
||||
restart: always
|
||||
expose:
|
||||
- ${HDFS_NAME_PORT}
|
||||
- ${HDFS_DATA_PORT}
|
||||
- ${HDFS_NAME_PORT:-50070}
|
||||
- ${HDFS_DATA_PORT:-50075}
|
||||
entrypoint: /etc/bootstrap.sh -d
|
||||
volumes:
|
||||
- type: ${HDFS_FS:-tmpfs}
|
||||
source: ${HDFS_LOGS:-}
|
||||
target: /usr/local/hadoop/logs
|
||||
target: /usr/local/hadoop/logs
|
||||
|
@ -15,7 +15,7 @@ services:
|
||||
image: confluentinc/cp-kafka:5.2.0
|
||||
hostname: kafka1
|
||||
ports:
|
||||
- ${KAFKA_EXTERNAL_PORT}:${KAFKA_EXTERNAL_PORT}
|
||||
- ${KAFKA_EXTERNAL_PORT:-8081}:${KAFKA_EXTERNAL_PORT:-8081}
|
||||
environment:
|
||||
KAFKA_ADVERTISED_LISTENERS: INSIDE://localhost:${KAFKA_EXTERNAL_PORT},OUTSIDE://kafka1:19092
|
||||
KAFKA_ADVERTISED_HOST_NAME: kafka1
|
||||
@ -35,7 +35,7 @@ services:
|
||||
image: confluentinc/cp-schema-registry:5.2.0
|
||||
hostname: schema-registry
|
||||
ports:
|
||||
- ${SCHEMA_REGISTRY_EXTERNAL_PORT}:${SCHEMA_REGISTRY_INTERNAL_PORT}
|
||||
- ${SCHEMA_REGISTRY_EXTERNAL_PORT:-12313}:${SCHEMA_REGISTRY_INTERNAL_PORT:-12313}
|
||||
environment:
|
||||
SCHEMA_REGISTRY_HOST_NAME: schema-registry
|
||||
SCHEMA_REGISTRY_KAFKASTORE_SECURITY_PROTOCOL: PLAINTEXT
|
||||
|
@ -15,8 +15,8 @@ services:
|
||||
source: ${KERBERIZED_HDFS_LOGS:-}
|
||||
target: /var/log/hadoop-hdfs
|
||||
expose:
|
||||
- ${KERBERIZED_HDFS_NAME_PORT}
|
||||
- ${KERBERIZED_HDFS_DATA_PORT}
|
||||
- ${KERBERIZED_HDFS_NAME_PORT:-50070}
|
||||
- ${KERBERIZED_HDFS_DATA_PORT:-1006}
|
||||
depends_on:
|
||||
- hdfskerberos
|
||||
entrypoint: /etc/bootstrap.sh -d
|
||||
|
@ -23,7 +23,7 @@ services:
|
||||
# restart: always
|
||||
hostname: kerberized_kafka1
|
||||
ports:
|
||||
- ${KERBERIZED_KAFKA_EXTERNAL_PORT}:${KERBERIZED_KAFKA_EXTERNAL_PORT}
|
||||
- ${KERBERIZED_KAFKA_EXTERNAL_PORT:-19092}:${KERBERIZED_KAFKA_EXTERNAL_PORT:-19092}
|
||||
environment:
|
||||
KAFKA_LISTENERS: OUTSIDE://:19092,UNSECURED_OUTSIDE://:19093,UNSECURED_INSIDE://0.0.0.0:${KERBERIZED_KAFKA_EXTERNAL_PORT}
|
||||
KAFKA_ADVERTISED_LISTENERS: OUTSIDE://kerberized_kafka1:19092,UNSECURED_OUTSIDE://kerberized_kafka1:19093,UNSECURED_INSIDE://localhost:${KERBERIZED_KAFKA_EXTERNAL_PORT}
|
||||
@ -41,7 +41,7 @@ services:
|
||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/broker_jaas.conf -Djava.security.krb5.conf=/etc/kafka/secrets/krb.conf -Dsun.security.krb5.debug=true"
|
||||
volumes:
|
||||
- ${KERBERIZED_KAFKA_DIR}/secrets:/etc/kafka/secrets
|
||||
- ${KERBERIZED_KAFKA_DIR:-}/secrets:/etc/kafka/secrets
|
||||
- /dev/urandom:/dev/random
|
||||
depends_on:
|
||||
- kafka_kerberized_zookeeper
|
||||
|
@ -4,13 +4,13 @@ services:
|
||||
image: getmeili/meilisearch:v0.27.0
|
||||
restart: always
|
||||
ports:
|
||||
- ${MEILI_EXTERNAL_PORT}:${MEILI_INTERNAL_PORT}
|
||||
- ${MEILI_EXTERNAL_PORT:-7700}:${MEILI_INTERNAL_PORT:-7700}
|
||||
|
||||
meili_secure:
|
||||
image: getmeili/meilisearch:v0.27.0
|
||||
restart: always
|
||||
ports:
|
||||
- ${MEILI_SECURE_EXTERNAL_PORT}:${MEILI_SECURE_INTERNAL_PORT}
|
||||
- ${MEILI_SECURE_EXTERNAL_PORT:-7700}:${MEILI_SECURE_INTERNAL_PORT:-7700}
|
||||
environment:
|
||||
MEILI_MASTER_KEY: "password"
|
||||
|
||||
|
@ -9,7 +9,7 @@ services:
|
||||
- data1-1:/data1
|
||||
- ${MINIO_CERTS_DIR:-}:/certs
|
||||
expose:
|
||||
- ${MINIO_PORT}
|
||||
- ${MINIO_PORT:-9001}
|
||||
environment:
|
||||
MINIO_ACCESS_KEY: minio
|
||||
MINIO_SECRET_KEY: minio123
|
||||
|
@ -7,11 +7,11 @@ services:
|
||||
MONGO_INITDB_ROOT_USERNAME: root
|
||||
MONGO_INITDB_ROOT_PASSWORD: clickhouse
|
||||
ports:
|
||||
- ${MONGO_EXTERNAL_PORT}:${MONGO_INTERNAL_PORT}
|
||||
- ${MONGO_EXTERNAL_PORT:-27017}:${MONGO_INTERNAL_PORT:-27017}
|
||||
command: --profile=2 --verbose
|
||||
|
||||
mongo2:
|
||||
image: mongo:5.0
|
||||
restart: always
|
||||
ports:
|
||||
- ${MONGO_NO_CRED_EXTERNAL_PORT}:${MONGO_NO_CRED_INTERNAL_PORT}
|
||||
- ${MONGO_NO_CRED_EXTERNAL_PORT:-27017}:${MONGO_NO_CRED_INTERNAL_PORT:-27017}
|
||||
|
@ -7,7 +7,7 @@ services:
|
||||
MONGO_INITDB_ROOT_USERNAME: root
|
||||
MONGO_INITDB_ROOT_PASSWORD: clickhouse
|
||||
volumes:
|
||||
- ${MONGO_CONFIG_PATH}:/mongo/
|
||||
- ${MONGO_CONFIG_PATH:-}:/mongo/
|
||||
ports:
|
||||
- ${MONGO_EXTERNAL_PORT}:${MONGO_INTERNAL_PORT}
|
||||
- ${MONGO_EXTERNAL_PORT:-27017}:${MONGO_INTERNAL_PORT:-27017}
|
||||
command: --config /mongo/mongo_secure.conf --profile=2 --verbose
|
||||
|
@ -8,7 +8,7 @@ services:
|
||||
MYSQL_ROOT_HOST: ${MYSQL_ROOT_HOST}
|
||||
DATADIR: /mysql/
|
||||
expose:
|
||||
- ${MYSQL_PORT}
|
||||
- ${MYSQL_PORT:-3306}
|
||||
command: --server_id=100
|
||||
--log-bin='mysql-bin-1.log'
|
||||
--default-time-zone='+3:00'
|
||||
|
@ -1,21 +0,0 @@
|
||||
version: '2.3'
|
||||
services:
|
||||
mysql1:
|
||||
image: mysql:5.7
|
||||
restart: 'no'
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: clickhouse
|
||||
ports:
|
||||
- 3308:3306
|
||||
command: --server_id=100 --log-bin='mysql-bin-1.log'
|
||||
--default-time-zone='+3:00'
|
||||
--gtid-mode="ON"
|
||||
--enforce-gtid-consistency
|
||||
--log-error-verbosity=3
|
||||
--log-error=/var/log/mysqld/error.log
|
||||
--general-log=ON
|
||||
--general-log-file=/var/log/mysqld/general.log
|
||||
volumes:
|
||||
- type: ${MYSQL_LOGS_FS:-tmpfs}
|
||||
source: ${MYSQL_LOGS:-}
|
||||
target: /var/log/mysqld/
|
@ -8,7 +8,7 @@ services:
|
||||
MYSQL_ROOT_HOST: ${MYSQL_ROOT_HOST}
|
||||
DATADIR: /mysql/
|
||||
expose:
|
||||
- ${MYSQL8_PORT}
|
||||
- ${MYSQL8_PORT:-3306}
|
||||
command: --server_id=100 --log-bin='mysql-bin-1.log'
|
||||
--default_authentication_plugin='mysql_native_password'
|
||||
--default-time-zone='+3:00' --gtid-mode="ON"
|
||||
|
@ -8,7 +8,7 @@ services:
|
||||
MYSQL_ROOT_HOST: ${MYSQL_CLUSTER_ROOT_HOST}
|
||||
DATADIR: /mysql/
|
||||
expose:
|
||||
- ${MYSQL_CLUSTER_PORT}
|
||||
- ${MYSQL_CLUSTER_PORT:-3306}
|
||||
command: --server_id=100
|
||||
--log-bin='mysql-bin-2.log'
|
||||
--default-time-zone='+3:00'
|
||||
@ -30,7 +30,7 @@ services:
|
||||
MYSQL_ROOT_HOST: ${MYSQL_CLUSTER_ROOT_HOST}
|
||||
DATADIR: /mysql/
|
||||
expose:
|
||||
- ${MYSQL_CLUSTER_PORT}
|
||||
- ${MYSQL_CLUSTER_PORT:-3306}
|
||||
command: --server_id=100
|
||||
--log-bin='mysql-bin-3.log'
|
||||
--default-time-zone='+3:00'
|
||||
@ -52,7 +52,7 @@ services:
|
||||
MYSQL_ROOT_HOST: ${MYSQL_CLUSTER_ROOT_HOST}
|
||||
DATADIR: /mysql/
|
||||
expose:
|
||||
- ${MYSQL_CLUSTER_PORT}
|
||||
- ${MYSQL_CLUSTER_PORT:-3306}
|
||||
command: --server_id=100
|
||||
--log-bin='mysql-bin-4.log'
|
||||
--default-time-zone='+3:00'
|
||||
|
@ -3,9 +3,9 @@ services:
|
||||
nats1:
|
||||
image: nats
|
||||
ports:
|
||||
- "${NATS_EXTERNAL_PORT}:${NATS_INTERNAL_PORT}"
|
||||
- "${NATS_EXTERNAL_PORT:-4444}:${NATS_INTERNAL_PORT:-4444}"
|
||||
command: "-p 4444 --user click --pass house --tls --tlscert=/etc/certs/server-cert.pem --tlskey=/etc/certs/server-key.pem"
|
||||
volumes:
|
||||
- type: bind
|
||||
source: "${NATS_CERT_DIR}/nats"
|
||||
source: "${NATS_CERT_DIR:-}/nats"
|
||||
target: /etc/certs
|
||||
|
@ -5,7 +5,7 @@ services:
|
||||
command: ["postgres", "-c", "wal_level=logical", "-c", "max_replication_slots=2", "-c", "logging_collector=on", "-c", "log_directory=/postgres/logs", "-c", "log_filename=postgresql.log", "-c", "log_statement=all", "-c", "max_connections=200"]
|
||||
restart: always
|
||||
expose:
|
||||
- ${POSTGRES_PORT}
|
||||
- ${POSTGRES_PORT:-5432}
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||
interval: 10s
|
||||
|
@ -9,7 +9,7 @@ services:
|
||||
POSTGRES_PASSWORD: mysecretpassword
|
||||
PGDATA: /postgres/data
|
||||
expose:
|
||||
- ${POSTGRES_PORT}
|
||||
- ${POSTGRES_PORT:-5432}
|
||||
volumes:
|
||||
- type: ${POSTGRES_LOGS_FS:-tmpfs}
|
||||
source: ${POSTGRES2_DIR:-}
|
||||
@ -23,7 +23,7 @@ services:
|
||||
POSTGRES_PASSWORD: mysecretpassword
|
||||
PGDATA: /postgres/data
|
||||
expose:
|
||||
- ${POSTGRES_PORT}
|
||||
- ${POSTGRES_PORT:-5432}
|
||||
volumes:
|
||||
- type: ${POSTGRES_LOGS_FS:-tmpfs}
|
||||
source: ${POSTGRES3_DIR:-}
|
||||
@ -37,7 +37,7 @@ services:
|
||||
POSTGRES_PASSWORD: mysecretpassword
|
||||
PGDATA: /postgres/data
|
||||
expose:
|
||||
- ${POSTGRES_PORT}
|
||||
- ${POSTGRES_PORT:-5432}
|
||||
volumes:
|
||||
- type: ${POSTGRES_LOGS_FS:-tmpfs}
|
||||
source: ${POSTGRES4_DIR:-}
|
||||
|
@ -5,7 +5,7 @@ services:
|
||||
image: rabbitmq:3.8-management-alpine
|
||||
hostname: rabbitmq1
|
||||
expose:
|
||||
- ${RABBITMQ_PORT}
|
||||
- ${RABBITMQ_PORT:-5672}
|
||||
environment:
|
||||
RABBITMQ_DEFAULT_USER: "root"
|
||||
RABBITMQ_DEFAULT_PASS: "clickhouse"
|
||||
|
@ -4,5 +4,5 @@ services:
|
||||
image: redis
|
||||
restart: always
|
||||
ports:
|
||||
- ${REDIS_EXTERNAL_PORT}:${REDIS_INTERNAL_PORT}
|
||||
- ${REDIS_EXTERNAL_PORT:-6379}:${REDIS_INTERNAL_PORT:-6379}
|
||||
command: redis-server --requirepass "clickhouse" --databases 32
|
||||
|
@ -11,7 +11,7 @@ set -eu
|
||||
for module; do
|
||||
if [ "${module#-}" = "$module" ]; then
|
||||
ip link show "$module" || true
|
||||
lsmod | grep "$module" || true
|
||||
lsmod | rg "$module" || true
|
||||
fi
|
||||
done
|
||||
|
||||
|
@ -37,6 +37,8 @@ RUN apt-get update \
|
||||
wget \
|
||||
rustc \
|
||||
cargo \
|
||||
ripgrep \
|
||||
zstd \
|
||||
&& pip3 --no-cache-dir install 'clickhouse-driver==0.2.1' scipy \
|
||||
&& apt-get purge --yes python3-dev g++ \
|
||||
&& apt-get autoremove --yes \
|
||||
|
@ -193,7 +193,7 @@ function run_tests
|
||||
then
|
||||
# Run only explicitly specified tests, if any.
|
||||
# shellcheck disable=SC2010
|
||||
test_files=($(ls "$test_prefix" | grep "$CHPC_TEST_GREP" | xargs -I{} -n1 readlink -f "$test_prefix/{}"))
|
||||
test_files=($(ls "$test_prefix" | rg "$CHPC_TEST_GREP" | xargs -I{} -n1 readlink -f "$test_prefix/{}"))
|
||||
elif [ "$PR_TO_TEST" -ne 0 ] \
|
||||
&& [ "$(wc -l < changed-test-definitions.txt)" -gt 0 ] \
|
||||
&& [ "$(wc -l < other-changed-files.txt)" -eq 0 ]
|
||||
@ -210,7 +210,7 @@ function run_tests
|
||||
# We can filter out certain tests
|
||||
if [ -v CHPC_TEST_GREP_EXCLUDE ]; then
|
||||
# filter tests array in bash https://stackoverflow.com/a/40375567
|
||||
filtered_test_files=( $( for i in ${test_files[@]} ; do echo $i ; done | grep -v ${CHPC_TEST_GREP_EXCLUDE} ) )
|
||||
filtered_test_files=( $( for i in ${test_files[@]} ; do echo $i ; done | rg -v ${CHPC_TEST_GREP_EXCLUDE} ) )
|
||||
test_files=("${filtered_test_files[@]}")
|
||||
fi
|
||||
|
||||
@ -284,7 +284,7 @@ function run_tests
|
||||
# Use awk because bash doesn't support floating point arithmetic.
|
||||
profile_seconds=$(awk "BEGIN { print ($profile_seconds_left > 0 ? 10 : 0) }")
|
||||
|
||||
if [ "$(grep -c $(basename $test) changed-test-definitions.txt)" -gt 0 ]
|
||||
if [ "$(rg -c $(basename $test) changed-test-definitions.txt)" -gt 0 ]
|
||||
then
|
||||
# Run all queries from changed test files to ensure that all new queries will be tested.
|
||||
max_queries=0
|
||||
@ -518,7 +518,7 @@ IFS=$'\n'
|
||||
for prefix in $(cut -f1,2 "analyze/query-run-metrics-for-stats.tsv" | sort | uniq)
|
||||
do
|
||||
file="analyze/tmp/${prefix// /_}.tsv"
|
||||
grep "^$prefix " "analyze/query-run-metrics-for-stats.tsv" > "$file" &
|
||||
rg "^$prefix " "analyze/query-run-metrics-for-stats.tsv" > "$file" &
|
||||
printf "%s\0\n" \
|
||||
"clickhouse-local \
|
||||
--file \"$file\" \
|
||||
@ -1088,7 +1088,7 @@ do
|
||||
# Build separate .svg flamegraph for each query.
|
||||
# -F is somewhat unsafe because it might match not the beginning of the
|
||||
# string, but this is unlikely and escaping the query for grep is a pain.
|
||||
grep -F "$query " "report/stacks.$version.tsv" \
|
||||
rg -F "$query " "report/stacks.$version.tsv" \
|
||||
| cut -f 5- \
|
||||
| sed 's/\t/ /g' \
|
||||
| tee "report/tmp/$query_file.stacks.$version.tsv" \
|
||||
@ -1117,7 +1117,7 @@ do
|
||||
query_file=$(echo "$query" | cut -c-120 | sed 's/[/ ]/_/g')
|
||||
|
||||
# Ditto the above comment about -F.
|
||||
grep -F "$query " "report/metric-deviation.$version.tsv" \
|
||||
rg -F "$query " "report/metric-deviation.$version.tsv" \
|
||||
| cut -f4- > "$query_file.$version.metrics.rep" &
|
||||
done
|
||||
done
|
||||
@ -1132,8 +1132,8 @@ do
|
||||
{
|
||||
# The second grep is a heuristic for error messages like
|
||||
# "socket.timeout: timed out".
|
||||
grep -h -m2 -i '\(Exception\|Error\):[^:]' "$log" \
|
||||
|| grep -h -m2 -i '^[^ ]\+: ' "$log" \
|
||||
rg --no-filename --max-count=2 -i '\(Exception\|Error\):[^:]' "$log" \
|
||||
|| rg --no-filename --max-count=2 -i '^[^ ]\+: ' "$log" \
|
||||
|| head -2 "$log"
|
||||
} | sed "s/^/$test\t/" >> run-errors.tsv ||:
|
||||
done
|
||||
@ -1180,7 +1180,7 @@ IFS=$'\n'
|
||||
for prefix in $(cut -f1 "metrics/metrics.tsv" | sort | uniq)
|
||||
do
|
||||
file="metrics/$prefix.tsv"
|
||||
grep "^$prefix " "metrics/metrics.tsv" | cut -f2- > "$file"
|
||||
rg "^$prefix " "metrics/metrics.tsv" | cut -f2- > "$file"
|
||||
|
||||
gnuplot -e "
|
||||
set datafile separator '\t';
|
||||
|
@ -28,8 +28,8 @@ function download
|
||||
# Historically there were various paths for the performance test package.
|
||||
# Test all of them.
|
||||
declare -a urls_to_try=(
|
||||
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/$BUILD_NAME/performance.tar.zst"
|
||||
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/$BUILD_NAME/performance.tgz"
|
||||
"https://s3.amazonaws.com/clickhouse-builds/$left_pr/$left_sha/performance/performance.tgz"
|
||||
)
|
||||
|
||||
for path in "${urls_to_try[@]}"
|
||||
@ -45,7 +45,7 @@ function download
|
||||
# download anything, for example in some manual runs. In this case, SHAs are not set.
|
||||
if ! [ "$left_sha" = "$right_sha" ]
|
||||
then
|
||||
wget -nv -nd -c "$left_path" -O- | tar -C left --no-same-owner --strip-components=1 -zxv &
|
||||
wget -nv -nd -c "$left_path" -O- | tar -C left --no-same-owner --strip-components=1 --zstd --extract --verbose &
|
||||
elif [ "$right_sha" != "" ]
|
||||
then
|
||||
mkdir left ||:
|
||||
@ -60,7 +60,7 @@ function download
|
||||
>&2 echo "Unknown dataset '$dataset_name'"
|
||||
exit 1
|
||||
fi
|
||||
cd db0 && wget -nv -nd -c "$dataset_path" -O- | tar -xv &
|
||||
cd db0 && wget -nv -nd -c "$dataset_path" -O- | tar --extract --verbose &
|
||||
done
|
||||
|
||||
mkdir ~/fg ||:
|
||||
|
@ -66,10 +66,8 @@ function find_reference_sha
|
||||
# test all of them.
|
||||
unset found
|
||||
declare -a urls_to_try=(
|
||||
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/$BUILD_NAME/performance.tar.zst"
|
||||
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/$BUILD_NAME/performance.tgz"
|
||||
# FIXME: the following link is left there for backward compatibility.
|
||||
# We should remove it after 2022-11-01
|
||||
"https://s3.amazonaws.com/clickhouse-builds/0/$REF_SHA/performance/performance.tgz"
|
||||
)
|
||||
for path in "${urls_to_try[@]}"
|
||||
do
|
||||
@ -94,13 +92,13 @@ chmod 777 workspace output
|
||||
cd workspace
|
||||
|
||||
# Download the package for the version we are going to test.
|
||||
if curl_with_retry "$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tgz"
|
||||
if curl_with_retry "$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tar.zst"
|
||||
then
|
||||
right_path="$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tgz"
|
||||
right_path="$S3_URL/$PR_TO_TEST/$SHA_TO_TEST$COMMON_BUILD_PREFIX/$BUILD_NAME/performance.tar.zst"
|
||||
fi
|
||||
|
||||
mkdir right
|
||||
wget -nv -nd -c "$right_path" -O- | tar -C right --no-same-owner --strip-components=1 -zxv
|
||||
wget -nv -nd -c "$right_path" -O- | tar -C right --no-same-owner --strip-components=1 --zstd --extract --verbose
|
||||
|
||||
# Find reference revision if not specified explicitly
|
||||
if [ "$REF_SHA" == "" ]; then find_reference_sha; fi
|
||||
|
@ -297,6 +297,7 @@ if not args.use_existing_tables:
|
||||
|
||||
# Let's sync the data to avoid writeback affects performance
|
||||
os.system("sync")
|
||||
reportStageEnd("sync")
|
||||
|
||||
# By default, test all queries.
|
||||
queries_to_run = range(0, len(test_queries))
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user