mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-22 15:42:02 +00:00
Merge branch 'master' into fix_cares_crash
This commit is contained in:
commit
7b1ceaa2c5
27
.clang-tidy
27
.clang-tidy
@ -16,6 +16,7 @@ Checks: '*,
|
||||
|
||||
-android-*,
|
||||
|
||||
-bugprone-assignment-in-if-condition,
|
||||
-bugprone-branch-clone,
|
||||
-bugprone-easily-swappable-parameters,
|
||||
-bugprone-exception-escape,
|
||||
@ -23,7 +24,6 @@ Checks: '*,
|
||||
-bugprone-narrowing-conversions,
|
||||
-bugprone-not-null-terminated-result,
|
||||
-bugprone-unchecked-optional-access,
|
||||
-bugprone-assignment-in-if-condition,
|
||||
|
||||
-cert-dcl16-c,
|
||||
-cert-err58-cpp,
|
||||
@ -34,7 +34,6 @@ Checks: '*,
|
||||
|
||||
-clang-analyzer-optin.performance.Padding,
|
||||
-clang-analyzer-optin.portability.UnixAPI,
|
||||
|
||||
-clang-analyzer-security.insecureAPI.bzero,
|
||||
-clang-analyzer-security.insecureAPI.strcpy,
|
||||
|
||||
@ -103,12 +102,13 @@ Checks: '*,
|
||||
|
||||
-openmp-*,
|
||||
|
||||
-misc-const-correctness,
|
||||
-misc-no-recursion,
|
||||
-misc-non-private-member-variables-in-classes,
|
||||
-misc-const-correctness,
|
||||
|
||||
-modernize-avoid-c-arrays,
|
||||
-modernize-concat-nested-namespaces,
|
||||
-modernize-macro-to-enum,
|
||||
-modernize-pass-by-value,
|
||||
-modernize-return-braced-init-list,
|
||||
-modernize-use-auto,
|
||||
@ -117,7 +117,6 @@ Checks: '*,
|
||||
-modernize-use-nodiscard,
|
||||
-modernize-use-override,
|
||||
-modernize-use-trailing-return-type,
|
||||
-modernize-macro-to-enum,
|
||||
|
||||
-performance-inefficient-string-concatenation,
|
||||
-performance-no-int-to-ptr,
|
||||
@ -135,17 +134,35 @@ Checks: '*,
|
||||
-readability-magic-numbers,
|
||||
-readability-named-parameter,
|
||||
-readability-redundant-declaration,
|
||||
-readability-simplify-boolean-expr,
|
||||
-readability-static-accessed-through-instance,
|
||||
-readability-suspicious-call-argument,
|
||||
-readability-uppercase-literal-suffix,
|
||||
-readability-use-anyofallof,
|
||||
-readability-simplify-boolean-expr,
|
||||
|
||||
-zirkon-*,
|
||||
|
||||
-misc-*, # temporarily disabled due to being too slow
|
||||
# also disable checks in other categories which are aliases of checks in misc-*:
|
||||
# https://releases.llvm.org/15.0.0/tools/clang/tools/extra/docs/clang-tidy/checks/list.html
|
||||
-cert-dcl54-cpp, # alias of misc-new-delete-overloads
|
||||
-hicpp-new-delete-operators, # alias of misc-new-delete-overloads
|
||||
-cert-fio38-c, # alias of misc-non-copyable-objects
|
||||
-cert-dcl03-c, # alias of misc-static-assert
|
||||
-hicpp-static-assert, # alias of misc-static-assert
|
||||
-cert-err09-cpp, # alias of misc-throw-by-value-catch-by-reference
|
||||
-cert-err61-cpp, # alias of misc-throw-by-value-catch-by-reference
|
||||
-cppcoreguidelines-c-copy-assignment-signature, # alias of misc-unconventional-assign-operator
|
||||
-cppcoreguidelines-non-private-member-variables-in-classes, # alias of misc-non-private-member-variables-in-classes
|
||||
'
|
||||
|
||||
WarningsAsErrors: '*'
|
||||
|
||||
# TODO: use dictionary syntax for CheckOptions when minimum clang-tidy level rose to 15
|
||||
# some-check.SomeOption: 'some value'
|
||||
# instead of
|
||||
# - key: some-check.SomeOption
|
||||
# value: 'some value'
|
||||
CheckOptions:
|
||||
- key: readability-identifier-naming.ClassCase
|
||||
value: CamelCase
|
||||
|
1
.exrc
Normal file
1
.exrc
Normal file
@ -0,0 +1 @@
|
||||
au BufRead,BufNewFile * set tabstop=4 softtabstop=0 expandtab shiftwidth=4 smarttab tags=tags,../tags
|
2
.github/ISSUE_TEMPLATE/10_question.md
vendored
2
.github/ISSUE_TEMPLATE/10_question.md
vendored
@ -7,6 +7,6 @@ assignees: ''
|
||||
|
||||
---
|
||||
|
||||
> Make sure to check documentation https://clickhouse.com/docs/en/ first. If the question is concise and probably has a short answer, asking it in Telegram chat https://telegram.me/clickhouse_en is probably the fastest way to find the answer. For more complicated questions, consider asking them on StackOverflow with "clickhouse" tag https://stackoverflow.com/questions/tagged/clickhouse
|
||||
> Make sure to check documentation https://clickhouse.com/docs/en/ first. If the question is concise and probably has a short answer, asking it in [community Slack](https://join.slack.com/t/clickhousedb/shared_invite/zt-1gh9ds7f4-PgDhJAaF8ad5RbWBAAjzFg) is probably the fastest way to find the answer. For more complicated questions, consider asking them on StackOverflow with "clickhouse" tag https://stackoverflow.com/questions/tagged/clickhouse
|
||||
|
||||
> If you still prefer GitHub issues, remove all this text and ask your question here.
|
||||
|
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
19
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -18,5 +18,24 @@ tests/ci/run_check.py
|
||||
### Changelog entry (a user-readable short description of the changes that goes to CHANGELOG.md):
|
||||
...
|
||||
|
||||
### Documentation entry for user-facing changes
|
||||
|
||||
- [ ] Documentation is written (mandatory for new features)
|
||||
|
||||
<!---
|
||||
Directly edit documentation source files in the "docs" folder with the same pull-request as code changes
|
||||
|
||||
or
|
||||
|
||||
Add a user-readable short description of the changes that should be added to docs.clickhouse.com below.
|
||||
|
||||
At a minimum, the following information should be added (but add more as needed).
|
||||
- Motivation: Why is this function, table engine, etc. useful to ClickHouse users?
|
||||
|
||||
- Parameters: If the feature being added takes arguments, options or is influenced by settings, please list them below with a brief explanation.
|
||||
|
||||
- Example use: A query or command.
|
||||
-->
|
||||
|
||||
|
||||
> Information about CI checks: https://clickhouse.com/docs/en/development/continuous-integration/
|
||||
|
209
.github/workflows/backport_branches.yml
vendored
209
.github/workflows/backport_branches.yml
vendored
@ -12,11 +12,10 @@ jobs:
|
||||
PythonUnitTests:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Python unit tests
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
@ -24,34 +23,32 @@ jobs:
|
||||
DockerHubPushAarch64:
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
DockerHubPushAmd64:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
@ -59,18 +56,17 @@ jobs:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
@ -79,7 +75,7 @@ jobs:
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
@ -94,13 +90,12 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse
|
||||
REPORTS_PATH=${{runner.temp}}/reports_dir
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: CompatibilityCheck
|
||||
@ -132,28 +127,25 @@ jobs:
|
||||
BUILD_NAME=package_release
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
fetch-depth: 0 # For a proper version and performance artifacts
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -177,28 +169,25 @@ jobs:
|
||||
BUILD_NAME=package_aarch64
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
fetch-depth: 0 # For a proper version and performance artifacts
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -222,26 +211,24 @@ jobs:
|
||||
BUILD_NAME=package_asan
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -265,26 +252,24 @@ jobs:
|
||||
BUILD_NAME=package_tsan
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -308,26 +293,24 @@ jobs:
|
||||
BUILD_NAME=package_debug
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -351,28 +334,25 @@ jobs:
|
||||
BUILD_NAME=binary_darwin
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -396,28 +376,25 @@ jobs:
|
||||
BUILD_NAME=binary_darwin_aarch64
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
||||
- name: Upload build URLs to artifacts
|
||||
if: ${{ success() || failure() }}
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ env.BUILD_URLS }}
|
||||
path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
||||
@ -436,12 +413,10 @@ jobs:
|
||||
- BuilderDebAarch64
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
fetch-depth: 0 # It MUST BE THE SAME for all dependencies and the job itself
|
||||
- name: Check docker clickhouse/clickhouse-server building
|
||||
run: |
|
||||
@ -477,14 +452,13 @@ jobs:
|
||||
NEEDS_DATA_PATH=${{runner.temp}}/needs.json
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Report Builder
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -516,14 +490,13 @@ jobs:
|
||||
NEEDS_DATA_PATH=${{runner.temp}}/needs.json
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Report Builder
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -556,14 +529,13 @@ jobs:
|
||||
KILL_TIMEOUT=10800
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Functional test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -594,14 +566,13 @@ jobs:
|
||||
KILL_TIMEOUT=3600
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Functional test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -635,14 +606,13 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/stress_thread/ClickHouse
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Stress test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -672,14 +642,13 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/integration_tests_release/ClickHouse
|
||||
EOF
|
||||
- name: Download json reports
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: ${{ env.REPORTS_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Integration test
|
||||
run: |
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
@ -706,12 +675,12 @@ jobs:
|
||||
- CompatibilityCheck
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Finish label
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 finish_check.py
|
||||
python3 merge_pr.py
|
||||
|
3
.github/workflows/cherry_pick.yml
vendored
3
.github/workflows/cherry_pick.yml
vendored
@ -28,8 +28,9 @@ jobs:
|
||||
REPO_TEAM=core
|
||||
EOF
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
token: ${{secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN}}
|
||||
fetch-depth: 0
|
||||
- name: Cherry pick
|
||||
|
2
.github/workflows/debug.yml
vendored
2
.github/workflows/debug.yml
vendored
@ -8,4 +8,4 @@ jobs:
|
||||
DebugInfo:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: hmarr/debug-action@1201a20fc9d278ddddd5f0f46922d06513892491
|
||||
- uses: hmarr/debug-action@a701ed95a46e6f2fb0df25e1a558c16356fae35a
|
||||
|
65
.github/workflows/docs_check.yml
vendored
65
.github/workflows/docs_check.yml
vendored
@ -16,15 +16,15 @@ on: # yamllint disable-line rule:truthy
|
||||
- 'docker/docs/**'
|
||||
- 'docs/**'
|
||||
- 'website/**'
|
||||
- 'utils/check-style/aspell-ignore/**'
|
||||
jobs:
|
||||
CheckLabels:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Labels check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
@ -33,17 +33,16 @@ jobs:
|
||||
needs: CheckLabels
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
@ -51,17 +50,16 @@ jobs:
|
||||
needs: CheckLabels
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
@ -69,18 +67,17 @@ jobs:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
@ -89,7 +86,7 @@ jobs:
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
@ -109,15 +106,14 @@ jobs:
|
||||
- name: Download changed images
|
||||
# even if artifact does not exist, e.g. on `do not test` label or failed Docker job
|
||||
continue-on-error: true
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.TEMP_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Style Check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
@ -139,15 +135,14 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/docs_check/ClickHouse
|
||||
EOF
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.TEMP_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -rf "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Docs Check
|
||||
run: |
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -166,12 +161,12 @@ jobs:
|
||||
- DocsCheck
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Finish label
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 finish_check.py
|
||||
python3 merge_pr.py --check-approved
|
||||
|
41
.github/workflows/docs_release.yml
vendored
41
.github/workflows/docs_release.yml
vendored
@ -17,39 +17,38 @@ concurrency:
|
||||
- 'docs/**'
|
||||
- 'utils/list-versions/version_date.tsv'
|
||||
- 'website/**'
|
||||
- 'utils/check-style/aspell-ignore/**'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
DockerHubPushAarch64:
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
DockerHubPushAmd64:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
@ -57,18 +56,17 @@ jobs:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
@ -77,7 +75,7 @@ jobs:
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
@ -96,13 +94,12 @@ jobs:
|
||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||
RCSK
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.TEMP_PATH }}
|
||||
|
12
.github/workflows/jepsen.yml
vendored
12
.github/workflows/jepsen.yml
vendored
@ -19,12 +19,10 @@ jobs:
|
||||
TEMP_PATH=${{runner.temp}}/keeper_jepsen
|
||||
REPO_COPY=${{runner.temp}}/keeper_jepsen/ClickHouse
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
fetch-depth: 0
|
||||
- name: Jepsen Test
|
||||
run: |
|
||||
@ -50,12 +48,10 @@ jobs:
|
||||
# TEMP_PATH=${{runner.temp}}/server_jepsen
|
||||
# REPO_COPY=${{runner.temp}}/server_jepsen/ClickHouse
|
||||
# EOF
|
||||
# - name: Clear repository
|
||||
# run: |
|
||||
# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
# - name: Check out repository code
|
||||
# uses: actions/checkout@v2
|
||||
# uses: ClickHouse/checkout@v1
|
||||
# with:
|
||||
# clear-repository: true
|
||||
# fetch-depth: 0
|
||||
# - name: Jepsen Test
|
||||
# run: |
|
||||
|
1058
.github/workflows/master.yml
vendored
1058
.github/workflows/master.yml
vendored
File diff suppressed because it is too large
Load Diff
50
.github/workflows/nightly.yml
vendored
50
.github/workflows/nightly.yml
vendored
@ -16,34 +16,32 @@ jobs:
|
||||
DockerHubPushAarch64:
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix aarch64 --all
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
|
||||
DockerHubPushAmd64:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Images check
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_images_check.py --suffix amd64 --all
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
|
||||
@ -51,18 +49,17 @@ jobs:
|
||||
needs: [DockerHubPushAmd64, DockerHubPushAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Download changed aarch64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_aarch64
|
||||
path: ${{ runner.temp }}
|
||||
- name: Download changed amd64 images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images_amd64
|
||||
path: ${{ runner.temp }}
|
||||
@ -71,7 +68,7 @@ jobs:
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
|
||||
- name: Upload images files to artifacts
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ runner.temp }}/changed_images.json
|
||||
@ -90,22 +87,17 @@ jobs:
|
||||
EOF
|
||||
echo "COVERITY_TOKEN=${{ secrets.COVERITY_TOKEN }}" >> "$GITHUB_ENV"
|
||||
- name: Download changed images
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: changed_images
|
||||
path: ${{ env.IMAGES_PATH }}
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
id: coverity-checkout
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
fetch-depth: 0 # otherwise we will have no info about contributors
|
||||
clear-repository: true
|
||||
submodules: true
|
||||
- name: Build
|
||||
run: |
|
||||
git -C "$GITHUB_WORKSPACE" submodule sync
|
||||
git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --init --jobs=10
|
||||
sudo rm -fr "$TEMP_PATH"
|
||||
mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
@ -115,7 +107,7 @@ jobs:
|
||||
run: |
|
||||
curl --form token="${COVERITY_TOKEN}" \
|
||||
--form email='security+coverity@clickhouse.com' \
|
||||
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tgz" \
|
||||
--form file="@$TEMP_PATH/$BUILD_NAME/coverity-scan.tar.zst" \
|
||||
--form version="${GITHUB_REF#refs/heads/}-${GITHUB_SHA::6}" \
|
||||
--form description="Nighly Scan: $(date +'%Y-%m-%dT%H:%M:%S')" \
|
||||
https://scan.coverity.com/builds?project=ClickHouse%2FClickHouse
|
||||
@ -134,8 +126,10 @@ jobs:
|
||||
CC: clang-15
|
||||
CXX: clang++-15
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||
submodules: true
|
||||
- name: Set up JDK 11
|
||||
|
1260
.github/workflows/pull_request.yml
vendored
1260
.github/workflows/pull_request.yml
vendored
File diff suppressed because it is too large
Load Diff
40
.github/workflows/release.yml
vendored
40
.github/workflows/release.yml
vendored
@ -12,50 +12,20 @@ jobs:
|
||||
ReleasePublish:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Set envs
|
||||
- name: Deploy packages and assets
|
||||
run: |
|
||||
cat >> "$GITHUB_ENV" << 'EOF'
|
||||
JFROG_API_KEY=${{ secrets.JFROG_ARTIFACTORY_API_KEY }}
|
||||
TEMP_PATH=${{runner.temp}}/release_packages
|
||||
REPO_COPY=${{runner.temp}}/release_packages/ClickHouse
|
||||
EOF
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
# Always use the most recent script version
|
||||
ref: master
|
||||
- name: Download packages and push to Artifactory
|
||||
run: |
|
||||
rm -rf "$TEMP_PATH" && mkdir -p "$TEMP_PATH"
|
||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||
cd "$REPO_COPY"
|
||||
# Download and push packages to artifactory
|
||||
python3 ./tests/ci/push_to_artifactory.py --release '${{ github.ref }}' \
|
||||
--commit '${{ github.sha }}' --artifactory-url '${{ secrets.JFROG_ARTIFACTORY_URL }}' --all
|
||||
# Download macos binaries to ${{runner.temp}}/download_binary
|
||||
python3 ./tests/ci/download_binary.py --version '${{ github.ref }}' \
|
||||
--commit '${{ github.sha }}' binary_darwin binary_darwin_aarch64
|
||||
mv '${{runner.temp}}/download_binary/'clickhouse-* '${{runner.temp}}/push_to_artifactory'
|
||||
- name: Upload packages to release assets
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
file: ${{runner.temp}}/push_to_artifactory/*
|
||||
overwrite: true
|
||||
tag: ${{ github.ref }}
|
||||
file_glob: true
|
||||
GITHUB_TAG="${GITHUB_REF#refs/tags/}"
|
||||
curl '${{ secrets.PACKAGES_RELEASE_URL }}/release/'"${GITHUB_TAG}"'?binary=binary_darwin&binary=binary_darwin_aarch64&sync=true' -d ''
|
||||
############################################################################################
|
||||
##################################### Docker images #######################################
|
||||
############################################################################################
|
||||
DockerServerImages:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
fetch-depth: 0 # otherwise we will have no version info
|
||||
- name: Check docker clickhouse/clickhouse-server building
|
||||
run: |
|
||||
|
514
.github/workflows/release_branches.yml
vendored
514
.github/workflows/release_branches.yml
vendored
File diff suppressed because it is too large
Load Diff
5
.github/workflows/tags_stable.yml
vendored
5
.github/workflows/tags_stable.yml
vendored
@ -34,7 +34,7 @@ jobs:
|
||||
run: |
|
||||
echo "GITHUB_TAG=${GITHUB_REF#refs/tags/}" >> "$GITHUB_ENV"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
ref: master
|
||||
fetch-depth: 0
|
||||
@ -51,7 +51,7 @@ jobs:
|
||||
--gh-user-or-token="$GITHUB_TOKEN" --jobs=5 \
|
||||
--output="/ClickHouse/docs/changelogs/${GITHUB_TAG}.md" "${GITHUB_TAG}"
|
||||
git add "./docs/changelogs/${GITHUB_TAG}.md"
|
||||
python ./utils/security-generator/generate_security.py > SECURITY.md
|
||||
python3 ./utils/security-generator/generate_security.py > SECURITY.md
|
||||
git diff HEAD
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
@ -61,6 +61,7 @@ jobs:
|
||||
committer: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
||||
commit-message: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }}
|
||||
branch: auto/${{ env.GITHUB_TAG }}
|
||||
assignees: ${{ github.event.sender.login }} # assign the PR to the tag pusher
|
||||
delete-branch: true
|
||||
title: Update version_date.tsv and changelogs after ${{ env.GITHUB_TAG }}
|
||||
labels: do not test
|
||||
|
6
.github/workflows/woboq.yml
vendored
6
.github/workflows/woboq.yml
vendored
@ -21,12 +21,10 @@ jobs:
|
||||
REPO_COPY=${{runner.temp}}/codebrowser/ClickHouse
|
||||
IMAGES_PATH=${{runner.temp}}/images_path
|
||||
EOF
|
||||
- name: Clear repository
|
||||
run: |
|
||||
sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
||||
- name: Check out repository code
|
||||
uses: actions/checkout@v2
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
submodules: 'true'
|
||||
- name: Codebrowser
|
||||
run: |
|
||||
|
9
.gitignore
vendored
9
.gitignore
vendored
@ -17,6 +17,7 @@
|
||||
|
||||
# logs
|
||||
*.log
|
||||
*.debuglog
|
||||
*.stderr
|
||||
*.stdout
|
||||
|
||||
@ -153,7 +154,15 @@ website/package-lock.json
|
||||
/programs/server/data
|
||||
/programs/server/metadata
|
||||
/programs/server/store
|
||||
/programs/server/uuid
|
||||
/programs/server/coordination
|
||||
|
||||
# temporary test files
|
||||
tests/queries/0_stateless/test_*
|
||||
tests/queries/0_stateless/*.binary
|
||||
tests/queries/0_stateless/*.generated-expect
|
||||
|
||||
# rust
|
||||
/rust/**/target
|
||||
# It is autogenerated from *.in
|
||||
/rust/**/.cargo/config.toml
|
||||
|
202
.gitmodules
vendored
202
.gitmodules
vendored
@ -1,94 +1,88 @@
|
||||
[submodule "contrib/poco"]
|
||||
path = contrib/poco
|
||||
url = https://github.com/ClickHouse/poco.git
|
||||
url = https://github.com/ClickHouse/poco
|
||||
branch = clickhouse
|
||||
[submodule "contrib/zstd"]
|
||||
path = contrib/zstd
|
||||
url = https://github.com/facebook/zstd.git
|
||||
url = https://github.com/facebook/zstd
|
||||
[submodule "contrib/lz4"]
|
||||
path = contrib/lz4
|
||||
url = https://github.com/lz4/lz4.git
|
||||
url = https://github.com/lz4/lz4
|
||||
[submodule "contrib/librdkafka"]
|
||||
path = contrib/librdkafka
|
||||
url = https://github.com/ClickHouse/librdkafka.git
|
||||
url = https://github.com/ClickHouse/librdkafka
|
||||
[submodule "contrib/cctz"]
|
||||
path = contrib/cctz
|
||||
url = https://github.com/ClickHouse/cctz.git
|
||||
url = https://github.com/ClickHouse/cctz
|
||||
[submodule "contrib/zlib-ng"]
|
||||
path = contrib/zlib-ng
|
||||
url = https://github.com/ClickHouse/zlib-ng.git
|
||||
url = https://github.com/ClickHouse/zlib-ng
|
||||
branch = clickhouse-2.0.x
|
||||
[submodule "contrib/googletest"]
|
||||
path = contrib/googletest
|
||||
url = https://github.com/google/googletest.git
|
||||
url = https://github.com/google/googletest
|
||||
[submodule "contrib/capnproto"]
|
||||
path = contrib/capnproto
|
||||
url = https://github.com/capnproto/capnproto.git
|
||||
url = https://github.com/capnproto/capnproto
|
||||
[submodule "contrib/double-conversion"]
|
||||
path = contrib/double-conversion
|
||||
url = https://github.com/google/double-conversion.git
|
||||
url = https://github.com/google/double-conversion
|
||||
[submodule "contrib/re2"]
|
||||
path = contrib/re2
|
||||
url = https://github.com/google/re2.git
|
||||
url = https://github.com/google/re2
|
||||
[submodule "contrib/mariadb-connector-c"]
|
||||
path = contrib/mariadb-connector-c
|
||||
url = https://github.com/ClickHouse/mariadb-connector-c.git
|
||||
url = https://github.com/ClickHouse/mariadb-connector-c
|
||||
[submodule "contrib/jemalloc"]
|
||||
path = contrib/jemalloc
|
||||
url = https://github.com/jemalloc/jemalloc.git
|
||||
url = https://github.com/jemalloc/jemalloc
|
||||
[submodule "contrib/unixodbc"]
|
||||
path = contrib/unixodbc
|
||||
url = https://github.com/ClickHouse/UnixODBC.git
|
||||
url = https://github.com/ClickHouse/UnixODBC
|
||||
[submodule "contrib/protobuf"]
|
||||
path = contrib/protobuf
|
||||
url = https://github.com/ClickHouse/protobuf.git
|
||||
url = https://github.com/ClickHouse/protobuf
|
||||
branch = v3.13.0.1
|
||||
[submodule "contrib/boost"]
|
||||
path = contrib/boost
|
||||
url = https://github.com/ClickHouse/boost.git
|
||||
url = https://github.com/ClickHouse/boost
|
||||
[submodule "contrib/base64"]
|
||||
path = contrib/base64
|
||||
url = https://github.com/ClickHouse/Turbo-Base64.git
|
||||
url = https://github.com/ClickHouse/Turbo-Base64
|
||||
[submodule "contrib/arrow"]
|
||||
path = contrib/arrow
|
||||
url = https://github.com/ClickHouse/arrow.git
|
||||
url = https://github.com/ClickHouse/arrow
|
||||
branch = blessed/release-6.0.1
|
||||
[submodule "contrib/thrift"]
|
||||
path = contrib/thrift
|
||||
url = https://github.com/apache/thrift.git
|
||||
url = https://github.com/apache/thrift
|
||||
[submodule "contrib/libhdfs3"]
|
||||
path = contrib/libhdfs3
|
||||
url = https://github.com/ClickHouse/libhdfs3.git
|
||||
url = https://github.com/ClickHouse/libhdfs3
|
||||
[submodule "contrib/libxml2"]
|
||||
path = contrib/libxml2
|
||||
url = https://github.com/GNOME/libxml2.git
|
||||
url = https://github.com/GNOME/libxml2
|
||||
[submodule "contrib/libgsasl"]
|
||||
path = contrib/libgsasl
|
||||
url = https://github.com/ClickHouse/libgsasl.git
|
||||
[submodule "contrib/libcxx"]
|
||||
path = contrib/libcxx
|
||||
url = https://github.com/ClickHouse/libcxx.git
|
||||
[submodule "contrib/libcxxabi"]
|
||||
path = contrib/libcxxabi
|
||||
url = https://github.com/ClickHouse/libcxxabi.git
|
||||
url = https://github.com/ClickHouse/libgsasl
|
||||
[submodule "contrib/snappy"]
|
||||
path = contrib/snappy
|
||||
url = https://github.com/ClickHouse/snappy.git
|
||||
url = https://github.com/ClickHouse/snappy
|
||||
[submodule "contrib/cppkafka"]
|
||||
path = contrib/cppkafka
|
||||
url = https://github.com/mfontanini/cppkafka.git
|
||||
url = https://github.com/mfontanini/cppkafka
|
||||
[submodule "contrib/brotli"]
|
||||
path = contrib/brotli
|
||||
url = https://github.com/google/brotli.git
|
||||
url = https://github.com/google/brotli
|
||||
[submodule "contrib/h3"]
|
||||
path = contrib/h3
|
||||
url = https://github.com/ClickHouse/h3
|
||||
[submodule "contrib/libunwind"]
|
||||
path = contrib/libunwind
|
||||
url = https://github.com/ClickHouse/libunwind.git
|
||||
url = https://github.com/ClickHouse/libunwind
|
||||
[submodule "contrib/simdjson"]
|
||||
path = contrib/simdjson
|
||||
url = https://github.com/simdjson/simdjson.git
|
||||
url = https://github.com/simdjson/simdjson
|
||||
[submodule "contrib/rapidjson"]
|
||||
path = contrib/rapidjson
|
||||
url = https://github.com/ClickHouse/rapidjson
|
||||
@ -100,68 +94,68 @@
|
||||
url = https://github.com/ClickHouse/orc
|
||||
[submodule "contrib/sparsehash-c11"]
|
||||
path = contrib/sparsehash-c11
|
||||
url = https://github.com/sparsehash/sparsehash-c11.git
|
||||
url = https://github.com/sparsehash/sparsehash-c11
|
||||
[submodule "contrib/grpc"]
|
||||
path = contrib/grpc
|
||||
url = https://github.com/ClickHouse/grpc.git
|
||||
url = https://github.com/ClickHouse/grpc
|
||||
branch = v1.33.2
|
||||
[submodule "contrib/aws"]
|
||||
path = contrib/aws
|
||||
url = https://github.com/ClickHouse/aws-sdk-cpp.git
|
||||
url = https://github.com/ClickHouse/aws-sdk-cpp
|
||||
[submodule "aws-c-event-stream"]
|
||||
path = contrib/aws-c-event-stream
|
||||
url = https://github.com/ClickHouse/aws-c-event-stream.git
|
||||
url = https://github.com/awslabs/aws-c-event-stream
|
||||
[submodule "aws-c-common"]
|
||||
path = contrib/aws-c-common
|
||||
url = https://github.com/ClickHouse/aws-c-common.git
|
||||
url = https://github.com/ClickHouse/aws-c-common
|
||||
[submodule "aws-checksums"]
|
||||
path = contrib/aws-checksums
|
||||
url = https://github.com/ClickHouse/aws-checksums.git
|
||||
url = https://github.com/awslabs/aws-checksums
|
||||
[submodule "contrib/curl"]
|
||||
path = contrib/curl
|
||||
url = https://github.com/curl/curl.git
|
||||
url = https://github.com/curl/curl
|
||||
[submodule "contrib/icudata"]
|
||||
path = contrib/icudata
|
||||
url = https://github.com/ClickHouse/icudata.git
|
||||
url = https://github.com/ClickHouse/icudata
|
||||
[submodule "contrib/icu"]
|
||||
path = contrib/icu
|
||||
url = https://github.com/unicode-org/icu.git
|
||||
url = https://github.com/unicode-org/icu
|
||||
[submodule "contrib/flatbuffers"]
|
||||
path = contrib/flatbuffers
|
||||
url = https://github.com/ClickHouse/flatbuffers.git
|
||||
url = https://github.com/ClickHouse/flatbuffers
|
||||
[submodule "contrib/replxx"]
|
||||
path = contrib/replxx
|
||||
url = https://github.com/ClickHouse/replxx.git
|
||||
url = https://github.com/ClickHouse/replxx
|
||||
[submodule "contrib/avro"]
|
||||
path = contrib/avro
|
||||
url = https://github.com/ClickHouse/avro.git
|
||||
url = https://github.com/ClickHouse/avro
|
||||
ignore = untracked
|
||||
[submodule "contrib/msgpack-c"]
|
||||
path = contrib/msgpack-c
|
||||
url = https://github.com/msgpack/msgpack-c
|
||||
[submodule "contrib/libcpuid"]
|
||||
path = contrib/libcpuid
|
||||
url = https://github.com/ClickHouse/libcpuid.git
|
||||
url = https://github.com/ClickHouse/libcpuid
|
||||
[submodule "contrib/openldap"]
|
||||
path = contrib/openldap
|
||||
url = https://github.com/ClickHouse/openldap.git
|
||||
url = https://github.com/ClickHouse/openldap
|
||||
[submodule "contrib/AMQP-CPP"]
|
||||
path = contrib/AMQP-CPP
|
||||
url = https://github.com/ClickHouse/AMQP-CPP.git
|
||||
url = https://github.com/ClickHouse/AMQP-CPP
|
||||
[submodule "contrib/cassandra"]
|
||||
path = contrib/cassandra
|
||||
url = https://github.com/ClickHouse/cpp-driver.git
|
||||
url = https://github.com/ClickHouse/cpp-driver
|
||||
branch = clickhouse
|
||||
[submodule "contrib/libuv"]
|
||||
path = contrib/libuv
|
||||
url = https://github.com/ClickHouse/libuv.git
|
||||
url = https://github.com/ClickHouse/libuv
|
||||
branch = clickhouse
|
||||
[submodule "contrib/fmtlib"]
|
||||
path = contrib/fmtlib
|
||||
url = https://github.com/fmtlib/fmt.git
|
||||
url = https://github.com/fmtlib/fmt
|
||||
[submodule "contrib/sentry-native"]
|
||||
path = contrib/sentry-native
|
||||
url = https://github.com/ClickHouse/sentry-native.git
|
||||
url = https://github.com/ClickHouse/sentry-native
|
||||
[submodule "contrib/krb5"]
|
||||
path = contrib/krb5
|
||||
url = https://github.com/ClickHouse/krb5
|
||||
@ -178,17 +172,17 @@
|
||||
url = https://github.com/danlark1/miniselect
|
||||
[submodule "contrib/rocksdb"]
|
||||
path = contrib/rocksdb
|
||||
url = https://github.com/ClickHouse/rocksdb.git
|
||||
url = https://github.com/ClickHouse/rocksdb
|
||||
[submodule "contrib/xz"]
|
||||
path = contrib/xz
|
||||
url = https://github.com/xz-mirror/xz
|
||||
[submodule "contrib/abseil-cpp"]
|
||||
path = contrib/abseil-cpp
|
||||
url = https://github.com/abseil/abseil-cpp.git
|
||||
url = https://github.com/abseil/abseil-cpp
|
||||
branch = lts_2021_11_02
|
||||
[submodule "contrib/dragonbox"]
|
||||
path = contrib/dragonbox
|
||||
url = https://github.com/ClickHouse/dragonbox.git
|
||||
url = https://github.com/ClickHouse/dragonbox
|
||||
[submodule "contrib/fast_float"]
|
||||
path = contrib/fast_float
|
||||
url = https://github.com/fastfloat/fast_float
|
||||
@ -197,44 +191,44 @@
|
||||
url = https://github.com/ClickHouse/libpq
|
||||
[submodule "contrib/boringssl"]
|
||||
path = contrib/boringssl
|
||||
url = https://github.com/ClickHouse/boringssl.git
|
||||
url = https://github.com/ClickHouse/boringssl
|
||||
branch = unknown_branch_from_artur
|
||||
[submodule "contrib/NuRaft"]
|
||||
path = contrib/NuRaft
|
||||
url = https://github.com/ClickHouse/NuRaft.git
|
||||
url = https://github.com/ClickHouse/NuRaft
|
||||
[submodule "contrib/nanodbc"]
|
||||
path = contrib/nanodbc
|
||||
url = https://github.com/ClickHouse/nanodbc.git
|
||||
url = https://github.com/ClickHouse/nanodbc
|
||||
[submodule "contrib/datasketches-cpp"]
|
||||
path = contrib/datasketches-cpp
|
||||
url = https://github.com/ClickHouse/datasketches-cpp.git
|
||||
url = https://github.com/ClickHouse/datasketches-cpp
|
||||
[submodule "contrib/yaml-cpp"]
|
||||
path = contrib/yaml-cpp
|
||||
url = https://github.com/ClickHouse/yaml-cpp.git
|
||||
url = https://github.com/ClickHouse/yaml-cpp
|
||||
[submodule "contrib/cld2"]
|
||||
path = contrib/cld2
|
||||
url = https://github.com/ClickHouse/cld2.git
|
||||
url = https://github.com/ClickHouse/cld2
|
||||
[submodule "contrib/libstemmer_c"]
|
||||
path = contrib/libstemmer_c
|
||||
url = https://github.com/ClickHouse/libstemmer_c.git
|
||||
url = https://github.com/ClickHouse/libstemmer_c
|
||||
[submodule "contrib/wordnet-blast"]
|
||||
path = contrib/wordnet-blast
|
||||
url = https://github.com/ClickHouse/wordnet-blast.git
|
||||
url = https://github.com/ClickHouse/wordnet-blast
|
||||
[submodule "contrib/lemmagen-c"]
|
||||
path = contrib/lemmagen-c
|
||||
url = https://github.com/ClickHouse/lemmagen-c.git
|
||||
url = https://github.com/ClickHouse/lemmagen-c
|
||||
[submodule "contrib/libpqxx"]
|
||||
path = contrib/libpqxx
|
||||
url = https://github.com/ClickHouse/libpqxx.git
|
||||
url = https://github.com/ClickHouse/libpqxx
|
||||
[submodule "contrib/sqlite-amalgamation"]
|
||||
path = contrib/sqlite-amalgamation
|
||||
url = https://github.com/azadkuh/sqlite-amalgamation
|
||||
url = https://github.com/ClickHouse/sqlite-amalgamation
|
||||
[submodule "contrib/s2geometry"]
|
||||
path = contrib/s2geometry
|
||||
url = https://github.com/ClickHouse/s2geometry.git
|
||||
url = https://github.com/ClickHouse/s2geometry
|
||||
[submodule "contrib/bzip2"]
|
||||
path = contrib/bzip2
|
||||
url = https://github.com/ClickHouse/bzip2.git
|
||||
url = https://github.com/ClickHouse/bzip2
|
||||
[submodule "contrib/magic_enum"]
|
||||
path = contrib/magic_enum
|
||||
url = https://github.com/Neargye/magic_enum
|
||||
@ -243,50 +237,96 @@
|
||||
url = https://github.com/google/libprotobuf-mutator
|
||||
[submodule "contrib/sysroot"]
|
||||
path = contrib/sysroot
|
||||
url = https://github.com/ClickHouse/sysroot.git
|
||||
url = https://github.com/ClickHouse/sysroot
|
||||
[submodule "contrib/nlp-data"]
|
||||
path = contrib/nlp-data
|
||||
url = https://github.com/ClickHouse/nlp-data.git
|
||||
url = https://github.com/ClickHouse/nlp-data
|
||||
[submodule "contrib/hive-metastore"]
|
||||
path = contrib/hive-metastore
|
||||
url = https://github.com/ClickHouse/hive-metastore
|
||||
[submodule "contrib/azure"]
|
||||
path = contrib/azure
|
||||
url = https://github.com/ClickHouse/azure-sdk-for-cpp.git
|
||||
url = https://github.com/ClickHouse/azure-sdk-for-cpp
|
||||
[submodule "contrib/minizip-ng"]
|
||||
path = contrib/minizip-ng
|
||||
url = https://github.com/zlib-ng/minizip-ng
|
||||
[submodule "contrib/annoy"]
|
||||
path = contrib/annoy
|
||||
url = https://github.com/ClickHouse/annoy.git
|
||||
url = https://github.com/ClickHouse/annoy
|
||||
branch = ClickHouse-master
|
||||
[submodule "contrib/qpl"]
|
||||
path = contrib/qpl
|
||||
url = https://github.com/intel/qpl.git
|
||||
url = https://github.com/intel/qpl
|
||||
[submodule "contrib/wyhash"]
|
||||
path = contrib/wyhash
|
||||
url = https://github.com/wangyi-fudan/wyhash.git
|
||||
url = https://github.com/wangyi-fudan/wyhash
|
||||
[submodule "contrib/hashidsxx"]
|
||||
path = contrib/hashidsxx
|
||||
url = https://github.com/schoentoon/hashidsxx.git
|
||||
url = https://github.com/schoentoon/hashidsxx
|
||||
[submodule "contrib/nats-io"]
|
||||
path = contrib/nats-io
|
||||
url = https://github.com/ClickHouse/nats.c.git
|
||||
url = https://github.com/ClickHouse/nats.c
|
||||
[submodule "contrib/vectorscan"]
|
||||
path = contrib/vectorscan
|
||||
url = https://github.com/VectorCamp/vectorscan.git
|
||||
[submodule "contrib/liburing"]
|
||||
path = contrib/liburing
|
||||
url = https://github.com/axboe/liburing.git
|
||||
url = https://github.com/VectorCamp/vectorscan
|
||||
[submodule "contrib/c-ares"]
|
||||
path = contrib/c-ares
|
||||
url = https://github.com/ClickHouse/c-ares
|
||||
[submodule "contrib/llvm-project"]
|
||||
path = contrib/llvm-project
|
||||
url = https://github.com/ClickHouse/llvm-project.git
|
||||
url = https://github.com/ClickHouse/llvm-project
|
||||
[submodule "contrib/corrosion"]
|
||||
path = contrib/corrosion
|
||||
url = https://github.com/corrosion-rs/corrosion.git
|
||||
url = https://github.com/corrosion-rs/corrosion
|
||||
[submodule "contrib/morton-nd"]
|
||||
path = contrib/morton-nd
|
||||
url = https://github.com/morton-nd/morton-nd
|
||||
[submodule "contrib/xxHash"]
|
||||
path = contrib/xxHash
|
||||
url = https://github.com/Cyan4973/xxHash
|
||||
[submodule "contrib/crc32-s390x"]
|
||||
path = contrib/crc32-s390x
|
||||
url = https://github.com/linux-on-ibm-z/crc32-s390x
|
||||
[submodule "contrib/openssl"]
|
||||
path = contrib/openssl
|
||||
url = https://github.com/openssl/openssl
|
||||
branch = openssl-3.0
|
||||
[submodule "contrib/google-benchmark"]
|
||||
path = contrib/google-benchmark
|
||||
url = https://github.com/google/benchmark
|
||||
[submodule "contrib/libdivide"]
|
||||
path = contrib/libdivide
|
||||
url = https://github.com/ridiculousfish/libdivide
|
||||
[submodule "contrib/aws-crt-cpp"]
|
||||
path = contrib/aws-crt-cpp
|
||||
url = https://github.com/ClickHouse/aws-crt-cpp
|
||||
[submodule "contrib/aws-c-io"]
|
||||
path = contrib/aws-c-io
|
||||
url = https://github.com/ClickHouse/aws-c-io
|
||||
[submodule "contrib/aws-c-mqtt"]
|
||||
path = contrib/aws-c-mqtt
|
||||
url = https://github.com/awslabs/aws-c-mqtt
|
||||
[submodule "contrib/aws-c-auth"]
|
||||
path = contrib/aws-c-auth
|
||||
url = https://github.com/awslabs/aws-c-auth
|
||||
[submodule "contrib/aws-c-cal"]
|
||||
path = contrib/aws-c-cal
|
||||
url = https://github.com/ClickHouse/aws-c-cal
|
||||
[submodule "contrib/aws-c-sdkutils"]
|
||||
path = contrib/aws-c-sdkutils
|
||||
url = https://github.com/awslabs/aws-c-sdkutils
|
||||
[submodule "contrib/aws-c-http"]
|
||||
path = contrib/aws-c-http
|
||||
url = https://github.com/awslabs/aws-c-http
|
||||
[submodule "contrib/aws-c-s3"]
|
||||
path = contrib/aws-c-s3
|
||||
url = https://github.com/awslabs/aws-c-s3
|
||||
[submodule "contrib/aws-c-compression"]
|
||||
path = contrib/aws-c-compression
|
||||
url = https://github.com/awslabs/aws-c-compression
|
||||
[submodule "contrib/aws-s2n-tls"]
|
||||
path = contrib/aws-s2n-tls
|
||||
url = https://github.com/ClickHouse/s2n-tls
|
||||
[submodule "contrib/crc32-vpmsum"]
|
||||
path = contrib/crc32-vpmsum
|
||||
url = https://github.com/antonblanchard/crc32-vpmsum.git
|
||||
|
2
.vimrc
2
.vimrc
@ -1,2 +0,0 @@
|
||||
au BufRead,BufNewFile ./* set tabstop=4 softtabstop=0 expandtab shiftwidth=4 smarttab tags=tags,../tags
|
||||
|
1894
CHANGELOG.md
1894
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@ -73,22 +73,7 @@ message (STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
|
||||
|
||||
string (TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE_UC)
|
||||
|
||||
option(USE_STATIC_LIBRARIES "Disable to use shared libraries" ON)
|
||||
# DEVELOPER ONLY.
|
||||
# Faster linking if turned on.
|
||||
option(SPLIT_SHARED_LIBRARIES "Keep all internal libraries as separate .so files" OFF)
|
||||
|
||||
if (USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
|
||||
message(FATAL_ERROR "SPLIT_SHARED_LIBRARIES=1 must not be used together with USE_STATIC_LIBRARIES=1")
|
||||
endif()
|
||||
|
||||
if (NOT USE_STATIC_LIBRARIES AND SPLIT_SHARED_LIBRARIES)
|
||||
set(BUILD_SHARED_LIBS 1 CACHE INTERNAL "")
|
||||
endif ()
|
||||
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
list(REVERSE CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||
endif ()
|
||||
list(REVERSE CMAKE_FIND_LIBRARY_SUFFIXES)
|
||||
|
||||
option (ENABLE_FUZZING "Fuzzy testing using libfuzzer" OFF)
|
||||
|
||||
@ -111,6 +96,7 @@ if (ENABLE_FUZZING)
|
||||
set (ENABLE_JEMALLOC 0)
|
||||
set (ENABLE_CHECK_HEAVY_BUILDS 1)
|
||||
set (GLIBC_COMPATIBILITY OFF)
|
||||
set (ENABLE_BENCHMARKS 0)
|
||||
|
||||
# For codegen_select_fuzzer
|
||||
set (ENABLE_PROTOBUF 1)
|
||||
@ -168,8 +154,9 @@ endif ()
|
||||
|
||||
option(ENABLE_TESTS "Provide unit_test_dbms target with Google.Test unit tests" ON)
|
||||
option(ENABLE_EXAMPLES "Build all example programs in 'examples' subdirectories" OFF)
|
||||
option(ENABLE_BENCHMARKS "Build all benchmark programs in 'benchmarks' subdirectories" OFF)
|
||||
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND USE_STATIC_LIBRARIES AND NOT SPLIT_SHARED_LIBRARIES AND NOT USE_MUSL)
|
||||
if (OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64) AND NOT USE_MUSL)
|
||||
# Only for Linux, x86_64 or aarch64.
|
||||
option(GLIBC_COMPATIBILITY "Enable compatibility with older glibc libraries." ON)
|
||||
elseif(GLIBC_COMPATIBILITY)
|
||||
@ -375,15 +362,15 @@ set (DEBUG_INFO_FLAGS "-g -gdwarf-4")
|
||||
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS}")
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_CXX_FLAGS_ADD}")
|
||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_C_FLAGS_ADD}")
|
||||
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||
|
||||
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${COMPILER_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
set (CMAKE_ASM_FLAGS_RELWITHDEBINFO "${CMAKE_ASM_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} -fno-inline ${CMAKE_ASM_FLAGS_ADD}")
|
||||
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||
|
||||
if (COMPILER_CLANG)
|
||||
if (OS_DARWIN)
|
||||
@ -442,15 +429,11 @@ elseif (OS_DARWIN)
|
||||
include(cmake/darwin/default_libs.cmake)
|
||||
elseif (OS_FREEBSD)
|
||||
include(cmake/freebsd/default_libs.cmake)
|
||||
else()
|
||||
link_libraries(global-group)
|
||||
endif ()
|
||||
link_libraries(global-group)
|
||||
|
||||
if (NOT (OS_LINUX OR OS_DARWIN))
|
||||
# Using system libs can cause a lot of warnings in includes (on macro expansion).
|
||||
option(WERROR "Enable -Werror compiler option" OFF)
|
||||
else ()
|
||||
option(WERROR "Enable -Werror compiler option" ON)
|
||||
endif ()
|
||||
option(WERROR "Enable -Werror compiler option" ON)
|
||||
|
||||
if (WERROR)
|
||||
# Don't pollute CMAKE_CXX_FLAGS with -Werror as it will break some CMake checks.
|
||||
@ -469,22 +452,13 @@ endif ()
|
||||
|
||||
set (CMAKE_POSTFIX_VARIABLE "CMAKE_${CMAKE_BUILD_TYPE_UC}_POSTFIX")
|
||||
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
|
||||
if (OS_LINUX AND NOT ARCH_AARCH64)
|
||||
set (CMAKE_POSITION_INDEPENDENT_CODE OFF)
|
||||
if (OS_LINUX AND NOT ARCH_AARCH64)
|
||||
# Slightly more efficient code can be generated
|
||||
# It's disabled for ARM because otherwise ClickHouse cannot run on Android.
|
||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -fno-pie")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie -Wl,-no-pie")
|
||||
endif ()
|
||||
else ()
|
||||
set (CMAKE_POSITION_INDEPENDENT_CODE ON)
|
||||
# This is required for clang on Arch linux, that uses PIE by default.
|
||||
# See enable-SSP-and-PIE-by-default.patch [1].
|
||||
#
|
||||
# [1]: https://github.com/archlinux/svntogit-packages/blob/6e681aa860e65ad46a1387081482eb875c2200f2/trunk/enable-SSP-and-PIE-by-default.patch
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -no-pie")
|
||||
endif ()
|
||||
|
||||
if (ENABLE_TESTS)
|
||||
@ -495,13 +469,9 @@ endif ()
|
||||
|
||||
enable_testing() # Enable for tests without binary
|
||||
|
||||
option(ENABLE_EXTERNAL_OPENSSL "This option is insecure and not recommended for any occasions. If it is enabled, it allows building with alternative OpenSSL library. By default, ClickHouse is using BoringSSL, which is better. Do not use this option." OFF)
|
||||
option(ENABLE_OPENSSL "This option performs a build with OpenSSL. NOTE! This option is insecure and should never be used. By default, ClickHouse uses and only supports BoringSSL" OFF)
|
||||
|
||||
if (ENABLE_EXTERNAL_OPENSSL)
|
||||
message (STATUS "Build and uses OpenSSL library instead of BoringSSL. This is strongly discouraged. Your build of ClickHouse will be unsupported.")
|
||||
set(ENABLE_SSL 1)
|
||||
target_compile_options(global-group INTERFACE "-Wno-deprecated-declarations")
|
||||
endif ()
|
||||
option(ENABLE_OPENSSL_DYNAMIC "This option removes SSL from ClickHouse and will link to the OpenSSL version supplied by OS." OFF)
|
||||
|
||||
# when installing to /usr - place configs to /etc but for /usr/local place to /usr/local/etc
|
||||
if (CMAKE_INSTALL_PREFIX STREQUAL "/usr")
|
||||
@ -510,10 +480,7 @@ else ()
|
||||
set (CLICKHOUSE_ETC_DIR "${CMAKE_INSTALL_PREFIX}/etc")
|
||||
endif ()
|
||||
|
||||
message (STATUS
|
||||
"Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE} ;
|
||||
USE_STATIC_LIBRARIES=${USE_STATIC_LIBRARIES}
|
||||
SPLIT_SHARED_LIBRARIES=${SPLIT_SHARED_LIBRARIES}")
|
||||
message (STATUS "Building for: ${CMAKE_SYSTEM} ${CMAKE_SYSTEM_PROCESSOR} ${CMAKE_LIBRARY_ARCHITECTURE}")
|
||||
|
||||
include (GNUInstallDirs)
|
||||
|
||||
@ -559,7 +526,7 @@ macro (clickhouse_add_executable target)
|
||||
# - _je_zone_register due to JEMALLOC_PRIVATE_NAMESPACE=je_ under OS X.
|
||||
# - but jemalloc-cmake does not run private_namespace.sh
|
||||
# so symbol name should be _zone_register
|
||||
if (ENABLE_JEMALLOC AND USE_STATIC_LIBRARIES AND OS_DARWIN)
|
||||
if (ENABLE_JEMALLOC AND OS_DARWIN)
|
||||
set_property(TARGET ${target} APPEND PROPERTY LINK_OPTIONS -u_zone_register)
|
||||
endif()
|
||||
endif()
|
||||
@ -592,7 +559,7 @@ add_subdirectory (programs)
|
||||
add_subdirectory (tests)
|
||||
add_subdirectory (utils)
|
||||
|
||||
include (cmake/sanitize_target_link_libraries.cmake)
|
||||
include (cmake/sanitize_targets.cmake)
|
||||
|
||||
# Build native targets if necessary
|
||||
get_property(NATIVE_BUILD_TARGETS GLOBAL PROPERTY NATIVE_BUILD_TARGETS)
|
||||
@ -615,6 +582,8 @@ if (NATIVE_BUILD_TARGETS
|
||||
"-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}"
|
||||
"-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}"
|
||||
"-DENABLE_CCACHE=${ENABLE_CCACHE}"
|
||||
# Avoid overriding .cargo/config.toml with native toolchain.
|
||||
"-DENABLE_RUST=OFF"
|
||||
"-DENABLE_CLICKHOUSE_SELF_EXTRACTING=${ENABLE_CLICKHOUSE_SELF_EXTRACTING}"
|
||||
${CMAKE_SOURCE_DIR}
|
||||
WORKING_DIRECTORY "${NATIVE_BUILD_DIR}"
|
||||
|
4
LICENSE
4
LICENSE
@ -1,4 +1,4 @@
|
||||
Copyright 2016-2022 ClickHouse, Inc.
|
||||
Copyright 2016-2023 ClickHouse, Inc.
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
@ -188,7 +188,7 @@ Copyright 2016-2022 ClickHouse, Inc.
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2016-2022 ClickHouse, Inc.
|
||||
Copyright 2016-2023 ClickHouse, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
@ -16,6 +16,5 @@ ClickHouse® is an open-source column-oriented database management system that a
|
||||
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||
|
||||
## Upcoming events
|
||||
* [**v22.11 Release Webinar**](https://clickhouse.com/company/events/v22-11-release-webinar) Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release, provide live demos, and share vision into what is coming in the roadmap.
|
||||
* [**ClickHouse Meetup at the Deutsche Bank office in Berlin**](https://www.meetup.com/clickhouse-berlin-user-group/events/289311596/) Hear from Deutsche Bank on why they chose ClickHouse for big sensitive data in a regulated environment. The ClickHouse team will then present how ClickHouse is used for real time financial data analytics, including tick data, trade analytics and risk management.
|
||||
* [**AWS re:Invent**](https://clickhouse.com/company/events/aws-reinvent) Core members of the ClickHouse team -- including 2 of our founders -- will be at re:Invent from November 29 to December 3. We are available on the show floor, but are also determining interest in holding an event during the time there.
|
||||
* **Recording available**: [**v23.1 Release Webinar**](https://www.youtube.com/watch?v=zYSZXBnTMSE) 23.1 is the ClickHouse New Year release. Original creator, co-founder, and CTO of ClickHouse Alexey Milovidov will walk us through the highlights of the release. Inverted indices, query cache, and so -- very -- much more.
|
||||
* **Recording available**: [**ClickHouse Meetup at the CHEQ office in Tel Aviv**](https://www.meetup.com/clickhouse-tel-aviv-user-group/events/289599423/) - We are very excited to be holding our next in-person ClickHouse meetup at the CHEQ office in Tel Aviv! Hear from CHEQ, ServiceNow and Contentsquare, as well as a deep dive presentation from ClickHouse CTO Alexey Milovidov. Join us for a fun evening of talks, food and discussion!
|
||||
|
19
SECURITY.md
19
SECURITY.md
@ -13,9 +13,11 @@ The following versions of ClickHouse server are currently being supported with s
|
||||
|
||||
| Version | Supported |
|
||||
|:-|:-|
|
||||
| 23.1 | ✔️ |
|
||||
| 22.12 | ✔️ |
|
||||
| 22.11 | ✔️ |
|
||||
| 22.10 | ✔️ |
|
||||
| 22.9 | ✔️ |
|
||||
| 22.10 | ❌ |
|
||||
| 22.9 | ❌ |
|
||||
| 22.8 | ✔️ |
|
||||
| 22.7 | ❌ |
|
||||
| 22.6 | ❌ |
|
||||
@ -24,18 +26,7 @@ The following versions of ClickHouse server are currently being supported with s
|
||||
| 22.3 | ✔️ |
|
||||
| 22.2 | ❌ |
|
||||
| 22.1 | ❌ |
|
||||
| 21.12 | ❌ |
|
||||
| 21.11 | ❌ |
|
||||
| 21.10 | ❌ |
|
||||
| 21.9 | ❌ |
|
||||
| 21.8 | ❌ |
|
||||
| 21.7 | ❌ |
|
||||
| 21.6 | ❌ |
|
||||
| 21.5 | ❌ |
|
||||
| 21.4 | ❌ |
|
||||
| 21.3 | ❌ |
|
||||
| 21.2 | ❌ |
|
||||
| 21.1 | ❌ |
|
||||
| 21.* | ❌ |
|
||||
| 20.* | ❌ |
|
||||
| 19.* | ❌ |
|
||||
| 18.* | ❌ |
|
||||
|
@ -10,7 +10,7 @@
|
||||
#include <base/MoveOrCopyIfThrow.h>
|
||||
|
||||
/** Pool for limited size objects that cannot be used from different threads simultaneously.
|
||||
* The main use case is to have fixed size of objects that can be reused in difference threads during their lifetime
|
||||
* The main use case is to have fixed size of objects that can be reused in different threads during their lifetime
|
||||
* and have to be initialized on demand.
|
||||
* Two main properties of pool are allocated objects size and borrowed objects size.
|
||||
* Allocated objects size is size of objects that are currently allocated by the pool.
|
||||
|
@ -8,16 +8,13 @@ set (SRCS
|
||||
getPageSize.cpp
|
||||
getThreadId.cpp
|
||||
JSON.cpp
|
||||
LineReader.cpp
|
||||
mremap.cpp
|
||||
phdr_cache.cpp
|
||||
preciseExp10.cpp
|
||||
setTerminalEcho.cpp
|
||||
shift10.cpp
|
||||
sleep.cpp
|
||||
terminalColors.cpp
|
||||
errnoToString.cpp
|
||||
ReplxxLineReader.cpp
|
||||
StringRef.cpp
|
||||
safeExit.cpp
|
||||
throwError.cpp
|
||||
@ -42,10 +39,6 @@ endif ()
|
||||
|
||||
target_include_directories(common PUBLIC .. "${CMAKE_CURRENT_BINARY_DIR}/..")
|
||||
|
||||
if (OS_DARWIN AND NOT USE_STATIC_LIBRARIES)
|
||||
target_link_libraries(common PUBLIC -Wl,-U,_inside_main)
|
||||
endif()
|
||||
|
||||
target_link_libraries (common
|
||||
PUBLIC
|
||||
ch_contrib::cityhash
|
||||
|
53
base/base/IPv4andIPv6.h
Normal file
53
base/base/IPv4andIPv6.h
Normal file
@ -0,0 +1,53 @@
|
||||
#pragma once
|
||||
|
||||
#include <base/strong_typedef.h>
|
||||
#include <base/extended_types.h>
|
||||
#include <Common/memcmpSmall.h>
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
using IPv4 = StrongTypedef<UInt32, struct IPv4Tag>;
|
||||
|
||||
struct IPv6 : StrongTypedef<UInt128, struct IPv6Tag>
|
||||
{
|
||||
constexpr IPv6() = default;
|
||||
constexpr explicit IPv6(const UInt128 & x) : StrongTypedef(x) {}
|
||||
constexpr explicit IPv6(UInt128 && x) : StrongTypedef(std::move(x)) {}
|
||||
|
||||
IPv6 & operator=(const UInt128 & rhs) { StrongTypedef::operator=(rhs); return *this; }
|
||||
IPv6 & operator=(UInt128 && rhs) { StrongTypedef::operator=(std::move(rhs)); return *this; }
|
||||
|
||||
bool operator<(const IPv6 & rhs) const
|
||||
{
|
||||
return
|
||||
memcmp16(
|
||||
reinterpret_cast<const unsigned char *>(toUnderType().items),
|
||||
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
|
||||
) < 0;
|
||||
}
|
||||
|
||||
bool operator>(const IPv6 & rhs) const
|
||||
{
|
||||
return
|
||||
memcmp16(
|
||||
reinterpret_cast<const unsigned char *>(toUnderType().items),
|
||||
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
|
||||
) > 0;
|
||||
}
|
||||
|
||||
bool operator==(const IPv6 & rhs) const
|
||||
{
|
||||
return
|
||||
memcmp16(
|
||||
reinterpret_cast<const unsigned char *>(toUnderType().items),
|
||||
reinterpret_cast<const unsigned char *>(rhs.toUnderType().items)
|
||||
) == 0;
|
||||
}
|
||||
|
||||
bool operator<=(const IPv6 & rhs) const { return !operator>(rhs); }
|
||||
bool operator>=(const IPv6 & rhs) const { return !operator<(rhs); }
|
||||
bool operator!=(const IPv6 & rhs) const { return !operator==(rhs); }
|
||||
};
|
||||
|
||||
}
|
@ -2,6 +2,7 @@
|
||||
|
||||
#include "Decimal.h"
|
||||
#include "UUID.h"
|
||||
#include "IPv4andIPv6.h"
|
||||
|
||||
namespace DB
|
||||
{
|
||||
@ -35,6 +36,8 @@ TN_MAP(Float32)
|
||||
TN_MAP(Float64)
|
||||
TN_MAP(String)
|
||||
TN_MAP(UUID)
|
||||
TN_MAP(IPv4)
|
||||
TN_MAP(IPv6)
|
||||
TN_MAP(Decimal32)
|
||||
TN_MAP(Decimal64)
|
||||
TN_MAP(Decimal128)
|
||||
|
@ -12,7 +12,21 @@
|
||||
template <typename To, typename From>
|
||||
std::decay_t<To> bit_cast(const From & from)
|
||||
{
|
||||
/**
|
||||
* Assume the source value is 0xAABBCCDD (i.e. sizeof(from) == 4).
|
||||
* Its BE representation is 0xAABBCCDD, the LE representation is 0xDDCCBBAA.
|
||||
* Further assume, sizeof(res) == 8 and that res is initially zeroed out.
|
||||
* With LE, the result after bit_cast will be 0xDDCCBBAA00000000 --> input value == output value.
|
||||
* With BE, the result after bit_cast will be 0x00000000AABBCCDD --> input value == output value.
|
||||
*/
|
||||
To res {};
|
||||
if constexpr (std::endian::native == std::endian::little)
|
||||
memcpy(static_cast<void*>(&res), &from, std::min(sizeof(res), sizeof(from)));
|
||||
else
|
||||
{
|
||||
uint32_t offset_to = (sizeof(res) > sizeof(from)) ? (sizeof(res) - sizeof(from)) : 0;
|
||||
uint32_t offset_from = (sizeof(from) > sizeof(res)) ? (sizeof(from) - sizeof(res)) : 0;
|
||||
memcpy(reinterpret_cast<char *>(&res) + offset_to, reinterpret_cast<const char *>(&from) + offset_from, std::min(sizeof(res), sizeof(from)));
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
@ -144,6 +144,13 @@
|
||||
# define TSA_REQUIRES_SHARED(...) __attribute__((requires_shared_capability(__VA_ARGS__))) /// thread needs shared possession of given capability
|
||||
# define TSA_ACQUIRED_AFTER(...) __attribute__((acquired_after(__VA_ARGS__))) /// annotated lock must be locked after given lock
|
||||
# define TSA_NO_THREAD_SAFETY_ANALYSIS __attribute__((no_thread_safety_analysis)) /// disable TSA for a function
|
||||
# define TSA_CAPABILITY(...) __attribute__((capability(__VA_ARGS__))) /// object of a class can be used as capability
|
||||
# define TSA_ACQUIRE(...) __attribute__((acquire_capability(__VA_ARGS__))) /// function acquires a capability, but does not release it
|
||||
# define TSA_TRY_ACQUIRE(...) __attribute__((try_acquire_capability(__VA_ARGS__))) /// function tries to acquire a capability and returns a boolean value indicating success or failure
|
||||
# define TSA_RELEASE(...) __attribute__((release_capability(__VA_ARGS__))) /// function releases the given capability
|
||||
# define TSA_ACQUIRE_SHARED(...) __attribute__((acquire_shared_capability(__VA_ARGS__))) /// function acquires a shared capability, but does not release it
|
||||
# define TSA_TRY_ACQUIRE_SHARED(...) __attribute__((try_acquire_shared_capability(__VA_ARGS__))) /// function tries to acquire a shared capability and returns a boolean value indicating success or failure
|
||||
# define TSA_RELEASE_SHARED(...) __attribute__((release_shared_capability(__VA_ARGS__))) /// function releases the given shared capability
|
||||
|
||||
/// Macros for suppressing TSA warnings for specific reads/writes (instead of suppressing it for the whole function)
|
||||
/// They use a lambda function to apply function attribute to a single statement. This enable us to suppress warnings locally instead of
|
||||
@ -164,6 +171,13 @@
|
||||
# define TSA_REQUIRES(...)
|
||||
# define TSA_REQUIRES_SHARED(...)
|
||||
# define TSA_NO_THREAD_SAFETY_ANALYSIS
|
||||
# define TSA_CAPABILITY(...)
|
||||
# define TSA_ACQUIRE(...)
|
||||
# define TSA_TRY_ACQUIRE(...)
|
||||
# define TSA_RELEASE(...)
|
||||
# define TSA_ACQUIRE_SHARED(...)
|
||||
# define TSA_TRY_ACQUIRE_SHARED(...)
|
||||
# define TSA_RELEASE_SHARED(...)
|
||||
|
||||
# define TSA_SUPPRESS_WARNING_FOR_READ(x) (x)
|
||||
# define TSA_SUPPRESS_WARNING_FOR_WRITE(x) (x)
|
||||
|
@ -1,28 +0,0 @@
|
||||
#include <base/setTerminalEcho.h>
|
||||
#include <base/errnoToString.h>
|
||||
#include <stdexcept>
|
||||
#include <cstring>
|
||||
#include <string>
|
||||
#include <termios.h>
|
||||
#include <unistd.h>
|
||||
|
||||
|
||||
void setTerminalEcho(bool enable)
|
||||
{
|
||||
/// Obtain terminal attributes,
|
||||
/// toggle the ECHO flag
|
||||
/// and set them back.
|
||||
|
||||
struct termios tty{};
|
||||
|
||||
if (0 != tcgetattr(STDIN_FILENO, &tty))
|
||||
throw std::runtime_error(std::string("setTerminalEcho failed get: ") + errnoToString());
|
||||
|
||||
if (enable)
|
||||
tty.c_lflag |= ECHO;
|
||||
else
|
||||
tty.c_lflag &= ~ECHO;
|
||||
|
||||
if (0 != tcsetattr(STDIN_FILENO, TCSANOW, &tty))
|
||||
throw std::runtime_error(std::string("setTerminalEcho failed set: ") + errnoToString());
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
#pragma once
|
||||
|
||||
/// Enable or disable echoing of typed characters. Throws std::runtime_error on error.
|
||||
void setTerminalEcho(bool enable);
|
@ -187,8 +187,20 @@ struct integer<Bits, Signed>::_impl
|
||||
static_assert(Bits % base_bits == 0);
|
||||
|
||||
/// Simple iteration in both directions
|
||||
static constexpr unsigned little(unsigned idx) { return idx; }
|
||||
static constexpr unsigned big(unsigned idx) { return item_count - 1 - idx; }
|
||||
static constexpr unsigned little(unsigned idx)
|
||||
{
|
||||
if constexpr (std::endian::native == std::endian::little)
|
||||
return idx;
|
||||
else
|
||||
return item_count - 1 - idx;
|
||||
}
|
||||
static constexpr unsigned big(unsigned idx)
|
||||
{
|
||||
if constexpr (std::endian::native == std::endian::little)
|
||||
return item_count - 1 - idx;
|
||||
else
|
||||
return idx;
|
||||
}
|
||||
static constexpr unsigned any(unsigned idx) { return idx; }
|
||||
|
||||
template <class T>
|
||||
@ -240,20 +252,20 @@ struct integer<Bits, Signed>::_impl
|
||||
{
|
||||
static_assert(sizeof(Integral) <= sizeof(base_type));
|
||||
|
||||
self.items[0] = _impl::to_Integral(rhs);
|
||||
self.items[little(0)] = _impl::to_Integral(rhs);
|
||||
|
||||
if constexpr (std::is_signed_v<Integral>)
|
||||
{
|
||||
if (rhs < 0)
|
||||
{
|
||||
for (size_t i = 1; i < item_count; ++i)
|
||||
self.items[i] = -1;
|
||||
for (unsigned i = 1; i < item_count; ++i)
|
||||
self.items[little(i)] = -1;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (size_t i = 1; i < item_count; ++i)
|
||||
self.items[i] = 0;
|
||||
for (unsigned i = 1; i < item_count; ++i)
|
||||
self.items[little(i)] = 0;
|
||||
}
|
||||
|
||||
template <typename TupleLike, size_t i = 0>
|
||||
@ -348,7 +360,7 @@ struct integer<Bits, Signed>::_impl
|
||||
constexpr const unsigned to_copy = min_bits / base_bits;
|
||||
|
||||
for (unsigned i = 0; i < to_copy; ++i)
|
||||
self.items[i] = rhs.items[i];
|
||||
self.items[little(i)] = rhs.items[little(i)];
|
||||
|
||||
if constexpr (Bits > Bits2)
|
||||
{
|
||||
@ -357,13 +369,13 @@ struct integer<Bits, Signed>::_impl
|
||||
if (rhs < 0)
|
||||
{
|
||||
for (unsigned i = to_copy; i < item_count; ++i)
|
||||
self.items[i] = -1;
|
||||
self.items[little(i)] = -1;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (unsigned i = to_copy; i < item_count; ++i)
|
||||
self.items[i] = 0;
|
||||
self.items[little(i)] = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@ -454,7 +466,7 @@ private:
|
||||
{
|
||||
if constexpr (sizeof(T) <= sizeof(base_type))
|
||||
{
|
||||
if (0 == idx)
|
||||
if (little(0) == idx)
|
||||
return static_cast<base_type>(x);
|
||||
}
|
||||
else if (idx * sizeof(base_type) < sizeof(T))
|
||||
@ -475,7 +487,7 @@ private:
|
||||
|
||||
for (unsigned i = 0; i < op_items; ++i)
|
||||
{
|
||||
base_type rhs_item = get_item(rhs, i);
|
||||
base_type rhs_item = get_item(rhs, little(i));
|
||||
base_type & res_item = res.items[little(i)];
|
||||
|
||||
underflows[i] = res_item < rhs_item;
|
||||
@ -508,7 +520,7 @@ private:
|
||||
|
||||
for (unsigned i = 0; i < op_items; ++i)
|
||||
{
|
||||
base_type rhs_item = get_item(rhs, i);
|
||||
base_type rhs_item = get_item(rhs, little(i));
|
||||
base_type & res_item = res.items[little(i)];
|
||||
|
||||
res_item += rhs_item;
|
||||
@ -580,12 +592,12 @@ private:
|
||||
else if constexpr (Bits == 128 && sizeof(base_type) == 8)
|
||||
{
|
||||
using CompilerUInt128 = unsigned __int128;
|
||||
CompilerUInt128 a = (CompilerUInt128(lhs.items[1]) << 64) + lhs.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 b = (CompilerUInt128(rhs.items[1]) << 64) + rhs.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 a = (CompilerUInt128(lhs.items[little(1)]) << 64) + lhs.items[little(0)]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 b = (CompilerUInt128(rhs.items[little(1)]) << 64) + rhs.items[little(0)]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 c = a * b;
|
||||
integer<Bits, Signed> res;
|
||||
res.items[0] = c;
|
||||
res.items[1] = c >> 64;
|
||||
res.items[little(0)] = c;
|
||||
res.items[little(1)] = c >> 64;
|
||||
return res;
|
||||
}
|
||||
else
|
||||
@ -597,7 +609,7 @@ private:
|
||||
#endif
|
||||
for (unsigned i = 0; i < item_count; ++i)
|
||||
{
|
||||
base_type rhs_item = get_item(rhs, i);
|
||||
base_type rhs_item = get_item(rhs, little(i));
|
||||
unsigned pos = i * base_bits;
|
||||
|
||||
while (rhs_item)
|
||||
@ -792,7 +804,7 @@ public:
|
||||
integer<Bits, Signed> res;
|
||||
|
||||
for (unsigned i = 0; i < item_count; ++i)
|
||||
res.items[little(i)] = lhs.items[little(i)] | get_item(rhs, i);
|
||||
res.items[little(i)] = lhs.items[little(i)] | get_item(rhs, little(i));
|
||||
return res;
|
||||
}
|
||||
else
|
||||
@ -810,7 +822,7 @@ public:
|
||||
integer<Bits, Signed> res;
|
||||
|
||||
for (unsigned i = 0; i < item_count; ++i)
|
||||
res.items[little(i)] = lhs.items[little(i)] & get_item(rhs, i);
|
||||
res.items[little(i)] = lhs.items[little(i)] & get_item(rhs, little(i));
|
||||
return res;
|
||||
}
|
||||
else
|
||||
@ -845,17 +857,17 @@ public:
|
||||
{
|
||||
using CompilerUInt128 = unsigned __int128;
|
||||
|
||||
CompilerUInt128 a = (CompilerUInt128(numerator.items[1]) << 64) + numerator.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 b = (CompilerUInt128(denominator.items[1]) << 64) + denominator.items[0]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 a = (CompilerUInt128(numerator.items[little(1)]) << 64) + numerator.items[little(0)]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 b = (CompilerUInt128(denominator.items[little(1)]) << 64) + denominator.items[little(0)]; // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
CompilerUInt128 c = a / b; // NOLINT
|
||||
|
||||
integer<Bits, Signed> res;
|
||||
res.items[0] = c;
|
||||
res.items[1] = c >> 64;
|
||||
res.items[little(0)] = c;
|
||||
res.items[little(1)] = c >> 64;
|
||||
|
||||
CompilerUInt128 remainder = a - b * c;
|
||||
numerator.items[0] = remainder;
|
||||
numerator.items[1] = remainder >> 64;
|
||||
numerator.items[little(0)] = remainder;
|
||||
numerator.items[little(1)] = remainder >> 64;
|
||||
|
||||
return res;
|
||||
}
|
||||
@ -1039,15 +1051,15 @@ constexpr integer<Bits, Signed>::integer(std::initializer_list<T> il) noexcept
|
||||
else
|
||||
{
|
||||
auto it = il.begin();
|
||||
for (size_t i = 0; i < _impl::item_count; ++i)
|
||||
for (unsigned i = 0; i < _impl::item_count; ++i)
|
||||
{
|
||||
if (it < il.end())
|
||||
{
|
||||
items[i] = *it;
|
||||
items[_impl::little(i)] = *it;
|
||||
++it;
|
||||
}
|
||||
else
|
||||
items[i] = 0;
|
||||
items[_impl::little(i)] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1208,7 +1220,7 @@ constexpr integer<Bits, Signed>::operator T() const noexcept
|
||||
|
||||
UnsignedT res{};
|
||||
for (unsigned i = 0; i < _impl::item_count && i < (sizeof(T) + sizeof(base_type) - 1) / sizeof(base_type); ++i)
|
||||
res += UnsignedT(items[i]) << (sizeof(base_type) * 8 * i); // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
res += UnsignedT(items[_impl::little(i)]) << (sizeof(base_type) * 8 * i); // NOLINT(clang-analyzer-core.UndefinedBinaryOperatorResult)
|
||||
|
||||
return res;
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ if (GLIBC_COMPATIBILITY)
|
||||
|
||||
target_include_directories(glibc-compatibility PRIVATE libcxxabi ${musl_arch_include_dir})
|
||||
|
||||
if (NOT USE_STATIC_LIBRARIES AND NOT USE_STATIC_LIBRARIES)
|
||||
if (ENABLE_OPENSSL_DYNAMIC)
|
||||
target_compile_options(glibc-compatibility PRIVATE -fPIC)
|
||||
endif ()
|
||||
|
||||
|
@ -220,13 +220,13 @@ struct statx {
|
||||
uint32_t stx_dev_minor;
|
||||
uint64_t spare[14];
|
||||
};
|
||||
#endif
|
||||
|
||||
int statx(int fd, const char *restrict path, int flag,
|
||||
unsigned int mask, struct statx *restrict statxbuf)
|
||||
{
|
||||
return syscall(SYS_statx, fd, path, flag, mask, statxbuf);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
#include <syscall.h>
|
||||
|
@ -153,7 +153,7 @@ restart:
|
||||
|
||||
/* Restore old terminal settings and signals. */
|
||||
if (memcmp(&term, &oterm, sizeof(term)) != 0) {
|
||||
const int sigttou = signo[SIGTTOU];
|
||||
const int sigttou = (int)signo[SIGTTOU];
|
||||
|
||||
/* Ignore SIGTTOU generated when we are not the fg pgrp. */
|
||||
while (tcsetattr(input, TCSAFLUSH|TCSASOFT, &oterm) == -1 &&
|
||||
|
@ -5,7 +5,7 @@ if (CMAKE_SYSTEM_PROCESSOR MATCHES "amd64|x86_64")
|
||||
set (ARCH_AMD64 1)
|
||||
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "^(aarch64.*|AARCH64.*|arm64.*|ARM64.*)")
|
||||
set (ARCH_AARCH64 1)
|
||||
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "^(ppc64le.*|PPC64LE.*)")
|
||||
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc64le.*|ppc64le.*|PPC64LE.*)")
|
||||
set (ARCH_PPC64LE 1)
|
||||
elseif (CMAKE_SYSTEM_PROCESSOR MATCHES "riscv64")
|
||||
set (ARCH_RISCV64 1)
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||
SET(VERSION_REVISION 54469)
|
||||
SET(VERSION_MAJOR 22)
|
||||
SET(VERSION_MINOR 12)
|
||||
SET(VERSION_REVISION 54471)
|
||||
SET(VERSION_MAJOR 23)
|
||||
SET(VERSION_MINOR 2)
|
||||
SET(VERSION_PATCH 1)
|
||||
SET(VERSION_GITHASH 0d211ed19849fe44b0e43fdebe2c15d76d560a77)
|
||||
SET(VERSION_DESCRIBE v22.12.1.1-testing)
|
||||
SET(VERSION_STRING 22.12.1.1)
|
||||
SET(VERSION_GITHASH dcaac47702510cc87ddf266bc524f6b7ce0a8e6e)
|
||||
SET(VERSION_DESCRIBE v23.2.1.1-testing)
|
||||
SET(VERSION_STRING 23.2.1.1)
|
||||
# end of autochange
|
||||
|
@ -5,21 +5,21 @@ if (ENABLE_CLANG_TIDY)
|
||||
|
||||
find_program (CLANG_TIDY_CACHE_PATH NAMES "clang-tidy-cache")
|
||||
if (CLANG_TIDY_CACHE_PATH)
|
||||
find_program (_CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-15" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12")
|
||||
find_program (_CLANG_TIDY_PATH NAMES "clang-tidy-15" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12" "clang-tidy")
|
||||
|
||||
# Why do we use ';' here?
|
||||
# It's a cmake black magic: https://cmake.org/cmake/help/latest/prop_tgt/LANG_CLANG_TIDY.html#prop_tgt:%3CLANG%3E_CLANG_TIDY
|
||||
# The CLANG_TIDY_PATH is passed to CMAKE_CXX_CLANG_TIDY, which follows CXX_CLANG_TIDY syntax.
|
||||
set (CLANG_TIDY_PATH "${CLANG_TIDY_CACHE_PATH};${_CLANG_TIDY_PATH}" CACHE STRING "A combined command to run clang-tidy with caching wrapper")
|
||||
else ()
|
||||
find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-15" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12")
|
||||
find_program (CLANG_TIDY_PATH NAMES "clang-tidy-15" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12" "clang-tidy")
|
||||
endif ()
|
||||
|
||||
if (CLANG_TIDY_PATH)
|
||||
message (STATUS
|
||||
"Using clang-tidy: ${CLANG_TIDY_PATH}.
|
||||
The checks will be run during build process.
|
||||
See the .clang-tidy file at the root directory to configure the checks.")
|
||||
The checks will be run during the build process.
|
||||
See the .clang-tidy file in the root directory to configure the checks.")
|
||||
|
||||
set (USE_CLANG_TIDY ON)
|
||||
|
||||
|
@ -102,6 +102,11 @@ elseif (ARCH_AMD64)
|
||||
SET(ENABLE_AVX512_FOR_SPEC_OP 0)
|
||||
endif()
|
||||
|
||||
# ClickHouse can be cross-compiled (e.g. on an ARM host for x86) but it is also possible to build ClickHouse on x86 w/o AVX for x86 w/
|
||||
# AVX. We only check that the compiler can emit certain SIMD instructions, we don't care if the host system is able to run the binary.
|
||||
# Therefore, use check_cxx_source_compiles (= does the code compile+link?) instead of check_cxx_source_runs (= does the code
|
||||
# compile+link+run).
|
||||
|
||||
set (TEST_FLAG "-mssse3")
|
||||
set (CMAKE_REQUIRED_FLAGS "${TEST_FLAG} -O0")
|
||||
check_cxx_source_compiles("
|
||||
|
@ -23,6 +23,7 @@ set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package(Threads REQUIRED)
|
||||
|
||||
include (cmake/cxx.cmake)
|
||||
link_libraries(global-group)
|
||||
|
||||
target_link_libraries(global-group INTERFACE
|
||||
$<TARGET_PROPERTY:global-libs,INTERFACE_LINK_LIBRARIES>
|
||||
|
@ -1,3 +1,6 @@
|
||||
# See linux/toolchain-x86_64.cmake for details about multiple load of toolchain file.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "Darwin")
|
||||
set (CMAKE_SYSTEM_PROCESSOR "aarch64")
|
||||
set (CMAKE_C_COMPILER_TARGET "aarch64-apple-darwin")
|
||||
|
@ -1,3 +1,6 @@
|
||||
# See linux/toolchain-x86_64.cmake for details about multiple load of toolchain file.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "Darwin")
|
||||
set (CMAKE_SYSTEM_PROCESSOR "x86_64")
|
||||
set (CMAKE_C_COMPILER_TARGET "x86_64-apple-darwin")
|
||||
|
@ -24,6 +24,7 @@ find_package(Threads REQUIRED)
|
||||
|
||||
include (cmake/unwind.cmake)
|
||||
include (cmake/cxx.cmake)
|
||||
link_libraries(global-group)
|
||||
|
||||
target_link_libraries(global-group INTERFACE
|
||||
$<TARGET_PROPERTY:global-libs,INTERFACE_LINK_LIBRARIES>
|
||||
|
@ -1,3 +1,6 @@
|
||||
# See linux/toolchain-x86_64.cmake for details about multiple load of toolchain file.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "FreeBSD")
|
||||
set (CMAKE_SYSTEM_PROCESSOR "aarch64")
|
||||
set (CMAKE_C_COMPILER_TARGET "aarch64-unknown-freebsd12")
|
||||
|
21
cmake/freebsd/toolchain-ppc64le.cmake
Normal file
21
cmake/freebsd/toolchain-ppc64le.cmake
Normal file
@ -0,0 +1,21 @@
|
||||
# See linux/toolchain-x86_64.cmake for details about multiple load of toolchain file.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "FreeBSD")
|
||||
set (CMAKE_SYSTEM_PROCESSOR "ppc64le")
|
||||
set (CMAKE_C_COMPILER_TARGET "powerpc64le-unknown-freebsd13")
|
||||
set (CMAKE_CXX_COMPILER_TARGET "powerpc64le-unknown-freebsd13")
|
||||
set (CMAKE_ASM_COMPILER_TARGET "powerpc64le-unknown-freebsd13")
|
||||
set (CMAKE_SYSROOT "${CMAKE_CURRENT_LIST_DIR}/../../contrib/sysroot/freebsd-ppc64le")
|
||||
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY) # disable linkage check - it doesn't work in CMake
|
||||
|
||||
# Will be changed later, but somehow needed to be set here.
|
||||
set (CMAKE_AR "ar")
|
||||
set (CMAKE_RANLIB "ranlib")
|
||||
|
||||
set (HAS_PRE_1970_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_PRE_1970_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
||||
|
||||
set (HAS_POST_2038_EXITCODE "0" CACHE STRING "Result from TRY_RUN" FORCE)
|
||||
set (HAS_POST_2038_EXITCODE__TRYRUN_OUTPUT "" CACHE STRING "Output from TRY_RUN" FORCE)
|
@ -1,3 +1,6 @@
|
||||
# See linux/toolchain-x86_64.cmake for details about multiple load of toolchain file.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "FreeBSD")
|
||||
set (CMAKE_SYSTEM_PROCESSOR "x86_64")
|
||||
set (CMAKE_C_COMPILER_TARGET "x86_64-pc-freebsd11")
|
||||
|
@ -34,6 +34,13 @@ set(CMAKE_C_STANDARD_LIBRARIES ${DEFAULT_LIBS})
|
||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
find_package(Threads REQUIRED)
|
||||
|
||||
include (cmake/unwind.cmake)
|
||||
include (cmake/cxx.cmake)
|
||||
|
||||
# Delay the call to link the global interface after the libc++ libraries are included to avoid circular dependencies
|
||||
# which are ok with static libraries but not with dynamic ones
|
||||
link_libraries(global-group)
|
||||
|
||||
if (NOT OS_ANDROID)
|
||||
if (NOT USE_MUSL)
|
||||
# Our compatibility layer doesn't build under Android, many errors in musl.
|
||||
@ -42,9 +49,6 @@ if (NOT OS_ANDROID)
|
||||
add_subdirectory(base/harmful)
|
||||
endif ()
|
||||
|
||||
include (cmake/unwind.cmake)
|
||||
include (cmake/cxx.cmake)
|
||||
|
||||
target_link_libraries(global-group INTERFACE
|
||||
-Wl,--start-group
|
||||
$<TARGET_PROPERTY:global-libs,INTERFACE_LINK_LIBRARIES>
|
||||
|
@ -1,3 +1,6 @@
|
||||
# See linux/toolchain-x86_64.cmake for details about multiple load of toolchain file.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "Linux")
|
||||
|
@ -1,3 +1,6 @@
|
||||
# See linux/toolchain-x86_64.cmake for details about multiple load of toolchain file.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "Linux")
|
||||
|
@ -1,3 +1,6 @@
|
||||
# See linux/toolchain-x86_64.cmake for details about multiple load of toolchain file.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "Linux")
|
||||
|
@ -1,3 +1,6 @@
|
||||
# See linux/toolchain-x86_64.cmake for details about multiple load of toolchain file.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
|
||||
|
||||
set (CMAKE_SYSTEM_NAME "Linux")
|
||||
|
@ -1,18 +1,15 @@
|
||||
if (_CLICKHOUSE_TOOLCHAIN_FILE_LOADED)
|
||||
# During first run of cmake the toolchain file will be loaded twice,
|
||||
# - /usr/share/cmake-3.23/Modules/CMakeDetermineSystem.cmake
|
||||
# - /bld/CMakeFiles/3.23.2/CMakeSystem.cmake
|
||||
#
|
||||
# But once you already have non-empty cmake cache it will be loaded only
|
||||
# once:
|
||||
# - /bld/CMakeFiles/3.23.2/CMakeSystem.cmake
|
||||
#
|
||||
# This has no harm except for double load of toolchain will add
|
||||
# --gcc-toolchain multiple times that will not allow ccache to reuse the
|
||||
# cache.
|
||||
return()
|
||||
endif()
|
||||
set (_CLICKHOUSE_TOOLCHAIN_FILE_LOADED ON)
|
||||
# During first run of cmake the toolchain file will be loaded twice,
|
||||
# - /usr/share/cmake-3.23/Modules/CMakeDetermineSystem.cmake
|
||||
# - /bld/CMakeFiles/3.23.2/CMakeSystem.cmake
|
||||
#
|
||||
# But once you already have non-empty cmake cache it will be loaded only
|
||||
# once:
|
||||
# - /bld/CMakeFiles/3.23.2/CMakeSystem.cmake
|
||||
#
|
||||
# This has no harm except for double load of toolchain will add
|
||||
# --gcc-toolchain multiple times that will not allow ccache to reuse the
|
||||
# cache.
|
||||
include_guard(GLOBAL)
|
||||
|
||||
set (CMAKE_TRY_COMPILE_TARGET_TYPE STATIC_LIBRARY)
|
||||
|
||||
|
@ -25,7 +25,7 @@ if (SANITIZE)
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${ASAN_FLAGS}")
|
||||
endif()
|
||||
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libasan")
|
||||
endif ()
|
||||
if (COMPILER_GCC)
|
||||
@ -50,7 +50,7 @@ if (SANITIZE)
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=memory")
|
||||
endif()
|
||||
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libmsan")
|
||||
endif ()
|
||||
|
||||
@ -71,7 +71,7 @@ if (SANITIZE)
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=thread")
|
||||
endif()
|
||||
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libtsan")
|
||||
endif ()
|
||||
if (COMPILER_GCC)
|
||||
@ -103,7 +103,7 @@ if (SANITIZE)
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fsanitize=undefined")
|
||||
endif()
|
||||
if (USE_STATIC_LIBRARIES AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libubsan")
|
||||
endif ()
|
||||
if (COMPILER_GCC)
|
||||
|
@ -1,3 +1,13 @@
|
||||
# https://stackoverflow.com/a/62311397/328260
|
||||
macro (get_all_targets_recursive targets dir)
|
||||
get_property (subdirectories DIRECTORY ${dir} PROPERTY SUBDIRECTORIES)
|
||||
foreach (subdir ${subdirectories})
|
||||
get_all_targets_recursive (${targets} ${subdir})
|
||||
endforeach ()
|
||||
get_property (current_targets DIRECTORY ${dir} PROPERTY BUILDSYSTEM_TARGETS)
|
||||
list (APPEND ${targets} ${current_targets})
|
||||
endmacro ()
|
||||
|
||||
# When you will try to link target with the directory (that exists), cmake will
|
||||
# skip this without an error, only the following warning will be reported:
|
||||
#
|
||||
@ -18,23 +28,12 @@
|
||||
# -- but cannot be used with link_libraries()
|
||||
# - use BUILDSYSTEM_TARGETS property to get list of all targets and sanitize
|
||||
# -- this will work.
|
||||
|
||||
# https://stackoverflow.com/a/62311397/328260
|
||||
function (get_all_targets var)
|
||||
set (targets)
|
||||
get_all_targets_recursive (targets ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
set (${var} ${targets} PARENT_SCOPE)
|
||||
endfunction()
|
||||
macro (get_all_targets_recursive targets dir)
|
||||
get_property (subdirectories DIRECTORY ${dir} PROPERTY SUBDIRECTORIES)
|
||||
foreach (subdir ${subdirectories})
|
||||
get_all_targets_recursive (${targets} ${subdir})
|
||||
endforeach ()
|
||||
get_property (current_targets DIRECTORY ${dir} PROPERTY BUILDSYSTEM_TARGETS)
|
||||
list (APPEND ${targets} ${current_targets})
|
||||
endmacro ()
|
||||
|
||||
macro (sanitize_link_libraries target)
|
||||
function (sanitize_link_libraries target)
|
||||
get_target_property(target_type ${target} TYPE)
|
||||
if (${target_type} STREQUAL "INTERFACE_LIBRARY")
|
||||
get_property(linked_libraries TARGET ${target} PROPERTY INTERFACE_LINK_LIBRARIES)
|
||||
@ -48,9 +47,35 @@ macro (sanitize_link_libraries target)
|
||||
message(FATAL_ERROR "${target} requested to link with directory: ${linked_library}")
|
||||
endif()
|
||||
endforeach()
|
||||
endmacro()
|
||||
|
||||
endfunction()
|
||||
get_all_targets (all_targets)
|
||||
foreach (target ${all_targets})
|
||||
sanitize_link_libraries(${target})
|
||||
endforeach()
|
||||
|
||||
#
|
||||
# Do not allow to define -W* from contrib publically (INTERFACE/PUBLIC).
|
||||
#
|
||||
function (get_contrib_targets var)
|
||||
set (targets)
|
||||
get_all_targets_recursive (targets ${CMAKE_CURRENT_SOURCE_DIR}/contrib)
|
||||
set (${var} ${targets} PARENT_SCOPE)
|
||||
endfunction()
|
||||
function (sanitize_interface_flags target)
|
||||
get_target_property(target_type ${target} TYPE)
|
||||
get_property(compile_definitions TARGET ${target} PROPERTY INTERFACE_COMPILE_DEFINITIONS)
|
||||
get_property(compile_options TARGET ${target} PROPERTY INTERFACE_COMPILE_OPTIONS)
|
||||
if (NOT "${compile_options}" STREQUAL "")
|
||||
message(FATAL_ERROR "${target} set INTERFACE_COMPILE_OPTIONS to ${compile_options}. This is forbidden.")
|
||||
endif()
|
||||
if ("${compile_definitions}" MATCHES "-Wl,")
|
||||
# linker option - OK
|
||||
elseif ("${compile_definitions}" MATCHES "-W")
|
||||
message(FATAL_ERROR "${target} contains ${compile_definitions} flags in INTERFACE_COMPILE_DEFINITIONS. This is forbidden.")
|
||||
endif()
|
||||
endfunction()
|
||||
get_contrib_targets (contrib_targets)
|
||||
foreach (contrib_target ${contrib_targets})
|
||||
sanitize_interface_flags(${contrib_target})
|
||||
endforeach()
|
||||
|
@ -21,12 +21,12 @@ set (APPLE_CLANG_MINIMUM_VERSION 12.0.0)
|
||||
set (GCC_MINIMUM_VERSION 11)
|
||||
|
||||
if (COMPILER_GCC)
|
||||
message (FATAL_ERROR "Compilation with GCC is unsupported. Please use Clang instead.")
|
||||
|
||||
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS ${GCC_MINIMUM_VERSION})
|
||||
message (FATAL_ERROR "Compilation with GCC version ${CMAKE_CXX_COMPILER_VERSION} is unsupported, the minimum required version is ${GCC_MINIMUM_VERSION}.")
|
||||
endif ()
|
||||
|
||||
message (WARNING "Compilation with GCC is unsupported. Please use Clang instead.")
|
||||
|
||||
elseif (COMPILER_CLANG)
|
||||
if (CMAKE_CXX_COMPILER_ID MATCHES "AppleClang")
|
||||
# (Experimental!) Specify "-DALLOW_APPLECLANG=ON" when running CMake configuration step, if you want to experiment with using it.
|
||||
@ -83,7 +83,7 @@ if ((OS_LINUX OR OS_DARWIN) AND NOT LINKER_NAME)
|
||||
|
||||
if (NOT LINKER_NAME)
|
||||
if (GOLD_PATH)
|
||||
message (WARNING "Linking with gold is not recommended. Please use lld.")
|
||||
message (FATAL_ERROR "Linking with gold is unsupported. Please use lld.")
|
||||
if (COMPILER_GCC)
|
||||
set (LINKER_NAME "gold")
|
||||
else ()
|
||||
|
39
contrib/CMakeLists.txt
vendored
39
contrib/CMakeLists.txt
vendored
@ -48,9 +48,14 @@ function(add_contrib cmake_folder)
|
||||
message(STATUS "Adding contrib module ${base_folders} (configuring with ${cmake_folder})")
|
||||
add_subdirectory (${cmake_folder})
|
||||
endfunction()
|
||||
|
||||
if (ENABLE_OPENSSL OR ENABLE_OPENSSL_DYNAMIC)
|
||||
add_contrib (openssl-cmake openssl)
|
||||
else ()
|
||||
add_contrib (boringssl-cmake boringssl)
|
||||
endif ()
|
||||
add_contrib (miniselect-cmake miniselect)
|
||||
add_contrib (pdqsort-cmake pdqsort)
|
||||
add_contrib (crc32-vpmsum-cmake crc32-vpmsum)
|
||||
add_contrib (sparsehash-c11-cmake sparsehash-c11)
|
||||
add_contrib (abseil-cpp-cmake abseil-cpp)
|
||||
add_contrib (magic-enum-cmake magic_enum)
|
||||
@ -61,7 +66,7 @@ add_contrib (dragonbox-cmake dragonbox)
|
||||
add_contrib (vectorscan-cmake vectorscan)
|
||||
add_contrib (jemalloc-cmake jemalloc)
|
||||
add_contrib (libcpuid-cmake libcpuid)
|
||||
add_contrib (libdivide)
|
||||
add_contrib (libdivide-cmake)
|
||||
add_contrib (libmetrohash)
|
||||
add_contrib (lz4-cmake lz4)
|
||||
add_contrib (murmurhash)
|
||||
@ -74,11 +79,6 @@ add_contrib (re2-cmake re2)
|
||||
add_contrib (xz-cmake xz)
|
||||
add_contrib (brotli-cmake brotli)
|
||||
add_contrib (double-conversion-cmake double-conversion)
|
||||
if (NOT ENABLE_EXTERNAL_OPENSSL)
|
||||
add_contrib (boringssl-cmake boringssl)
|
||||
else ()
|
||||
add_contrib (openssl-cmake openssl)
|
||||
endif ()
|
||||
add_contrib (poco-cmake poco)
|
||||
add_contrib (croaring-cmake croaring)
|
||||
add_contrib (zstd-cmake zstd)
|
||||
@ -116,12 +116,25 @@ endif()
|
||||
add_contrib (llvm-project-cmake llvm-project)
|
||||
add_contrib (libfuzzer-cmake llvm-project)
|
||||
add_contrib (libxml2-cmake libxml2)
|
||||
add_contrib (aws-s3-cmake
|
||||
|
||||
add_contrib (aws-cmake
|
||||
aws
|
||||
aws-c-auth
|
||||
aws-c-cal
|
||||
aws-c-common
|
||||
aws-c-compression
|
||||
aws-c-event-stream
|
||||
aws-c-http
|
||||
aws-c-io
|
||||
aws-c-mqtt
|
||||
aws-c-s3
|
||||
aws-c-sdkutils
|
||||
aws-s2n-tls
|
||||
aws-checksums
|
||||
aws-crt-cpp
|
||||
aws-cmake
|
||||
)
|
||||
|
||||
add_contrib (base64-cmake base64)
|
||||
add_contrib (simdjson-cmake simdjson)
|
||||
add_contrib (rapidjson-cmake rapidjson)
|
||||
@ -167,7 +180,15 @@ add_contrib (c-ares-cmake c-ares)
|
||||
add_contrib (qpl-cmake qpl)
|
||||
add_contrib (morton-nd-cmake morton-nd)
|
||||
|
||||
add_contrib(annoy-cmake annoy)
|
||||
if (ARCH_S390X)
|
||||
add_contrib(crc32-s390x-cmake crc32-s390x)
|
||||
endif()
|
||||
|
||||
add_contrib (annoy-cmake annoy)
|
||||
|
||||
add_contrib (xxHash-cmake xxHash)
|
||||
|
||||
add_contrib (google-benchmark-cmake google-benchmark)
|
||||
|
||||
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
||||
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
||||
|
2
contrib/NuRaft
vendored
2
contrib/NuRaft
vendored
@ -1 +1 @@
|
||||
Subproject commit e4e746a24eb56861a86f3672771e3308d8c40722
|
||||
Subproject commit 545b8c810a956b2efdc116e86be219af7e83d68a
|
2
contrib/arrow
vendored
2
contrib/arrow
vendored
@ -1 +1 @@
|
||||
Subproject commit 450a5638704386356f8e520080468fc9bc8bcaf8
|
||||
Subproject commit d03245f801f798c63ee9a7d2b8914a9e5c5cd666
|
@ -78,23 +78,14 @@ set(FLATBUFFERS_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/flatbuffers")
|
||||
set(FLATBUFFERS_INCLUDE_DIR "${FLATBUFFERS_SRC_DIR}/include")
|
||||
|
||||
# set flatbuffers CMake options
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
set(FLATBUFFERS_BUILD_FLATLIB ON CACHE BOOL "Enable the build of the flatbuffers library")
|
||||
set(FLATBUFFERS_BUILD_SHAREDLIB OFF CACHE BOOL "Disable the build of the flatbuffers shared library")
|
||||
else ()
|
||||
set(FLATBUFFERS_BUILD_SHAREDLIB ON CACHE BOOL "Enable the build of the flatbuffers shared library")
|
||||
set(FLATBUFFERS_BUILD_FLATLIB OFF CACHE BOOL "Disable the build of the flatbuffers library")
|
||||
endif ()
|
||||
set(FLATBUFFERS_BUILD_FLATLIB ON CACHE BOOL "Enable the build of the flatbuffers library")
|
||||
set(FLATBUFFERS_BUILD_SHAREDLIB OFF CACHE BOOL "Disable the build of the flatbuffers shared library")
|
||||
set(FLATBUFFERS_BUILD_TESTS OFF CACHE BOOL "Skip flatbuffers tests")
|
||||
|
||||
add_subdirectory(${FLATBUFFERS_SRC_DIR} "${FLATBUFFERS_BINARY_DIR}")
|
||||
|
||||
add_library(_flatbuffers INTERFACE)
|
||||
if(USE_STATIC_LIBRARIES)
|
||||
target_link_libraries(_flatbuffers INTERFACE flatbuffers)
|
||||
else()
|
||||
target_link_libraries(_flatbuffers INTERFACE flatbuffers_shared)
|
||||
endif()
|
||||
target_link_libraries(_flatbuffers INTERFACE flatbuffers)
|
||||
target_include_directories(_flatbuffers INTERFACE ${FLATBUFFERS_INCLUDE_DIR})
|
||||
|
||||
# === hdfs
|
||||
|
2
contrib/aws
vendored
2
contrib/aws
vendored
@ -1 +1 @@
|
||||
Subproject commit 00b03604543367d7e310cb0993973fdcb723ea79
|
||||
Subproject commit 4a12641211d4dbc8e2fdb2dd0f1eea0927db9252
|
1
contrib/aws-c-auth
vendored
Submodule
1
contrib/aws-c-auth
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 30df6c407e2df43bd244e2c34c9b4a4b87372bfb
|
1
contrib/aws-c-cal
vendored
Submodule
1
contrib/aws-c-cal
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 85dd7664b786a389c6fb1a6f031ab4bb2282133d
|
2
contrib/aws-c-common
vendored
2
contrib/aws-c-common
vendored
@ -1 +1 @@
|
||||
Subproject commit 736a82d1697c108b04a277e66438a7f4e19b6857
|
||||
Subproject commit 324fd1d973ccb25c813aa747bf1759cfde5121c5
|
1
contrib/aws-c-compression
vendored
Submodule
1
contrib/aws-c-compression
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit b517b7decd0dac30be2162f5186c250221c53aff
|
2
contrib/aws-c-event-stream
vendored
2
contrib/aws-c-event-stream
vendored
@ -1 +1 @@
|
||||
Subproject commit 3bc33662f9ccff4f4cbcf9509cc78c26e022fde0
|
||||
Subproject commit 39bfa94a14b7126bf0c1330286ef8db452d87e66
|
1
contrib/aws-c-http
vendored
Submodule
1
contrib/aws-c-http
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 2c5a2a7d5556600b9782ffa6c9d7e09964df1abc
|
1
contrib/aws-c-io
vendored
Submodule
1
contrib/aws-c-io
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 5d32c453560d0823df521a686bf7fbacde7f9be3
|
1
contrib/aws-c-mqtt
vendored
Submodule
1
contrib/aws-c-mqtt
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 882c689561a3db1466330ccfe3b63637e0a575d3
|
1
contrib/aws-c-s3
vendored
Submodule
1
contrib/aws-c-s3
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit a41255ece72a7c887bba7f9d998ca3e14f4c8a1b
|
1
contrib/aws-c-sdkutils
vendored
Submodule
1
contrib/aws-c-sdkutils
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 25bf5cf225f977c3accc6a05a0a7a181ef2a4a30
|
2
contrib/aws-checksums
vendored
2
contrib/aws-checksums
vendored
@ -1 +1 @@
|
||||
Subproject commit 519d6d9093819b6cf89ffff589a27ef8f83d0f65
|
||||
Subproject commit 48e7c0e01479232f225c8044d76c84e74192889d
|
114
contrib/aws-cmake/AwsFeatureTests.cmake
Normal file
114
contrib/aws-cmake/AwsFeatureTests.cmake
Normal file
@ -0,0 +1,114 @@
|
||||
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0.
|
||||
|
||||
include(CheckCSourceRuns)
|
||||
|
||||
option(USE_CPU_EXTENSIONS "Whenever possible, use functions optimized for CPUs with specific extensions (ex: SSE, AVX)." ON)
|
||||
|
||||
# In the current (11/2/21) state of mingw64, the packaged gcc is not capable of emitting properly aligned avx2 instructions under certain circumstances.
|
||||
# This leads to crashes for windows builds using mingw64 when invoking the avx2-enabled versions of certain functions. Until we can find a better
|
||||
# work-around, disable avx2 (and all other extensions) in mingw builds.
|
||||
#
|
||||
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=54412
|
||||
#
|
||||
if (MINGW)
|
||||
message(STATUS "MINGW detected! Disabling avx2 and other CPU extensions")
|
||||
set(USE_CPU_EXTENSIONS OFF)
|
||||
endif()
|
||||
|
||||
if(NOT CMAKE_CROSSCOMPILING)
|
||||
check_c_source_runs("
|
||||
#include <stdbool.h>
|
||||
bool foo(int a, int b, int *c) {
|
||||
return __builtin_mul_overflow(a, b, c);
|
||||
}
|
||||
|
||||
int main() {
|
||||
int out;
|
||||
if (foo(1, 2, &out)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}" AWS_HAVE_GCC_OVERFLOW_MATH_EXTENSIONS)
|
||||
|
||||
if (USE_CPU_EXTENSIONS)
|
||||
check_c_source_runs("
|
||||
int main() {
|
||||
int foo = 42;
|
||||
_mulx_u32(1, 2, &foo);
|
||||
return foo != 2;
|
||||
}" AWS_HAVE_MSVC_MULX)
|
||||
endif()
|
||||
|
||||
endif()
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <Windows.h>
|
||||
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP)
|
||||
int main() {
|
||||
return 0;
|
||||
}
|
||||
#else
|
||||
it's not windows desktop
|
||||
#endif
|
||||
" AWS_HAVE_WINAPI_DESKTOP)
|
||||
|
||||
check_c_source_compiles("
|
||||
int main() {
|
||||
#if !(defined(__x86_64__) || defined(__i386__) || defined(_M_X64) || defined(_M_IX86))
|
||||
# error \"not intel\"
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
" AWS_ARCH_INTEL)
|
||||
|
||||
check_c_source_compiles("
|
||||
int main() {
|
||||
#if !(defined(__aarch64__) || defined(_M_ARM64))
|
||||
# error \"not arm64\"
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
" AWS_ARCH_ARM64)
|
||||
|
||||
check_c_source_compiles("
|
||||
int main() {
|
||||
#if !(defined(__arm__) || defined(_M_ARM))
|
||||
# error \"not arm\"
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
" AWS_ARCH_ARM32)
|
||||
|
||||
check_c_source_compiles("
|
||||
int main() {
|
||||
int foo = 42, bar = 24;
|
||||
__asm__ __volatile__(\"\":\"=r\"(foo):\"r\"(bar):\"memory\");
|
||||
}" AWS_HAVE_GCC_INLINE_ASM)
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <sys/auxv.h>
|
||||
int main() {
|
||||
#ifdef __linux__
|
||||
getauxval(AT_HWCAP);
|
||||
getauxval(AT_HWCAP2);
|
||||
#endif
|
||||
return 0;
|
||||
}" AWS_HAVE_AUXV)
|
||||
|
||||
string(REGEX MATCH "^(aarch64|arm)" ARM_CPU "${CMAKE_SYSTEM_PROCESSOR}")
|
||||
if(NOT LEGACY_COMPILER_SUPPORT OR ARM_CPU)
|
||||
check_c_source_compiles("
|
||||
#include <execinfo.h>
|
||||
int main() {
|
||||
backtrace(NULL, 0);
|
||||
return 0;
|
||||
}" AWS_HAVE_EXECINFO)
|
||||
endif()
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <linux/if_link.h>
|
||||
int main() {
|
||||
return 1;
|
||||
}" AWS_HAVE_LINUX_IF_LINK_H)
|
74
contrib/aws-cmake/AwsSIMD.cmake
Normal file
74
contrib/aws-cmake/AwsSIMD.cmake
Normal file
@ -0,0 +1,74 @@
|
||||
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0.
|
||||
|
||||
include(CheckCCompilerFlag)
|
||||
include(CheckIncludeFile)
|
||||
|
||||
if (USE_CPU_EXTENSIONS)
|
||||
if (MSVC)
|
||||
check_c_compiler_flag("/arch:AVX2" HAVE_M_AVX2_FLAG)
|
||||
if (HAVE_M_AVX2_FLAG)
|
||||
set(AVX2_CFLAGS "/arch:AVX2")
|
||||
endif()
|
||||
else()
|
||||
check_c_compiler_flag(-mavx2 HAVE_M_AVX2_FLAG)
|
||||
if (HAVE_M_AVX2_FLAG)
|
||||
set(AVX2_CFLAGS "-mavx -mavx2")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
||||
cmake_push_check_state()
|
||||
set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} ${AVX2_CFLAGS}")
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <immintrin.h>
|
||||
#include <emmintrin.h>
|
||||
#include <string.h>
|
||||
|
||||
int main() {
|
||||
__m256i vec;
|
||||
memset(&vec, 0, sizeof(vec));
|
||||
|
||||
_mm256_shuffle_epi8(vec, vec);
|
||||
_mm256_set_epi32(1,2,3,4,5,6,7,8);
|
||||
_mm256_permutevar8x32_epi32(vec, vec);
|
||||
|
||||
return 0;
|
||||
}" HAVE_AVX2_INTRINSICS)
|
||||
|
||||
check_c_source_compiles("
|
||||
#include <immintrin.h>
|
||||
#include <string.h>
|
||||
|
||||
int main() {
|
||||
__m256i vec;
|
||||
memset(&vec, 0, sizeof(vec));
|
||||
return (int)_mm256_extract_epi64(vec, 2);
|
||||
}" HAVE_MM256_EXTRACT_EPI64)
|
||||
|
||||
cmake_pop_check_state()
|
||||
endif() # USE_CPU_EXTENSIONS
|
||||
|
||||
macro(simd_add_definition_if target definition)
|
||||
if(${definition})
|
||||
target_compile_definitions(${target} PRIVATE -D${definition})
|
||||
endif(${definition})
|
||||
endmacro(simd_add_definition_if)
|
||||
|
||||
# Configure private preprocessor definitions for SIMD-related features
|
||||
# Does not set any processor feature codegen flags
|
||||
function(simd_add_definitions target)
|
||||
simd_add_definition_if(${target} HAVE_AVX2_INTRINSICS)
|
||||
simd_add_definition_if(${target} HAVE_MM256_EXTRACT_EPI64)
|
||||
endfunction(simd_add_definitions)
|
||||
|
||||
# Adds source files only if AVX2 is supported. These files will be built with
|
||||
# avx2 intrinsics enabled.
|
||||
# Usage: simd_add_source_avx2(target file1.c file2.c ...)
|
||||
function(simd_add_source_avx2 target)
|
||||
foreach(file ${ARGN})
|
||||
target_sources(${target} PRIVATE ${file})
|
||||
set_source_files_properties(${file} PROPERTIES COMPILE_FLAGS "${AVX2_CFLAGS}")
|
||||
endforeach()
|
||||
endfunction(simd_add_source_avx2)
|
50
contrib/aws-cmake/AwsThreadAffinity.cmake
Normal file
50
contrib/aws-cmake/AwsThreadAffinity.cmake
Normal file
@ -0,0 +1,50 @@
|
||||
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0.
|
||||
|
||||
include(CheckSymbolExists)
|
||||
|
||||
# Check if the platform supports setting thread affinity
|
||||
# (important for hitting full NIC entitlement on NUMA architectures)
|
||||
function(aws_set_thread_affinity_method target)
|
||||
|
||||
# Non-POSIX, Android, and Apple platforms do not support thread affinity.
|
||||
if (NOT UNIX OR ANDROID OR APPLE)
|
||||
target_compile_definitions(${target} PRIVATE
|
||||
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE)
|
||||
return()
|
||||
endif()
|
||||
|
||||
cmake_push_check_state()
|
||||
list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE)
|
||||
list(APPEND CMAKE_REQUIRED_LIBRARIES pthread)
|
||||
|
||||
set(headers "pthread.h")
|
||||
# BSDs put nonportable pthread declarations in a separate header.
|
||||
if(CMAKE_SYSTEM_NAME MATCHES BSD)
|
||||
set(headers "${headers};pthread_np.h")
|
||||
endif()
|
||||
|
||||
# Using pthread attrs is the preferred method, but is glibc-specific.
|
||||
check_symbol_exists(pthread_attr_setaffinity_np "${headers}" USE_PTHREAD_ATTR_SETAFFINITY)
|
||||
if (USE_PTHREAD_ATTR_SETAFFINITY)
|
||||
target_compile_definitions(${target} PRIVATE
|
||||
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD_ATTR)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# This method is still nonportable, but is supported by musl and BSDs.
|
||||
check_symbol_exists(pthread_setaffinity_np "${headers}" USE_PTHREAD_SETAFFINITY)
|
||||
if (USE_PTHREAD_SETAFFINITY)
|
||||
target_compile_definitions(${target} PRIVATE
|
||||
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_PTHREAD)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# If we got here, we expected thread affinity support but didn't find it.
|
||||
# We still build with degraded NUMA performance, but show a warning.
|
||||
message(WARNING "No supported method for setting thread affinity")
|
||||
target_compile_definitions(${target} PRIVATE
|
||||
-DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE)
|
||||
|
||||
cmake_pop_check_state()
|
||||
endfunction()
|
61
contrib/aws-cmake/AwsThreadName.cmake
Normal file
61
contrib/aws-cmake/AwsThreadName.cmake
Normal file
@ -0,0 +1,61 @@
|
||||
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
||||
# SPDX-License-Identifier: Apache-2.0.
|
||||
|
||||
include(CheckSymbolExists)
|
||||
|
||||
# Check how the platform supports setting thread name
|
||||
function(aws_set_thread_name_method target)
|
||||
|
||||
if (WINDOWS)
|
||||
# On Windows we do a runtime check, instead of compile-time check
|
||||
return()
|
||||
elseif (APPLE)
|
||||
# All Apple platforms we support have the same function, so no need for compile-time check.
|
||||
return()
|
||||
endif()
|
||||
|
||||
cmake_push_check_state()
|
||||
list(APPEND CMAKE_REQUIRED_DEFINITIONS -D_GNU_SOURCE)
|
||||
list(APPEND CMAKE_REQUIRED_LIBRARIES pthread)
|
||||
|
||||
# The start of the test program
|
||||
set(c_source_start "
|
||||
#define _GNU_SOURCE
|
||||
#include <pthread.h>
|
||||
|
||||
#if defined(__FreeBSD__) || defined(__NETBSD__)
|
||||
#include <pthread_np.h>
|
||||
#endif
|
||||
|
||||
int main() {
|
||||
pthread_t thread_id;
|
||||
")
|
||||
|
||||
# The end of the test program
|
||||
set(c_source_end "}")
|
||||
|
||||
# pthread_setname_np() usually takes 2 args
|
||||
check_c_source_compiles("
|
||||
${c_source_start}
|
||||
pthread_setname_np(thread_id, \"asdf\");
|
||||
${c_source_end}"
|
||||
PTHREAD_SETNAME_TAKES_2ARGS)
|
||||
if (PTHREAD_SETNAME_TAKES_2ARGS)
|
||||
target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_2ARGS)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# But on NetBSD it takes 3!
|
||||
check_c_source_compiles("
|
||||
${c_source_start}
|
||||
pthread_setname_np(thread_id, \"asdf\", NULL);
|
||||
${c_source_end}
|
||||
" PTHREAD_SETNAME_TAKES_3ARGS)
|
||||
if (PTHREAD_SETNAME_TAKES_3ARGS)
|
||||
target_compile_definitions(${target} PRIVATE -DAWS_PTHREAD_SETNAME_TAKES_3ARGS)
|
||||
return()
|
||||
endif()
|
||||
|
||||
# And on many older/weirder platforms it's just not supported
|
||||
cmake_pop_check_state()
|
||||
endfunction()
|
376
contrib/aws-cmake/CMakeLists.txt
Normal file
376
contrib/aws-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,376 @@
|
||||
set(ENABLE_AWS_S3_DEFAULT OFF)
|
||||
|
||||
if(ENABLE_LIBRARIES AND (OS_LINUX OR OS_DARWIN) AND TARGET OpenSSL::Crypto)
|
||||
set(ENABLE_AWS_S3_DEFAULT ON)
|
||||
endif()
|
||||
|
||||
option(ENABLE_AWS_S3 "Enable AWS S3" ${ENABLE_AWS_S3_DEFAULT})
|
||||
|
||||
if(ENABLE_AWS_S3)
|
||||
if(NOT TARGET OpenSSL::Crypto)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use AWS SDK without OpenSSL")
|
||||
elseif(NOT (OS_LINUX OR OS_DARWIN))
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use AWS SDK with platform ${CMAKE_SYSTEM_NAME}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(NOT ENABLE_AWS_S3)
|
||||
message(STATUS "Not using AWS S3")
|
||||
return()
|
||||
endif()
|
||||
|
||||
|
||||
# Utilities.
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsFeatureTests.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsThreadAffinity.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsThreadName.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsSIMD.cmake")
|
||||
|
||||
|
||||
# Gather sources and options.
|
||||
set(AWS_SOURCES)
|
||||
set(AWS_PUBLIC_INCLUDES)
|
||||
set(AWS_PRIVATE_INCLUDES)
|
||||
set(AWS_PUBLIC_COMPILE_DEFS)
|
||||
set(AWS_PRIVATE_COMPILE_DEFS)
|
||||
set(AWS_PRIVATE_LIBS)
|
||||
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DDEBUG_BUILD")
|
||||
endif()
|
||||
|
||||
set(ENABLE_OPENSSL_ENCRYPTION ON)
|
||||
if (ENABLE_OPENSSL_ENCRYPTION)
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DENABLE_OPENSSL_ENCRYPTION")
|
||||
endif()
|
||||
|
||||
set(USE_S2N ON)
|
||||
if (USE_S2N)
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DUSE_S2N")
|
||||
endif()
|
||||
|
||||
|
||||
# Directories.
|
||||
SET(AWS_SDK_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws")
|
||||
SET(AWS_SDK_CORE_DIR "${AWS_SDK_DIR}/aws-cpp-sdk-core")
|
||||
SET(AWS_SDK_S3_DIR "${AWS_SDK_DIR}/aws-cpp-sdk-s3")
|
||||
|
||||
SET(AWS_AUTH_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-auth")
|
||||
SET(AWS_CAL_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-cal")
|
||||
SET(AWS_CHECKSUMS_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-checksums")
|
||||
SET(AWS_COMMON_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-common")
|
||||
SET(AWS_COMPRESSION_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-compression")
|
||||
SET(AWS_CRT_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-crt-cpp")
|
||||
SET(AWS_EVENT_STREAM_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-event-stream")
|
||||
SET(AWS_HTTP_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-http")
|
||||
SET(AWS_IO_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-io")
|
||||
SET(AWS_MQTT_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-mqtt")
|
||||
SET(AWS_S2N_TLS_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-s2n-tls")
|
||||
SET(AWS_S3_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-s3")
|
||||
SET(AWS_SDKUTILS_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-sdkutils")
|
||||
|
||||
|
||||
# aws-cpp-sdk-core
|
||||
file(GLOB AWS_SDK_CORE_SRC
|
||||
"${AWS_SDK_CORE_DIR}/source/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/auth/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/auth/bearer-token-provider/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/auth/signer/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/auth/signer-provider/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/client/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/config/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/config/defaults/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/endpoint/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/endpoint/internal/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/external/cjson/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/external/tinyxml2/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/http/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/http/standard/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/internal/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/monitoring/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/base64/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/openssl/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/factory/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/event/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/json/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/logging/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/memory/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/memory/stl/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/stream/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/threading/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/xml/*.cpp"
|
||||
)
|
||||
|
||||
if(OS_LINUX OR OS_DARWIN)
|
||||
file(GLOB AWS_SDK_CORE_NET_SRC "${AWS_SDK_CORE_DIR}/source/net/linux-shared/*.cpp")
|
||||
file(GLOB AWS_SDK_CORE_PLATFORM_SRC "${AWS_SDK_CORE_DIR}/source/platform/linux-shared/*.cpp")
|
||||
else()
|
||||
file(GLOB AWS_SDK_CORE_NET_SRC "${AWS_SDK_CORE_DIR}/source/net/*.cpp")
|
||||
set(AWS_SDK_CORE_PLATFORM_SRC)
|
||||
endif()
|
||||
|
||||
OPTION(USE_AWS_MEMORY_MANAGEMENT "Aws memory management" OFF)
|
||||
configure_file("${AWS_SDK_CORE_DIR}/include/aws/core/SDKConfig.h.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
|
||||
|
||||
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_MAJOR=1")
|
||||
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_MINOR=10")
|
||||
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_PATCH=36")
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_SDK_CORE_SRC} ${AWS_SDK_CORE_NET_SRC} ${AWS_SDK_CORE_PLATFORM_SRC})
|
||||
|
||||
list(APPEND AWS_PUBLIC_INCLUDES
|
||||
"${AWS_SDK_CORE_DIR}/include/"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include"
|
||||
)
|
||||
|
||||
|
||||
# aws-cpp-sdk-s3
|
||||
file(GLOB AWS_SDK_S3_SRC
|
||||
"${AWS_SDK_S3_DIR}/source/*.cpp"
|
||||
"${AWS_SDK_S3_DIR}/source/model/*.cpp"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_SDK_S3_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_SDK_S3_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-auth
|
||||
file(GLOB AWS_AUTH_SRC
|
||||
"${AWS_AUTH_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_AUTH_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_AUTH_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-cal
|
||||
file(GLOB AWS_CAL_SRC
|
||||
"${AWS_CAL_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
if (ENABLE_OPENSSL_ENCRYPTION)
|
||||
file(GLOB AWS_CAL_OS_SRC
|
||||
"${AWS_CAL_DIR}/source/unix/*.c"
|
||||
)
|
||||
list(APPEND AWS_PRIVATE_LIBS OpenSSL::Crypto)
|
||||
endif()
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_CAL_SRC} ${AWS_CAL_OS_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_CAL_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-event-stream
|
||||
file(GLOB AWS_EVENT_STREAM_SRC
|
||||
"${AWS_EVENT_STREAM_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_EVENT_STREAM_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_EVENT_STREAM_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-common
|
||||
file(GLOB AWS_COMMON_SRC
|
||||
"${AWS_COMMON_DIR}/source/*.c"
|
||||
"${AWS_COMMON_DIR}/source/external/*.c"
|
||||
"${AWS_COMMON_DIR}/source/posix/*.c"
|
||||
)
|
||||
|
||||
file(GLOB AWS_COMMON_ARCH_SRC
|
||||
"${AWS_COMMON_DIR}/source/arch/generic/*.c"
|
||||
)
|
||||
|
||||
if (AWS_ARCH_INTEL)
|
||||
file(GLOB AWS_COMMON_ARCH_SRC
|
||||
"${AWS_COMMON_DIR}/source/arch/intel/cpuid.c"
|
||||
"${AWS_COMMON_DIR}/source/arch/intel/asm/*.c"
|
||||
)
|
||||
elseif (AWS_ARCH_ARM64 OR AWS_ARCH_ARM32)
|
||||
if (AWS_HAVE_AUXV)
|
||||
file(GLOB AWS_COMMON_ARCH_SRC
|
||||
"${AWS_COMMON_DIR}/source/arch/arm/asm/*.c"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set(AWS_COMMON_AVX2_SRC)
|
||||
if (HAVE_AVX2_INTRINSICS)
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DUSE_SIMD_ENCODING")
|
||||
set(AWS_COMMON_AVX2_SRC "${AWS_COMMON_DIR}/source/arch/intel/encoding_avx2.c")
|
||||
set_source_files_properties(${AWS_COMMON_AVX2_SRC} PROPERTIES COMPILE_FLAGS "${AVX2_CFLAGS}")
|
||||
endif()
|
||||
|
||||
configure_file("${AWS_COMMON_DIR}/include/aws/common/config.h.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/common/config.h" @ONLY)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_COMMON_SRC} ${AWS_COMMON_ARCH_SRC} ${AWS_COMMON_AVX2_SRC})
|
||||
|
||||
list(APPEND AWS_PUBLIC_INCLUDES
|
||||
"${AWS_COMMON_DIR}/include/"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include"
|
||||
)
|
||||
|
||||
|
||||
# aws-checksums
|
||||
file(GLOB AWS_CHECKSUMS_SRC
|
||||
"${AWS_CHECKSUMS_DIR}/source/*.c"
|
||||
"${AWS_CHECKSUMS_DIR}/source/intel/*.c"
|
||||
"${AWS_CHECKSUMS_DIR}/source/intel/asm/*.c"
|
||||
"${AWS_CHECKSUMS_DIR}/source/arm/*.c"
|
||||
)
|
||||
|
||||
if(AWS_ARCH_INTEL AND AWS_HAVE_GCC_INLINE_ASM)
|
||||
file(GLOB AWS_CHECKSUMS_ARCH_SRC
|
||||
"${AWS_CHECKSUMS_DIR}/source/intel/asm/*.c"
|
||||
)
|
||||
endif()
|
||||
|
||||
if (AWS_ARCH_ARM64)
|
||||
file(GLOB AWS_CHECKSUMS_ARCH_SRC
|
||||
"${AWS_CHECKSUMS_DIR}/source/arm/*.c"
|
||||
)
|
||||
set_source_files_properties("${AWS_CHECKSUMS_DIR}/source/arm/crc32c_arm.c" PROPERTIES COMPILE_FLAGS -march=armv8-a+crc)
|
||||
elseif (AWS_ARCH_ARM32)
|
||||
if (AWS_ARM32_CRC)
|
||||
file(GLOB AWS_CHECKSUMS_ARCH_SRC
|
||||
"${AWS_CHECKSUMS_DIR}/source/arm/*.c"
|
||||
"${AWS_CHECKSUMS_DIR}/source/arm/asm/*.c"
|
||||
)
|
||||
set_source_files_properties(source/arm/crc32c_arm.c PROPERTIES COMPILE_FLAGS -march=armv8-a+crc)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_CHECKSUMS_SRC} ${AWS_CHECKSUMS_ARCH_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_CHECKSUMS_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-io
|
||||
file(GLOB AWS_IO_SRC
|
||||
"${AWS_IO_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
if (OS_LINUX)
|
||||
file(GLOB AWS_IO_OS_SRC
|
||||
"${AWS_IO_DIR}/source/linux/*.c"
|
||||
"${AWS_IO_DIR}/source/posix/*.c"
|
||||
)
|
||||
elseif (OS_DARWIN)
|
||||
file(GLOB AWS_IO_OS_SRC
|
||||
"${AWS_IO_DIR}/source/bsd/*.c"
|
||||
"${AWS_IO_DIR}/source/posix/*.c"
|
||||
)
|
||||
endif()
|
||||
|
||||
set(AWS_IO_TLS_SRC)
|
||||
if (USE_S2N)
|
||||
file(GLOB AWS_IO_TLS_SRC
|
||||
"${AWS_IO_DIR}/source/s2n/*.c"
|
||||
)
|
||||
endif()
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_IO_SRC} ${AWS_IO_OS_SRC} ${AWS_IO_TLS_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_IO_DIR}/include/")
|
||||
|
||||
|
||||
# aws-s2n-tls
|
||||
if (USE_S2N)
|
||||
file(GLOB AWS_S2N_TLS_SRC
|
||||
"${AWS_S2N_TLS_DIR}/crypto/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/error/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/stuffer/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/pq-crypto/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/pq-crypto/kyber_r3/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/tls/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/tls/extensions/*.c"
|
||||
"${AWS_S2N_TLS_DIR}/utils/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_S2N_TLS_SRC})
|
||||
|
||||
list(APPEND AWS_PRIVATE_INCLUDES
|
||||
"${AWS_S2N_TLS_DIR}/"
|
||||
"${AWS_S2N_TLS_DIR}/api/"
|
||||
)
|
||||
endif()
|
||||
|
||||
|
||||
# aws-crt-cpp
|
||||
file(GLOB AWS_CRT_SRC
|
||||
"${AWS_CRT_DIR}/source/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/auth/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/crypto/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/endpoints/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/external/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/http/*.cpp"
|
||||
"${AWS_CRT_DIR}/source/io/*.cpp"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_CRT_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_CRT_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-mqtt
|
||||
file(GLOB AWS_MQTT_SRC
|
||||
"${AWS_MQTT_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_MQTT_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_MQTT_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-http
|
||||
file(GLOB AWS_HTTP_SRC
|
||||
"${AWS_HTTP_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_HTTP_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_HTTP_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-compression
|
||||
file(GLOB AWS_COMPRESSION_SRC
|
||||
"${AWS_COMPRESSION_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_COMPRESSION_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_COMPRESSION_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-s3
|
||||
file(GLOB AWS_S3_SRC
|
||||
"${AWS_S3_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_S3_SRC})
|
||||
list(APPEND AWS_PRIVATE_INCLUDES "${AWS_S3_DIR}/include/")
|
||||
|
||||
|
||||
# aws-c-sdkutils
|
||||
file(GLOB AWS_SDKUTILS_SRC
|
||||
"${AWS_SDKUTILS_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_SDKUTILS_SRC})
|
||||
list(APPEND AWS_PUBLIC_INCLUDES "${AWS_SDKUTILS_DIR}/include/")
|
||||
|
||||
|
||||
# Add library.
|
||||
add_library(_aws ${AWS_SOURCES})
|
||||
|
||||
target_include_directories(_aws SYSTEM BEFORE PUBLIC ${AWS_PUBLIC_INCLUDES})
|
||||
target_include_directories(_aws SYSTEM BEFORE PRIVATE ${AWS_PRIVATE_INCLUDES})
|
||||
target_compile_definitions(_aws PUBLIC ${AWS_PUBLIC_COMPILE_DEFS})
|
||||
target_compile_definitions(_aws PRIVATE ${AWS_PRIVATE_COMPILE_DEFS})
|
||||
target_link_libraries(_aws PRIVATE ${AWS_PRIVATE_LIBS})
|
||||
|
||||
aws_set_thread_affinity_method(_aws)
|
||||
aws_set_thread_name_method(_aws)
|
||||
|
||||
# The library is large - avoid bloat.
|
||||
if (OMIT_HEAVY_DEBUG_SYMBOLS)
|
||||
target_compile_options (_aws PRIVATE -g0)
|
||||
endif()
|
||||
|
||||
add_library(ch_contrib::aws_s3 ALIAS _aws)
|
1
contrib/aws-crt-cpp
vendored
Submodule
1
contrib/aws-crt-cpp
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit ec0bea288f451d884c0d80d534bc5c66241c39a4
|
1
contrib/aws-s2n-tls
vendored
Submodule
1
contrib/aws-s2n-tls
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 0f1ba9e5c4a67cb3898de0c0b4f911d4194dc8de
|
@ -1,122 +0,0 @@
|
||||
if(NOT OS_FREEBSD)
|
||||
option(ENABLE_S3 "Enable S3" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_S3)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "Can't use S3 on FreeBSD")
|
||||
endif()
|
||||
|
||||
if(NOT ENABLE_S3)
|
||||
message(STATUS "Not using S3")
|
||||
return()
|
||||
endif()
|
||||
|
||||
SET(AWS_S3_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-s3")
|
||||
SET(AWS_CORE_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws/aws-cpp-sdk-core")
|
||||
SET(AWS_CHECKSUMS_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-checksums")
|
||||
SET(AWS_COMMON_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-common")
|
||||
SET(AWS_EVENT_STREAM_LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/aws-c-event-stream")
|
||||
|
||||
OPTION(USE_AWS_MEMORY_MANAGEMENT "Aws memory management" OFF)
|
||||
configure_file("${AWS_CORE_LIBRARY_DIR}/include/aws/core/SDKConfig.h.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
|
||||
|
||||
configure_file("${AWS_COMMON_LIBRARY_DIR}/include/aws/common/config.h.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/common/config.h" @ONLY)
|
||||
|
||||
|
||||
file(GLOB AWS_CORE_SOURCES
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/auth/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/client/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/http/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/http/standard/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/config/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/external/cjson/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/external/tinyxml2/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/internal/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/monitoring/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/net/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/linux-shared/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/platform/linux-shared/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/base64/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/event/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/crypto/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/crypto/openssl/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/crypto/factory/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/json/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/logging/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/memory/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/memory/stl/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/stream/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/threading/*.cpp"
|
||||
"${AWS_CORE_LIBRARY_DIR}/source/utils/xml/*.cpp"
|
||||
)
|
||||
|
||||
file(GLOB AWS_S3_SOURCES
|
||||
"${AWS_S3_LIBRARY_DIR}/source/*.cpp"
|
||||
)
|
||||
|
||||
file(GLOB AWS_S3_MODEL_SOURCES
|
||||
"${AWS_S3_LIBRARY_DIR}/source/model/*.cpp"
|
||||
)
|
||||
|
||||
file(GLOB AWS_EVENT_STREAM_SOURCES
|
||||
"${AWS_EVENT_STREAM_LIBRARY_DIR}/source/*.c"
|
||||
)
|
||||
|
||||
file(GLOB AWS_COMMON_SOURCES
|
||||
"${AWS_COMMON_LIBRARY_DIR}/source/*.c"
|
||||
"${AWS_COMMON_LIBRARY_DIR}/source/posix/*.c"
|
||||
)
|
||||
|
||||
file(GLOB AWS_CHECKSUMS_SOURCES
|
||||
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/*.c"
|
||||
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/intel/*.c"
|
||||
"${AWS_CHECKSUMS_LIBRARY_DIR}/source/arm/*.c"
|
||||
)
|
||||
|
||||
file(GLOB S3_UNIFIED_SRC
|
||||
${AWS_EVENT_STREAM_SOURCES}
|
||||
${AWS_COMMON_SOURCES}
|
||||
${AWS_S3_SOURCES}
|
||||
${AWS_S3_MODEL_SOURCES}
|
||||
${AWS_CORE_SOURCES}
|
||||
)
|
||||
|
||||
set(S3_INCLUDES
|
||||
"${AWS_COMMON_LIBRARY_DIR}/include/"
|
||||
"${AWS_EVENT_STREAM_LIBRARY_DIR}/include/"
|
||||
"${AWS_S3_LIBRARY_DIR}/include/"
|
||||
"${AWS_CORE_LIBRARY_DIR}/include/"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/"
|
||||
)
|
||||
|
||||
add_library(_aws_s3_checksums ${AWS_CHECKSUMS_SOURCES})
|
||||
target_include_directories(_aws_s3_checksums SYSTEM PUBLIC "${AWS_CHECKSUMS_LIBRARY_DIR}/include/")
|
||||
if(CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
target_compile_definitions(_aws_s3_checksums PRIVATE "-DDEBUG_BUILD")
|
||||
endif()
|
||||
set_target_properties(_aws_s3_checksums PROPERTIES LINKER_LANGUAGE C)
|
||||
set_property(TARGET _aws_s3_checksums PROPERTY C_STANDARD 99)
|
||||
|
||||
add_library(_aws_s3 ${S3_UNIFIED_SRC})
|
||||
|
||||
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MAJOR=1")
|
||||
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_MINOR=7")
|
||||
target_compile_definitions(_aws_s3 PUBLIC "AWS_SDK_VERSION_PATCH=231")
|
||||
target_include_directories(_aws_s3 SYSTEM BEFORE PUBLIC ${S3_INCLUDES})
|
||||
|
||||
if (TARGET OpenSSL::SSL)
|
||||
target_compile_definitions(_aws_s3 PUBLIC -DENABLE_OPENSSL_ENCRYPTION)
|
||||
target_link_libraries(_aws_s3 PRIVATE OpenSSL::Crypto OpenSSL::SSL)
|
||||
endif()
|
||||
|
||||
target_link_libraries(_aws_s3 PRIVATE _aws_s3_checksums)
|
||||
|
||||
# The library is large - avoid bloat.
|
||||
if (OMIT_HEAVY_DEBUG_SYMBOLS)
|
||||
target_compile_options (_aws_s3 PRIVATE -g0)
|
||||
target_compile_options (_aws_s3_checksums PRIVATE -g0)
|
||||
endif()
|
||||
|
||||
add_library(ch_contrib::aws_s3 ALIAS _aws_s3)
|
2
contrib/azure
vendored
2
contrib/azure
vendored
@ -1 +1 @@
|
||||
Subproject commit ef75afc075fc71fbcd8fe28dcda3794ae265fd1c
|
||||
Subproject commit ea8c3044f43f5afa7016d2d580ed201f495d7e94
|
@ -1,4 +1,4 @@
|
||||
if(ARCH_AMD64 OR ARCH_AARCH64 OR ARCH_PPC64LE)
|
||||
if(ARCH_AMD64 OR ARCH_AARCH64 OR ARCH_PPC64LE OR ARCH_S390X)
|
||||
option (ENABLE_BASE64 "Enable base64" ${ENABLE_LIBRARIES})
|
||||
elseif(ENABLE_BASE64)
|
||||
message (${RECONFIGURE_MESSAGE_LEVEL} "base64 library is only supported on x86_64 and aarch64")
|
||||
|
@ -139,13 +139,6 @@ if(NOT OPENSSL_NO_ASM)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(BUILD_SHARED_LIBS)
|
||||
add_definitions(-DBORINGSSL_SHARED_LIBRARY)
|
||||
# Enable position-independent code globally. This is needed because
|
||||
# some library targets are OBJECT libraries.
|
||||
set(CMAKE_POSITION_INDEPENDENT_CODE TRUE)
|
||||
endif()
|
||||
|
||||
set(
|
||||
CRYPTO_ios_aarch64_SOURCES
|
||||
|
||||
|
@ -63,13 +63,8 @@ SET(SRCS
|
||||
"${LIBRARY_DIR}/src/lib/windows_port.c"
|
||||
)
|
||||
|
||||
if (USE_STATIC_LIBRARIES)
|
||||
add_library(_c-ares STATIC ${SRCS})
|
||||
target_compile_definitions(_c-ares PUBLIC CARES_STATICLIB)
|
||||
else()
|
||||
add_library(_c-ares SHARED ${SRCS})
|
||||
target_compile_definitions(_c-ares PUBLIC CARES_BUILDING_LIBRARY)
|
||||
endif()
|
||||
add_library(_c-ares STATIC ${SRCS})
|
||||
target_compile_definitions(_c-ares PUBLIC CARES_STATICLIB)
|
||||
|
||||
target_compile_definitions(_c-ares PRIVATE HAVE_CONFIG_H=1)
|
||||
|
||||
|
2
contrib/cctz
vendored
2
contrib/cctz
vendored
@ -1 +1 @@
|
||||
Subproject commit 5c8528fb35e89ee0b3a7157490423fba0d4dd7b5
|
||||
Subproject commit 7c78edd52b4d65acc103c2f195818ffcabe6fe0d
|
@ -10,9 +10,6 @@ else()
|
||||
endif()
|
||||
|
||||
option(ENABLE_RUST "Enable rust" ${DEFAULT_ENABLE_RUST})
|
||||
|
||||
message(STATUS ${ENABLE_RUST})
|
||||
|
||||
if(NOT ENABLE_RUST)
|
||||
message(STATUS "Not using rust")
|
||||
return()
|
||||
@ -42,5 +39,7 @@ endif()
|
||||
|
||||
message(STATUS "Switched Rust target to ${Rust_CARGO_TARGET}")
|
||||
|
||||
# FindRust.cmake
|
||||
list(APPEND CMAKE_MODULE_PATH "${ClickHouse_SOURCE_DIR}/contrib/corrosion/cmake")
|
||||
# Define function corrosion_import_crate()
|
||||
include ("${ClickHouse_SOURCE_DIR}/contrib/corrosion/cmake/Corrosion.cmake")
|
||||
|
1
contrib/crc32-s390x
vendored
Submodule
1
contrib/crc32-s390x
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 30980583bf9ed3fa193abb83a1849705ff457f70
|
27
contrib/crc32-s390x-cmake/CMakeLists.txt
Normal file
27
contrib/crc32-s390x-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,27 @@
|
||||
if(ARCH_S390X)
|
||||
option (ENABLE_CRC32_S390X "Enable crc32 on s390x platform" ON)
|
||||
endif()
|
||||
|
||||
if (NOT ENABLE_CRC32_S390X)
|
||||
return()
|
||||
endif()
|
||||
|
||||
set(CRC32_S390X_SOURCE_DIR ${ClickHouse_SOURCE_DIR}/contrib/crc32-s390x)
|
||||
set(CRC32_S390X_INCLUDE_DIR ${ClickHouse_SOURCE_DIR}/contrib/crc32-s390x)
|
||||
|
||||
set(CRC32_SRCS
|
||||
"${CRC32_S390X_SOURCE_DIR}/crc32-s390x.c"
|
||||
"${CRC32_S390X_SOURCE_DIR}/crc32be-vx.S"
|
||||
"${CRC32_S390X_SOURCE_DIR}/crc32le-vx.S"
|
||||
)
|
||||
|
||||
set(CRC32_HDRS
|
||||
"${CRC32_S390X_INCLUDE_DIR}/crc32-s390x.h"
|
||||
)
|
||||
|
||||
add_library(_crc32_s390x ${CRC32_SRCS} ${CRC32_HDRS})
|
||||
|
||||
target_include_directories(_crc32_s390x SYSTEM PUBLIC "${CRC32_S390X_INCLUDE_DIR}")
|
||||
target_compile_definitions(_crc32_s390x PUBLIC)
|
||||
|
||||
add_library(ch_contrib::crc32_s390x ALIAS _crc32_s390x)
|
1
contrib/crc32-vpmsum
vendored
Submodule
1
contrib/crc32-vpmsum
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 452155439389311fc7d143621eaf56a258e02476
|
14
contrib/crc32-vpmsum-cmake/CMakeLists.txt
Normal file
14
contrib/crc32-vpmsum-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,14 @@
|
||||
# module crc32-vpmsum gets build along with the files vec_crc32.h and crc32_constants.h in crc32-vpmsum-cmake
|
||||
# Please see README.md for information about how to generate crc32_constants.h
|
||||
if (NOT ARCH_PPC64LE)
|
||||
message (STATUS "crc32-vpmsum library is only supported on ppc64le")
|
||||
return()
|
||||
endif()
|
||||
|
||||
SET(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/crc32-vpmsum")
|
||||
|
||||
add_library(_crc32-vpmsum
|
||||
"${LIBRARY_DIR}/vec_crc32.c"
|
||||
)
|
||||
target_include_directories(_crc32-vpmsum SYSTEM BEFORE PUBLIC "${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
add_library(ch_contrib::crc32-vpmsum ALIAS _crc32-vpmsum)
|
9
contrib/crc32-vpmsum-cmake/README.md
Normal file
9
contrib/crc32-vpmsum-cmake/README.md
Normal file
@ -0,0 +1,9 @@
|
||||
# To Generate crc32_constants.h
|
||||
|
||||
- Run make file in `../crc32-vpmsum` directory using following options and CRC polynomial. These options should use the same polynomial and order used by intel intrinisic functions
|
||||
```bash
|
||||
make crc32_constants.h CRC="0x11EDC6F41" OPTIONS="-x -r -c"
|
||||
```
|
||||
- move the generated `crc32_constants.h` into this directory
|
||||
- To understand more about this go here: https://masterchef2209.wordpress.com/2020/06/17/guide-to-intel-sse4-2-crc-intrinisics-implementation-for-simde/
|
||||
- Here is the link to information about intel intrinsic functions: https://www.intel.com/content/www/us/en/docs/intrinsics-guide/index.html#text=_mm_crc32_u64&ig_expand=1492,1493,1559
|
1206
contrib/crc32-vpmsum-cmake/crc32_constants.h
Normal file
1206
contrib/crc32-vpmsum-cmake/crc32_constants.h
Normal file
File diff suppressed because it is too large
Load Diff
26
contrib/crc32-vpmsum-cmake/vec_crc32.h
Normal file
26
contrib/crc32-vpmsum-cmake/vec_crc32.h
Normal file
@ -0,0 +1,26 @@
|
||||
#ifndef VEC_CRC32
|
||||
#define VEC_CRC32
|
||||
|
||||
#if ! ((defined(__PPC64__) || defined(__powerpc64__)) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
|
||||
# error PowerPC architecture is expected
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
unsigned int crc32_vpmsum(unsigned int crc, const unsigned char *p, unsigned long len);
|
||||
|
||||
static inline uint32_t crc32_ppc(uint64_t crc, unsigned char const *buffer, size_t len)
|
||||
{
|
||||
assert(buffer);
|
||||
crc = crc32_vpmsum(crc, buffer, (unsigned long)len);
|
||||
|
||||
return crc;
|
||||
}
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif
|
1
contrib/google-benchmark
vendored
Submodule
1
contrib/google-benchmark
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit 2257fa4d6afb8e5a2ccd510a70f38fe7fcdf1edf
|
34
contrib/google-benchmark-cmake/CMakeLists.txt
Normal file
34
contrib/google-benchmark-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,34 @@
|
||||
set (SRC_DIR "${ClickHouse_SOURCE_DIR}/contrib/google-benchmark/src")
|
||||
|
||||
set (SRCS
|
||||
"${SRC_DIR}/benchmark.cc"
|
||||
"${SRC_DIR}/benchmark_api_internal.cc"
|
||||
"${SRC_DIR}/benchmark_name.cc"
|
||||
"${SRC_DIR}/benchmark_register.cc"
|
||||
"${SRC_DIR}/benchmark_runner.cc"
|
||||
"${SRC_DIR}/check.cc"
|
||||
"${SRC_DIR}/colorprint.cc"
|
||||
"${SRC_DIR}/commandlineflags.cc"
|
||||
"${SRC_DIR}/complexity.cc"
|
||||
"${SRC_DIR}/console_reporter.cc"
|
||||
"${SRC_DIR}/counter.cc"
|
||||
"${SRC_DIR}/csv_reporter.cc"
|
||||
"${SRC_DIR}/json_reporter.cc"
|
||||
"${SRC_DIR}/perf_counters.cc"
|
||||
"${SRC_DIR}/reporter.cc"
|
||||
"${SRC_DIR}/sleep.cc"
|
||||
"${SRC_DIR}/statistics.cc"
|
||||
"${SRC_DIR}/string_util.cc"
|
||||
"${SRC_DIR}/sysinfo.cc"
|
||||
"${SRC_DIR}/timers.cc")
|
||||
|
||||
add_library(google_benchmark "${SRCS}")
|
||||
target_include_directories(google_benchmark SYSTEM PUBLIC "${SRC_DIR}/../include")
|
||||
|
||||
add_library(google_benchmark_main "${SRC_DIR}/benchmark_main.cc")
|
||||
target_link_libraries(google_benchmark_main PUBLIC google_benchmark)
|
||||
|
||||
add_library(google_benchmark_all INTERFACE)
|
||||
target_link_libraries(google_benchmark_all INTERFACE google_benchmark google_benchmark_main)
|
||||
|
||||
add_library(ch_contrib::gbenchmark_all ALIAS google_benchmark_all)
|
2
contrib/googletest
vendored
2
contrib/googletest
vendored
@ -1 +1 @@
|
||||
Subproject commit e7e591764baba0a0c3c9ad0014430e7a27331d16
|
||||
Subproject commit 71140c3ca7a87bb1b5b9c9f1500fea8858cce344
|
@ -0,0 +1,435 @@
|
||||
/* include/jemalloc/internal/jemalloc_internal_defs.h. Generated from jemalloc_internal_defs.h.in by configure. */
|
||||
#ifndef JEMALLOC_INTERNAL_DEFS_H_
|
||||
#define JEMALLOC_INTERNAL_DEFS_H_
|
||||
/*
|
||||
* If JEMALLOC_PREFIX is defined via --with-jemalloc-prefix, it will cause all
|
||||
* public APIs to be prefixed. This makes it possible, with some care, to use
|
||||
* multiple allocators simultaneously.
|
||||
*/
|
||||
/* #undef JEMALLOC_PREFIX */
|
||||
/* #undef JEMALLOC_CPREFIX */
|
||||
|
||||
/*
|
||||
* Define overrides for non-standard allocator-related functions if they are
|
||||
* present on the system.
|
||||
*/
|
||||
/* #undef JEMALLOC_OVERRIDE___LIBC_CALLOC */
|
||||
/* #undef JEMALLOC_OVERRIDE___LIBC_FREE */
|
||||
/* #undef JEMALLOC_OVERRIDE___LIBC_MALLOC */
|
||||
/* #undef JEMALLOC_OVERRIDE___LIBC_MEMALIGN */
|
||||
/* #undef JEMALLOC_OVERRIDE___LIBC_REALLOC */
|
||||
/* #undef JEMALLOC_OVERRIDE___LIBC_VALLOC */
|
||||
/* #undef JEMALLOC_OVERRIDE___LIBC_PVALLOC */
|
||||
#define JEMALLOC_OVERRIDE___POSIX_MEMALIGN
|
||||
|
||||
/*
|
||||
* JEMALLOC_PRIVATE_NAMESPACE is used as a prefix for all library-private APIs.
|
||||
* For shared libraries, symbol visibility mechanisms prevent these symbols
|
||||
* from being exported, but for static libraries, naming collisions are a real
|
||||
* possibility.
|
||||
*/
|
||||
#define JEMALLOC_PRIVATE_NAMESPACE je_
|
||||
|
||||
/*
|
||||
* Hyper-threaded CPUs may need a special instruction inside spin loops in
|
||||
* order to yield to another virtual CPU.
|
||||
*/
|
||||
#define CPU_SPINWAIT
|
||||
/* 1 if CPU_SPINWAIT is defined, 0 otherwise. */
|
||||
#define HAVE_CPU_SPINWAIT 0
|
||||
|
||||
/*
|
||||
* Number of significant bits in virtual addresses. This may be less than the
|
||||
* total number of bits in a pointer, e.g. on x64, for which the uppermost 16
|
||||
* bits are the same as bit 47.
|
||||
*/
|
||||
#define LG_VADDR 64
|
||||
|
||||
/* Defined if C11 atomics are available. */
|
||||
#define JEMALLOC_C11_ATOMICS
|
||||
|
||||
/* Defined if GCC __atomic atomics are available. */
|
||||
#define JEMALLOC_GCC_ATOMIC_ATOMICS
|
||||
/* and the 8-bit variant support. */
|
||||
#define JEMALLOC_GCC_U8_ATOMIC_ATOMICS
|
||||
|
||||
/* Defined if GCC __sync atomics are available. */
|
||||
#define JEMALLOC_GCC_SYNC_ATOMICS
|
||||
/* and the 8-bit variant support. */
|
||||
#define JEMALLOC_GCC_U8_SYNC_ATOMICS
|
||||
|
||||
/*
|
||||
* Defined if __builtin_clz() and __builtin_clzl() are available.
|
||||
*/
|
||||
#define JEMALLOC_HAVE_BUILTIN_CLZ
|
||||
|
||||
/*
|
||||
* Defined if os_unfair_lock_*() functions are available, as provided by Darwin.
|
||||
*/
|
||||
/* #undef JEMALLOC_OS_UNFAIR_LOCK */
|
||||
|
||||
/* Defined if syscall(2) is usable. */
|
||||
#define JEMALLOC_USE_SYSCALL
|
||||
|
||||
/*
|
||||
* Defined if secure_getenv(3) is available.
|
||||
*/
|
||||
/* #undef JEMALLOC_HAVE_SECURE_GETENV */
|
||||
|
||||
/*
|
||||
* Defined if issetugid(2) is available.
|
||||
*/
|
||||
#define JEMALLOC_HAVE_ISSETUGID
|
||||
|
||||
/* Defined if pthread_atfork(3) is available. */
|
||||
#define JEMALLOC_HAVE_PTHREAD_ATFORK
|
||||
|
||||
/* Defined if pthread_setname_np(3) is available. */
|
||||
#define JEMALLOC_HAVE_PTHREAD_SETNAME_NP
|
||||
|
||||
/* Defined if pthread_getname_np(3) is available. */
|
||||
#define JEMALLOC_HAVE_PTHREAD_GETNAME_NP
|
||||
|
||||
/* Defined if pthread_get_name_np(3) is available. */
|
||||
#define JEMALLOC_HAVE_PTHREAD_GET_NAME_NP
|
||||
|
||||
/*
|
||||
* Defined if clock_gettime(CLOCK_MONOTONIC_COARSE, ...) is available.
|
||||
*/
|
||||
#define JEMALLOC_HAVE_CLOCK_MONOTONIC_COARSE
|
||||
|
||||
/*
|
||||
* Defined if clock_gettime(CLOCK_MONOTONIC, ...) is available.
|
||||
*/
|
||||
#define JEMALLOC_HAVE_CLOCK_MONOTONIC
|
||||
|
||||
/*
|
||||
* Defined if mach_absolute_time() is available.
|
||||
*/
|
||||
/* #undef JEMALLOC_HAVE_MACH_ABSOLUTE_TIME */
|
||||
|
||||
/*
|
||||
* Defined if clock_gettime(CLOCK_REALTIME, ...) is available.
|
||||
*/
|
||||
#define JEMALLOC_HAVE_CLOCK_REALTIME
|
||||
|
||||
/*
|
||||
* Defined if _malloc_thread_cleanup() exists. At least in the case of
|
||||
* FreeBSD, pthread_key_create() allocates, which if used during malloc
|
||||
* bootstrapping will cause recursion into the pthreads library. Therefore, if
|
||||
* _malloc_thread_cleanup() exists, use it as the basis for thread cleanup in
|
||||
* malloc_tsd.
|
||||
*/
|
||||
#define JEMALLOC_MALLOC_THREAD_CLEANUP
|
||||
|
||||
/*
|
||||
* Defined if threaded initialization is known to be safe on this platform.
|
||||
* Among other things, it must be possible to initialize a mutex without
|
||||
* triggering allocation in order for threaded allocation to be safe.
|
||||
*/
|
||||
/* #undef JEMALLOC_THREADED_INIT */
|
||||
|
||||
/*
|
||||
* Defined if the pthreads implementation defines
|
||||
* _pthread_mutex_init_calloc_cb(), in which case the function is used in order
|
||||
* to avoid recursive allocation during mutex initialization.
|
||||
*/
|
||||
#define JEMALLOC_MUTEX_INIT_CB
|
||||
|
||||
/* Non-empty if the tls_model attribute is supported. */
|
||||
#define JEMALLOC_TLS_MODEL __attribute__((tls_model("initial-exec")))
|
||||
|
||||
/*
|
||||
* JEMALLOC_DEBUG enables assertions and other sanity checks, and disables
|
||||
* inline functions.
|
||||
*/
|
||||
/* #undef JEMALLOC_DEBUG */
|
||||
|
||||
/* JEMALLOC_STATS enables statistics calculation. */
|
||||
#define JEMALLOC_STATS
|
||||
|
||||
/* JEMALLOC_EXPERIMENTAL_SMALLOCX_API enables experimental smallocx API. */
|
||||
/* #undef JEMALLOC_EXPERIMENTAL_SMALLOCX_API */
|
||||
|
||||
/* JEMALLOC_PROF enables allocation profiling. */
|
||||
/* #undef JEMALLOC_PROF */
|
||||
|
||||
/* Use libunwind for profile backtracing if defined. */
|
||||
/* #undef JEMALLOC_PROF_LIBUNWIND */
|
||||
|
||||
/* Use libgcc for profile backtracing if defined. */
|
||||
/* #undef JEMALLOC_PROF_LIBGCC */
|
||||
|
||||
/* Use gcc intrinsics for profile backtracing if defined. */
|
||||
/* #undef JEMALLOC_PROF_GCC */
|
||||
|
||||
/* JEMALLOC_PAGEID enabled page id */
|
||||
/* #undef JEMALLOC_PAGEID */
|
||||
|
||||
/* JEMALLOC_HAVE_PRCTL checks prctl */
|
||||
/* #undef JEMALLOC_HAVE_PRCTL */
|
||||
|
||||
/*
|
||||
* JEMALLOC_DSS enables use of sbrk(2) to allocate extents from the data storage
|
||||
* segment (DSS).
|
||||
*/
|
||||
#define JEMALLOC_DSS
|
||||
|
||||
/* Support memory filling (junk/zero). */
|
||||
#define JEMALLOC_FILL
|
||||
|
||||
/* Support utrace(2)-based tracing. */
|
||||
/* #undef JEMALLOC_UTRACE */
|
||||
|
||||
/* Support utrace(2)-based tracing (label based signature). */
|
||||
/* #undef JEMALLOC_UTRACE_LABEL */
|
||||
|
||||
/* Support optional abort() on OOM. */
|
||||
/* #undef JEMALLOC_XMALLOC */
|
||||
|
||||
/* Support lazy locking (avoid locking unless a second thread is launched). */
|
||||
#define JEMALLOC_LAZY_LOCK
|
||||
|
||||
/*
|
||||
* Minimum allocation alignment is 2^LG_QUANTUM bytes (ignoring tiny size
|
||||
* classes).
|
||||
*/
|
||||
/* #undef LG_QUANTUM */
|
||||
|
||||
/* One page is 2^LG_PAGE bytes. */
|
||||
#define LG_PAGE 12
|
||||
|
||||
/* Maximum number of regions in a slab. */
|
||||
/* #undef CONFIG_LG_SLAB_MAXREGS */
|
||||
|
||||
/*
|
||||
* One huge page is 2^LG_HUGEPAGE bytes. Note that this is defined even if the
|
||||
* system does not explicitly support huge pages; system calls that require
|
||||
* explicit huge page support are separately configured.
|
||||
*/
|
||||
#define LG_HUGEPAGE 21
|
||||
|
||||
/*
|
||||
* If defined, adjacent virtual memory mappings with identical attributes
|
||||
* automatically coalesce, and they fragment when changes are made to subranges.
|
||||
* This is the normal order of things for mmap()/munmap(), but on Windows
|
||||
* VirtualAlloc()/VirtualFree() operations must be precisely matched, i.e.
|
||||
* mappings do *not* coalesce/fragment.
|
||||
*/
|
||||
#define JEMALLOC_MAPS_COALESCE
|
||||
|
||||
/*
|
||||
* If defined, retain memory for later reuse by default rather than using e.g.
|
||||
* munmap() to unmap freed extents. This is enabled on 64-bit Linux because
|
||||
* common sequences of mmap()/munmap() calls will cause virtual memory map
|
||||
* holes.
|
||||
*/
|
||||
/* #undef JEMALLOC_RETAIN */
|
||||
|
||||
/* TLS is used to map arenas and magazine caches to threads. */
|
||||
#define JEMALLOC_TLS
|
||||
|
||||
/*
|
||||
* Used to mark unreachable code to quiet "end of non-void" compiler warnings.
|
||||
* Don't use this directly; instead use unreachable() from util.h
|
||||
*/
|
||||
#define JEMALLOC_INTERNAL_UNREACHABLE __builtin_unreachable
|
||||
|
||||
/*
|
||||
* ffs*() functions to use for bitmapping. Don't use these directly; instead,
|
||||
* use ffs_*() from util.h.
|
||||
*/
|
||||
#define JEMALLOC_INTERNAL_FFSLL __builtin_ffsll
|
||||
#define JEMALLOC_INTERNAL_FFSL __builtin_ffsl
|
||||
#define JEMALLOC_INTERNAL_FFS __builtin_ffs
|
||||
|
||||
/*
|
||||
* popcount*() functions to use for bitmapping.
|
||||
*/
|
||||
#define JEMALLOC_INTERNAL_POPCOUNTL __builtin_popcountl
|
||||
#define JEMALLOC_INTERNAL_POPCOUNT __builtin_popcount
|
||||
|
||||
/*
|
||||
* If defined, explicitly attempt to more uniformly distribute large allocation
|
||||
* pointer alignments across all cache indices.
|
||||
*/
|
||||
#define JEMALLOC_CACHE_OBLIVIOUS
|
||||
|
||||
/*
|
||||
* If defined, enable logging facilities. We make this a configure option to
|
||||
* avoid taking extra branches everywhere.
|
||||
*/
|
||||
/* #undef JEMALLOC_LOG */
|
||||
|
||||
/*
|
||||
* If defined, use readlinkat() (instead of readlink()) to follow
|
||||
* /etc/malloc_conf.
|
||||
*/
|
||||
/* #undef JEMALLOC_READLINKAT */
|
||||
|
||||
/*
|
||||
* Darwin (OS X) uses zones to work around Mach-O symbol override shortcomings.
|
||||
*/
|
||||
/* #undef JEMALLOC_ZONE */
|
||||
|
||||
/*
|
||||
* Methods for determining whether the OS overcommits.
|
||||
* JEMALLOC_PROC_SYS_VM_OVERCOMMIT_MEMORY: Linux's
|
||||
* /proc/sys/vm.overcommit_memory file.
|
||||
* JEMALLOC_SYSCTL_VM_OVERCOMMIT: FreeBSD's vm.overcommit sysctl.
|
||||
*/
|
||||
#define JEMALLOC_SYSCTL_VM_OVERCOMMIT
|
||||
/* #undef JEMALLOC_PROC_SYS_VM_OVERCOMMIT_MEMORY */
|
||||
|
||||
/* Defined if madvise(2) is available. */
|
||||
#define JEMALLOC_HAVE_MADVISE
|
||||
|
||||
/*
|
||||
* Defined if transparent huge pages are supported via the MADV_[NO]HUGEPAGE
|
||||
* arguments to madvise(2).
|
||||
*/
|
||||
/* #undef JEMALLOC_HAVE_MADVISE_HUGE */
|
||||
|
||||
/*
|
||||
* Methods for purging unused pages differ between operating systems.
|
||||
*
|
||||
* madvise(..., MADV_FREE) : This marks pages as being unused, such that they
|
||||
* will be discarded rather than swapped out.
|
||||
* madvise(..., MADV_DONTNEED) : If JEMALLOC_PURGE_MADVISE_DONTNEED_ZEROS is
|
||||
* defined, this immediately discards pages,
|
||||
* such that new pages will be demand-zeroed if
|
||||
* the address region is later touched;
|
||||
* otherwise this behaves similarly to
|
||||
* MADV_FREE, though typically with higher
|
||||
* system overhead.
|
||||
*/
|
||||
#define JEMALLOC_PURGE_MADVISE_FREE
|
||||
#define JEMALLOC_PURGE_MADVISE_DONTNEED
|
||||
/* #undef JEMALLOC_PURGE_MADVISE_DONTNEED_ZEROS */
|
||||
|
||||
/* Defined if madvise(2) is available but MADV_FREE is not (x86 Linux only). */
|
||||
/* #undef JEMALLOC_DEFINE_MADVISE_FREE */
|
||||
|
||||
/*
|
||||
* Defined if MADV_DO[NT]DUMP is supported as an argument to madvise.
|
||||
*/
|
||||
/* #undef JEMALLOC_MADVISE_DONTDUMP */
|
||||
|
||||
/*
|
||||
* Defined if MADV_[NO]CORE is supported as an argument to madvise.
|
||||
*/
|
||||
#define JEMALLOC_MADVISE_NOCORE
|
||||
|
||||
/* Defined if mprotect(2) is available. */
|
||||
#define JEMALLOC_HAVE_MPROTECT
|
||||
|
||||
/*
|
||||
* Defined if transparent huge pages (THPs) are supported via the
|
||||
* MADV_[NO]HUGEPAGE arguments to madvise(2), and THP support is enabled.
|
||||
*/
|
||||
/* #undef JEMALLOC_THP */
|
||||
|
||||
/* Defined if posix_madvise is available. */
|
||||
/* #undef JEMALLOC_HAVE_POSIX_MADVISE */
|
||||
|
||||
/*
|
||||
* Method for purging unused pages using posix_madvise.
|
||||
*
|
||||
* posix_madvise(..., POSIX_MADV_DONTNEED)
|
||||
*/
|
||||
/* #undef JEMALLOC_PURGE_POSIX_MADVISE_DONTNEED */
|
||||
/* #undef JEMALLOC_PURGE_POSIX_MADVISE_DONTNEED_ZEROS */
|
||||
|
||||
/*
|
||||
* Defined if memcntl page admin call is supported
|
||||
*/
|
||||
/* #undef JEMALLOC_HAVE_MEMCNTL */
|
||||
|
||||
/*
|
||||
* Defined if malloc_size is supported
|
||||
*/
|
||||
/* #undef JEMALLOC_HAVE_MALLOC_SIZE */
|
||||
|
||||
/* Define if operating system has alloca.h header. */
|
||||
/* #undef JEMALLOC_HAS_ALLOCA_H */
|
||||
|
||||
/* C99 restrict keyword supported. */
|
||||
#define JEMALLOC_HAS_RESTRICT
|
||||
|
||||
/* For use by hash code. */
|
||||
/* #undef JEMALLOC_BIG_ENDIAN */
|
||||
|
||||
/* sizeof(int) == 2^LG_SIZEOF_INT. */
|
||||
#define LG_SIZEOF_INT 2
|
||||
|
||||
/* sizeof(long) == 2^LG_SIZEOF_LONG. */
|
||||
#define LG_SIZEOF_LONG 3
|
||||
|
||||
/* sizeof(long long) == 2^LG_SIZEOF_LONG_LONG. */
|
||||
#define LG_SIZEOF_LONG_LONG 3
|
||||
|
||||
/* sizeof(intmax_t) == 2^LG_SIZEOF_INTMAX_T. */
|
||||
#define LG_SIZEOF_INTMAX_T 3
|
||||
|
||||
/* glibc malloc hooks (__malloc_hook, __realloc_hook, __free_hook). */
|
||||
/* #undef JEMALLOC_GLIBC_MALLOC_HOOK */
|
||||
|
||||
/* glibc memalign hook. */
|
||||
/* #undef JEMALLOC_GLIBC_MEMALIGN_HOOK */
|
||||
|
||||
/* pthread support */
|
||||
#define JEMALLOC_HAVE_PTHREAD
|
||||
|
||||
/* dlsym() support */
|
||||
#define JEMALLOC_HAVE_DLSYM
|
||||
|
||||
/* Adaptive mutex support in pthreads. */
|
||||
#define JEMALLOC_HAVE_PTHREAD_MUTEX_ADAPTIVE_NP
|
||||
|
||||
/* GNU specific sched_getcpu support */
|
||||
#define JEMALLOC_HAVE_SCHED_GETCPU
|
||||
|
||||
/* GNU specific sched_setaffinity support */
|
||||
#define JEMALLOC_HAVE_SCHED_SETAFFINITY
|
||||
|
||||
/*
|
||||
* If defined, all the features necessary for background threads are present.
|
||||
*/
|
||||
#define JEMALLOC_BACKGROUND_THREAD
|
||||
|
||||
/*
|
||||
* If defined, jemalloc symbols are not exported (doesn't work when
|
||||
* JEMALLOC_PREFIX is not defined).
|
||||
*/
|
||||
/* #undef JEMALLOC_EXPORT */
|
||||
|
||||
/* config.malloc_conf options string. */
|
||||
#define JEMALLOC_CONFIG_MALLOC_CONF ""
|
||||
|
||||
/* If defined, jemalloc takes the malloc/free/etc. symbol names. */
|
||||
#define JEMALLOC_IS_MALLOC
|
||||
|
||||
/*
|
||||
* Defined if strerror_r returns char * if _GNU_SOURCE is defined.
|
||||
*/
|
||||
/* #undef JEMALLOC_STRERROR_R_RETURNS_CHAR_WITH_GNU_SOURCE */
|
||||
|
||||
/* Performs additional safety checks when defined. */
|
||||
/* #undef JEMALLOC_OPT_SAFETY_CHECKS */
|
||||
|
||||
/* Is C++ support being built? */
|
||||
#define JEMALLOC_ENABLE_CXX
|
||||
|
||||
/* Performs additional size checks when defined. */
|
||||
/* #undef JEMALLOC_OPT_SIZE_CHECKS */
|
||||
|
||||
/* Allows sampled junk and stash for checking use-after-free when defined. */
|
||||
/* #undef JEMALLOC_UAF_DETECTION */
|
||||
|
||||
/* Darwin VM_MAKE_TAG support */
|
||||
/* #undef JEMALLOC_HAVE_VM_MAKE_TAG */
|
||||
|
||||
/* If defined, realloc(ptr, 0) defaults to "free" instead of "alloc". */
|
||||
/* #undef JEMALLOC_ZERO_REALLOC_DEFAULT_FREE */
|
||||
|
||||
#endif /* JEMALLOC_INTERNAL_DEFS_H_ */
|
2
contrib/krb5
vendored
2
contrib/krb5
vendored
@ -1 +1 @@
|
||||
Subproject commit b89e20367b074bd02dd118a6534099b21e88b3c3
|
||||
Subproject commit f8262a1b548eb29d97e059260042036255d07f8d
|
@ -15,6 +15,10 @@ if(NOT AWK_PROGRAM)
|
||||
message(FATAL_ERROR "You need the awk program to build ClickHouse with krb5 enabled.")
|
||||
endif()
|
||||
|
||||
if (NOT (ENABLE_OPENSSL OR ENABLE_OPENSSL_DYNAMIC))
|
||||
add_compile_definitions(USE_BORINGSSL=1)
|
||||
endif ()
|
||||
|
||||
set(KRB5_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/krb5/src")
|
||||
set(KRB5_ET_BIN_DIR "${CMAKE_CURRENT_BINARY_DIR}/include_private")
|
||||
|
||||
@ -578,12 +582,6 @@ if(CMAKE_SYSTEM_NAME MATCHES "Darwin")
|
||||
list(APPEND ALL_SRCS "${CMAKE_CURRENT_BINARY_DIR}/include_private/kcmrpc.c")
|
||||
endif()
|
||||
|
||||
if (ENABLE_EXTERNAL_OPENSSL)
|
||||
list(REMOVE_ITEM ALL_SRCS "${KRB5_SOURCE_DIR}/lib/crypto/openssl/enc_provider/aes.c")
|
||||
list(APPEND ALL_SRCS "${CMAKE_CURRENT_SOURCE_DIR}/aes.c")
|
||||
endif ()
|
||||
|
||||
|
||||
target_sources(_krb5 PRIVATE
|
||||
${ALL_SRCS}
|
||||
)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user