mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-25 00:52:02 +00:00
Merge remote-tracking branch 'ClickHouse/master' into change_date
This commit is contained in:
commit
a6c1477ece
11
.clang-tidy
11
.clang-tidy
@ -94,6 +94,7 @@ Checks: [
|
|||||||
'-modernize-pass-by-value',
|
'-modernize-pass-by-value',
|
||||||
'-modernize-return-braced-init-list',
|
'-modernize-return-braced-init-list',
|
||||||
'-modernize-use-auto',
|
'-modernize-use-auto',
|
||||||
|
'-modernize-use-constraints', # This is a good check, but clang-tidy crashes, see https://github.com/llvm/llvm-project/issues/91872
|
||||||
'-modernize-use-default-member-init',
|
'-modernize-use-default-member-init',
|
||||||
'-modernize-use-emplace',
|
'-modernize-use-emplace',
|
||||||
'-modernize-use-nodiscard',
|
'-modernize-use-nodiscard',
|
||||||
@ -121,7 +122,8 @@ Checks: [
|
|||||||
'-readability-magic-numbers',
|
'-readability-magic-numbers',
|
||||||
'-readability-named-parameter',
|
'-readability-named-parameter',
|
||||||
'-readability-redundant-declaration',
|
'-readability-redundant-declaration',
|
||||||
'-readability-redundant-inline-specifier',
|
'-readability-redundant-inline-specifier', # useful but incompatible with __attribute((always_inline))__ (aka. ALWAYS_INLINE, base/base/defines.h).
|
||||||
|
# ALWAYS_INLINE only has an effect if combined with `inline`: https://godbolt.org/z/Eefd74qdM
|
||||||
'-readability-redundant-member-init', # Useful but triggers another problem. Imagine a struct S with multiple String members. Structs are often instantiated via designated
|
'-readability-redundant-member-init', # Useful but triggers another problem. Imagine a struct S with multiple String members. Structs are often instantiated via designated
|
||||||
# initializer S s{.s1 = [...], .s2 = [...], [...]}. In this case, compiler warning `missing-field-initializers` requires to specify all members which are not in-struct
|
# initializer S s{.s1 = [...], .s2 = [...], [...]}. In this case, compiler warning `missing-field-initializers` requires to specify all members which are not in-struct
|
||||||
# initialized (example: s1 in struct S { String s1; String s2{};}; is not in-struct initialized, therefore it must be specified at instantiation time). As explicitly
|
# initialized (example: s1 in struct S { String s1; String s2{};}; is not in-struct initialized, therefore it must be specified at instantiation time). As explicitly
|
||||||
@ -132,12 +134,7 @@ Checks: [
|
|||||||
'-readability-uppercase-literal-suffix',
|
'-readability-uppercase-literal-suffix',
|
||||||
'-readability-use-anyofallof',
|
'-readability-use-anyofallof',
|
||||||
|
|
||||||
'-zircon-*',
|
'-zircon-*'
|
||||||
|
|
||||||
# This is a good check, but clang-tidy crashes, see https://github.com/llvm/llvm-project/issues/91872
|
|
||||||
'-modernize-use-constraints',
|
|
||||||
# https://github.com/abseil/abseil-cpp/issues/1667
|
|
||||||
'-clang-analyzer-optin.core.EnumCastOutOfRange'
|
|
||||||
]
|
]
|
||||||
|
|
||||||
WarningsAsErrors: '*'
|
WarningsAsErrors: '*'
|
||||||
|
@ -19,3 +19,7 @@ charset = utf-8
|
|||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
|
# Some SQL results have trailing whitespace which is removed by IDEs
|
||||||
|
[tests/queries/**.reference]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -2,3 +2,4 @@ contrib/* linguist-vendored
|
|||||||
*.h linguist-language=C++
|
*.h linguist-language=C++
|
||||||
tests/queries/0_stateless/data_json/* binary
|
tests/queries/0_stateless/data_json/* binary
|
||||||
tests/queries/0_stateless/*.reference -crlf
|
tests/queries/0_stateless/*.reference -crlf
|
||||||
|
src/Core/SettingsChangesHistory.cpp merge=union
|
||||||
|
12
.github/ISSUE_TEMPLATE/10_question.md
vendored
12
.github/ISSUE_TEMPLATE/10_question.md
vendored
@ -1,12 +0,0 @@
|
|||||||
---
|
|
||||||
name: Question
|
|
||||||
about: Ask a question about ClickHouse
|
|
||||||
title: ''
|
|
||||||
labels: question
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
> Make sure to check documentation https://clickhouse.com/docs/en/ first. If the question is concise and probably has a short answer, asking it in [community Slack](https://join.slack.com/t/clickhousedb/shared_invite/zt-1gh9ds7f4-PgDhJAaF8ad5RbWBAAjzFg) is probably the fastest way to find the answer. For more complicated questions, consider asking them on StackOverflow with "clickhouse" tag https://stackoverflow.com/questions/tagged/clickhouse
|
|
||||||
|
|
||||||
> If you still prefer GitHub issues, remove all this text and ask your question here.
|
|
20
.github/ISSUE_TEMPLATE/10_question.yaml
vendored
Normal file
20
.github/ISSUE_TEMPLATE/10_question.yaml
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
name: Question
|
||||||
|
description: Ask a question about ClickHouse
|
||||||
|
labels: ["question"]
|
||||||
|
body:
|
||||||
|
- type: markdown
|
||||||
|
attributes:
|
||||||
|
value: |
|
||||||
|
> Make sure to check documentation https://clickhouse.com/docs/en/ first. If the question is concise and probably has a short answer, asking it in [community Slack](https://join.slack.com/t/clickhousedb/shared_invite/zt-1gh9ds7f4-PgDhJAaF8ad5RbWBAAjzFg) is probably the fastest way to find the answer. For more complicated questions, consider asking them on StackOverflow with "clickhouse" tag https://stackoverflow.com/questions/tagged/clickhouse
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Company or project name
|
||||||
|
description: Put your company name or project description here.
|
||||||
|
validations:
|
||||||
|
required: false
|
||||||
|
- type: textarea
|
||||||
|
attributes:
|
||||||
|
label: Question
|
||||||
|
description: Please put your question here.
|
||||||
|
validations:
|
||||||
|
required: true
|
4
.github/ISSUE_TEMPLATE/20_feature-request.md
vendored
4
.github/ISSUE_TEMPLATE/20_feature-request.md
vendored
@ -9,6 +9,10 @@ assignees: ''
|
|||||||
|
|
||||||
> (you don't have to strictly follow this form)
|
> (you don't have to strictly follow this form)
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
|
||||||
|
> Put your company name or project description here
|
||||||
|
|
||||||
**Use case**
|
**Use case**
|
||||||
|
|
||||||
> A clear and concise description of what is the intended usage scenario is.
|
> A clear and concise description of what is the intended usage scenario is.
|
||||||
|
@ -9,6 +9,10 @@ assignees: ''
|
|||||||
|
|
||||||
(you don't have to strictly follow this form)
|
(you don't have to strictly follow this form)
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
|
||||||
|
Put your company name or project description here
|
||||||
|
|
||||||
**Describe the unexpected behaviour**
|
**Describe the unexpected behaviour**
|
||||||
A clear and concise description of what works not as it is supposed to.
|
A clear and concise description of what works not as it is supposed to.
|
||||||
|
|
||||||
|
@ -9,6 +9,10 @@ assignees: ''
|
|||||||
|
|
||||||
(you don't have to strictly follow this form)
|
(you don't have to strictly follow this form)
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
|
||||||
|
Put your company name or project description here
|
||||||
|
|
||||||
**Describe the unexpected behaviour**
|
**Describe the unexpected behaviour**
|
||||||
A clear and concise description of what works not as it is supposed to.
|
A clear and concise description of what works not as it is supposed to.
|
||||||
|
|
||||||
|
3
.github/ISSUE_TEMPLATE/45_usability-issue.md
vendored
3
.github/ISSUE_TEMPLATE/45_usability-issue.md
vendored
@ -9,6 +9,9 @@ assignees: ''
|
|||||||
|
|
||||||
(you don't have to strictly follow this form)
|
(you don't have to strictly follow this form)
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
Put your company name or project description here
|
||||||
|
|
||||||
**Describe the issue**
|
**Describe the issue**
|
||||||
A clear and concise description of what works not as it is supposed to.
|
A clear and concise description of what works not as it is supposed to.
|
||||||
|
|
||||||
|
4
.github/ISSUE_TEMPLATE/50_build-issue.md
vendored
4
.github/ISSUE_TEMPLATE/50_build-issue.md
vendored
@ -9,6 +9,10 @@ assignees: ''
|
|||||||
|
|
||||||
> Make sure that `git diff` result is empty and you've just pulled fresh master. Try cleaning up cmake cache. Just in case, official build instructions are published here: https://clickhouse.com/docs/en/development/build/
|
> Make sure that `git diff` result is empty and you've just pulled fresh master. Try cleaning up cmake cache. Just in case, official build instructions are published here: https://clickhouse.com/docs/en/development/build/
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
|
||||||
|
> Put your company name or project description here
|
||||||
|
|
||||||
**Operating system**
|
**Operating system**
|
||||||
|
|
||||||
> OS kind or distribution, specific version/release, non-standard kernel if any. If you are trying to build inside virtual machine, please mention it too.
|
> OS kind or distribution, specific version/release, non-standard kernel if any. If you are trying to build inside virtual machine, please mention it too.
|
||||||
|
@ -8,6 +8,9 @@ labels: comp-documentation
|
|||||||
|
|
||||||
(you don't have to strictly follow this form)
|
(you don't have to strictly follow this form)
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
Put your company name or project description here
|
||||||
|
|
||||||
**Describe the issue**
|
**Describe the issue**
|
||||||
A clear and concise description of what's wrong in documentation.
|
A clear and concise description of what's wrong in documentation.
|
||||||
|
|
||||||
|
@ -9,6 +9,9 @@ assignees: ''
|
|||||||
|
|
||||||
(you don't have to strictly follow this form)
|
(you don't have to strictly follow this form)
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
Put your company name or project description here
|
||||||
|
|
||||||
**Describe the situation**
|
**Describe the situation**
|
||||||
What exactly works slower than expected?
|
What exactly works slower than expected?
|
||||||
|
|
||||||
|
@ -9,6 +9,9 @@ assignees: ''
|
|||||||
|
|
||||||
(you don't have to strictly follow this form)
|
(you don't have to strictly follow this form)
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
Put your company name or project description here
|
||||||
|
|
||||||
**Describe the issue**
|
**Describe the issue**
|
||||||
A clear and concise description of what works not as it is supposed to.
|
A clear and concise description of what works not as it is supposed to.
|
||||||
|
|
||||||
|
4
.github/ISSUE_TEMPLATE/85_bug-report.md
vendored
4
.github/ISSUE_TEMPLATE/85_bug-report.md
vendored
@ -11,6 +11,10 @@ assignees: ''
|
|||||||
|
|
||||||
> You have to provide the following information whenever possible.
|
> You have to provide the following information whenever possible.
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
|
||||||
|
> Put your company name or project description here
|
||||||
|
|
||||||
**Describe what's wrong**
|
**Describe what's wrong**
|
||||||
|
|
||||||
> A clear and concise description of what works not as it is supposed to.
|
> A clear and concise description of what works not as it is supposed to.
|
||||||
|
@ -7,6 +7,10 @@ assignees: ''
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
**Company or project name**
|
||||||
|
|
||||||
|
Put your company name or project description here
|
||||||
|
|
||||||
**I have tried the following solutions**: https://clickhouse.com/docs/en/faq/troubleshooting/#troubleshooting-installation-errors
|
**I have tried the following solutions**: https://clickhouse.com/docs/en/faq/troubleshooting/#troubleshooting-installation-errors
|
||||||
|
|
||||||
**Installation type**
|
**Installation type**
|
||||||
|
67
.github/PULL_REQUEST_TEMPLATE.md
vendored
67
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -6,11 +6,13 @@ tests/ci/cancel_and_rerun_workflow_lambda/app.py
|
|||||||
-->
|
-->
|
||||||
### Changelog category (leave one):
|
### Changelog category (leave one):
|
||||||
- New Feature
|
- New Feature
|
||||||
|
- Experimental Feature
|
||||||
- Improvement
|
- Improvement
|
||||||
- Performance Improvement
|
- Performance Improvement
|
||||||
- Backward Incompatible Change
|
- Backward Incompatible Change
|
||||||
- Build/Testing/Packaging Improvement
|
- Build/Testing/Packaging Improvement
|
||||||
- Documentation (changelog entry is not required)
|
- Documentation (changelog entry is not required)
|
||||||
|
- Critical Bug Fix (crash, LOGICAL_ERROR, data loss, RBAC)
|
||||||
- Bug Fix (user-visible misbehavior in an official stable release)
|
- Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
- CI Fix or Improvement (changelog entry is not required)
|
- CI Fix or Improvement (changelog entry is not required)
|
||||||
- Not for changelog (changelog entry is not required)
|
- Not for changelog (changelog entry is not required)
|
||||||
@ -41,47 +43,24 @@ At a minimum, the following information should be added (but add more as needed)
|
|||||||
|
|
||||||
> Information about CI checks: https://clickhouse.com/docs/en/development/continuous-integration/
|
> Information about CI checks: https://clickhouse.com/docs/en/development/continuous-integration/
|
||||||
|
|
||||||
<details>
|
#### CI Settings (Only check the boxes if you know what you are doing):
|
||||||
<summary>CI Settings</summary>
|
- [ ] <!---ci_set_required--> Allow: All Required Checks
|
||||||
|
- [ ] <!---ci_include_stateless--> Allow: Stateless tests
|
||||||
**NOTE:** If your merge the PR with modified CI you **MUST KNOW** what you are doing
|
- [ ] <!---ci_include_stateful--> Allow: Stateful tests
|
||||||
**NOTE:** Checked options will be applied if set before CI RunConfig/PrepareRunConfig step
|
- [ ] <!---ci_include_integration--> Allow: Integration Tests
|
||||||
|
- [ ] <!---ci_include_performance--> Allow: Performance tests
|
||||||
#### Run these jobs only (required builds will be added automatically):
|
- [ ] <!---ci_set_builds--> Allow: All Builds
|
||||||
- [ ] <!---ci_include_integration--> Integration Tests
|
- [ ] <!---batch_0_1--> Allow: batch 1, 2 for multi-batch jobs
|
||||||
- [ ] <!---ci_include_stateless--> Stateless tests
|
- [ ] <!---batch_2_3--> Allow: batch 3, 4, 5, 6 for multi-batch jobs
|
||||||
- [ ] <!---ci_include_stateful--> Stateful tests
|
---
|
||||||
- [ ] <!---ci_include_unit--> Unit tests
|
- [ ] <!---ci_exclude_style--> Exclude: Style check
|
||||||
- [ ] <!---ci_include_performance--> Performance tests
|
- [ ] <!---ci_exclude_fast--> Exclude: Fast test
|
||||||
- [ ] <!---ci_include_aarch64--> All with aarch64
|
- [ ] <!---ci_exclude_asan--> Exclude: All with ASAN
|
||||||
- [ ] <!---ci_include_asan--> All with ASAN
|
- [ ] <!---ci_exclude_tsan|msan|ubsan|coverage--> Exclude: All with TSAN, MSAN, UBSAN, Coverage
|
||||||
- [ ] <!---ci_include_tsan--> All with TSAN
|
- [ ] <!---ci_exclude_aarch64|release|debug--> Exclude: All with aarch64, release, debug
|
||||||
- [ ] <!---ci_include_analyzer--> All with Analyzer
|
---
|
||||||
- [ ] <!---ci_include_azure --> All with Azure
|
- [ ] <!---do_not_test--> Do not test
|
||||||
- [ ] <!---ci_include_KEYWORD--> Add your option here
|
- [ ] <!---woolen_wolfdog--> Woolen Wolfdog
|
||||||
|
- [ ] <!---upload_all--> Upload binaries for special builds
|
||||||
#### Deny these jobs:
|
- [ ] <!---no_merge_commit--> Disable merge-commit
|
||||||
- [ ] <!---ci_exclude_fast--> Fast test
|
- [ ] <!---no_ci_cache--> Disable CI cache
|
||||||
- [ ] <!---ci_exclude_integration--> Integration Tests
|
|
||||||
- [ ] <!---ci_exclude_stateless--> Stateless tests
|
|
||||||
- [ ] <!---ci_exclude_stateful--> Stateful tests
|
|
||||||
- [ ] <!---ci_exclude_performance--> Performance tests
|
|
||||||
- [ ] <!---ci_exclude_asan--> All with ASAN
|
|
||||||
- [ ] <!---ci_exclude_tsan--> All with TSAN
|
|
||||||
- [ ] <!---ci_exclude_msan--> All with MSAN
|
|
||||||
- [ ] <!---ci_exclude_ubsan--> All with UBSAN
|
|
||||||
- [ ] <!---ci_exclude_coverage--> All with Coverage
|
|
||||||
- [ ] <!---ci_exclude_aarch64--> All with Aarch64
|
|
||||||
|
|
||||||
#### Extra options:
|
|
||||||
- [ ] <!---do_not_test--> do not test (only style check)
|
|
||||||
- [ ] <!---no_merge_commit--> disable merge-commit (no merge from master before tests)
|
|
||||||
- [ ] <!---no_ci_cache--> disable CI cache (job reuse)
|
|
||||||
|
|
||||||
#### Only specified batches in multi-batch jobs:
|
|
||||||
- [ ] <!---batch_0--> 1
|
|
||||||
- [ ] <!---batch_1--> 2
|
|
||||||
- [ ] <!---batch_2--> 3
|
|
||||||
- [ ] <!---batch_3--> 4
|
|
||||||
|
|
||||||
</details>
|
|
||||||
|
57
.github/workflows/backport_branches.yml
vendored
57
.github/workflows/backport_branches.yml
vendored
@ -70,7 +70,7 @@ jobs:
|
|||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: Compatibility check (amd64)
|
test_name: Compatibility check (release)
|
||||||
runner_type: style-checker
|
runner_type: style-checker
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
CompatibilityCheckAarch64:
|
CompatibilityCheckAarch64:
|
||||||
@ -159,33 +159,24 @@ jobs:
|
|||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### BUILD REPORTER #######################################
|
##################################### BUILD REPORTER #######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
BuilderReport:
|
Builds_Report:
|
||||||
# run report check for failed builds to indicate the CI error
|
# run report check for failed builds to indicate the CI error
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Builds') }}
|
||||||
needs:
|
needs: [RunConfig, BuilderDebAarch64, BuilderDebAsan, BuilderDebDebug, BuilderDebRelease, BuilderDebTsan, BuilderBinDarwin, BuilderBinDarwinAarch64]
|
||||||
- RunConfig
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
- BuilderDebAarch64
|
steps:
|
||||||
- BuilderDebAsan
|
- name: Check out repository code
|
||||||
- BuilderDebDebug
|
uses: ClickHouse/checkout@v1
|
||||||
- BuilderDebRelease
|
- name: Download reports
|
||||||
- BuilderDebTsan
|
run: |
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(needs.RunConfig.outputs.data) }} --pre --job-name Builds
|
||||||
with:
|
- name: Builds report
|
||||||
test_name: ClickHouse build check
|
run: |
|
||||||
runner_type: style-checker-aarch64
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
python3 ./build_report_check.py --reports package_release package_aarch64 package_asan package_tsan package_debug binary_darwin binary_darwin_aarch64
|
||||||
BuilderSpecialReport:
|
- name: Set status
|
||||||
# run report check for failed builds to indicate the CI error
|
run: |
|
||||||
if: ${{ !cancelled() }}
|
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(needs.RunConfig.outputs.data) }} --post --job-name Builds
|
||||||
needs:
|
|
||||||
- RunConfig
|
|
||||||
- BuilderBinDarwin
|
|
||||||
- BuilderBinDarwinAarch64
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: ClickHouse special build check
|
|
||||||
runner_type: style-checker-aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
############################################################################################
|
############################################################################################
|
||||||
#################################### INSTALL PACKAGES ######################################
|
#################################### INSTALL PACKAGES ######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
@ -194,7 +185,7 @@ jobs:
|
|||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: Install packages (amd64)
|
test_name: Install packages (release)
|
||||||
runner_type: style-checker
|
runner_type: style-checker
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
run_command: |
|
run_command: |
|
||||||
@ -204,7 +195,7 @@ jobs:
|
|||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: Install packages (arm64)
|
test_name: Install packages (aarch64)
|
||||||
runner_type: style-checker-aarch64
|
runner_type: style-checker-aarch64
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
run_command: |
|
run_command: |
|
||||||
@ -256,8 +247,7 @@ jobs:
|
|||||||
FinishCheck:
|
FinishCheck:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
needs:
|
needs:
|
||||||
- BuilderReport
|
- Builds_Report
|
||||||
- BuilderSpecialReport
|
|
||||||
- FunctionalStatelessTestAsan
|
- FunctionalStatelessTestAsan
|
||||||
- FunctionalStatefulTestDebug
|
- FunctionalStatefulTestDebug
|
||||||
- StressTestTsan
|
- StressTestTsan
|
||||||
@ -273,5 +263,8 @@ jobs:
|
|||||||
- name: Finish label
|
- name: Finish label
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 finish_check.py
|
# update mergeable check
|
||||||
|
python3 merge_pr.py --set-ci-status --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
|
||||||
|
# update overall ci report
|
||||||
|
python3 finish_check.py --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
|
||||||
python3 merge_pr.py
|
python3 merge_pr.py
|
||||||
|
29
.github/workflows/create_release.yml
vendored
Normal file
29
.github/workflows/create_release.yml
vendored
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
name: CreateRelease
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: release
|
||||||
|
|
||||||
|
'on':
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
sha:
|
||||||
|
description: 'The SHA hash of the commit from which to create the release'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
type:
|
||||||
|
description: 'The type of release: "new" for a new release or "patch" for a patch release'
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- new
|
||||||
|
- patch
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
Release:
|
||||||
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: ClickHouse/checkout@v1
|
||||||
|
- name: Print greeting
|
||||||
|
run: |
|
||||||
|
python3 ./tests/ci/release.py --commit ${{ inputs.sha }} --type ${{ inputs.type }} --dry-run
|
853
.github/workflows/master.yml
vendored
853
.github/workflows/master.yml
vendored
@ -27,15 +27,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 sync_pr.py --merge || :
|
python3 sync_pr.py --merge || :
|
||||||
- name: Python unit tests
|
# Runs in MQ:
|
||||||
run: |
|
# - name: Python unit tests
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
# run: |
|
||||||
echo "Testing the main ci directory"
|
# cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 -m unittest discover -s . -p 'test_*.py'
|
# echo "Testing the main ci directory"
|
||||||
for dir in *_lambda/; do
|
# python3 -m unittest discover -s . -p 'test_*.py'
|
||||||
echo "Testing $dir"
|
# for dir in *_lambda/; do
|
||||||
python3 -m unittest discover -s "$dir" -p 'test_*.py'
|
# echo "Testing $dir"
|
||||||
done
|
# python3 -m unittest discover -s "$dir" -p 'test_*.py'
|
||||||
|
# done
|
||||||
- name: PrepareRunConfig
|
- name: PrepareRunConfig
|
||||||
id: runconfig
|
id: runconfig
|
||||||
run: |
|
run: |
|
||||||
@ -53,13 +54,13 @@ jobs:
|
|||||||
- name: Re-create GH statuses for skipped jobs if any
|
- name: Re-create GH statuses for skipped jobs if any
|
||||||
run: |
|
run: |
|
||||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ runner.temp }}/ci_run_data.json --update-gh-statuses
|
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ runner.temp }}/ci_run_data.json --update-gh-statuses
|
||||||
BuildDockers:
|
# Runs in MQ:
|
||||||
needs: [RunConfig]
|
# BuildDockers:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
# needs: [RunConfig]
|
||||||
uses: ./.github/workflows/reusable_docker.yml
|
# if: ${{ !failure() && !cancelled() }}
|
||||||
with:
|
# uses: ./.github/workflows/reusable_docker.yml
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
# with:
|
||||||
# Tested in MQ
|
# data: ${{ needs.RunConfig.outputs.data }}
|
||||||
# StyleCheck:
|
# StyleCheck:
|
||||||
# needs: [RunConfig, BuildDockers]
|
# needs: [RunConfig, BuildDockers]
|
||||||
# if: ${{ !failure() && !cancelled() }}
|
# if: ${{ !failure() && !cancelled() }}
|
||||||
@ -70,262 +71,64 @@ jobs:
|
|||||||
# data: ${{ needs.RunConfig.outputs.data }}
|
# data: ${{ needs.RunConfig.outputs.data }}
|
||||||
# run_command: |
|
# run_command: |
|
||||||
# python3 style_check.py --no-push
|
# python3 style_check.py --no-push
|
||||||
CompatibilityCheckX86:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
################################# Main stages #################################
|
||||||
if: ${{ !failure() && !cancelled() }}
|
# for main CI chain
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
#
|
||||||
|
Builds_1:
|
||||||
|
needs: [RunConfig]
|
||||||
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_1') }}
|
||||||
|
# using callable wf (reusable_stage.yml) allows grouping all nested jobs under a tab
|
||||||
|
uses: ./.github/workflows/reusable_build_stage.yml
|
||||||
with:
|
with:
|
||||||
test_name: Compatibility check (amd64)
|
stage: Builds_1
|
||||||
runner_type: style-checker
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
CompatibilityCheckAarch64:
|
Tests_1:
|
||||||
needs: [RunConfig, BuilderDebAarch64]
|
needs: [RunConfig, Builds_1]
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_1') }}
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test_stage.yml
|
||||||
with:
|
with:
|
||||||
test_name: Compatibility check (aarch64)
|
stage: Tests_1
|
||||||
runner_type: style-checker
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
#########################################################################################
|
Builds_2:
|
||||||
#################################### ORDINARY BUILDS ####################################
|
needs: [RunConfig, Builds_1]
|
||||||
#########################################################################################
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_2') }}
|
||||||
# TODO: never skip builds!
|
uses: ./.github/workflows/reusable_build_stage.yml
|
||||||
BuilderDebRelease:
|
|
||||||
needs: [RunConfig, BuildDockers]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
with:
|
||||||
build_name: package_release
|
stage: Builds_2
|
||||||
checkout_depth: 0
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
BuilderDebReleaseCoverage:
|
Tests_2:
|
||||||
needs: [RunConfig, BuildDockers]
|
needs: [RunConfig, Builds_2]
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_2') }}
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
uses: ./.github/workflows/reusable_test_stage.yml
|
||||||
with:
|
with:
|
||||||
build_name: package_release_coverage
|
stage: Tests_2
|
||||||
checkout_depth: 0
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
BuilderDebAarch64:
|
Tests_3:
|
||||||
needs: [RunConfig, BuildDockers]
|
# Test_3 should not wait for Test_1/Test_2 and should not be blocked by them on master branch since all jobs need to run there.
|
||||||
if: ${{ !failure() && !cancelled() }}
|
needs: [RunConfig, Builds_1]
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_3') }}
|
||||||
|
uses: ./.github/workflows/reusable_test_stage.yml
|
||||||
with:
|
with:
|
||||||
build_name: package_aarch64
|
stage: Tests_3
|
||||||
checkout_depth: 0
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
BuilderBinRelease:
|
|
||||||
needs: [RunConfig, BuildDockers]
|
################################# Reports #################################
|
||||||
if: ${{ !failure() && !cancelled() }}
|
# Reports should run even if Builds_1/2 fail - run them separately, not in Tests_1/2/3
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
Builds_Report:
|
||||||
with:
|
|
||||||
build_name: binary_release
|
|
||||||
checkout_depth: 0 # otherwise we will have no info about contributors
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
BuilderDebAsan:
|
|
||||||
needs: [RunConfig, BuildDockers]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: package_asan
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
BuilderDebUBsan:
|
|
||||||
needs: [RunConfig, BuildDockers]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: package_ubsan
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
BuilderDebTsan:
|
|
||||||
needs: [RunConfig, BuildDockers]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: package_tsan
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
BuilderDebMsan:
|
|
||||||
needs: [RunConfig, BuildDockers]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: package_msan
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
BuilderDebDebug:
|
|
||||||
needs: [RunConfig, BuildDockers]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: package_debug
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
##########################################################################################
|
|
||||||
##################################### SPECIAL BUILDS #####################################
|
|
||||||
##########################################################################################
|
|
||||||
BuilderBinClangTidy:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_tidy
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
BuilderBinDarwin:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_darwin
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinAarch64:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinFreeBSD:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_freebsd
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinDarwinAarch64:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_darwin_aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinPPC64:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_ppc64le
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinAmd64Compat:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_amd64_compat
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinAmd64Musl:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_amd64_musl
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinAarch64V80Compat:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_aarch64_v80compat
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinRISCV64:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_riscv64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinS390X:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_s390x
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
BuilderBinLoongarch64:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_build.yml
|
|
||||||
with:
|
|
||||||
build_name: binary_loongarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
checkout_depth: 0
|
|
||||||
############################################################################################
|
|
||||||
##################################### Docker images #######################################
|
|
||||||
############################################################################################
|
|
||||||
DockerServerImage:
|
|
||||||
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Docker server image
|
|
||||||
runner_type: style-checker
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
DockerKeeperImage:
|
|
||||||
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Docker keeper image
|
|
||||||
runner_type: style-checker
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
############################################################################################
|
|
||||||
##################################### BUILD REPORTER #######################################
|
|
||||||
############################################################################################
|
|
||||||
BuilderReport:
|
|
||||||
# run report check for failed builds to indicate the CI error
|
# run report check for failed builds to indicate the CI error
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Builds') }}
|
||||||
needs:
|
needs: [RunConfig, Builds_1, Builds_2]
|
||||||
- RunConfig
|
|
||||||
- BuilderDebAarch64
|
|
||||||
- BuilderDebAsan
|
|
||||||
- BuilderDebDebug
|
|
||||||
- BuilderDebMsan
|
|
||||||
- BuilderDebRelease
|
|
||||||
- BuilderDebTsan
|
|
||||||
- BuilderDebUBsan
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: ClickHouse build check
|
test_name: Builds
|
||||||
runner_type: style-checker-aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
BuilderSpecialReport:
|
|
||||||
# run report check for failed builds to indicate the CI error
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
needs:
|
|
||||||
- RunConfig
|
|
||||||
- BuilderBinAarch64
|
|
||||||
- BuilderBinDarwin
|
|
||||||
- BuilderBinDarwinAarch64
|
|
||||||
- BuilderBinFreeBSD
|
|
||||||
- BuilderBinPPC64
|
|
||||||
- BuilderBinRISCV64
|
|
||||||
- BuilderBinS390X
|
|
||||||
- BuilderBinLoongarch64
|
|
||||||
- BuilderBinAmd64Compat
|
|
||||||
- BuilderBinAarch64V80Compat
|
|
||||||
- BuilderBinClangTidy
|
|
||||||
- BuilderBinAmd64Musl
|
|
||||||
- BuilderDebReleaseCoverage
|
|
||||||
- BuilderBinRelease
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: ClickHouse special build check
|
|
||||||
runner_type: style-checker-aarch64
|
runner_type: style-checker-aarch64
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
|
||||||
MarkReleaseReady:
|
MarkReleaseReady:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
needs:
|
needs: [RunConfig, Builds_1, Builds_2]
|
||||||
- BuilderBinDarwin
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
- BuilderBinDarwinAarch64
|
|
||||||
- BuilderDebRelease
|
|
||||||
- BuilderDebAarch64
|
|
||||||
runs-on: [self-hosted, style-checker]
|
|
||||||
steps:
|
steps:
|
||||||
- name: Debug
|
- name: Debug
|
||||||
run: |
|
run: |
|
||||||
@ -338,7 +141,7 @@ jobs:
|
|||||||
no both ${{ !(contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }}
|
no both ${{ !(contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }}
|
||||||
EOF
|
EOF
|
||||||
- name: Not ready
|
- name: Not ready
|
||||||
# fail the job to be able restart it
|
# fail the job to be able to restart it
|
||||||
if: ${{ contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure') }}
|
if: ${{ contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure') }}
|
||||||
run: exit 1
|
run: exit 1
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
@ -349,545 +152,15 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 mark_release_ready.py
|
python3 mark_release_ready.py
|
||||||
############################################################################################
|
|
||||||
#################################### INSTALL PACKAGES ######################################
|
|
||||||
############################################################################################
|
|
||||||
InstallPackagesTestRelease:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Install packages (amd64)
|
|
||||||
runner_type: style-checker
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
run_command: |
|
|
||||||
python3 install_check.py "$CHECK_NAME"
|
|
||||||
InstallPackagesTestAarch64:
|
|
||||||
needs: [RunConfig, BuilderDebAarch64]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Install packages (arm64)
|
|
||||||
runner_type: style-checker-aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
run_command: |
|
|
||||||
python3 install_check.py "$CHECK_NAME"
|
|
||||||
##############################################################################################
|
|
||||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
|
||||||
##############################################################################################
|
|
||||||
FunctionalStatelessTestRelease:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (release)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatelessTestReleaseAnalyzerS3Replicated:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (release, old analyzer, s3, DatabaseReplicated)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatelessTestS3Debug:
|
|
||||||
needs: [RunConfig, BuilderDebDebug]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (debug, s3 storage)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatelessTestS3Tsan:
|
|
||||||
needs: [RunConfig, BuilderDebTsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (tsan, s3 storage)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatelessTestAarch64:
|
|
||||||
needs: [RunConfig, BuilderDebAarch64]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (aarch64)
|
|
||||||
runner_type: func-tester-aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatelessTestAsan:
|
|
||||||
needs: [RunConfig, BuilderDebAsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (asan)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
|
|
||||||
FunctionalStatelessTestTsan:
|
|
||||||
needs: [RunConfig, BuilderDebTsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (tsan)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatelessTestMsan:
|
|
||||||
needs: [RunConfig, BuilderDebMsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (msan)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatelessTestUBsan:
|
|
||||||
needs: [RunConfig, BuilderDebUBsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (ubsan)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatelessTestDebug:
|
|
||||||
needs: [RunConfig, BuilderDebDebug]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (debug)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatelessTestAsanAzure:
|
|
||||||
needs: [RunConfig, BuilderDebAsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateless tests (azure, asan)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
##############################################################################################
|
|
||||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
|
||||||
##############################################################################################
|
|
||||||
FunctionalStatefulTestRelease:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (release)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestAarch64:
|
|
||||||
needs: [RunConfig, BuilderDebAarch64]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (aarch64)
|
|
||||||
runner_type: func-tester-aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestAsan:
|
|
||||||
needs: [RunConfig, BuilderDebAsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (asan)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestTsan:
|
|
||||||
needs: [RunConfig, BuilderDebTsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (tsan)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestMsan:
|
|
||||||
needs: [RunConfig, BuilderDebMsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (msan)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestUBsan:
|
|
||||||
needs: [RunConfig, BuilderDebUBsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (ubsan)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestDebug:
|
|
||||||
needs: [RunConfig, BuilderDebDebug]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (debug)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
# Parallel replicas
|
|
||||||
FunctionalStatefulTestDebugParallelReplicas:
|
|
||||||
needs: [RunConfig, BuilderDebDebug]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (debug, ParallelReplicas)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestUBsanParallelReplicas:
|
|
||||||
needs: [RunConfig, BuilderDebUBsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (ubsan, ParallelReplicas)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestMsanParallelReplicas:
|
|
||||||
needs: [RunConfig, BuilderDebMsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (msan, ParallelReplicas)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestTsanParallelReplicas:
|
|
||||||
needs: [RunConfig, BuilderDebTsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (tsan, ParallelReplicas)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestAsanParallelReplicas:
|
|
||||||
needs: [RunConfig, BuilderDebAsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (asan, ParallelReplicas)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FunctionalStatefulTestReleaseParallelReplicas:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stateful tests (release, ParallelReplicas)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
##############################################################################################
|
|
||||||
########################### ClickBench #######################################################
|
|
||||||
##############################################################################################
|
|
||||||
ClickBenchAMD64:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: ClickBench (amd64)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
run_command: |
|
|
||||||
python3 clickbench.py "$CHECK_NAME"
|
|
||||||
ClickBenchAarch64:
|
|
||||||
needs: [RunConfig, BuilderDebAarch64]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: ClickBench (aarch64)
|
|
||||||
runner_type: func-tester-aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
run_command: |
|
|
||||||
python3 clickbench.py "$CHECK_NAME"
|
|
||||||
##############################################################################################
|
|
||||||
######################################### STRESS TESTS #######################################
|
|
||||||
##############################################################################################
|
|
||||||
StressTestAsan:
|
|
||||||
needs: [RunConfig, BuilderDebAsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stress test (asan)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
StressTestTsan:
|
|
||||||
needs: [RunConfig, BuilderDebTsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stress test (tsan)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
StressTestTsanAzure:
|
|
||||||
needs: [RunConfig, BuilderDebTsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stress test (azure, tsan)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
StressTestMsan:
|
|
||||||
needs: [RunConfig, BuilderDebMsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stress test (msan)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
StressTestUBsan:
|
|
||||||
needs: [RunConfig, BuilderDebUBsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stress test (ubsan)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
StressTestDebug:
|
|
||||||
needs: [RunConfig, BuilderDebDebug]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Stress test (debug)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
#############################################################################################
|
|
||||||
############################# INTEGRATION TESTS #############################################
|
|
||||||
#############################################################################################
|
|
||||||
IntegrationTestsAsan:
|
|
||||||
needs: [RunConfig, BuilderDebAsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Integration tests (asan)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
IntegrationTestsAnalyzerAsan:
|
|
||||||
needs: [RunConfig, BuilderDebAsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Integration tests (asan, old analyzer)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
IntegrationTestsTsan:
|
|
||||||
needs: [RunConfig, BuilderDebTsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Integration tests (tsan)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
IntegrationTestsRelease:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Integration tests (release)
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
##############################################################################################
|
|
||||||
##################################### AST FUZZERS ############################################
|
|
||||||
##############################################################################################
|
|
||||||
ASTFuzzerTestAsan:
|
|
||||||
needs: [RunConfig, BuilderDebAsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: AST fuzzer (asan)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
ASTFuzzerTestTsan:
|
|
||||||
needs: [RunConfig, BuilderDebTsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: AST fuzzer (tsan)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
ASTFuzzerTestUBSan:
|
|
||||||
needs: [RunConfig, BuilderDebUBsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: AST fuzzer (ubsan)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
ASTFuzzerTestMSan:
|
|
||||||
needs: [RunConfig, BuilderDebMsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: AST fuzzer (msan)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
ASTFuzzerTestDebug:
|
|
||||||
needs: [RunConfig, BuilderDebDebug]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: AST fuzzer (debug)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
#############################################################################################
|
|
||||||
#################################### UNIT TESTS #############################################
|
|
||||||
#############################################################################################
|
|
||||||
UnitTestsAsan:
|
|
||||||
needs: [RunConfig, BuilderDebAsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Unit tests (asan)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
UnitTestsReleaseClang:
|
|
||||||
needs: [RunConfig, BuilderBinRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Unit tests (release)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
UnitTestsTsan:
|
|
||||||
needs: [RunConfig, BuilderDebTsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Unit tests (tsan)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
UnitTestsMsan:
|
|
||||||
needs: [RunConfig, BuilderDebMsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Unit tests (msan)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
UnitTestsUBsan:
|
|
||||||
needs: [RunConfig, BuilderDebUBsan]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Unit tests (ubsan)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
#############################################################################################
|
|
||||||
#################################### PERFORMANCE TESTS ######################################
|
|
||||||
#############################################################################################
|
|
||||||
PerformanceComparisonX86:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Performance Comparison
|
|
||||||
runner_type: stress-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
PerformanceComparisonAarch:
|
|
||||||
needs: [RunConfig, BuilderDebAarch64]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Performance Comparison Aarch64
|
|
||||||
runner_type: func-tester-aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
##############################################################################################
|
|
||||||
############################ SQLLOGIC TEST ###################################################
|
|
||||||
##############################################################################################
|
|
||||||
SQLLogicTestRelease:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: Sqllogic test (release)
|
|
||||||
runner_type: func-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
##############################################################################################
|
|
||||||
##################################### SQL TEST ###############################################
|
|
||||||
##############################################################################################
|
|
||||||
SQLTest:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: SQLTest
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
##############################################################################################
|
|
||||||
###################################### SQLANCER FUZZERS ######################################
|
|
||||||
##############################################################################################
|
|
||||||
SQLancerTestRelease:
|
|
||||||
needs: [RunConfig, BuilderDebRelease]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: SQLancer (release)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
SQLancerTestDebug:
|
|
||||||
needs: [RunConfig, BuilderDebDebug]
|
|
||||||
if: ${{ !failure() && !cancelled() }}
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: SQLancer (debug)
|
|
||||||
runner_type: fuzzer-unit-tester
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
needs:
|
needs: [RunConfig, Builds_1, Builds_2, Builds_Report, Tests_1, Tests_2, Tests_3]
|
||||||
- MarkReleaseReady
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
- FunctionalStatelessTestDebug
|
|
||||||
- FunctionalStatelessTestRelease
|
|
||||||
- FunctionalStatelessTestReleaseAnalyzerS3Replicated
|
|
||||||
- FunctionalStatelessTestAarch64
|
|
||||||
- FunctionalStatelessTestAsan
|
|
||||||
- FunctionalStatelessTestTsan
|
|
||||||
- FunctionalStatelessTestMsan
|
|
||||||
- FunctionalStatelessTestUBsan
|
|
||||||
- FunctionalStatelessTestS3Debug
|
|
||||||
- FunctionalStatelessTestS3Tsan
|
|
||||||
- FunctionalStatefulTestDebug
|
|
||||||
- FunctionalStatefulTestRelease
|
|
||||||
- FunctionalStatefulTestAarch64
|
|
||||||
- FunctionalStatefulTestAsan
|
|
||||||
- FunctionalStatefulTestTsan
|
|
||||||
- FunctionalStatefulTestMsan
|
|
||||||
- FunctionalStatefulTestUBsan
|
|
||||||
- FunctionalStatefulTestDebugParallelReplicas
|
|
||||||
- FunctionalStatefulTestUBsanParallelReplicas
|
|
||||||
- FunctionalStatefulTestMsanParallelReplicas
|
|
||||||
- FunctionalStatefulTestTsanParallelReplicas
|
|
||||||
- FunctionalStatefulTestAsanParallelReplicas
|
|
||||||
- FunctionalStatefulTestReleaseParallelReplicas
|
|
||||||
- StressTestDebug
|
|
||||||
- StressTestAsan
|
|
||||||
- StressTestTsan
|
|
||||||
- StressTestMsan
|
|
||||||
- StressTestUBsan
|
|
||||||
- IntegrationTestsAsan
|
|
||||||
- IntegrationTestsAnalyzerAsan
|
|
||||||
- IntegrationTestsTsan
|
|
||||||
- IntegrationTestsRelease
|
|
||||||
- PerformanceComparisonX86
|
|
||||||
- PerformanceComparisonAarch
|
|
||||||
- CompatibilityCheckX86
|
|
||||||
- CompatibilityCheckAarch64
|
|
||||||
- ASTFuzzerTestDebug
|
|
||||||
- ASTFuzzerTestAsan
|
|
||||||
- ASTFuzzerTestTsan
|
|
||||||
- ASTFuzzerTestMSan
|
|
||||||
- ASTFuzzerTestUBSan
|
|
||||||
- UnitTestsAsan
|
|
||||||
- UnitTestsTsan
|
|
||||||
- UnitTestsMsan
|
|
||||||
- UnitTestsUBsan
|
|
||||||
- UnitTestsReleaseClang
|
|
||||||
- SQLancerTestRelease
|
|
||||||
- SQLancerTestDebug
|
|
||||||
- SQLLogicTestRelease
|
|
||||||
- SQLTest
|
|
||||||
runs-on: [self-hosted, style-checker]
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: ClickHouse/checkout@v1
|
uses: ClickHouse/checkout@v1
|
||||||
with:
|
|
||||||
clear-repository: true
|
|
||||||
- name: Finish label
|
- name: Finish label
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 finish_check.py
|
python3 finish_check.py --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
|
||||||
|
39
.github/workflows/merge_queue.yml
vendored
39
.github/workflows/merge_queue.yml
vendored
@ -20,7 +20,7 @@ jobs:
|
|||||||
uses: ClickHouse/checkout@v1
|
uses: ClickHouse/checkout@v1
|
||||||
with:
|
with:
|
||||||
clear-repository: true # to ensure correct digests
|
clear-repository: true # to ensure correct digests
|
||||||
fetch-depth: 0 # to get version
|
fetch-depth: 0 # to get a version
|
||||||
filter: tree:0
|
filter: tree:0
|
||||||
- name: Cancel PR workflow
|
- name: Cancel PR workflow
|
||||||
run: |
|
run: |
|
||||||
@ -60,7 +60,7 @@ jobs:
|
|||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: Style check
|
test_name: Style check
|
||||||
runner_type: style-checker
|
runner_type: style-checker-aarch64
|
||||||
run_command: |
|
run_command: |
|
||||||
python3 style_check.py
|
python3 style_check.py
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
@ -80,20 +80,31 @@ jobs:
|
|||||||
run_command: |
|
run_command: |
|
||||||
python3 fast_test_check.py
|
python3 fast_test_check.py
|
||||||
|
|
||||||
################################# Stage Final #################################
|
Builds_1:
|
||||||
#
|
needs: [RunConfig, BuildDockers]
|
||||||
FinishCheck:
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_1') }}
|
||||||
if: ${{ !failure() && !cancelled() }}
|
# using callable wf (reusable_stage.yml) allows grouping all nested jobs under a tab
|
||||||
needs: [RunConfig, BuildDockers, StyleCheck, FastTest]
|
uses: ./.github/workflows/reusable_build_stage.yml
|
||||||
runs-on: [self-hosted, style-checker]
|
with:
|
||||||
|
stage: Builds_1
|
||||||
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
Tests_1:
|
||||||
|
needs: [RunConfig, Builds_1]
|
||||||
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_1') }}
|
||||||
|
uses: ./.github/workflows/reusable_test_stage.yml
|
||||||
|
with:
|
||||||
|
stage: Tests_1
|
||||||
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
|
||||||
|
CheckReadyForMerge:
|
||||||
|
if: ${{ !cancelled() && needs.StyleCheck.result == 'success' }}
|
||||||
|
# Test_2 or Test_3 must not have jobs required for Mergeable check
|
||||||
|
needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Tests_1]
|
||||||
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: ClickHouse/checkout@v1
|
uses: ClickHouse/checkout@v1
|
||||||
- name: Check sync status
|
- name: Check and set merge status
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 sync_pr.py --status
|
python3 merge_pr.py --set-ci-status --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
|
||||||
- name: Finish label
|
|
||||||
run: |
|
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
|
||||||
python3 finish_check.py ${{ (contains(needs.*.result, 'failure') && github.event_name == 'merge_group') && '--pipeline-failure' || '' }}
|
|
||||||
|
65
.github/workflows/pull_request.yml
vendored
65
.github/workflows/pull_request.yml
vendored
@ -31,8 +31,14 @@ jobs:
|
|||||||
uses: ClickHouse/checkout@v1
|
uses: ClickHouse/checkout@v1
|
||||||
with:
|
with:
|
||||||
clear-repository: true # to ensure correct digests
|
clear-repository: true # to ensure correct digests
|
||||||
fetch-depth: 0 # to get version
|
fetch-depth: 0 # to get a version
|
||||||
filter: tree:0
|
filter: tree:0
|
||||||
|
- name: Cancel previous Sync PR workflow
|
||||||
|
run: |
|
||||||
|
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --cancel-previous-run
|
||||||
|
- name: Set pending Sync status
|
||||||
|
run: |
|
||||||
|
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --set-pending-status
|
||||||
- name: Labels check
|
- name: Labels check
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
@ -75,7 +81,7 @@ jobs:
|
|||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: Style check
|
test_name: Style check
|
||||||
runner_type: style-checker
|
runner_type: style-checker-aarch64
|
||||||
run_command: |
|
run_command: |
|
||||||
python3 style_check.py
|
python3 style_check.py
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
@ -95,13 +101,13 @@ jobs:
|
|||||||
run_command: |
|
run_command: |
|
||||||
python3 fast_test_check.py
|
python3 fast_test_check.py
|
||||||
|
|
||||||
################################# Main statges #################################
|
################################# Main stages #################################
|
||||||
# for main CI chain
|
# for main CI chain
|
||||||
#
|
#
|
||||||
Builds_1:
|
Builds_1:
|
||||||
needs: [RunConfig, StyleCheck, FastTest]
|
needs: [RunConfig, StyleCheck, FastTest]
|
||||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_1') }}
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_1') }}
|
||||||
# using callable wf (reusable_stage.yml) allows to group all nested jobs under a tab
|
# using callable wf (reusable_stage.yml) allows grouping all nested jobs under a tab
|
||||||
uses: ./.github/workflows/reusable_build_stage.yml
|
uses: ./.github/workflows/reusable_build_stage.yml
|
||||||
with:
|
with:
|
||||||
stage: Builds_1
|
stage: Builds_1
|
||||||
@ -109,7 +115,6 @@ jobs:
|
|||||||
Tests_1:
|
Tests_1:
|
||||||
needs: [RunConfig, Builds_1]
|
needs: [RunConfig, Builds_1]
|
||||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_1') }}
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_1') }}
|
||||||
# using callable wf (reusable_stage.yml) allows to group all nested jobs under a tab
|
|
||||||
uses: ./.github/workflows/reusable_test_stage.yml
|
uses: ./.github/workflows/reusable_test_stage.yml
|
||||||
with:
|
with:
|
||||||
stage: Tests_1
|
stage: Tests_1
|
||||||
@ -117,22 +122,20 @@ jobs:
|
|||||||
Builds_2:
|
Builds_2:
|
||||||
needs: [RunConfig, Builds_1]
|
needs: [RunConfig, Builds_1]
|
||||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_2') }}
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_2') }}
|
||||||
# using callable wf (reusable_stage.yml) allows to group all nested jobs under a tab
|
|
||||||
uses: ./.github/workflows/reusable_build_stage.yml
|
uses: ./.github/workflows/reusable_build_stage.yml
|
||||||
with:
|
with:
|
||||||
stage: Builds_2
|
stage: Builds_2
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
# stage for running non-required checks without being blocked by required checks (Test_1) if corresponding settings is selected
|
||||||
Tests_2:
|
Tests_2:
|
||||||
needs: [RunConfig, Builds_2]
|
needs: [RunConfig, Builds_1]
|
||||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_2') }}
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_2') }}
|
||||||
# using callable wf (reusable_stage.yml) allows to group all nested jobs under a tab
|
|
||||||
uses: ./.github/workflows/reusable_test_stage.yml
|
uses: ./.github/workflows/reusable_test_stage.yml
|
||||||
with:
|
with:
|
||||||
stage: Tests_2
|
stage: Tests_2
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
# stage for jobs that do not prohibit merge
|
|
||||||
Tests_3:
|
Tests_3:
|
||||||
needs: [RunConfig, Tests_1, Tests_2]
|
needs: [RunConfig, Builds_1, Tests_1]
|
||||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_3') }}
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_3') }}
|
||||||
uses: ./.github/workflows/reusable_test_stage.yml
|
uses: ./.github/workflows/reusable_test_stage.yml
|
||||||
with:
|
with:
|
||||||
@ -140,29 +143,21 @@ jobs:
|
|||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
|
||||||
################################# Reports #################################
|
################################# Reports #################################
|
||||||
# Reports should by run even if Builds_1/2 fail, so put them separately in wf (not in Tests_1/2)
|
# Reports should run even if Builds_1/2 fail - run them separately (not in Tests_1/2/3)
|
||||||
Builds_1_Report:
|
Builds_Report:
|
||||||
# run report check for failed builds to indicate the CI error
|
# run report check for failed builds to indicate the CI error
|
||||||
if: ${{ !cancelled() && needs.StyleCheck.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse build check') }}
|
if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Builds') }}
|
||||||
needs: [RunConfig, StyleCheck, Builds_1]
|
needs: [RunConfig, StyleCheck, Builds_1, Builds_2]
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: ClickHouse build check
|
test_name: Builds
|
||||||
runner_type: style-checker-aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
Builds_2_Report:
|
|
||||||
# run report check for failed builds to indicate the CI error
|
|
||||||
if: ${{ !cancelled() && needs.StyleCheck.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse special build check') }}
|
|
||||||
needs: [RunConfig, StyleCheck, Builds_2]
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: ClickHouse special build check
|
|
||||||
runner_type: style-checker-aarch64
|
runner_type: style-checker-aarch64
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
|
||||||
CheckReadyForMerge:
|
CheckReadyForMerge:
|
||||||
if: ${{ !cancelled() && needs.StyleCheck.result == 'success' }}
|
if: ${{ !cancelled() && needs.StyleCheck.result == 'success' }}
|
||||||
needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_1_Report, Builds_2_Report, Tests_1, Tests_2]
|
# Test_2 or Test_3 must not have jobs required for Mergeable check
|
||||||
|
needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_Report, Tests_1]
|
||||||
runs-on: [self-hosted, style-checker-aarch64]
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
@ -177,9 +172,9 @@ jobs:
|
|||||||
################################# Stage Final #################################
|
################################# Stage Final #################################
|
||||||
#
|
#
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !cancelled() }}
|
||||||
needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_1_Report, Builds_2_Report, Tests_1, Tests_2, Tests_3]
|
needs: [RunConfig, BuildDockers, StyleCheck, FastTest, Builds_1, Builds_2, Builds_Report, Tests_1, Tests_2, Tests_3]
|
||||||
runs-on: [self-hosted, style-checker]
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
steps:
|
steps:
|
||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: ClickHouse/checkout@v1
|
uses: ClickHouse/checkout@v1
|
||||||
@ -188,14 +183,7 @@ jobs:
|
|||||||
- name: Finish label
|
- name: Finish label
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 finish_check.py
|
python3 finish_check.py --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
|
||||||
# FIXME: merge on approval does not work with MQ. Could be fixed by using defaul GH's automerge after some corrections in Mergeable Check status
|
|
||||||
# - name: Auto merge if approved
|
|
||||||
# if: ${{ github.event_name != 'merge_group' }}
|
|
||||||
# run: |
|
|
||||||
# cd "$GITHUB_WORKSPACE/tests/ci"
|
|
||||||
# python3 merge_pr.py --check-approved
|
|
||||||
|
|
||||||
|
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
###################################### JEPSEN TESTS #########################################
|
###################################### JEPSEN TESTS #########################################
|
||||||
@ -208,10 +196,9 @@ jobs:
|
|||||||
concurrency:
|
concurrency:
|
||||||
group: jepsen
|
group: jepsen
|
||||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse Keeper Jepsen') }}
|
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse Keeper Jepsen') }}
|
||||||
# jepsen needs binary_release build which is in Builds_2
|
needs: [RunConfig, Builds_1]
|
||||||
needs: [RunConfig, Builds_2]
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: ClickHouse Keeper Jepsen
|
test_name: ClickHouse Keeper Jepsen
|
||||||
runner_type: style-checker
|
runner_type: style-checker-aarch64
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
|
59
.github/workflows/release_branches.yml
vendored
59
.github/workflows/release_branches.yml
vendored
@ -65,7 +65,7 @@ jobs:
|
|||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: Compatibility check (amd64)
|
test_name: Compatibility check (release)
|
||||||
runner_type: style-checker
|
runner_type: style-checker
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
CompatibilityCheckAarch64:
|
CompatibilityCheckAarch64:
|
||||||
@ -176,35 +176,24 @@ jobs:
|
|||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### BUILD REPORTER #######################################
|
##################################### BUILD REPORTER #######################################
|
||||||
############################################################################################
|
############################################################################################
|
||||||
BuilderReport:
|
Builds_Report:
|
||||||
# run report check for failed builds to indicate the CI error
|
# run report check for failed builds to indicate the CI error
|
||||||
if: ${{ !cancelled() }}
|
if: ${{ !cancelled() && needs.RunConfig.result == 'success' && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Builds') }}
|
||||||
needs:
|
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64, BuilderDebAsan, BuilderDebUBsan, BuilderDebMsan, BuilderDebTsan, BuilderDebDebug, BuilderBinDarwin, BuilderBinDarwinAarch64]
|
||||||
- RunConfig
|
runs-on: [self-hosted, style-checker-aarch64]
|
||||||
- BuilderDebRelease
|
steps:
|
||||||
- BuilderDebAarch64
|
- name: Check out repository code
|
||||||
- BuilderDebAsan
|
uses: ClickHouse/checkout@v1
|
||||||
- BuilderDebTsan
|
- name: Download reports
|
||||||
- BuilderDebUBsan
|
run: |
|
||||||
- BuilderDebMsan
|
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(needs.RunConfig.outputs.data) }} --pre --job-name Builds
|
||||||
- BuilderDebDebug
|
- name: Builds report
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
run: |
|
||||||
with:
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
test_name: ClickHouse build check
|
python3 ./build_report_check.py --reports package_release package_aarch64 package_asan package_msan package_ubsan package_tsan package_debug binary_darwin binary_darwin_aarch64
|
||||||
runner_type: style-checker-aarch64
|
- name: Set status
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
run: |
|
||||||
BuilderSpecialReport:
|
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(needs.RunConfig.outputs.data) }} --post --job-name Builds
|
||||||
# run report check for failed builds to indicate the CI error
|
|
||||||
if: ${{ !cancelled() }}
|
|
||||||
needs:
|
|
||||||
- RunConfig
|
|
||||||
- BuilderBinDarwin
|
|
||||||
- BuilderBinDarwinAarch64
|
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
|
||||||
with:
|
|
||||||
test_name: ClickHouse special build check
|
|
||||||
runner_type: style-checker-aarch64
|
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
|
||||||
MarkReleaseReady:
|
MarkReleaseReady:
|
||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
needs:
|
needs:
|
||||||
@ -244,7 +233,7 @@ jobs:
|
|||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: Install packages (amd64)
|
test_name: Install packages (release)
|
||||||
runner_type: style-checker
|
runner_type: style-checker
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
run_command: |
|
run_command: |
|
||||||
@ -254,7 +243,7 @@ jobs:
|
|||||||
if: ${{ !failure() && !cancelled() }}
|
if: ${{ !failure() && !cancelled() }}
|
||||||
uses: ./.github/workflows/reusable_test.yml
|
uses: ./.github/workflows/reusable_test.yml
|
||||||
with:
|
with:
|
||||||
test_name: Install packages (arm64)
|
test_name: Install packages (aarch64)
|
||||||
runner_type: style-checker-aarch64
|
runner_type: style-checker-aarch64
|
||||||
data: ${{ needs.RunConfig.outputs.data }}
|
data: ${{ needs.RunConfig.outputs.data }}
|
||||||
run_command: |
|
run_command: |
|
||||||
@ -460,8 +449,7 @@ jobs:
|
|||||||
needs:
|
needs:
|
||||||
- DockerServerImage
|
- DockerServerImage
|
||||||
- DockerKeeperImage
|
- DockerKeeperImage
|
||||||
- BuilderReport
|
- Builds_Report
|
||||||
- BuilderSpecialReport
|
|
||||||
- MarkReleaseReady
|
- MarkReleaseReady
|
||||||
- FunctionalStatelessTestDebug
|
- FunctionalStatelessTestDebug
|
||||||
- FunctionalStatelessTestRelease
|
- FunctionalStatelessTestRelease
|
||||||
@ -496,4 +484,7 @@ jobs:
|
|||||||
- name: Finish label
|
- name: Finish label
|
||||||
run: |
|
run: |
|
||||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||||
python3 finish_check.py
|
# update mergeable check
|
||||||
|
python3 merge_pr.py --set-ci-status --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
|
||||||
|
# update overall ci report
|
||||||
|
python3 finish_check.py --wf-status ${{ contains(needs.*.result, 'failure') && 'failure' || 'success' }}
|
||||||
|
5
.github/workflows/reusable_build.yml
vendored
5
.github/workflows/reusable_build.yml
vendored
@ -33,6 +33,10 @@ name: Build ClickHouse
|
|||||||
additional_envs:
|
additional_envs:
|
||||||
description: additional ENV variables to setup the job
|
description: additional ENV variables to setup the job
|
||||||
type: string
|
type: string
|
||||||
|
secrets:
|
||||||
|
secret_envs:
|
||||||
|
description: if given, it's passed to the environments
|
||||||
|
required: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
Build:
|
Build:
|
||||||
@ -54,6 +58,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cat >> "$GITHUB_ENV" << 'EOF'
|
cat >> "$GITHUB_ENV" << 'EOF'
|
||||||
${{inputs.additional_envs}}
|
${{inputs.additional_envs}}
|
||||||
|
${{secrets.secret_envs}}
|
||||||
DOCKER_TAG<<DOCKER_JSON
|
DOCKER_TAG<<DOCKER_JSON
|
||||||
${{ toJson(fromJson(inputs.data).docker_data.images) }}
|
${{ toJson(fromJson(inputs.data).docker_data.images) }}
|
||||||
DOCKER_JSON
|
DOCKER_JSON
|
||||||
|
6
.github/workflows/reusable_build_stage.yml
vendored
6
.github/workflows/reusable_build_stage.yml
vendored
@ -13,6 +13,10 @@ name: BuildStageWF
|
|||||||
description: ci data
|
description: ci data
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
|
secrets:
|
||||||
|
secret_envs:
|
||||||
|
description: if given, it's passed to the environments
|
||||||
|
required: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
s:
|
s:
|
||||||
@ -30,3 +34,5 @@ jobs:
|
|||||||
# for now let's do I deep checkout for builds
|
# for now let's do I deep checkout for builds
|
||||||
checkout_depth: 0
|
checkout_depth: 0
|
||||||
data: ${{ inputs.data }}
|
data: ${{ inputs.data }}
|
||||||
|
secrets:
|
||||||
|
secret_envs: ${{ secrets.secret_envs }}
|
||||||
|
2
.github/workflows/reusable_test.yml
vendored
2
.github/workflows/reusable_test.yml
vendored
@ -58,7 +58,7 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].num_batches > 1 && format('-{0}',matrix.batch) || '' }}
|
GITHUB_JOB_OVERRIDDEN: ${{inputs.test_name}}${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].num_batches > 1 && format('-{0}',matrix.batch) || '' }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false # we always wait for entire matrix
|
fail-fast: false # we always wait for the entire matrix
|
||||||
matrix:
|
matrix:
|
||||||
batch: ${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].batches }}
|
batch: ${{ fromJson(inputs.data).jobs_data.jobs_params[inputs.test_name].batches }}
|
||||||
steps:
|
steps:
|
||||||
|
6
.github/workflows/reusable_test_stage.yml
vendored
6
.github/workflows/reusable_test_stage.yml
vendored
@ -10,6 +10,10 @@ name: StageWF
|
|||||||
description: ci data
|
description: ci data
|
||||||
type: string
|
type: string
|
||||||
required: true
|
required: true
|
||||||
|
secrets:
|
||||||
|
secret_envs:
|
||||||
|
description: if given, it's passed to the environments
|
||||||
|
required: false
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
s:
|
s:
|
||||||
@ -23,3 +27,5 @@ jobs:
|
|||||||
test_name: ${{ matrix.job_name_and_runner_type.job_name }}
|
test_name: ${{ matrix.job_name_and_runner_type.job_name }}
|
||||||
runner_type: ${{ matrix.job_name_and_runner_type.runner_type }}
|
runner_type: ${{ matrix.job_name_and_runner_type.runner_type }}
|
||||||
data: ${{ inputs.data }}
|
data: ${{ inputs.data }}
|
||||||
|
secrets:
|
||||||
|
secret_envs: ${{ secrets.secret_envs }}
|
||||||
|
5
.github/workflows/tags_stable.yml
vendored
5
.github/workflows/tags_stable.yml
vendored
@ -46,9 +46,10 @@ jobs:
|
|||||||
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
|
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
|
||||||
./utils/list-versions/update-docker-version.sh
|
./utils/list-versions/update-docker-version.sh
|
||||||
GID=$(id -g "${UID}")
|
GID=$(id -g "${UID}")
|
||||||
docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 \
|
# --network=host and CI=1 are required for the S3 access from a container
|
||||||
|
docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 -e CI=1 --network=host \
|
||||||
--volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \
|
--volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \
|
||||||
/ClickHouse/utils/changelog/changelog.py -v --debug-helpers \
|
/ClickHouse/tests/ci/changelog.py -v --debug-helpers \
|
||||||
--gh-user-or-token="$GITHUB_TOKEN" --jobs=5 \
|
--gh-user-or-token="$GITHUB_TOKEN" --jobs=5 \
|
||||||
--output="/ClickHouse/docs/changelogs/${GITHUB_TAG}.md" "${GITHUB_TAG}"
|
--output="/ClickHouse/docs/changelogs/${GITHUB_TAG}.md" "${GITHUB_TAG}"
|
||||||
git add "./docs/changelogs/${GITHUB_TAG}.md"
|
git add "./docs/changelogs/${GITHUB_TAG}.md"
|
||||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -21,6 +21,9 @@
|
|||||||
*.stderr
|
*.stderr
|
||||||
*.stdout
|
*.stdout
|
||||||
|
|
||||||
|
# llvm-xray logs
|
||||||
|
xray-log.*
|
||||||
|
|
||||||
/docs/build
|
/docs/build
|
||||||
/docs/publish
|
/docs/publish
|
||||||
/docs/edit
|
/docs/edit
|
||||||
|
29
.gitmessage
29
.gitmessage
@ -1,29 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
### CI modificators (add a leading space to apply) ###
|
|
||||||
|
|
||||||
## To avoid a merge commit in CI:
|
|
||||||
#no_merge_commit
|
|
||||||
|
|
||||||
## To discard CI cache:
|
|
||||||
#no_ci_cache
|
|
||||||
|
|
||||||
## To not test (only style check):
|
|
||||||
#do_not_test
|
|
||||||
|
|
||||||
## To run specified set of tests in CI:
|
|
||||||
#ci_set_<SET_NAME>
|
|
||||||
#ci_set_reduced
|
|
||||||
#ci_set_arm
|
|
||||||
#ci_set_integration
|
|
||||||
#ci_set_old_analyzer
|
|
||||||
|
|
||||||
## To run specified job in CI:
|
|
||||||
#job_<JOB NAME>
|
|
||||||
#job_stateless_tests_release
|
|
||||||
#job_package_debug
|
|
||||||
#job_integration_tests_asan
|
|
||||||
|
|
||||||
## To run only specified batches for multi-batch job(s)
|
|
||||||
#batch_2
|
|
||||||
#batch_1_2_3
|
|
8
.gitmodules
vendored
8
.gitmodules
vendored
@ -91,13 +91,13 @@
|
|||||||
[submodule "contrib/aws"]
|
[submodule "contrib/aws"]
|
||||||
path = contrib/aws
|
path = contrib/aws
|
||||||
url = https://github.com/ClickHouse/aws-sdk-cpp
|
url = https://github.com/ClickHouse/aws-sdk-cpp
|
||||||
[submodule "aws-c-event-stream"]
|
[submodule "contrib/aws-c-event-stream"]
|
||||||
path = contrib/aws-c-event-stream
|
path = contrib/aws-c-event-stream
|
||||||
url = https://github.com/awslabs/aws-c-event-stream
|
url = https://github.com/awslabs/aws-c-event-stream
|
||||||
[submodule "aws-c-common"]
|
[submodule "contrib/aws-c-common"]
|
||||||
path = contrib/aws-c-common
|
path = contrib/aws-c-common
|
||||||
url = https://github.com/awslabs/aws-c-common.git
|
url = https://github.com/awslabs/aws-c-common.git
|
||||||
[submodule "aws-checksums"]
|
[submodule "contrib/aws-checksums"]
|
||||||
path = contrib/aws-checksums
|
path = contrib/aws-checksums
|
||||||
url = https://github.com/awslabs/aws-checksums
|
url = https://github.com/awslabs/aws-checksums
|
||||||
[submodule "contrib/curl"]
|
[submodule "contrib/curl"]
|
||||||
@ -163,7 +163,7 @@
|
|||||||
url = https://github.com/xz-mirror/xz
|
url = https://github.com/xz-mirror/xz
|
||||||
[submodule "contrib/abseil-cpp"]
|
[submodule "contrib/abseil-cpp"]
|
||||||
path = contrib/abseil-cpp
|
path = contrib/abseil-cpp
|
||||||
url = https://github.com/abseil/abseil-cpp
|
url = https://github.com/ClickHouse/abseil-cpp.git
|
||||||
[submodule "contrib/dragonbox"]
|
[submodule "contrib/dragonbox"]
|
||||||
path = contrib/dragonbox
|
path = contrib/dragonbox
|
||||||
url = https://github.com/ClickHouse/dragonbox
|
url = https://github.com/ClickHouse/dragonbox
|
||||||
|
@ -13,5 +13,4 @@ rules:
|
|||||||
level: warning
|
level: warning
|
||||||
comments:
|
comments:
|
||||||
min-spaces-from-content: 1
|
min-spaces-from-content: 1
|
||||||
document-start:
|
document-start: disable
|
||||||
present: false
|
|
||||||
|
323
CHANGELOG.md
323
CHANGELOG.md
@ -1,4 +1,6 @@
|
|||||||
### Table of Contents
|
### Table of Contents
|
||||||
|
**[ClickHouse release v24.6, 2024-07-01](#246)**<br/>
|
||||||
|
**[ClickHouse release v24.5, 2024-05-30](#245)**<br/>
|
||||||
**[ClickHouse release v24.4, 2024-04-30](#244)**<br/>
|
**[ClickHouse release v24.4, 2024-04-30](#244)**<br/>
|
||||||
**[ClickHouse release v24.3 LTS, 2024-03-26](#243)**<br/>
|
**[ClickHouse release v24.3 LTS, 2024-03-26](#243)**<br/>
|
||||||
**[ClickHouse release v24.2, 2024-02-29](#242)**<br/>
|
**[ClickHouse release v24.2, 2024-02-29](#242)**<br/>
|
||||||
@ -7,6 +9,325 @@
|
|||||||
|
|
||||||
# 2024 Changelog
|
# 2024 Changelog
|
||||||
|
|
||||||
|
### <a id="246"></a> ClickHouse release 24.6, 2024-07-01
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Enable asynchronous load of databases and tables by default. See the `async_load_databases` in config.xml. While this change is fully compatible, it can introduce a difference in behavior. When `async_load_databases` is false, as in the previous versions, the server will not accept connections until all tables are loaded. When `async_load_databases` is true, as in the new version, the server can accept connections before all the tables are loaded. If a query is made to a table that is not yet loaded, it will wait for the table's loading, which can take considerable time. It can change the behavior of the server if it is part of a large distributed system under a load balancer. In the first case, the load balancer can get a connection refusal and quickly failover to another server. In the second case, the load balancer can connect to a server that is still loading the tables, and the query will have a higher latency. Moreover, if many queries accumulate in the waiting state, it can lead to a "thundering herd" problem when they start processing simultaneously. This can make a difference only for highly loaded distributed backends. You can set the value of `async_load_databases` to false to avoid this problem. [#57695](https://github.com/ClickHouse/ClickHouse/pull/57695) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Setting `replace_long_file_name_to_hash` is enabled by default for `MergeTree` tables. [#64457](https://github.com/ClickHouse/ClickHouse/pull/64457) ([Anton Popov](https://github.com/CurtizJ)). This setting is fully compatible, and no actions needed during upgrade. The new data format is supported from all versions starting from 23.9. After enabling this setting, you can no longer downgrade to a version 23.8 or older.
|
||||||
|
* Some invalid queries will fail earlier during parsing. Note: disabled the support for inline KQL expressions (the experimental Kusto language) when they are put into a `kql` table function without a string literal, e.g. `kql(garbage | trash)` instead of `kql('garbage | trash')` or `kql($$garbage | trash$$)`. This feature was introduced unintentionally and should not exist. [#61500](https://github.com/ClickHouse/ClickHouse/pull/61500) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Rework parallel processing in `Ordered` mode of storage `S3Queue`. This PR is backward incompatible for Ordered mode if you used settings `s3queue_processing_threads_num` or `s3queue_total_shards_num`. Setting `s3queue_total_shards_num` is deleted, previously it was allowed to use only under `s3queue_allow_experimental_sharded_mode`, which is now deprecated. A new setting is added - `s3queue_buckets`. [#64349](https://github.com/ClickHouse/ClickHouse/pull/64349) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* New functions `snowflakeIDToDateTime`, `snowflakeIDToDateTime64`, `dateTimeToSnowflakeID`, and `dateTime64ToSnowflakeID` were added. Unlike the existing functions `snowflakeToDateTime`, `snowflakeToDateTime64`, `dateTimeToSnowflake`, and `dateTime64ToSnowflake`, the new functions are compatible with function `generateSnowflakeID`, i.e. they accept the snowflake IDs generated by `generateSnowflakeID` and produce snowflake IDs of the same type as `generateSnowflakeID` (i.e. `UInt64`). Furthermore, the new functions default to the UNIX epoch (aka. 1970-01-01), just like `generateSnowflakeID`. If necessary, a different epoch, e.g. Twitter's/X's epoch 2010-11-04 aka. 1288834974657 msec since UNIX epoch, can be passed. The old conversion functions are deprecated and will be removed after a transition period: to use them regardless, enable setting `allow_deprecated_snowflake_conversion_functions`. [#64948](https://github.com/ClickHouse/ClickHouse/pull/64948) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* Allow to store named collections in ClickHouse Keeper. [#64574](https://github.com/ClickHouse/ClickHouse/pull/64574) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Support empty tuples. [#55061](https://github.com/ClickHouse/ClickHouse/pull/55061) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Add Hilbert Curve encode and decode functions. [#60156](https://github.com/ClickHouse/ClickHouse/pull/60156) ([Artem Mustafin](https://github.com/Artemmm91)).
|
||||||
|
* Add support for index analysis over `hilbertEncode`. [#64662](https://github.com/ClickHouse/ClickHouse/pull/64662) ([Artem Mustafin](https://github.com/Artemmm91)).
|
||||||
|
* Added support for reading `LINESTRING` geometry in the WKT format using function `readWKTLineString`. [#62519](https://github.com/ClickHouse/ClickHouse/pull/62519) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Allow to attach parts from a different disk. [#63087](https://github.com/ClickHouse/ClickHouse/pull/63087) ([Unalian](https://github.com/Unalian)).
|
||||||
|
* Added new SQL functions `generateSnowflakeID` for generating Twitter-style Snowflake IDs. [#63577](https://github.com/ClickHouse/ClickHouse/pull/63577) ([Danila Puzov](https://github.com/kazalika)).
|
||||||
|
* Added `merge_workload` and `mutation_workload` settings to regulate how resources are utilized and shared between merges, mutations and other workloads. [#64061](https://github.com/ClickHouse/ClickHouse/pull/64061) ([Sergei Trifonov](https://github.com/serxa)).
|
||||||
|
* Add support for comparing `IPv4` and `IPv6` types using the `=` operator. [#64292](https://github.com/ClickHouse/ClickHouse/pull/64292) ([Francisco J. Jurado Moreno](https://github.com/Beetelbrox)).
|
||||||
|
* Support decimal arguments in binary math functions (pow, atan2, max2, min2, hypot). [#64582](https://github.com/ClickHouse/ClickHouse/pull/64582) ([Mikhail Gorshkov](https://github.com/mgorshkov)).
|
||||||
|
* Added SQL functions `parseReadableSize` (along with `OrNull` and `OrZero` variants). [#64742](https://github.com/ClickHouse/ClickHouse/pull/64742) ([Francisco J. Jurado Moreno](https://github.com/Beetelbrox)).
|
||||||
|
* Add server settings `max_table_num_to_throw` and `max_database_num_to_throw` to limit the number of databases or tables on `CREATE` queries. [#64781](https://github.com/ClickHouse/ClickHouse/pull/64781) ([Xu Jia](https://github.com/XuJia0210)).
|
||||||
|
* Add `_time` virtual column to file alike storages (s3/file/hdfs/url/azureBlobStorage). [#64947](https://github.com/ClickHouse/ClickHouse/pull/64947) ([Ilya Golshtein](https://github.com/ilejn)).
|
||||||
|
* Introduced new functions `base64URLEncode`, `base64URLDecode` and `tryBase64URLDecode`. [#64991](https://github.com/ClickHouse/ClickHouse/pull/64991) ([Mikhail Gorshkov](https://github.com/mgorshkov)).
|
||||||
|
* Add new function `editDistanceUTF8`, which calculates the [edit distance](https://en.wikipedia.org/wiki/Edit_distance) between two UTF8 strings. [#65269](https://github.com/ClickHouse/ClickHouse/pull/65269) ([LiuNeng](https://github.com/liuneng1994)).
|
||||||
|
* Add `http_response_headers` configuration to support custom response headers in custom HTTP handlers. [#63562](https://github.com/ClickHouse/ClickHouse/pull/63562) ([Grigorii](https://github.com/GSokol)).
|
||||||
|
* Added a new table function `loop` to support returning query results in an infinite loop. [#63452](https://github.com/ClickHouse/ClickHouse/pull/63452) ([Sariel](https://github.com/sarielwxm)). This is useful for testing.
|
||||||
|
* Introduced two additional columns in the `system.query_log`: `used_privileges` and `missing_privileges`. `used_privileges` is populated with the privileges that were checked during query execution, and `missing_privileges` contains required privileges that are missing. [#64597](https://github.com/ClickHouse/ClickHouse/pull/64597) ([Alexey Katsman](https://github.com/alexkats)).
|
||||||
|
* Added a setting `output_format_pretty_display_footer_column_names` which when enabled displays column names at the end of the table for long tables (50 rows by default), with the threshold value for minimum number of rows controlled by `output_format_pretty_display_footer_column_names_min_rows`. [#65144](https://github.com/ClickHouse/ClickHouse/pull/65144) ([Shaun Struwig](https://github.com/Blargian)).
|
||||||
|
|
||||||
|
#### Experimental Feature
|
||||||
|
* Introduce statistics of type "number of distinct values". [#59357](https://github.com/ClickHouse/ClickHouse/pull/59357) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
|
* Support statistics with ReplicatedMergeTree. [#64934](https://github.com/ClickHouse/ClickHouse/pull/64934) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
|
* If "replica group" is configured for a `Replicated` database, automatically create a cluster that includes replicas from all groups. [#64312](https://github.com/ClickHouse/ClickHouse/pull/64312) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Add settings `parallel_replicas_custom_key_range_lower` and `parallel_replicas_custom_key_range_upper` to control how parallel replicas with dynamic shards parallelizes queries when using a range filter. [#64604](https://github.com/ClickHouse/ClickHouse/pull/64604) ([josh-hildred](https://github.com/josh-hildred)).
|
||||||
|
|
||||||
|
#### Performance Improvement
|
||||||
|
* Add the ability to reshuffle rows during insert to optimize for size without violating the order set by `PRIMARY KEY`. It's controlled by the setting `optimize_row_order` (off by default). [#63578](https://github.com/ClickHouse/ClickHouse/pull/63578) ([Igor Markelov](https://github.com/ElderlyPassionFruit)).
|
||||||
|
* Add a native parquet reader, which can read parquet binary to ClickHouse Columns directly. It's controlled by the setting `input_format_parquet_use_native_reader` (disabled by default). [#60361](https://github.com/ClickHouse/ClickHouse/pull/60361) ([ZhiHong Zhang](https://github.com/copperybean)).
|
||||||
|
* Support partial trivial count optimization when the query filter is able to select exact ranges from merge tree tables. [#60463](https://github.com/ClickHouse/ClickHouse/pull/60463) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Reduce max memory usage of multithreaded `INSERT`s by collecting chunks of multiple threads in a single transform. [#61047](https://github.com/ClickHouse/ClickHouse/pull/61047) ([Yarik Briukhovetskyi](https://github.com/yariks5s)).
|
||||||
|
* Reduce the memory usage when using Azure object storage by using fixed memory allocation, avoiding the allocation of an extra buffer. [#63160](https://github.com/ClickHouse/ClickHouse/pull/63160) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Reduce the number of virtual function calls in `ColumnNullable::size`. [#60556](https://github.com/ClickHouse/ClickHouse/pull/60556) ([HappenLee](https://github.com/HappenLee)).
|
||||||
|
* Speedup `splitByRegexp` when the regular expression argument is a single-character. [#62696](https://github.com/ClickHouse/ClickHouse/pull/62696) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Speed up aggregation by 8-bit and 16-bit keys by keeping track of the min and max keys used. This allows to reduce the number of cells that need to be verified. [#62746](https://github.com/ClickHouse/ClickHouse/pull/62746) ([Jiebin Sun](https://github.com/jiebinn)).
|
||||||
|
* Optimize operator IN when the left hand side is `LowCardinality` and the right is a set of constants. [#64060](https://github.com/ClickHouse/ClickHouse/pull/64060) ([Zhiguo Zhou](https://github.com/ZhiguoZh)).
|
||||||
|
* Use a thread pool to initialize and destroy hash tables inside `ConcurrentHashJoin`. [#64241](https://github.com/ClickHouse/ClickHouse/pull/64241) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Optimized vertical merges in tables with sparse columns. [#64311](https://github.com/ClickHouse/ClickHouse/pull/64311) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Enabled prefetches of data from remote filesystem during vertical merges. It improves latency of vertical merges in tables with data stored on remote filesystem. [#64314](https://github.com/ClickHouse/ClickHouse/pull/64314) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Reduce redundant calls to `isDefault` of `ColumnSparse::filter` to improve performance. [#64426](https://github.com/ClickHouse/ClickHouse/pull/64426) ([Jiebin Sun](https://github.com/jiebinn)).
|
||||||
|
* Speedup `find_super_nodes` and `find_big_family` keeper-client commands by making multiple asynchronous getChildren requests. [#64628](https://github.com/ClickHouse/ClickHouse/pull/64628) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Improve function `least`/`greatest` for nullable numberic type arguments. [#64668](https://github.com/ClickHouse/ClickHouse/pull/64668) ([KevinyhZou](https://github.com/KevinyhZou)).
|
||||||
|
* Allow merging two consequent filtering steps of a query plan. This improves filter-push-down optimization if the filter condition can be pushed down from the parent step. [#64760](https://github.com/ClickHouse/ClickHouse/pull/64760) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Remove bad optimization in the vertical final implementation and re-enable vertical final algorithm by default. [#64783](https://github.com/ClickHouse/ClickHouse/pull/64783) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Remove ALIAS nodes from the filter expression. This slightly improves performance for queries with `PREWHERE` (with the new analyzer). [#64793](https://github.com/ClickHouse/ClickHouse/pull/64793) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Re-enable OpenSSL session caching. [#65111](https://github.com/ClickHouse/ClickHouse/pull/65111) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Added settings to disable materialization of skip indexes and statistics on inserts (`materialize_skip_indexes_on_insert` and `materialize_statistics_on_insert`). [#64391](https://github.com/ClickHouse/ClickHouse/pull/64391) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Use the allocated memory size to calculate the row group size and reduce the peak memory of the parquet writer in the single-threaded mode. [#64424](https://github.com/ClickHouse/ClickHouse/pull/64424) ([LiuNeng](https://github.com/liuneng1994)).
|
||||||
|
* Improve the iterator of sparse column to reduce call of `size`. [#64497](https://github.com/ClickHouse/ClickHouse/pull/64497) ([Jiebin Sun](https://github.com/jiebinn)).
|
||||||
|
* Update condition to use server-side copy for backups to Azure blob storage. [#64518](https://github.com/ClickHouse/ClickHouse/pull/64518) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Optimized memory usage of vertical merges for tables with high number of skip indexes. [#64580](https://github.com/ClickHouse/ClickHouse/pull/64580) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* `SHOW CREATE TABLE` executed on top of system tables will now show the super handy comment unique for each table which will explain why this table is needed. [#63788](https://github.com/ClickHouse/ClickHouse/pull/63788) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* The second argument (scale) of functions `round()`, `roundBankers()`, `floor()`, `ceil()` and `trunc()` can now be non-const. [#64798](https://github.com/ClickHouse/ClickHouse/pull/64798) ([Mikhail Gorshkov](https://github.com/mgorshkov)).
|
||||||
|
* Hot reload storage policy for `Distributed` tables when adding a new disk. [#58285](https://github.com/ClickHouse/ClickHouse/pull/58285) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Avoid possible deadlock during MergeTree index analysis when scheduling threads in a saturated service. [#59427](https://github.com/ClickHouse/ClickHouse/pull/59427) ([Sean Haynes](https://github.com/seandhaynes)).
|
||||||
|
* Several minor corner case fixes to S3 proxy support & tunneling. [#63427](https://github.com/ClickHouse/ClickHouse/pull/63427) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Improve io_uring resubmit visibility. Rename profile event `IOUringSQEsResubmits` -> `IOUringSQEsResubmitsAsync` and add a new one `IOUringSQEsResubmitsSync`. [#63699](https://github.com/ClickHouse/ClickHouse/pull/63699) ([Tomer Shafir](https://github.com/tomershafir)).
|
||||||
|
* Added a new setting, `metadata_keep_free_space_bytes` to keep free space on the metadata storage disk. [#64128](https://github.com/ClickHouse/ClickHouse/pull/64128) ([MikhailBurdukov](https://github.com/MikhailBurdukov)).
|
||||||
|
* Add metrics to track the number of directories created and removed by the `plain_rewritable` metadata storage, and the number of entries in the local-to-remote in-memory map. [#64175](https://github.com/ClickHouse/ClickHouse/pull/64175) ([Julia Kartseva](https://github.com/jkartseva)).
|
||||||
|
* The query cache now considers identical queries with different settings as different. This increases robustness in cases where different settings (e.g. `limit` or `additional_table_filters`) would affect the query result. [#64205](https://github.com/ClickHouse/ClickHouse/pull/64205) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Support the non standard error code `QpsLimitExceeded` in object storage as a retryable error. [#64225](https://github.com/ClickHouse/ClickHouse/pull/64225) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Forbid converting a MergeTree table to replicated if the zookeeper path for this table already exists. [#64244](https://github.com/ClickHouse/ClickHouse/pull/64244) ([Kirill](https://github.com/kirillgarbar)).
|
||||||
|
* Added a new setting `input_format_parquet_prefer_block_bytes` to control the average output block bytes, and modified the default value of `input_format_parquet_max_block_size` to 65409. [#64427](https://github.com/ClickHouse/ClickHouse/pull/64427) ([LiuNeng](https://github.com/liuneng1994)).
|
||||||
|
* Allow proxy to be bypassed for hosts specified in `no_proxy` env variable and ClickHouse proxy configuration. [#63314](https://github.com/ClickHouse/ClickHouse/pull/63314) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
* Always start Keeper with sufficient amount of threads in global thread pool. [#64444](https://github.com/ClickHouse/ClickHouse/pull/64444) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Settings from the user's config don't affect merges and mutations for `MergeTree` on top of object storage. [#64456](https://github.com/ClickHouse/ClickHouse/pull/64456) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Support the non standard error code `TotalQpsLimitExceeded` in object storage as a retryable error. [#64520](https://github.com/ClickHouse/ClickHouse/pull/64520) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Updated Advanced Dashboard for both open-source and ClickHouse Cloud versions to include a chart for 'Maximum concurrent network connections'. [#64610](https://github.com/ClickHouse/ClickHouse/pull/64610) ([Thom O'Connor](https://github.com/thomoco)).
|
||||||
|
* Improve progress report on `zeros_mt` and `generateRandom`. [#64804](https://github.com/ClickHouse/ClickHouse/pull/64804) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Add an asynchronous metric `jemalloc.profile.active` to show whether sampling is currently active. This is an activation mechanism in addition to prof.active; both must be active for the calling thread to sample. [#64842](https://github.com/ClickHouse/ClickHouse/pull/64842) ([Unalian](https://github.com/Unalian)).
|
||||||
|
* Remove mark of `allow_experimental_join_condition` as important. This mark may have prevented distributed queries in a mixed versions cluster from being executed successfully. [#65008](https://github.com/ClickHouse/ClickHouse/pull/65008) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Added server Asynchronous metrics `DiskGetObjectThrottler*` and `DiskGetObjectThrottler*` reflecting request per second rate limit defined with `s3_max_get_rps` and `s3_max_put_rps` disk settings and currently available number of requests that could be sent without hitting throttling limit on the disk. Metrics are defined for every disk that has a configured limit. [#65050](https://github.com/ClickHouse/ClickHouse/pull/65050) ([Sergei Trifonov](https://github.com/serxa)).
|
||||||
|
* Initialize global trace collector for `Poco::ThreadPool` (needed for Keeper, etc). [#65239](https://github.com/ClickHouse/ClickHouse/pull/65239) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Add a validation when creating a user with `bcrypt_hash`. [#65242](https://github.com/ClickHouse/ClickHouse/pull/65242) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Add profile events for number of rows read during/after `PREWHERE`. [#64198](https://github.com/ClickHouse/ClickHouse/pull/64198) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Print query in `EXPLAIN PLAN` with parallel replicas. [#64298](https://github.com/ClickHouse/ClickHouse/pull/64298) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Rename `allow_deprecated_functions` to `allow_deprecated_error_prone_window_functions`. [#64358](https://github.com/ClickHouse/ClickHouse/pull/64358) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Respect `max_read_buffer_size` setting for file descriptors as well in the `file` table function. [#64532](https://github.com/ClickHouse/ClickHouse/pull/64532) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Disable transactions for unsupported storages even for materialized views. [#64918](https://github.com/ClickHouse/ClickHouse/pull/64918) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Forbid `QUALIFY` clause in the old analyzer. The old analyzer ignored `QUALIFY`, so it could lead to unexpected data removal in mutations. [#65356](https://github.com/ClickHouse/ClickHouse/pull/65356) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in an official stable release)
|
||||||
|
* A bug in Apache ORC library was fixed: Fixed ORC statistics calculation, when writing, for unsigned types on all platforms and Int8 on ARM. [#64563](https://github.com/ClickHouse/ClickHouse/pull/64563) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Returned back the behaviour of how ClickHouse works and interprets Tuples in CSV format. This change effectively reverts https://github.com/ClickHouse/ClickHouse/pull/60994 and makes it available only under a few settings: `output_format_csv_serialize_tuple_into_separate_columns`, `input_format_csv_deserialize_separate_columns_into_tuple` and `input_format_csv_try_infer_strings_from_quoted_tuples`. [#65170](https://github.com/ClickHouse/ClickHouse/pull/65170) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Fix a permission error where a user in a specific situation can escalate their privileges on the default database without necessary grants. [#64769](https://github.com/ClickHouse/ClickHouse/pull/64769) ([pufit](https://github.com/pufit)).
|
||||||
|
* Fix crash with UniqInjectiveFunctionsEliminationPass and uniqCombined. [#65188](https://github.com/ClickHouse/ClickHouse/pull/65188) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Fix a bug in ClickHouse Keeper that causes digest mismatch during closing session. [#65198](https://github.com/ClickHouse/ClickHouse/pull/65198) ([Aleksei Filatov](https://github.com/aalexfvk)).
|
||||||
|
* Use correct memory alignment for Distinct combinator. Previously, crash could happen because of invalid memory allocation when the combinator was used. [#65379](https://github.com/ClickHouse/ClickHouse/pull/65379) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix crash with `DISTINCT` and window functions. [#64767](https://github.com/ClickHouse/ClickHouse/pull/64767) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fixed 'set' skip index not working with IN and indexHint(). [#62083](https://github.com/ClickHouse/ClickHouse/pull/62083) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Support executing function during assignment of parameterized view value. [#63502](https://github.com/ClickHouse/ClickHouse/pull/63502) ([SmitaRKulkarni](https://github.com/SmitaRKulkarni)).
|
||||||
|
* Fixed parquet memory tracking. [#63584](https://github.com/ClickHouse/ClickHouse/pull/63584) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Fixed reading of columns of type `Tuple(Map(LowCardinality(String), String), ...)`. [#63956](https://github.com/ClickHouse/ClickHouse/pull/63956) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix an `Cyclic aliases` error for cyclic aliases of different type (expression and function). [#63993](https://github.com/ClickHouse/ClickHouse/pull/63993) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* This fix will use a proper redefined context with the correct definer for each individual view in the query pipeline. [#64079](https://github.com/ClickHouse/ClickHouse/pull/64079) ([pufit](https://github.com/pufit)).
|
||||||
|
* Fix analyzer: "Not found column" error is fixed when using INTERPOLATE. [#64096](https://github.com/ClickHouse/ClickHouse/pull/64096) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix creating backups to S3 buckets with different credentials from the disk containing the file. [#64153](https://github.com/ClickHouse/ClickHouse/pull/64153) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* The query cache now considers two identical queries against different databases as different. The previous behavior could be used to bypass missing privileges to read from a table. [#64199](https://github.com/ClickHouse/ClickHouse/pull/64199) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix possible abort on uncaught exception in ~WriteBufferFromFileDescriptor in StatusFile. [#64206](https://github.com/ClickHouse/ClickHouse/pull/64206) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix `duplicate alias` error for distributed queries with `ARRAY JOIN`. [#64226](https://github.com/ClickHouse/ClickHouse/pull/64226) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix unexpected accurateCast from string to integer. [#64255](https://github.com/ClickHouse/ClickHouse/pull/64255) ([wudidapaopao](https://github.com/wudidapaopao)).
|
||||||
|
* Fixed CNF simplification, in case any OR group contains mutually exclusive atoms. [#64256](https://github.com/ClickHouse/ClickHouse/pull/64256) ([Eduard Karacharov](https://github.com/korowa)).
|
||||||
|
* Fix Query Tree size validation. [#64377](https://github.com/ClickHouse/ClickHouse/pull/64377) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Fix `Logical error: Bad cast` for `Buffer` table with `PREWHERE`. [#64388](https://github.com/ClickHouse/ClickHouse/pull/64388) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Prevent recursive logging in `blob_storage_log` when it's stored on object storage. [#64393](https://github.com/ClickHouse/ClickHouse/pull/64393) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fixed `CREATE TABLE AS` queries for tables with default expressions. [#64455](https://github.com/ClickHouse/ClickHouse/pull/64455) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fixed `optimize_read_in_order` behaviour for ORDER BY ... NULLS FIRST / LAST on tables with nullable keys. [#64483](https://github.com/ClickHouse/ClickHouse/pull/64483) ([Eduard Karacharov](https://github.com/korowa)).
|
||||||
|
* Fix the `Expression nodes list expected 1 projection names` and `Unknown expression or identifier` errors for queries with aliases to `GLOBAL IN.`. [#64517](https://github.com/ClickHouse/ClickHouse/pull/64517) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix an error `Cannot find column` in distributed queries with constant CTE in the `GROUP BY` key. [#64519](https://github.com/ClickHouse/ClickHouse/pull/64519) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix the crash loop when restoring from backup is blocked by creating an MV with a definer that hasn't been restored yet. [#64595](https://github.com/ClickHouse/ClickHouse/pull/64595) ([pufit](https://github.com/pufit)).
|
||||||
|
* Fix the output of function `formatDateTimeInJodaSyntax` when a formatter generates an uneven number of characters and the last character is `0`. For example, `SELECT formatDateTimeInJodaSyntax(toDate('2012-05-29'), 'D')` now correctly returns `150` instead of previously `15`. [#64614](https://github.com/ClickHouse/ClickHouse/pull/64614) ([LiuNeng](https://github.com/liuneng1994)).
|
||||||
|
* Do not rewrite aggregation if `-If` combinator is already used. [#64638](https://github.com/ClickHouse/ClickHouse/pull/64638) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Fix type inference for float (in case of small buffer, i.e. `--max_read_buffer_size 1`). [#64641](https://github.com/ClickHouse/ClickHouse/pull/64641) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix bug which could lead to non-working TTLs with expressions. [#64694](https://github.com/ClickHouse/ClickHouse/pull/64694) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix removing the `WHERE` and `PREWHERE` expressions, which are always true (for the new analyzer). [#64695](https://github.com/ClickHouse/ClickHouse/pull/64695) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fixed excessive part elimination by token-based text indexes (`ngrambf` , `full_text`) when filtering by result of `startsWith`, `endsWith`, `match`, `multiSearchAny`. [#64720](https://github.com/ClickHouse/ClickHouse/pull/64720) ([Eduard Karacharov](https://github.com/korowa)).
|
||||||
|
* Fixes incorrect behaviour of ANSI CSI escaping in the `UTF8::computeWidth` function. [#64756](https://github.com/ClickHouse/ClickHouse/pull/64756) ([Shaun Struwig](https://github.com/Blargian)).
|
||||||
|
* Fix a case of incorrect removal of `ORDER BY` / `LIMIT BY` across subqueries. [#64766](https://github.com/ClickHouse/ClickHouse/pull/64766) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Fix (experimental) unequal join with subqueries for sets which are in the mixed join conditions. [#64775](https://github.com/ClickHouse/ClickHouse/pull/64775) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
* Fix crash in a local cache over `plain_rewritable` disk. [#64778](https://github.com/ClickHouse/ClickHouse/pull/64778) ([Julia Kartseva](https://github.com/jkartseva)).
|
||||||
|
* Keeper fix: return correct value for `zk_latest_snapshot_size` in `mntr` command. [#64784](https://github.com/ClickHouse/ClickHouse/pull/64784) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix `Cannot find column` in distributed query with `ARRAY JOIN` by `Nested` column. Fixes [#64755](https://github.com/ClickHouse/ClickHouse/issues/64755). [#64801](https://github.com/ClickHouse/ClickHouse/pull/64801) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix memory leak in slru cache policy. [#64803](https://github.com/ClickHouse/ClickHouse/pull/64803) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fixed possible incorrect memory tracking in several kinds of queries: queries that read any data from S3, queries via http protocol, asynchronous inserts. [#64844](https://github.com/ClickHouse/ClickHouse/pull/64844) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix the `Block structure mismatch` error for queries reading with `PREWHERE` from the materialized view when the materialized view has columns of different types than the source table. Fixes [#64611](https://github.com/ClickHouse/ClickHouse/issues/64611). [#64855](https://github.com/ClickHouse/ClickHouse/pull/64855) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix rare crash when table has TTL with subquery + database replicated + parallel replicas + analyzer. It's really rare, but please don't use TTLs with subqueries. [#64858](https://github.com/ClickHouse/ClickHouse/pull/64858) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix duplicating `Delete` events in `blob_storage_log` in case of large batch to delete. [#64924](https://github.com/ClickHouse/ClickHouse/pull/64924) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fixed `Session moved to another server` error from [Zoo]Keeper that might happen after server startup when the config has includes from [Zoo]Keeper. [#64986](https://github.com/ClickHouse/ClickHouse/pull/64986) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix `ALTER MODIFY COMMENT` query that was broken for parameterized VIEWs in https://github.com/ClickHouse/ClickHouse/pull/54211. [#65031](https://github.com/ClickHouse/ClickHouse/pull/65031) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Fix `host_id` in DatabaseReplicated when `cluster_secure_connection` parameter is enabled. Previously all the connections within the cluster created by DatabaseReplicated were not secure, even if the parameter was enabled. [#65054](https://github.com/ClickHouse/ClickHouse/pull/65054) ([Nikolay Degterinsky](https://github.com/evillique)).
|
||||||
|
* Fixing the `Not-ready Set` error after the `PREWHERE` optimization for StorageMerge. [#65057](https://github.com/ClickHouse/ClickHouse/pull/65057) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Avoid writing to finalized buffer in File-like storages. [#65063](https://github.com/ClickHouse/ClickHouse/pull/65063) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Fix possible infinite query duration in case of cyclic aliases. Fixes [#64849](https://github.com/ClickHouse/ClickHouse/issues/64849). [#65081](https://github.com/ClickHouse/ClickHouse/pull/65081) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix the `Unknown expression identifier` error for remote queries with `INTERPOLATE (alias)` (new analyzer). Fixes [#64636](https://github.com/ClickHouse/ClickHouse/issues/64636). [#65090](https://github.com/ClickHouse/ClickHouse/pull/65090) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix pushing arithmetic operations out of aggregation. In the new analyzer, optimization was applied only once. [#65104](https://github.com/ClickHouse/ClickHouse/pull/65104) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Fix aggregate function name rewriting in the new analyzer. [#65110](https://github.com/ClickHouse/ClickHouse/pull/65110) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Respond with 5xx instead of 200 OK in case of receive timeout while reading (parts of) the request body from the client socket. [#65118](https://github.com/ClickHouse/ClickHouse/pull/65118) ([Julian Maicher](https://github.com/jmaicher)).
|
||||||
|
* Fix possible crash for hedged requests. [#65206](https://github.com/ClickHouse/ClickHouse/pull/65206) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix the bug in Hashed and Hashed_Array dictionary short circuit evaluation, which may read uninitialized number, leading to various errors. [#65256](https://github.com/ClickHouse/ClickHouse/pull/65256) ([jsc0218](https://github.com/jsc0218)).
|
||||||
|
* This PR ensures that the type of the constant(IN operator's second parameter) is always visible during the IN operator's type conversion process. Otherwise, losing type information may cause some conversions to fail, such as the conversion from DateTime to Date. This fixes ([#64487](https://github.com/ClickHouse/ClickHouse/issues/64487)). [#65315](https://github.com/ClickHouse/ClickHouse/pull/65315) ([pn](https://github.com/chloro-pn)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Add support for LLVM XRay. [#64592](https://github.com/ClickHouse/ClickHouse/pull/64592) [#64837](https://github.com/ClickHouse/ClickHouse/pull/64837) ([Tomer Shafir](https://github.com/tomershafir)).
|
||||||
|
* Unite s3/hdfs/azure storage implementations into a single class working with IObjectStorage. Same for *Cluster, data lakes and Queue storages. [#59767](https://github.com/ClickHouse/ClickHouse/pull/59767) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Refactor data part writer to remove dependencies on MergeTreeData and DataPart. [#63620](https://github.com/ClickHouse/ClickHouse/pull/63620) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Refactor `KeyCondition` and key analysis to improve PartitionPruner and trivial count optimization. This is separated from [#60463](https://github.com/ClickHouse/ClickHouse/issues/60463) . [#61459](https://github.com/ClickHouse/ClickHouse/pull/61459) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Introduce assertions to verify all functions are called with columns of the right size. [#63723](https://github.com/ClickHouse/ClickHouse/pull/63723) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Make `network` service be required when using the `rc` init script to start the ClickHouse server daemon. [#60650](https://github.com/ClickHouse/ClickHouse/pull/60650) ([Chun-Sheng, Li](https://github.com/peter279k)).
|
||||||
|
* Reduce the size of some slow tests. [#64387](https://github.com/ClickHouse/ClickHouse/pull/64387) [#64452](https://github.com/ClickHouse/ClickHouse/pull/64452) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Replay ZooKeeper logs using keeper-bench. [#62481](https://github.com/ClickHouse/ClickHouse/pull/62481) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
|
||||||
|
### <a id="245"></a> ClickHouse release 24.5, 2024-05-30
|
||||||
|
|
||||||
|
#### Backward Incompatible Change
|
||||||
|
* Renamed "inverted indexes" to "full-text indexes" which is a less technical / more user-friendly name. This also changes internal table metadata and breaks tables with existing (experimental) inverted indexes. Please make sure to drop such indexes before upgrade and re-create them after upgrade. [#62884](https://github.com/ClickHouse/ClickHouse/pull/62884) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Usage of functions `neighbor`, `runningAccumulate`, `runningDifferenceStartingWithFirstValue`, `runningDifference` deprecated (because it is error-prone). Proper window functions should be used instead. To enable them back, set `allow_deprecated_error_prone_window_functions = 1` or set `compatibility = '24.4'` or lower. [#63132](https://github.com/ClickHouse/ClickHouse/pull/63132) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Queries from `system.columns` will work faster if there is a large number of columns, but many databases or tables are not granted for `SHOW TABLES`. Note that in previous versions, if you grant `SHOW COLUMNS` to individual columns without granting `SHOW TABLES` to the corresponding tables, the `system.columns` table will show these columns, but in a new version, it will skip the table entirely. Remove trace log messages "Access granted" and "Access denied" that slowed down queries. [#63439](https://github.com/ClickHouse/ClickHouse/pull/63439) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* Adds the `Form` format to read/write a single record in the `application/x-www-form-urlencoded` format. [#60199](https://github.com/ClickHouse/ClickHouse/pull/60199) ([Shaun Struwig](https://github.com/Blargian)).
|
||||||
|
* Added possibility to compress in CROSS JOIN. [#60459](https://github.com/ClickHouse/ClickHouse/pull/60459) ([p1rattttt](https://github.com/p1rattttt)).
|
||||||
|
* Added possibility to do `CROSS JOIN` in temporary files if the size exceeds limits. [#63432](https://github.com/ClickHouse/ClickHouse/pull/63432) ([p1rattttt](https://github.com/p1rattttt)).
|
||||||
|
* Support join with inequal conditions which involve columns from both left and right table. e.g. `t1.y < t2.y`. To enable, `SET allow_experimental_join_condition = 1`. [#60920](https://github.com/ClickHouse/ClickHouse/pull/60920) ([lgbo](https://github.com/lgbo-ustc)).
|
||||||
|
* Maps can now have `Float32`, `Float64`, `Array(T)`, `Map(K, V)` and `Tuple(T1, T2, ...)` as keys. Closes [#54537](https://github.com/ClickHouse/ClickHouse/issues/54537). [#59318](https://github.com/ClickHouse/ClickHouse/pull/59318) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Introduce bulk loading to `EmbeddedRocksDB` by creating and ingesting SST file instead of relying on rocksdb build-in memtable. This help to increase importing speed, especially for long-running insert query to StorageEmbeddedRocksDB tables. Also, introduce `EmbeddedRocksDB` table settings. [#59163](https://github.com/ClickHouse/ClickHouse/pull/59163) [#63324](https://github.com/ClickHouse/ClickHouse/pull/63324) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* User can now parse CRLF with TSV format using a setting `input_format_tsv_crlf_end_of_line`. Closes [#56257](https://github.com/ClickHouse/ClickHouse/issues/56257). [#59747](https://github.com/ClickHouse/ClickHouse/pull/59747) ([Shaun Struwig](https://github.com/Blargian)).
|
||||||
|
* A new setting `input_format_force_null_for_omitted_fields` that forces NULL values for omitted fields. [#60887](https://github.com/ClickHouse/ClickHouse/pull/60887) ([Constantine Peresypkin](https://github.com/pkit)).
|
||||||
|
* Earlier our S3 storage and s3 table function didn't support selecting from archive container files, such as tarballs, zip, 7z. Now they allow to iterate over files inside archives in S3. [#62259](https://github.com/ClickHouse/ClickHouse/pull/62259) ([Daniil Ivanik](https://github.com/divanik)).
|
||||||
|
* Support for conditional function `clamp`. [#62377](https://github.com/ClickHouse/ClickHouse/pull/62377) ([skyoct](https://github.com/skyoct)).
|
||||||
|
* Add `NPy` output format. [#62430](https://github.com/ClickHouse/ClickHouse/pull/62430) ([豪肥肥](https://github.com/HowePa)).
|
||||||
|
* `Raw` format as a synonym for `TSVRaw`. [#63394](https://github.com/ClickHouse/ClickHouse/pull/63394) ([Unalian](https://github.com/Unalian)).
|
||||||
|
* Added a new SQL function `generateUUIDv7` to generate version 7 UUIDs aka. timestamp-based UUIDs with random component. Also added a new function `UUIDToNum` to extract bytes from a UUID and a new function `UUIDv7ToDateTime` to extract timestamp component from a UUID version 7. [#62852](https://github.com/ClickHouse/ClickHouse/pull/62852) ([Alexey Petrunyaka](https://github.com/pet74alex)).
|
||||||
|
* On Linux and MacOS, if the program has stdout redirected to a file with a compression extension, use the corresponding compression method instead of nothing (making it behave similarly to `INTO OUTFILE`). [#63662](https://github.com/ClickHouse/ClickHouse/pull/63662) ([v01dXYZ](https://github.com/v01dXYZ)).
|
||||||
|
* Change warning on high number of attached tables to differentiate tables, views and dictionaries. [#64180](https://github.com/ClickHouse/ClickHouse/pull/64180) ([Francisco J. Jurado Moreno](https://github.com/Beetelbrox)).
|
||||||
|
* Provide support for `azureBlobStorage` function in ClickHouse server to use Azure Workload identity to authenticate against Azure blob storage. If `use_workload_identity` parameter is set in config, [workload identity](https://github.com/Azure/azure-sdk-for-cpp/tree/main/sdk/identity/azure-identity#authenticate-azure-hosted-applications) is used for authentication. [#57881](https://github.com/ClickHouse/ClickHouse/pull/57881) ([Vinay Suryadevara](https://github.com/vinay92-ch)).
|
||||||
|
* Add TTL information in the `system.parts_columns` table. [#63200](https://github.com/ClickHouse/ClickHouse/pull/63200) ([litlig](https://github.com/litlig)).
|
||||||
|
|
||||||
|
#### Experimental Features
|
||||||
|
* Implement `Dynamic` data type that allows to store values of any type inside it without knowing all of them in advance. `Dynamic` type is available under a setting `allow_experimental_dynamic_type`. Reference: [#54864](https://github.com/ClickHouse/ClickHouse/issues/54864). [#63058](https://github.com/ClickHouse/ClickHouse/pull/63058) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
* Allowed to create `MaterializedMySQL` database without connection to MySQL. [#63397](https://github.com/ClickHouse/ClickHouse/pull/63397) ([Kirill](https://github.com/kirillgarbar)).
|
||||||
|
* Automatically mark a replica of Replicated database as lost and start recovery if some DDL task fails more than `max_retries_before_automatic_recovery` (100 by default) times in a row with the same error. Also, fixed a bug that could cause skipping DDL entries when an exception is thrown during an early stage of entry execution. [#63549](https://github.com/ClickHouse/ClickHouse/pull/63549) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Account failed files in `s3queue_tracked_file_ttl_sec` and `s3queue_traked_files_limit` for `StorageS3Queue`. [#63638](https://github.com/ClickHouse/ClickHouse/pull/63638) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
|
||||||
|
#### Performance Improvement
|
||||||
|
* Less contention in filesystem cache (part 4). Allow to keep filesystem cache not filled to the limit by doing additional eviction in the background (controlled by `keep_free_space_size(elements)_ratio`). This allows to release pressure from space reservation for queries (on `tryReserve` method). Also this is done in a lock free way as much as possible, e.g. should not block normal cache usage. [#61250](https://github.com/ClickHouse/ClickHouse/pull/61250) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Skip merging of newly created projection blocks during `INSERT`-s. [#59405](https://github.com/ClickHouse/ClickHouse/pull/59405) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Process string functions `...UTF8` 'asciily' if input strings are all ascii chars. Inspired by https://github.com/apache/doris/pull/29799. Overall speed up by 1.07x~1.62x. Notice that peak memory usage had been decreased in some cases. [#61632](https://github.com/ClickHouse/ClickHouse/pull/61632) ([李扬](https://github.com/taiyang-li)).
|
||||||
|
* Improved performance of selection (`{}`) globs in StorageS3. [#62120](https://github.com/ClickHouse/ClickHouse/pull/62120) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||||
|
* HostResolver has each IP address several times. If remote host has several IPs and by some reason (firewall rules for example) access on some IPs allowed and on others forbidden, than only first record of forbidden IPs marked as failed, and in each try these IPs have a chance to be chosen (and failed again). Even if fix this, every 120 seconds DNS cache dropped, and IPs can be chosen again. [#62652](https://github.com/ClickHouse/ClickHouse/pull/62652) ([Anton Ivashkin](https://github.com/ianton-ru)).
|
||||||
|
* Add a new configuration`prefer_merge_sort_block_bytes` to control the memory usage and speed up sorting 2 times when merging when there are many columns. [#62904](https://github.com/ClickHouse/ClickHouse/pull/62904) ([LiuNeng](https://github.com/liuneng1994)).
|
||||||
|
* `clickhouse-local` will start faster. In previous versions, it was not deleting temporary directories by mistake. Now it will. This closes [#62941](https://github.com/ClickHouse/ClickHouse/issues/62941). [#63074](https://github.com/ClickHouse/ClickHouse/pull/63074) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Micro-optimizations for the new analyzer. [#63429](https://github.com/ClickHouse/ClickHouse/pull/63429) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Index analysis will work if `DateTime` is compared to `DateTime64`. This closes [#63441](https://github.com/ClickHouse/ClickHouse/issues/63441). [#63443](https://github.com/ClickHouse/ClickHouse/pull/63443) [#63532](https://github.com/ClickHouse/ClickHouse/pull/63532) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Speed up indices of type `set` a little (around 1.5 times) by removing garbage. [#64098](https://github.com/ClickHouse/ClickHouse/pull/64098) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Remove copying data when writing to the filesystem cache. [#63401](https://github.com/ClickHouse/ClickHouse/pull/63401) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Now backups with azure blob storage will use multicopy. [#64116](https://github.com/ClickHouse/ClickHouse/pull/64116) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Allow to use native copy for azure even with different containers. [#64154](https://github.com/ClickHouse/ClickHouse/pull/64154) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Finally enable native copy for azure. [#64182](https://github.com/ClickHouse/ClickHouse/pull/64182) ([alesapin](https://github.com/alesapin)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Allow using `clickhouse-local` and its shortcuts `clickhouse` and `ch` with a query or queries file as a positional argument. Examples: `ch "SELECT 1"`, `ch --param_test Hello "SELECT {test:String}"`, `ch query.sql`. This closes [#62361](https://github.com/ClickHouse/ClickHouse/issues/62361). [#63081](https://github.com/ClickHouse/ClickHouse/pull/63081) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Enable plain_rewritable metadata for local and Azure (azure_blob_storage) object storages. [#63365](https://github.com/ClickHouse/ClickHouse/pull/63365) ([Julia Kartseva](https://github.com/jkartseva)).
|
||||||
|
* Support English-style Unicode quotes, e.g. “Hello”, ‘world’. This is questionable in general but helpful when you type your query in a word processor, such as Google Docs. This closes [#58634](https://github.com/ClickHouse/ClickHouse/issues/58634). [#63381](https://github.com/ClickHouse/ClickHouse/pull/63381) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Allow trailing commas in the columns list in the INSERT query. For example, `INSERT INTO test (a, b, c, ) VALUES ...`. [#63803](https://github.com/ClickHouse/ClickHouse/pull/63803) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Better exception messages for the `Regexp` format. [#63804](https://github.com/ClickHouse/ClickHouse/pull/63804) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Allow trailing commas in the `Values` format. For example, this query is allowed: `INSERT INTO test (a, b, c) VALUES (4, 5, 6,);`. [#63810](https://github.com/ClickHouse/ClickHouse/pull/63810) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Make rabbitmq nack broken messages. Closes [#45350](https://github.com/ClickHouse/ClickHouse/issues/45350). [#60312](https://github.com/ClickHouse/ClickHouse/pull/60312) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix a crash in asynchronous stack unwinding (such as when using the sampling query profiler) while interpreting debug info. This closes [#60460](https://github.com/ClickHouse/ClickHouse/issues/60460). [#60468](https://github.com/ClickHouse/ClickHouse/pull/60468) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Distinct messages for s3 error 'no key' for cases disk and storage. [#61108](https://github.com/ClickHouse/ClickHouse/pull/61108) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* The progress bar will work for trivial queries with LIMIT from `system.zeros`, `system.zeros_mt` (it already works for `system.numbers` and `system.numbers_mt`), and the `generateRandom` table function. As a bonus, if the total number of records is greater than the `max_rows_to_read` limit, it will throw an exception earlier. This closes [#58183](https://github.com/ClickHouse/ClickHouse/issues/58183). [#61823](https://github.com/ClickHouse/ClickHouse/pull/61823) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Support for "Merge Key" in YAML configurations (this is a weird feature of YAML, please never mind). [#62685](https://github.com/ClickHouse/ClickHouse/pull/62685) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Enhance error message when non-deterministic function is used with Replicated source. [#62896](https://github.com/ClickHouse/ClickHouse/pull/62896) ([Grégoire Pineau](https://github.com/lyrixx)).
|
||||||
|
* Fix interserver secret for Distributed over Distributed from `remote`. [#63013](https://github.com/ClickHouse/ClickHouse/pull/63013) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Support `include_from` for YAML files. However, you should better use `config.d` [#63106](https://github.com/ClickHouse/ClickHouse/pull/63106) ([Eduard Karacharov](https://github.com/korowa)).
|
||||||
|
* Keep previous data in terminal after picking from skim suggestions. [#63261](https://github.com/ClickHouse/ClickHouse/pull/63261) ([FlameFactory](https://github.com/FlameFactory)).
|
||||||
|
* Width of fields (in Pretty formats or the `visibleWidth` function) now correctly ignores ANSI escape sequences. [#63270](https://github.com/ClickHouse/ClickHouse/pull/63270) ([Shaun Struwig](https://github.com/Blargian)).
|
||||||
|
* Update the usage of error code `NUMBER_OF_ARGUMENTS_DOESNT_MATCH` by more accurate error codes when appropriate. [#63406](https://github.com/ClickHouse/ClickHouse/pull/63406) ([Yohann Jardin](https://github.com/yohannj)).
|
||||||
|
* `os_user` and `client_hostname` are now correctly set up for queries for command line suggestions in clickhouse-client. This closes [#63430](https://github.com/ClickHouse/ClickHouse/issues/63430). [#63433](https://github.com/ClickHouse/ClickHouse/pull/63433) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Automatically correct `max_block_size` to the default value if it is zero. [#63587](https://github.com/ClickHouse/ClickHouse/pull/63587) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Add a build_id ALIAS column to trace_log to facilitate auto renaming upon detecting binary changes. This is to address [#52086](https://github.com/ClickHouse/ClickHouse/issues/52086). [#63656](https://github.com/ClickHouse/ClickHouse/pull/63656) ([Zimu Li](https://github.com/woodlzm)).
|
||||||
|
* Enable truncate operation for object storage disks. [#63693](https://github.com/ClickHouse/ClickHouse/pull/63693) ([MikhailBurdukov](https://github.com/MikhailBurdukov)).
|
||||||
|
* The loading of the keywords list is now dependent on the server revision and will be disabled for the old versions of ClickHouse server. CC @azat. [#63786](https://github.com/ClickHouse/ClickHouse/pull/63786) ([Nikita Mikhaylov](https://github.com/nikitamikhaylov)).
|
||||||
|
* Clickhouse disks have to read server setting to obtain actual metadata format version. [#63831](https://github.com/ClickHouse/ClickHouse/pull/63831) ([Sema Checherinda](https://github.com/CheSema)).
|
||||||
|
* Disable pretty format restrictions (`output_format_pretty_max_rows`/`output_format_pretty_max_value_width`) when stdout is not TTY. [#63942](https://github.com/ClickHouse/ClickHouse/pull/63942) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Exception handling now works when ClickHouse is used inside AWS Lambda. Author: [Alexey Coolnev](https://github.com/acoolnev). [#64014](https://github.com/ClickHouse/ClickHouse/pull/64014) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Throw `CANNOT_DECOMPRESS` instread of `CORRUPTED_DATA` on invalid compressed data passed via HTTP. [#64036](https://github.com/ClickHouse/ClickHouse/pull/64036) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* A tip for a single large number in Pretty formats now works for Nullable and LowCardinality. This closes [#61993](https://github.com/ClickHouse/ClickHouse/issues/61993). [#64084](https://github.com/ClickHouse/ClickHouse/pull/64084) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Add metrics, logs, and thread names around parts filtering with indices. [#64130](https://github.com/ClickHouse/ClickHouse/pull/64130) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Ignore `allow_suspicious_primary_key` on `ATTACH` and verify on `ALTER`. [#64202](https://github.com/ClickHouse/ClickHouse/pull/64202) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* ClickHouse is built with clang-18. A lot of new checks from clang-tidy-18 have been enabled. [#60469](https://github.com/ClickHouse/ClickHouse/pull/60469) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Experimentally support loongarch64 as a new platform for ClickHouse. [#63733](https://github.com/ClickHouse/ClickHouse/pull/63733) ([qiangxuhui](https://github.com/qiangxuhui)).
|
||||||
|
* The Dockerfile is reviewed by the docker official library in https://github.com/docker-library/official-images/pull/15846. [#63400](https://github.com/ClickHouse/ClickHouse/pull/63400) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Information about every symbol in every translation unit will be collected in the CI database for every build in the CI. This closes [#63494](https://github.com/ClickHouse/ClickHouse/issues/63494). [#63495](https://github.com/ClickHouse/ClickHouse/pull/63495) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Update Apache Datasketches library. It resolves [#63858](https://github.com/ClickHouse/ClickHouse/issues/63858). [#63923](https://github.com/ClickHouse/ClickHouse/pull/63923) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Enable GRPC support for aarch64 linux while cross-compiling binary. [#64072](https://github.com/ClickHouse/ClickHouse/pull/64072) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix unwind on SIGSEGV on aarch64 (due to small stack for signal) [#64058](https://github.com/ClickHouse/ClickHouse/pull/64058) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
|
||||||
|
#### Bug Fix
|
||||||
|
* Disabled `enable_vertical_final` setting by default. This feature should not be used because it has a bug: [#64543](https://github.com/ClickHouse/ClickHouse/issues/64543). [#64544](https://github.com/ClickHouse/ClickHouse/pull/64544) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Fix making backup when multiple shards are used [#57684](https://github.com/ClickHouse/ClickHouse/pull/57684) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Fix passing projections/indexes/primary key from columns list from CREATE query into inner table of MV [#59183](https://github.com/ClickHouse/ClickHouse/pull/59183) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix boundRatio incorrect merge [#60532](https://github.com/ClickHouse/ClickHouse/pull/60532) ([Tao Wang](https://github.com/wangtZJU)).
|
||||||
|
* Fix crash when calling some functions on const low-cardinality columns [#61966](https://github.com/ClickHouse/ClickHouse/pull/61966) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Fix queries with FINAL give wrong result when table does not use adaptive granularity [#62432](https://github.com/ClickHouse/ClickHouse/pull/62432) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Improve detection of cgroups v2 support for memory controllers [#62903](https://github.com/ClickHouse/ClickHouse/pull/62903) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix subsequent use of external tables in client [#62964](https://github.com/ClickHouse/ClickHouse/pull/62964) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix crash with untuple and unresolved lambda [#63131](https://github.com/ClickHouse/ClickHouse/pull/63131) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Fix premature server listen for connections [#63181](https://github.com/ClickHouse/ClickHouse/pull/63181) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix intersecting parts when restarting after a DROP PART command [#63202](https://github.com/ClickHouse/ClickHouse/pull/63202) ([Han Fei](https://github.com/hanfei1991)).
|
||||||
|
* Correctly load SQL security defaults during startup [#63209](https://github.com/ClickHouse/ClickHouse/pull/63209) ([pufit](https://github.com/pufit)).
|
||||||
|
* JOIN filter push down filter join fix [#63234](https://github.com/ClickHouse/ClickHouse/pull/63234) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Fix infinite loop in AzureObjectStorage::listObjects [#63257](https://github.com/ClickHouse/ClickHouse/pull/63257) ([Julia Kartseva](https://github.com/jkartseva)).
|
||||||
|
* CROSS join ignore join_algorithm setting [#63273](https://github.com/ClickHouse/ClickHouse/pull/63273) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix finalize WriteBufferToFileSegment and StatusFile [#63346](https://github.com/ClickHouse/ClickHouse/pull/63346) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix logical error during SELECT query after ALTER in rare case [#63353](https://github.com/ClickHouse/ClickHouse/pull/63353) ([alesapin](https://github.com/alesapin)).
|
||||||
|
* Fix `X-ClickHouse-Timezone` header with `session_timezone` [#63377](https://github.com/ClickHouse/ClickHouse/pull/63377) ([Andrey Zvonov](https://github.com/zvonand)).
|
||||||
|
* Fix debug assert when using grouping WITH ROLLUP and LowCardinality types [#63398](https://github.com/ClickHouse/ClickHouse/pull/63398) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Small fixes for group_by_use_nulls [#63405](https://github.com/ClickHouse/ClickHouse/pull/63405) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix backup/restore of projection part in case projection was removed from table metadata, but part still has projection [#63426](https://github.com/ClickHouse/ClickHouse/pull/63426) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Fix mysql dictionary source [#63481](https://github.com/ClickHouse/ClickHouse/pull/63481) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Insert QueryFinish on AsyncInsertFlush with no data [#63483](https://github.com/ClickHouse/ClickHouse/pull/63483) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Fix: empty used_dictionaries in system.query_log [#63487](https://github.com/ClickHouse/ClickHouse/pull/63487) ([Eduard Karacharov](https://github.com/korowa)).
|
||||||
|
* Make `MergeTreePrefetchedReadPool` safer [#63513](https://github.com/ClickHouse/ClickHouse/pull/63513) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix crash on exit with sentry enabled (due to openssl destroyed before sentry) [#63548](https://github.com/ClickHouse/ClickHouse/pull/63548) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix Array and Map support with Keyed hashing [#63628](https://github.com/ClickHouse/ClickHouse/pull/63628) ([Salvatore Mesoraca](https://github.com/aiven-sal)).
|
||||||
|
* Fix filter pushdown for Parquet and maybe StorageMerge [#63642](https://github.com/ClickHouse/ClickHouse/pull/63642) ([Michael Kolupaev](https://github.com/al13n321)).
|
||||||
|
* Prevent conversion to Replicated if zookeeper path already exists [#63670](https://github.com/ClickHouse/ClickHouse/pull/63670) ([Kirill](https://github.com/kirillgarbar)).
|
||||||
|
* Analyzer: views read only necessary columns [#63688](https://github.com/ClickHouse/ClickHouse/pull/63688) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Analyzer: Forbid WINDOW redefinition [#63694](https://github.com/ClickHouse/ClickHouse/pull/63694) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* flatten_nested was broken with the experimental Replicated database. [#63695](https://github.com/ClickHouse/ClickHouse/pull/63695) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix [#63653](https://github.com/ClickHouse/ClickHouse/issues/63653) [#63722](https://github.com/ClickHouse/ClickHouse/pull/63722) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Allow cast from Array(Nothing) to Map(Nothing, Nothing) [#63753](https://github.com/ClickHouse/ClickHouse/pull/63753) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix ILLEGAL_COLUMN in partial_merge join [#63755](https://github.com/ClickHouse/ClickHouse/pull/63755) ([vdimir](https://github.com/vdimir)).
|
||||||
|
* Fix: remove redundant distinct with window functions [#63776](https://github.com/ClickHouse/ClickHouse/pull/63776) ([Igor Nikonov](https://github.com/devcrafter)).
|
||||||
|
* Fix possible crash with SYSTEM UNLOAD PRIMARY KEY [#63778](https://github.com/ClickHouse/ClickHouse/pull/63778) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Fix a query with duplicating cycling alias. [#63791](https://github.com/ClickHouse/ClickHouse/pull/63791) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Make `TokenIterator` lazy as it should be [#63801](https://github.com/ClickHouse/ClickHouse/pull/63801) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
* Add `endpoint_subpath` S3 URI setting [#63806](https://github.com/ClickHouse/ClickHouse/pull/63806) ([Julia Kartseva](https://github.com/jkartseva)).
|
||||||
|
* Fix deadlock in `ParallelReadBuffer` [#63814](https://github.com/ClickHouse/ClickHouse/pull/63814) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* JOIN filter push down equivalent columns fix [#63819](https://github.com/ClickHouse/ClickHouse/pull/63819) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Remove data from all disks after DROP with Lazy database. [#63848](https://github.com/ClickHouse/ClickHouse/pull/63848) ([MikhailBurdukov](https://github.com/MikhailBurdukov)).
|
||||||
|
* Fix incorrect result when reading from MV with parallel replicas and new analyzer [#63861](https://github.com/ClickHouse/ClickHouse/pull/63861) ([Nikita Taranov](https://github.com/nickitat)).
|
||||||
|
* Fixes in `find_super_nodes` and `find_big_family` command of keeper-client [#63862](https://github.com/ClickHouse/ClickHouse/pull/63862) ([Alexander Gololobov](https://github.com/davenger)).
|
||||||
|
* Update lambda execution name [#63864](https://github.com/ClickHouse/ClickHouse/pull/63864) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Fix SIGSEGV due to CPU/Real profiler [#63865](https://github.com/ClickHouse/ClickHouse/pull/63865) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Fix `EXPLAIN CURRENT TRANSACTION` query [#63926](https://github.com/ClickHouse/ClickHouse/pull/63926) ([Anton Popov](https://github.com/CurtizJ)).
|
||||||
|
* Fix analyzer: there's turtles all the way down... [#63930](https://github.com/ClickHouse/ClickHouse/pull/63930) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Allow certain ALTER TABLE commands for `plain_rewritable` disk [#63933](https://github.com/ClickHouse/ClickHouse/pull/63933) ([Julia Kartseva](https://github.com/jkartseva)).
|
||||||
|
* Recursive CTE distributed fix [#63939](https://github.com/ClickHouse/ClickHouse/pull/63939) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Analyzer: Fix COLUMNS resolve [#63962](https://github.com/ClickHouse/ClickHouse/pull/63962) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* LIMIT BY and skip_unused_shards with analyzer [#63983](https://github.com/ClickHouse/ClickHouse/pull/63983) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* A fix for some trash (experimental Kusto) [#63992](https://github.com/ClickHouse/ClickHouse/pull/63992) ([Yong Wang](https://github.com/kashwy)).
|
||||||
|
* Deserialize untrusted binary inputs in a safer way [#64024](https://github.com/ClickHouse/ClickHouse/pull/64024) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Fix query analysis for queries with the setting `final` = 1 for Distributed tables over tables from other than the MergeTree family. [#64037](https://github.com/ClickHouse/ClickHouse/pull/64037) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
* Add missing settings to recoverLostReplica [#64040](https://github.com/ClickHouse/ClickHouse/pull/64040) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Fix SQL security access checks with analyzer [#64079](https://github.com/ClickHouse/ClickHouse/pull/64079) ([pufit](https://github.com/pufit)).
|
||||||
|
* Fix analyzer: only interpolate expression should be used for DAG [#64096](https://github.com/ClickHouse/ClickHouse/pull/64096) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Fix azure backup writing multipart blocks by 1 MiB (read buffer size) instead of `max_upload_part_size` (in non-native copy case) [#64117](https://github.com/ClickHouse/ClickHouse/pull/64117) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Correctly fallback during backup copy [#64153](https://github.com/ClickHouse/ClickHouse/pull/64153) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Prevent LOGICAL_ERROR on CREATE TABLE as Materialized View [#64174](https://github.com/ClickHouse/ClickHouse/pull/64174) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
|
* Query Cache: Consider identical queries against different databases as different [#64199](https://github.com/ClickHouse/ClickHouse/pull/64199) ([Robert Schulze](https://github.com/rschu1ze)).
|
||||||
|
* Ignore `text_log` for Keeper [#64218](https://github.com/ClickHouse/ClickHouse/pull/64218) ([Antonio Andelic](https://github.com/antonio2368)).
|
||||||
|
* Fix Logical error: Bad cast for Buffer table with prewhere. [#64388](https://github.com/ClickHouse/ClickHouse/pull/64388) ([Nikolai Kochetov](https://github.com/KochetovNicolai)).
|
||||||
|
|
||||||
|
|
||||||
### <a id="244"></a> ClickHouse release 24.4, 2024-04-30
|
### <a id="244"></a> ClickHouse release 24.4, 2024-04-30
|
||||||
|
|
||||||
#### Upgrade Notes
|
#### Upgrade Notes
|
||||||
@ -506,7 +827,7 @@
|
|||||||
* Improve the operation of `sumMapFiltered` with NaN values. NaN values are now placed at the end (instead of randomly) and considered different from any values. `-0` is now also treated as equal to `0`; since 0 values are discarded, `-0` values are discarded too. [#58959](https://github.com/ClickHouse/ClickHouse/pull/58959) ([Raúl Marín](https://github.com/Algunenano)).
|
* Improve the operation of `sumMapFiltered` with NaN values. NaN values are now placed at the end (instead of randomly) and considered different from any values. `-0` is now also treated as equal to `0`; since 0 values are discarded, `-0` values are discarded too. [#58959](https://github.com/ClickHouse/ClickHouse/pull/58959) ([Raúl Marín](https://github.com/Algunenano)).
|
||||||
* The function `visibleWidth` will behave according to the docs. In previous versions, it simply counted code points after string serialization, like the `lengthUTF8` function, but didn't consider zero-width and combining characters, full-width characters, tabs, and deletes. Now the behavior is changed accordingly. If you want to keep the old behavior, set `function_visible_width_behavior` to `0`, or set `compatibility` to `23.12` or lower. [#59022](https://github.com/ClickHouse/ClickHouse/pull/59022) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
* The function `visibleWidth` will behave according to the docs. In previous versions, it simply counted code points after string serialization, like the `lengthUTF8` function, but didn't consider zero-width and combining characters, full-width characters, tabs, and deletes. Now the behavior is changed accordingly. If you want to keep the old behavior, set `function_visible_width_behavior` to `0`, or set `compatibility` to `23.12` or lower. [#59022](https://github.com/ClickHouse/ClickHouse/pull/59022) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
* `Kusto` dialect is disabled until these two bugs will be fixed: [#59037](https://github.com/ClickHouse/ClickHouse/issues/59037) and [#59036](https://github.com/ClickHouse/ClickHouse/issues/59036). [#59305](https://github.com/ClickHouse/ClickHouse/pull/59305) ([Alexey Milovidov](https://github.com/alexey-milovidov)). Any attempt to use `Kusto` will result in exception.
|
* `Kusto` dialect is disabled until these two bugs will be fixed: [#59037](https://github.com/ClickHouse/ClickHouse/issues/59037) and [#59036](https://github.com/ClickHouse/ClickHouse/issues/59036). [#59305](https://github.com/ClickHouse/ClickHouse/pull/59305) ([Alexey Milovidov](https://github.com/alexey-milovidov)). Any attempt to use `Kusto` will result in exception.
|
||||||
* More efficient implementation of the `FINAL` modifier no longer guarantees preserving the order even if `max_threads = 1`. If you counted on the previous behavior, set `enable_vertical_final` to 0 or `compatibility` to `23.12`.
|
* More efficient implementation of the `FINAL` modifier no longer guarantees preserving the order even if `max_threads = 1`. If you counted on the previous behavior, set `enable_vertical_final` to 0 or `compatibility` to `23.12`.
|
||||||
|
|
||||||
#### New Feature
|
#### New Feature
|
||||||
* Implement Variant data type that represents a union of other data types. Type `Variant(T1, T2, ..., TN)` means that each row of this type has a value of either type `T1` or `T2` or ... or `TN` or none of them (`NULL` value). Variant type is available under a setting `allow_experimental_variant_type`. Reference: [#54864](https://github.com/ClickHouse/ClickHouse/issues/54864). [#58047](https://github.com/ClickHouse/ClickHouse/pull/58047) ([Kruglov Pavel](https://github.com/Avogar)).
|
* Implement Variant data type that represents a union of other data types. Type `Variant(T1, T2, ..., TN)` means that each row of this type has a value of either type `T1` or `T2` or ... or `TN` or none of them (`NULL` value). Variant type is available under a setting `allow_experimental_variant_type`. Reference: [#54864](https://github.com/ClickHouse/ClickHouse/issues/54864). [#58047](https://github.com/ClickHouse/ClickHouse/pull/58047) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
@ -122,6 +122,8 @@ add_library(global-libs INTERFACE)
|
|||||||
|
|
||||||
include (cmake/sanitize.cmake)
|
include (cmake/sanitize.cmake)
|
||||||
|
|
||||||
|
include (cmake/xray_instrumentation.cmake)
|
||||||
|
|
||||||
option(ENABLE_COLORED_BUILD "Enable colors in compiler output" ON)
|
option(ENABLE_COLORED_BUILD "Enable colors in compiler output" ON)
|
||||||
|
|
||||||
set (CMAKE_COLOR_MAKEFILE ${ENABLE_COLORED_BUILD}) # works only for the makefile generator
|
set (CMAKE_COLOR_MAKEFILE ${ENABLE_COLORED_BUILD}) # works only for the makefile generator
|
||||||
@ -208,8 +210,6 @@ option(OMIT_HEAVY_DEBUG_SYMBOLS
|
|||||||
"Do not generate debugger info for heavy modules (ClickHouse functions and dictionaries, some contrib)"
|
"Do not generate debugger info for heavy modules (ClickHouse functions and dictionaries, some contrib)"
|
||||||
${OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT})
|
${OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT})
|
||||||
|
|
||||||
option(USE_DEBUG_HELPERS "Enable debug helpers" ${USE_DEBUG_HELPERS})
|
|
||||||
|
|
||||||
option(BUILD_STANDALONE_KEEPER "Build keeper as small standalone binary" OFF)
|
option(BUILD_STANDALONE_KEEPER "Build keeper as small standalone binary" OFF)
|
||||||
if (NOT BUILD_STANDALONE_KEEPER)
|
if (NOT BUILD_STANDALONE_KEEPER)
|
||||||
option(CREATE_KEEPER_SYMLINK "Create symlink for clickhouse-keeper to main server binary" ON)
|
option(CREATE_KEEPER_SYMLINK "Create symlink for clickhouse-keeper to main server binary" ON)
|
||||||
@ -319,7 +319,6 @@ endif()
|
|||||||
# Disable floating-point expression contraction in order to get consistent floating point calculation results across platforms
|
# Disable floating-point expression contraction in order to get consistent floating point calculation results across platforms
|
||||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffp-contract=off")
|
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffp-contract=off")
|
||||||
|
|
||||||
# Our built-in unwinder only supports DWARF version up to 4.
|
|
||||||
set (DEBUG_INFO_FLAGS "-g")
|
set (DEBUG_INFO_FLAGS "-g")
|
||||||
|
|
||||||
# Disable omit frame pointer compiler optimization using -fno-omit-frame-pointer
|
# Disable omit frame pointer compiler optimization using -fno-omit-frame-pointer
|
||||||
@ -333,15 +332,15 @@ endif()
|
|||||||
|
|
||||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${COMPILER_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
set (CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
set (CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -Og ${DEBUG_INFO_FLAGS} ${CMAKE_CXX_FLAGS_ADD}")
|
||||||
|
|
||||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${COMPILER_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||||
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
set (CMAKE_C_FLAGS_RELWITHDEBINFO "${CMAKE_C_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||||
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
set (CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Og ${DEBUG_INFO_FLAGS} ${CMAKE_C_FLAGS_ADD}")
|
||||||
|
|
||||||
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${COMPILER_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
set (CMAKE_ASM_FLAGS "${CMAKE_ASM_FLAGS} ${COMPILER_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||||
set (CMAKE_ASM_FLAGS_RELWITHDEBINFO "${CMAKE_ASM_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
set (CMAKE_ASM_FLAGS_RELWITHDEBINFO "${CMAKE_ASM_FLAGS_RELWITHDEBINFO} -O3 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||||
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -O0 ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
set (CMAKE_ASM_FLAGS_DEBUG "${CMAKE_ASM_FLAGS_DEBUG} -Og ${DEBUG_INFO_FLAGS} ${CMAKE_ASM_FLAGS_ADD}")
|
||||||
|
|
||||||
if (OS_DARWIN)
|
if (OS_DARWIN)
|
||||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -stdlib=libc++")
|
||||||
@ -399,7 +398,7 @@ option (ENABLE_GWP_ASAN "Enable Gwp-Asan" ON)
|
|||||||
# but GWP-ASan also wants to use mmap frequently,
|
# but GWP-ASan also wants to use mmap frequently,
|
||||||
# and due to a large number of memory mappings,
|
# and due to a large number of memory mappings,
|
||||||
# it does not work together well.
|
# it does not work together well.
|
||||||
if ((NOT OS_LINUX AND NOT OS_ANDROID) OR (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG"))
|
if ((NOT OS_LINUX AND NOT OS_ANDROID) OR (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG") OR SANITIZE)
|
||||||
set(ENABLE_GWP_ASAN OFF)
|
set(ENABLE_GWP_ASAN OFF)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
|
10
README.md
10
README.md
@ -34,20 +34,16 @@ curl https://clickhouse.com/ | sh
|
|||||||
|
|
||||||
Every month we get together with the community (users, contributors, customers, those interested in learning more about ClickHouse) to discuss what is coming in the latest release. If you are interested in sharing what you've built on ClickHouse, let us know.
|
Every month we get together with the community (users, contributors, customers, those interested in learning more about ClickHouse) to discuss what is coming in the latest release. If you are interested in sharing what you've built on ClickHouse, let us know.
|
||||||
|
|
||||||
* [v24.5 Community Call](https://clickhouse.com/company/events/v24-5-community-release-call) - May 30
|
* [v24.7 Community Call](https://clickhouse.com/company/events/v24-7-community-release-call) - Jul 30
|
||||||
|
|
||||||
## Upcoming Events
|
## Upcoming Events
|
||||||
|
|
||||||
Keep an eye out for upcoming meetups and events around the world. Somewhere else you want us to be? Please feel free to reach out to tyler `<at>` clickhouse `<dot>` com. You can also peruse [ClickHouse Events](https://clickhouse.com/company/news-events) for a list of all upcoming trainings, meetups, speaking engagements, etc.
|
Keep an eye out for upcoming meetups and events around the world. Somewhere else you want us to be? Please feel free to reach out to tyler `<at>` clickhouse `<dot>` com. You can also peruse [ClickHouse Events](https://clickhouse.com/company/news-events) for a list of all upcoming trainings, meetups, speaking engagements, etc.
|
||||||
|
|
||||||
* [ClickHouse Happy Hour @ Tom's Watch Bar - Los Angeles](https://www.meetup.com/clickhouse-los-angeles-user-group/events/300740584/) - May 22
|
|
||||||
* [ClickHouse & Confluent Meetup in Dubai](https://www.meetup.com/clickhouse-dubai-meetup-group/events/299629189/) - May 28
|
|
||||||
* [ClickHouse Meetup in Stockholm](https://www.meetup.com/clickhouse-stockholm-user-group/events/299752651/) - Jun 3
|
|
||||||
* [ClickHouse Meetup @ Cloudflare - San Francisco](https://www.meetup.com/clickhouse-silicon-valley-meetup-group/events/300523061/) - Jun 4
|
|
||||||
* [ClickHouse (クリックハウス) Meetup Tokyo](https://www.meetup.com/clickhouse-tokyo-user-group/events/300798053/) - Jun 5
|
|
||||||
* [ClickHouse Meetup in Amsterdam](https://www.meetup.com/clickhouse-netherlands-user-group/events/300781068/) - Jun 27
|
|
||||||
* [ClickHouse Meetup in Paris](https://www.meetup.com/clickhouse-france-user-group/events/300783448/) - Jul 9
|
* [ClickHouse Meetup in Paris](https://www.meetup.com/clickhouse-france-user-group/events/300783448/) - Jul 9
|
||||||
|
* [ClickHouse Cloud - Live Update Call](https://clickhouse.com/company/events/202407-cloud-update-live) - Jul 9
|
||||||
* [ClickHouse Meetup @ Ramp - New York City](https://www.meetup.com/clickhouse-new-york-user-group/events/300595845/) - Jul 9
|
* [ClickHouse Meetup @ Ramp - New York City](https://www.meetup.com/clickhouse-new-york-user-group/events/300595845/) - Jul 9
|
||||||
|
* [AWS Summit in New York](https://clickhouse.com/company/events/2024-07-awssummit-nyc) - Jul 10
|
||||||
* [ClickHouse Meetup @ Klaviyo - Boston](https://www.meetup.com/clickhouse-boston-user-group/events/300907870) - Jul 11
|
* [ClickHouse Meetup @ Klaviyo - Boston](https://www.meetup.com/clickhouse-boston-user-group/events/300907870) - Jul 11
|
||||||
|
|
||||||
## Recent Recordings
|
## Recent Recordings
|
||||||
|
40
SECURITY.md
40
SECURITY.md
@ -2,22 +2,28 @@
|
|||||||
the file is autogenerated by utils/security-generator/generate_security.py
|
the file is autogenerated by utils/security-generator/generate_security.py
|
||||||
-->
|
-->
|
||||||
|
|
||||||
# Security Policy
|
# ClickHouse Security Vulnerability Response Policy
|
||||||
|
|
||||||
## Security Announcements
|
## Security Change Log and Support
|
||||||
Security fixes will be announced by posting them in the [security changelog](https://clickhouse.com/docs/en/whats-new/security-changelog/).
|
|
||||||
|
|
||||||
## Scope and Supported Versions
|
Details regarding security fixes are publicly reported in our [security changelog](https://clickhouse.com/docs/en/whats-new/security-changelog/). A summary of known security vulnerabilities is shown at the bottom of this page.
|
||||||
|
|
||||||
The following versions of ClickHouse server are currently being supported with security updates:
|
Vulnerability notifications pre-release or during embargo periods are available to open source users and support customers registered for vulnerability alerts. Refer to our [Embargo Policy](#embargo-policy) below.
|
||||||
|
|
||||||
|
The following versions of ClickHouse server are currently supported with security updates:
|
||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
|:-|:-|
|
|:-|:-|
|
||||||
|
| 24.6 | ✔️ |
|
||||||
|
| 24.5 | ✔️ |
|
||||||
| 24.4 | ✔️ |
|
| 24.4 | ✔️ |
|
||||||
| 24.3 | ✔️ |
|
| 24.3 | ✔️ |
|
||||||
| 24.2 | ✔️ |
|
| 24.2 | ❌ |
|
||||||
| 24.1 | ❌ |
|
| 24.1 | ❌ |
|
||||||
| 23.* | ❌ |
|
| 23.12 | ❌ |
|
||||||
|
| 23.11 | ❌ |
|
||||||
|
| 23.10 | ❌ |
|
||||||
|
| 23.9 | ❌ |
|
||||||
| 23.8 | ✔️ |
|
| 23.8 | ✔️ |
|
||||||
| 23.7 | ❌ |
|
| 23.7 | ❌ |
|
||||||
| 23.6 | ❌ |
|
| 23.6 | ❌ |
|
||||||
@ -37,7 +43,7 @@ The following versions of ClickHouse server are currently being supported with s
|
|||||||
|
|
||||||
We're extremely grateful for security researchers and users that report vulnerabilities to the ClickHouse Open Source Community. All reports are thoroughly investigated by developers.
|
We're extremely grateful for security researchers and users that report vulnerabilities to the ClickHouse Open Source Community. All reports are thoroughly investigated by developers.
|
||||||
|
|
||||||
To report a potential vulnerability in ClickHouse please send the details about it to [security@clickhouse.com](mailto:security@clickhouse.com). We do not offer any financial rewards for reporting issues to us using this method. Alternatively, you can also submit your findings through our public bug bounty program hosted by [Bugcrowd](https://bugcrowd.com/clickhouse) and be rewarded for it as per the program scope and rules of engagement.
|
To report a potential vulnerability in ClickHouse please send the details about it through our public bug bounty program hosted by [Bugcrowd](https://bugcrowd.com/clickhouse) and be rewarded for it as per the program scope and rules of engagement.
|
||||||
|
|
||||||
### When Should I Report a Vulnerability?
|
### When Should I Report a Vulnerability?
|
||||||
|
|
||||||
@ -59,3 +65,21 @@ As the security issue moves from triage, to identified fix, to release planning
|
|||||||
|
|
||||||
A public disclosure date is negotiated by the ClickHouse maintainers and the bug submitter. We prefer to fully disclose the bug as soon as possible once a user mitigation is available. It is reasonable to delay disclosure when the bug or the fix is not yet fully understood, the solution is not well-tested, or for vendor coordination. The timeframe for disclosure is from immediate (especially if it's already publicly known) to 90 days. For a vulnerability with a straightforward mitigation, we expect the report date to disclosure date to be on the order of 7 days.
|
A public disclosure date is negotiated by the ClickHouse maintainers and the bug submitter. We prefer to fully disclose the bug as soon as possible once a user mitigation is available. It is reasonable to delay disclosure when the bug or the fix is not yet fully understood, the solution is not well-tested, or for vendor coordination. The timeframe for disclosure is from immediate (especially if it's already publicly known) to 90 days. For a vulnerability with a straightforward mitigation, we expect the report date to disclosure date to be on the order of 7 days.
|
||||||
|
|
||||||
|
## Embargo Policy
|
||||||
|
|
||||||
|
Open source users and support customers may subscribe to receive alerts during the embargo period by visiting [https://trust.clickhouse.com/?product=clickhouseoss](https://trust.clickhouse.com/?product=clickhouseoss), requesting access and subscribing for alerts. Subscribers agree not to make these notifications public, issue communications, share this information with others, or issue public patches before the disclosure date. Accidental disclosures must be reported immediately to trust@clickhouse.com. Failure to follow this policy or repeated leaks may result in removal from the subscriber list.
|
||||||
|
|
||||||
|
Participation criteria:
|
||||||
|
1. Be a current open source user or support customer with a valid corporate email domain (no @gmail.com, @azure.com, etc.).
|
||||||
|
1. Sign up to the ClickHouse OSS Trust Center at [https://trust.clickhouse.com](https://trust.clickhouse.com).
|
||||||
|
1. Accept the ClickHouse Security Vulnerability Response Policy as outlined above.
|
||||||
|
1. Subscribe to ClickHouse OSS Trust Center alerts.
|
||||||
|
|
||||||
|
Removal criteria:
|
||||||
|
1. Members may be removed for failure to follow this policy or repeated leaks.
|
||||||
|
1. Members may be removed for bounced messages (mail delivery failure).
|
||||||
|
1. Members may unsubscribe at any time.
|
||||||
|
|
||||||
|
Notification process:
|
||||||
|
ClickHouse will post notifications within our OSS Trust Center and notify subscribers. Subscribers must log in to the Trust Center to download the notification. The notification will include the timeframe for public disclosure.
|
||||||
|
|
||||||
|
@ -86,7 +86,7 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Return object into pool. Client must return same object that was borrowed.
|
/// Return object into pool. Client must return same object that was borrowed.
|
||||||
inline void returnObject(T && object_to_return)
|
void returnObject(T && object_to_return)
|
||||||
{
|
{
|
||||||
{
|
{
|
||||||
std::lock_guard lock(objects_mutex);
|
std::lock_guard lock(objects_mutex);
|
||||||
@ -99,20 +99,20 @@ public:
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Max pool size
|
/// Max pool size
|
||||||
inline size_t maxSize() const
|
size_t maxSize() const
|
||||||
{
|
{
|
||||||
return max_size;
|
return max_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allocated objects size by the pool. If allocatedObjectsSize == maxSize then pool is full.
|
/// Allocated objects size by the pool. If allocatedObjectsSize == maxSize then pool is full.
|
||||||
inline size_t allocatedObjectsSize() const
|
size_t allocatedObjectsSize() const
|
||||||
{
|
{
|
||||||
std::lock_guard lock(objects_mutex);
|
std::lock_guard lock(objects_mutex);
|
||||||
return allocated_objects_size;
|
return allocated_objects_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns allocatedObjectsSize == maxSize
|
/// Returns allocatedObjectsSize == maxSize
|
||||||
inline bool isFull() const
|
bool isFull() const
|
||||||
{
|
{
|
||||||
std::lock_guard lock(objects_mutex);
|
std::lock_guard lock(objects_mutex);
|
||||||
return allocated_objects_size == max_size;
|
return allocated_objects_size == max_size;
|
||||||
@ -120,7 +120,7 @@ public:
|
|||||||
|
|
||||||
/// Borrowed objects size. If borrowedObjectsSize == allocatedObjectsSize and pool is full.
|
/// Borrowed objects size. If borrowedObjectsSize == allocatedObjectsSize and pool is full.
|
||||||
/// Then client will wait during borrowObject function call.
|
/// Then client will wait during borrowObject function call.
|
||||||
inline size_t borrowedObjectsSize() const
|
size_t borrowedObjectsSize() const
|
||||||
{
|
{
|
||||||
std::lock_guard lock(objects_mutex);
|
std::lock_guard lock(objects_mutex);
|
||||||
return borrowed_objects_size;
|
return borrowed_objects_size;
|
||||||
@ -129,7 +129,7 @@ public:
|
|||||||
private:
|
private:
|
||||||
|
|
||||||
template <typename FactoryFunc>
|
template <typename FactoryFunc>
|
||||||
inline T allocateObjectForBorrowing(const std::unique_lock<std::mutex> &, FactoryFunc && func)
|
T allocateObjectForBorrowing(const std::unique_lock<std::mutex> &, FactoryFunc && func)
|
||||||
{
|
{
|
||||||
++allocated_objects_size;
|
++allocated_objects_size;
|
||||||
++borrowed_objects_size;
|
++borrowed_objects_size;
|
||||||
@ -137,7 +137,7 @@ private:
|
|||||||
return std::forward<FactoryFunc>(func)();
|
return std::forward<FactoryFunc>(func)();
|
||||||
}
|
}
|
||||||
|
|
||||||
inline T borrowFromObjects(const std::unique_lock<std::mutex> &)
|
T borrowFromObjects(const std::unique_lock<std::mutex> &)
|
||||||
{
|
{
|
||||||
T dst;
|
T dst;
|
||||||
detail::moveOrCopyIfThrow(std::move(objects.back()), dst);
|
detail::moveOrCopyIfThrow(std::move(objects.back()), dst);
|
||||||
|
@ -34,15 +34,6 @@ set (SRCS
|
|||||||
throwError.cpp
|
throwError.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
if (USE_DEBUG_HELPERS)
|
|
||||||
get_target_property(MAGIC_ENUM_INCLUDE_DIR ch_contrib::magic_enum INTERFACE_INCLUDE_DIRECTORIES)
|
|
||||||
# CMake generator expression will do insane quoting when it encounters special character like quotes, spaces, etc.
|
|
||||||
# Prefixing "SHELL:" will force it to use the original text.
|
|
||||||
set (INCLUDE_DEBUG_HELPERS "SHELL:-I\"${MAGIC_ENUM_INCLUDE_DIR}\" -include \"${ClickHouse_SOURCE_DIR}/base/base/iostream_debug_helpers.h\"")
|
|
||||||
# Use generator expression as we don't want to pollute CMAKE_CXX_FLAGS, which will interfere with CMake check system.
|
|
||||||
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:${INCLUDE_DEBUG_HELPERS}>)
|
|
||||||
endif ()
|
|
||||||
|
|
||||||
add_library (common ${SRCS})
|
add_library (common ${SRCS})
|
||||||
|
|
||||||
if (WITH_COVERAGE)
|
if (WITH_COVERAGE)
|
||||||
|
@ -44,6 +44,10 @@ concept is_over_big_int =
|
|||||||
|| std::is_same_v<T, UInt256>
|
|| std::is_same_v<T, UInt256>
|
||||||
|| std::is_same_v<T, Decimal128>
|
|| std::is_same_v<T, Decimal128>
|
||||||
|| std::is_same_v<T, Decimal256>;
|
|| std::is_same_v<T, Decimal256>;
|
||||||
|
|
||||||
|
template <class T>
|
||||||
|
concept is_over_big_decimal = is_decimal<T> && is_over_big_int<typename T::NativeType>;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
template <> struct is_signed<DB::Decimal32> { static constexpr bool value = true; };
|
template <> struct is_signed<DB::Decimal32> { static constexpr bool value = true; };
|
||||||
|
@ -32,7 +32,7 @@ constexpr void static_for(F && f)
|
|||||||
template <is_enum T>
|
template <is_enum T>
|
||||||
struct fmt::formatter<T> : fmt::formatter<std::string_view>
|
struct fmt::formatter<T> : fmt::formatter<std::string_view>
|
||||||
{
|
{
|
||||||
constexpr auto format(T value, auto& format_context)
|
constexpr auto format(T value, auto& format_context) const
|
||||||
{
|
{
|
||||||
return formatter<string_view>::format(magic_enum::enum_name(value), format_context);
|
return formatter<string_view>::format(magic_enum::enum_name(value), format_context);
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,8 @@
|
|||||||
#include <base/types.h>
|
#include <base/types.h>
|
||||||
#include <base/unaligned.h>
|
#include <base/unaligned.h>
|
||||||
#include <base/simd.h>
|
#include <base/simd.h>
|
||||||
|
#include <fmt/core.h>
|
||||||
|
#include <fmt/ostream.h>
|
||||||
|
|
||||||
#include <city.h>
|
#include <city.h>
|
||||||
|
|
||||||
@ -376,3 +378,5 @@ namespace PackedZeroTraits
|
|||||||
|
|
||||||
|
|
||||||
std::ostream & operator<<(std::ostream & os, const StringRef & str);
|
std::ostream & operator<<(std::ostream & os, const StringRef & str);
|
||||||
|
|
||||||
|
template<> struct fmt::formatter<StringRef> : fmt::ostream_formatter {};
|
||||||
|
@ -6,6 +6,9 @@ namespace
|
|||||||
{
|
{
|
||||||
std::string getFQDNOrHostNameImpl()
|
std::string getFQDNOrHostNameImpl()
|
||||||
{
|
{
|
||||||
|
#if defined(OS_DARWIN)
|
||||||
|
return Poco::Net::DNS::hostName();
|
||||||
|
#else
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
return Poco::Net::DNS::thisHost().name();
|
return Poco::Net::DNS::thisHost().name();
|
||||||
@ -14,6 +17,7 @@ namespace
|
|||||||
{
|
{
|
||||||
return Poco::Net::DNS::hostName();
|
return Poco::Net::DNS::hostName();
|
||||||
}
|
}
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,187 +0,0 @@
|
|||||||
#pragma once
|
|
||||||
|
|
||||||
#include "demangle.h"
|
|
||||||
#include "getThreadId.h"
|
|
||||||
#include <type_traits>
|
|
||||||
#include <tuple>
|
|
||||||
#include <iomanip>
|
|
||||||
#include <iostream>
|
|
||||||
#include <magic_enum.hpp>
|
|
||||||
|
|
||||||
/** Usage:
|
|
||||||
*
|
|
||||||
* DUMP(variable...)
|
|
||||||
*/
|
|
||||||
|
|
||||||
|
|
||||||
template <typename Out, typename T>
|
|
||||||
Out & dumpValue(Out &, T &&);
|
|
||||||
|
|
||||||
|
|
||||||
/// Catch-all case.
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
requires(priority == -1)
|
|
||||||
Out & dumpImpl(Out & out, T &&) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
return out << "{...}";
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An object, that could be output with operator <<.
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
requires(priority == 0)
|
|
||||||
Out & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::declval<Out &>() << std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
return out << x;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A pointer-like object.
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
requires(priority == 1
|
|
||||||
/// Protect from the case when operator * do effectively nothing (function pointer).
|
|
||||||
&& !std::is_same_v<std::decay_t<T>, std::decay_t<decltype(*std::declval<T>())>>)
|
|
||||||
Out & dumpImpl(Out & out, T && x, std::decay_t<decltype(*std::declval<T>())> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
if (!x)
|
|
||||||
return out << "nullptr";
|
|
||||||
return dumpValue(out, *x);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Container.
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
requires(priority == 2)
|
|
||||||
Out & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::begin(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
bool first = true;
|
|
||||||
out << "{";
|
|
||||||
for (const auto & elem : x)
|
|
||||||
{
|
|
||||||
if (first)
|
|
||||||
first = false;
|
|
||||||
else
|
|
||||||
out << ", ";
|
|
||||||
dumpValue(out, elem);
|
|
||||||
}
|
|
||||||
return out << "}";
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
requires(priority == 3 && std::is_enum_v<std::decay_t<T>>)
|
|
||||||
Out & dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
return out << magic_enum::enum_name(x);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// string and const char * - output not as container or pointer.
|
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
requires(priority == 3 && (std::is_same_v<std::decay_t<T>, std::string> || std::is_same_v<std::decay_t<T>, const char *>))
|
|
||||||
Out & dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
return out << std::quoted(x);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// UInt8 - output as number, not char.
|
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
requires(priority == 3 && std::is_same_v<std::decay_t<T>, unsigned char>)
|
|
||||||
Out & dumpImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
return out << int(x);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/// Tuple, pair
|
|
||||||
template <size_t N, typename Out, typename T>
|
|
||||||
Out & dumpTupleImpl(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
if constexpr (N == 0)
|
|
||||||
out << "{";
|
|
||||||
else
|
|
||||||
out << ", ";
|
|
||||||
|
|
||||||
dumpValue(out, std::get<N>(x));
|
|
||||||
|
|
||||||
if constexpr (N + 1 == std::tuple_size_v<std::decay_t<T>>)
|
|
||||||
out << "}";
|
|
||||||
else
|
|
||||||
dumpTupleImpl<N + 1>(out, x);
|
|
||||||
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
requires(priority == 4)
|
|
||||||
Out & dumpImpl(Out & out, T && x, std::decay_t<decltype(std::get<0>(std::declval<T>()))> * = nullptr) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
return dumpTupleImpl<0>(out, x);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
Out & dumpDispatchPriorities(Out & out, T && x, std::decay_t<decltype(dumpImpl<priority>(std::declval<Out &>(), std::declval<T>()))> *) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
return dumpImpl<priority>(out, x);
|
|
||||||
}
|
|
||||||
|
|
||||||
// NOLINTNEXTLINE(google-explicit-constructor)
|
|
||||||
struct LowPriority { LowPriority(void *) {} };
|
|
||||||
|
|
||||||
template <int priority, typename Out, typename T>
|
|
||||||
Out & dumpDispatchPriorities(Out & out, T && x, LowPriority) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
return dumpDispatchPriorities<priority - 1>(out, x, nullptr);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template <typename Out, typename T>
|
|
||||||
Out & dumpValue(Out & out, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
return dumpDispatchPriorities<5>(out, x, nullptr);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
template <typename Out, typename T>
|
|
||||||
Out & dump(Out & out, const char * name, T && x) // NOLINT(cppcoreguidelines-missing-std-forward)
|
|
||||||
{
|
|
||||||
// Dumping string literal, printing name and demangled type is irrelevant.
|
|
||||||
if constexpr (std::is_same_v<const char *, std::decay_t<std::remove_reference_t<T>>>)
|
|
||||||
{
|
|
||||||
const auto name_len = strlen(name);
|
|
||||||
const auto value_len = strlen(x);
|
|
||||||
// `name` is the same as quoted `x`
|
|
||||||
if (name_len > 2 && value_len > 0 && name[0] == '"' && name[name_len - 1] == '"'
|
|
||||||
&& strncmp(name + 1, x, std::min(value_len, name_len) - 1) == 0)
|
|
||||||
return out << x;
|
|
||||||
}
|
|
||||||
|
|
||||||
out << demangle(typeid(x).name()) << " " << name << " = ";
|
|
||||||
return dumpValue(out, x) << "; ";
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma clang diagnostic ignored "-Wgnu-zero-variadic-macro-arguments"
|
|
||||||
|
|
||||||
#define DUMPVAR(VAR) ::dump(std::cerr, #VAR, (VAR));
|
|
||||||
#define DUMPHEAD std::cerr << __FILE__ << ':' << __LINE__ << " [ " << getThreadId() << " ] ";
|
|
||||||
#define DUMPTAIL std::cerr << '\n';
|
|
||||||
|
|
||||||
#define DUMP1(V1) do { DUMPHEAD DUMPVAR(V1) DUMPTAIL } while(0)
|
|
||||||
#define DUMP2(V1, V2) do { DUMPHEAD DUMPVAR(V1) DUMPVAR(V2) DUMPTAIL } while(0)
|
|
||||||
#define DUMP3(V1, V2, V3) do { DUMPHEAD DUMPVAR(V1) DUMPVAR(V2) DUMPVAR(V3) DUMPTAIL } while(0)
|
|
||||||
#define DUMP4(V1, V2, V3, V4) do { DUMPHEAD DUMPVAR(V1) DUMPVAR(V2) DUMPVAR(V3) DUMPVAR(V4) DUMPTAIL } while(0)
|
|
||||||
#define DUMP5(V1, V2, V3, V4, V5) do { DUMPHEAD DUMPVAR(V1) DUMPVAR(V2) DUMPVAR(V3) DUMPVAR(V4) DUMPVAR(V5) DUMPTAIL } while(0)
|
|
||||||
#define DUMP6(V1, V2, V3, V4, V5, V6) do { DUMPHEAD DUMPVAR(V1) DUMPVAR(V2) DUMPVAR(V3) DUMPVAR(V4) DUMPVAR(V5) DUMPVAR(V6) DUMPTAIL } while(0)
|
|
||||||
#define DUMP7(V1, V2, V3, V4, V5, V6, V7) do { DUMPHEAD DUMPVAR(V1) DUMPVAR(V2) DUMPVAR(V3) DUMPVAR(V4) DUMPVAR(V5) DUMPVAR(V6) DUMPVAR(V7) DUMPTAIL } while(0)
|
|
||||||
#define DUMP8(V1, V2, V3, V4, V5, V6, V7, V8) do { DUMPHEAD DUMPVAR(V1) DUMPVAR(V2) DUMPVAR(V3) DUMPVAR(V4) DUMPVAR(V5) DUMPVAR(V6) DUMPVAR(V7) DUMPVAR(V8) DUMPTAIL } while(0)
|
|
||||||
#define DUMP9(V1, V2, V3, V4, V5, V6, V7, V8, V9) do { DUMPHEAD DUMPVAR(V1) DUMPVAR(V2) DUMPVAR(V3) DUMPVAR(V4) DUMPVAR(V5) DUMPVAR(V6) DUMPVAR(V7) DUMPVAR(V8) DUMPVAR(V9) DUMPTAIL } while(0)
|
|
||||||
|
|
||||||
/// https://groups.google.com/forum/#!searchin/kona-dev/variadic$20macro%7Csort:date/kona-dev/XMA-lDOqtlI/GCzdfZsD41sJ
|
|
||||||
|
|
||||||
#define VA_NUM_ARGS_IMPL(x1, x2, x3, x4, x5, x6, x7, x8, x9, N, ...) N
|
|
||||||
#define VA_NUM_ARGS(...) VA_NUM_ARGS_IMPL(__VA_ARGS__, 9, 8, 7, 6, 5, 4, 3, 2, 1)
|
|
||||||
|
|
||||||
#define MAKE_VAR_MACRO_IMPL_CONCAT(PREFIX, NUM_ARGS) PREFIX ## NUM_ARGS
|
|
||||||
#define MAKE_VAR_MACRO_IMPL(PREFIX, NUM_ARGS) MAKE_VAR_MACRO_IMPL_CONCAT(PREFIX, NUM_ARGS)
|
|
||||||
#define MAKE_VAR_MACRO(PREFIX, ...) MAKE_VAR_MACRO_IMPL(PREFIX, VA_NUM_ARGS(__VA_ARGS__))
|
|
||||||
|
|
||||||
#define DUMP(...) MAKE_VAR_MACRO(DUMP, __VA_ARGS__)(__VA_ARGS__)
|
|
@ -1,32 +1,3 @@
|
|||||||
// Based on https://github.com/amdn/itoa and combined with our optimizations
|
|
||||||
//
|
|
||||||
//=== itoa.cpp - Fast integer to ascii conversion --*- C++ -*-//
|
|
||||||
//
|
|
||||||
// The MIT License (MIT)
|
|
||||||
// Copyright (c) 2016 Arturo Martin-de-Nicolas
|
|
||||||
//
|
|
||||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
// of this software and associated documentation files (the "Software"), to deal
|
|
||||||
// in the Software without restriction, including without limitation the rights
|
|
||||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
// copies of the Software, and to permit persons to whom the Software is
|
|
||||||
// furnished to do so, subject to the following conditions:
|
|
||||||
//
|
|
||||||
// The above copyright notice and this permission notice shall be included
|
|
||||||
// in all copies or substantial portions of the Software.
|
|
||||||
//
|
|
||||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
// SOFTWARE.
|
|
||||||
//===----------------------------------------------------------------------===//
|
|
||||||
|
|
||||||
#include <cstddef>
|
|
||||||
#include <cstdint>
|
|
||||||
#include <cstring>
|
|
||||||
#include <type_traits>
|
#include <type_traits>
|
||||||
#include <base/defines.h>
|
#include <base/defines.h>
|
||||||
#include <base/extended_types.h>
|
#include <base/extended_types.h>
|
||||||
@ -34,99 +5,15 @@
|
|||||||
|
|
||||||
namespace
|
namespace
|
||||||
{
|
{
|
||||||
template <typename T>
|
ALWAYS_INLINE inline char * outOneDigit(char * p, uint8_t value)
|
||||||
ALWAYS_INLINE inline constexpr T pow10(size_t x)
|
|
||||||
{
|
|
||||||
return x ? 10 * pow10<T>(x - 1) : 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Division by a power of 10 is implemented using a multiplicative inverse.
|
|
||||||
// This strength reduction is also done by optimizing compilers, but
|
|
||||||
// presently the fastest results are produced by using the values
|
|
||||||
// for the multiplication and the shift as given by the algorithm
|
|
||||||
// described by Agner Fog in "Optimizing Subroutines in Assembly Language"
|
|
||||||
//
|
|
||||||
// http://www.agner.org/optimize/optimizing_assembly.pdf
|
|
||||||
//
|
|
||||||
// "Integer division by a constant (all processors)
|
|
||||||
// A floating point number can be divided by a constant by multiplying
|
|
||||||
// with the reciprocal. If we want to do the same with integers, we have
|
|
||||||
// to scale the reciprocal by 2n and then shift the product to the right
|
|
||||||
// by n. There are various algorithms for finding a suitable value of n
|
|
||||||
// and compensating for rounding errors. The algorithm described below
|
|
||||||
// was invented by Terje Mathisen, Norway, and not published elsewhere."
|
|
||||||
|
|
||||||
/// Division by constant is performed by:
|
|
||||||
/// 1. Adding 1 if needed;
|
|
||||||
/// 2. Multiplying by another constant;
|
|
||||||
/// 3. Shifting right by another constant.
|
|
||||||
template <typename UInt, bool add_, UInt multiplier_, unsigned shift_>
|
|
||||||
struct Division
|
|
||||||
{
|
|
||||||
static constexpr bool add{add_};
|
|
||||||
static constexpr UInt multiplier{multiplier_};
|
|
||||||
static constexpr unsigned shift{shift_};
|
|
||||||
};
|
|
||||||
|
|
||||||
/// Select a type with appropriate number of bytes from the list of types.
|
|
||||||
/// First parameter is the number of bytes requested. Then goes a list of types with 1, 2, 4, ... number of bytes.
|
|
||||||
/// Example: SelectType<4, uint8_t, uint16_t, uint32_t, uint64_t> will select uint32_t.
|
|
||||||
template <size_t N, typename T, typename... Ts>
|
|
||||||
struct SelectType
|
|
||||||
{
|
|
||||||
using Result = typename SelectType<N / 2, Ts...>::Result;
|
|
||||||
};
|
|
||||||
|
|
||||||
template <typename T, typename... Ts>
|
|
||||||
struct SelectType<1, T, Ts...>
|
|
||||||
{
|
|
||||||
using Result = T;
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
/// Division by 10^N where N is the size of the type.
|
|
||||||
template <size_t N>
|
|
||||||
using DivisionBy10PowN = typename SelectType<
|
|
||||||
N,
|
|
||||||
Division<uint8_t, false, 205U, 11>, /// divide by 10
|
|
||||||
Division<uint16_t, true, 41943U, 22>, /// divide by 100
|
|
||||||
Division<uint32_t, false, 3518437209U, 45>, /// divide by 10000
|
|
||||||
Division<uint64_t, false, 12379400392853802749ULL, 90> /// divide by 100000000
|
|
||||||
>::Result;
|
|
||||||
|
|
||||||
template <size_t N>
|
|
||||||
using UnsignedOfSize = typename SelectType<N, uint8_t, uint16_t, uint32_t, uint64_t, __uint128_t>::Result;
|
|
||||||
|
|
||||||
/// Holds the result of dividing an unsigned N-byte variable by 10^N resulting in
|
|
||||||
template <size_t N>
|
|
||||||
struct QuotientAndRemainder
|
|
||||||
{
|
|
||||||
UnsignedOfSize<N> quotient; // quotient with fewer than 2*N decimal digits
|
|
||||||
UnsignedOfSize<N / 2> remainder; // remainder with at most N decimal digits
|
|
||||||
};
|
|
||||||
|
|
||||||
template <size_t N>
|
|
||||||
QuotientAndRemainder<N> inline split(UnsignedOfSize<N> value)
|
|
||||||
{
|
|
||||||
constexpr DivisionBy10PowN<N> division;
|
|
||||||
|
|
||||||
UnsignedOfSize<N> quotient = (division.multiplier * (UnsignedOfSize<2 * N>(value) + division.add)) >> division.shift;
|
|
||||||
UnsignedOfSize<N / 2> remainder = static_cast<UnsignedOfSize<N / 2>>(value - quotient * pow10<UnsignedOfSize<N / 2>>(N));
|
|
||||||
|
|
||||||
return {quotient, remainder};
|
|
||||||
}
|
|
||||||
|
|
||||||
ALWAYS_INLINE inline char * outDigit(char * p, uint8_t value)
|
|
||||||
{
|
{
|
||||||
*p = '0' + value;
|
*p = '0' + value;
|
||||||
++p;
|
return p + 1;
|
||||||
return p;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Using a lookup table to convert binary numbers from 0 to 99
|
// Using a lookup table to convert binary numbers from 0 to 99
|
||||||
// into ascii characters as described by Andrei Alexandrescu in
|
// into ascii characters as described by Andrei Alexandrescu in
|
||||||
// https://www.facebook.com/notes/facebook-engineering/three-optimization-tips-for-c/10151361643253920/
|
// https://www.facebook.com/notes/facebook-engineering/three-optimization-tips-for-c/10151361643253920/
|
||||||
|
|
||||||
const char digits[201] = "00010203040506070809"
|
const char digits[201] = "00010203040506070809"
|
||||||
"10111213141516171819"
|
"10111213141516171819"
|
||||||
"20212223242526272829"
|
"20212223242526272829"
|
||||||
@ -137,7 +24,6 @@ const char digits[201] = "00010203040506070809"
|
|||||||
"70717273747576777879"
|
"70717273747576777879"
|
||||||
"80818283848586878889"
|
"80818283848586878889"
|
||||||
"90919293949596979899";
|
"90919293949596979899";
|
||||||
|
|
||||||
ALWAYS_INLINE inline char * outTwoDigits(char * p, uint8_t value)
|
ALWAYS_INLINE inline char * outTwoDigits(char * p, uint8_t value)
|
||||||
{
|
{
|
||||||
memcpy(p, &digits[value * 2], 2);
|
memcpy(p, &digits[value * 2], 2);
|
||||||
@ -145,153 +31,260 @@ ALWAYS_INLINE inline char * outTwoDigits(char * p, uint8_t value)
|
|||||||
return p;
|
return p;
|
||||||
}
|
}
|
||||||
|
|
||||||
namespace convert
|
namespace jeaiii
|
||||||
{
|
{
|
||||||
template <typename UInt, size_t N = sizeof(UInt)>
|
/*
|
||||||
char * head(char * p, UInt u);
|
MIT License
|
||||||
template <typename UInt, size_t N = sizeof(UInt)>
|
|
||||||
char * tail(char * p, UInt u);
|
|
||||||
|
|
||||||
//===----------------------------------------------------------===//
|
Copyright (c) 2022 James Edward Anhalt III - https://github.com/jeaiii/itoa
|
||||||
// head: find most significant digit, skip leading zeros
|
|
||||||
//===----------------------------------------------------------===//
|
|
||||||
|
|
||||||
// "x" contains quotient and remainder after division by 10^N
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
// quotient is less than 10^N
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
template <size_t N>
|
in the Software without restriction, including without limitation the rights
|
||||||
ALWAYS_INLINE inline char * head(char * p, QuotientAndRemainder<N> x)
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
*/
|
||||||
|
struct pair
|
||||||
{
|
{
|
||||||
p = head(p, UnsignedOfSize<N / 2>(x.quotient));
|
char dd[2];
|
||||||
p = tail(p, x.remainder);
|
constexpr pair(char c) : dd{c, '\0'} { } /// NOLINT(google-explicit-constructor)
|
||||||
return p;
|
constexpr pair(int n) : dd{"0123456789"[n / 10], "0123456789"[n % 10]} { } /// NOLINT(google-explicit-constructor)
|
||||||
}
|
};
|
||||||
|
|
||||||
// "u" is less than 10^2*N
|
constexpr struct
|
||||||
template <typename UInt, size_t N>
|
|
||||||
ALWAYS_INLINE inline char * head(char * p, UInt u)
|
|
||||||
{
|
{
|
||||||
return u < pow10<UnsignedOfSize<N>>(N) ? head(p, UnsignedOfSize<N / 2>(u)) : head<N>(p, split<N>(u));
|
pair dd[100]{
|
||||||
}
|
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, //
|
||||||
|
10, 11, 12, 13, 14, 15, 16, 17, 18, 19, //
|
||||||
|
20, 21, 22, 23, 24, 25, 26, 27, 28, 29, //
|
||||||
|
30, 31, 32, 33, 34, 35, 36, 37, 38, 39, //
|
||||||
|
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, //
|
||||||
|
50, 51, 52, 53, 54, 55, 56, 57, 58, 59, //
|
||||||
|
60, 61, 62, 63, 64, 65, 66, 67, 68, 69, //
|
||||||
|
70, 71, 72, 73, 74, 75, 76, 77, 78, 79, //
|
||||||
|
80, 81, 82, 83, 84, 85, 86, 87, 88, 89, //
|
||||||
|
90, 91, 92, 93, 94, 95, 96, 97, 98, 99, //
|
||||||
|
};
|
||||||
|
pair fd[100]{
|
||||||
|
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', //
|
||||||
|
10, 11, 12, 13, 14, 15, 16, 17, 18, 19, //
|
||||||
|
20, 21, 22, 23, 24, 25, 26, 27, 28, 29, //
|
||||||
|
30, 31, 32, 33, 34, 35, 36, 37, 38, 39, //
|
||||||
|
40, 41, 42, 43, 44, 45, 46, 47, 48, 49, //
|
||||||
|
50, 51, 52, 53, 54, 55, 56, 57, 58, 59, //
|
||||||
|
60, 61, 62, 63, 64, 65, 66, 67, 68, 69, //
|
||||||
|
70, 71, 72, 73, 74, 75, 76, 77, 78, 79, //
|
||||||
|
80, 81, 82, 83, 84, 85, 86, 87, 88, 89, //
|
||||||
|
90, 91, 92, 93, 94, 95, 96, 97, 98, 99, //
|
||||||
|
};
|
||||||
|
} digits;
|
||||||
|
|
||||||
// recursion base case, selected when "u" is one byte
|
constexpr UInt64 mask24 = (UInt64(1) << 24) - 1;
|
||||||
template <>
|
constexpr UInt64 mask32 = (UInt64(1) << 32) - 1;
|
||||||
ALWAYS_INLINE inline char * head<UnsignedOfSize<1>, 1>(char * p, UnsignedOfSize<1> u)
|
constexpr UInt64 mask57 = (UInt64(1) << 57) - 1;
|
||||||
|
|
||||||
|
template <bool, class, class F>
|
||||||
|
struct _cond
|
||||||
{
|
{
|
||||||
return u < 10 ? outDigit(p, u) : outTwoDigits(p, u);
|
using type = F;
|
||||||
}
|
};
|
||||||
|
template <class T, class F>
|
||||||
//===----------------------------------------------------------===//
|
struct _cond<true, T, F>
|
||||||
// tail: produce all digits including leading zeros
|
|
||||||
//===----------------------------------------------------------===//
|
|
||||||
|
|
||||||
// recursive step, "u" is less than 10^2*N
|
|
||||||
template <typename UInt, size_t N>
|
|
||||||
ALWAYS_INLINE inline char * tail(char * p, UInt u)
|
|
||||||
{
|
{
|
||||||
QuotientAndRemainder<N> x = split<N>(u);
|
using type = T;
|
||||||
p = tail(p, UnsignedOfSize<N / 2>(x.quotient));
|
};
|
||||||
p = tail(p, x.remainder);
|
template <bool B, class T, class F>
|
||||||
return p;
|
using cond = typename _cond<B, T, F>::type;
|
||||||
}
|
|
||||||
|
|
||||||
// recursion base case, selected when "u" is one byte
|
template <class T>
|
||||||
template <>
|
inline ALWAYS_INLINE char * to_text_from_integer(char * b, T i)
|
||||||
ALWAYS_INLINE inline char * tail<UnsignedOfSize<1>, 1>(char * p, UnsignedOfSize<1> u)
|
|
||||||
{
|
{
|
||||||
return outTwoDigits(p, u);
|
constexpr auto q = sizeof(T);
|
||||||
}
|
using U = cond<q == 1, char8_t, cond<q <= sizeof(UInt16), UInt16, cond<q <= sizeof(UInt32), UInt32, UInt64>>>;
|
||||||
|
|
||||||
//===----------------------------------------------------------===//
|
// convert bool to int before test with unary + to silence warning if T happens to be bool
|
||||||
// large values are >= 10^2*N
|
U const n = +i < 0 ? *b++ = '-', U(0) - U(i) : U(i);
|
||||||
// where x contains quotient and remainder after division by 10^N
|
|
||||||
//===----------------------------------------------------------===//
|
|
||||||
template <size_t N>
|
|
||||||
ALWAYS_INLINE inline char * large(char * p, QuotientAndRemainder<N> x)
|
|
||||||
{
|
|
||||||
QuotientAndRemainder<N> y = split<N>(x.quotient);
|
|
||||||
p = head(p, UnsignedOfSize<N / 2>(y.quotient));
|
|
||||||
p = tail(p, y.remainder);
|
|
||||||
p = tail(p, x.remainder);
|
|
||||||
return p;
|
|
||||||
}
|
|
||||||
|
|
||||||
//===----------------------------------------------------------===//
|
if (n < U(1e2))
|
||||||
// handle values of "u" that might be >= 10^2*N
|
{
|
||||||
// where N is the size of "u" in bytes
|
/// This is changed from the original jeaiii implementation
|
||||||
//===----------------------------------------------------------===//
|
/// For small numbers the extra branch to call outOneDigit() is worth it as it saves some instructions
|
||||||
template <typename UInt, size_t N = sizeof(UInt)>
|
/// and a memory access (no need to read digits.fd[n])
|
||||||
ALWAYS_INLINE inline char * uitoa(char * p, UInt u)
|
/// This is not true for pure random numbers, but that's not the common use case of a database
|
||||||
{
|
/// Original jeaii code
|
||||||
if (u < pow10<UnsignedOfSize<N>>(N))
|
// *reinterpret_cast<pair *>(b) = digits.fd[n];
|
||||||
return head(p, UnsignedOfSize<N / 2>(u));
|
// return n < 10 ? b + 1 : b + 2;
|
||||||
QuotientAndRemainder<N> x = split<N>(u);
|
return n < 10 ? outOneDigit(b, n) : outTwoDigits(b, n);
|
||||||
|
}
|
||||||
|
if (n < UInt32(1e6))
|
||||||
|
{
|
||||||
|
if (sizeof(U) == 1 || n < U(1e4))
|
||||||
|
{
|
||||||
|
auto f0 = UInt32(10 * (1 << 24) / 1e3 + 1) * n;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.fd[f0 >> 24];
|
||||||
|
if constexpr (sizeof(U) == 1)
|
||||||
|
b -= 1;
|
||||||
|
else
|
||||||
|
b -= n < U(1e3);
|
||||||
|
auto f2 = (f0 & mask24) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 24];
|
||||||
|
return b + 4;
|
||||||
|
}
|
||||||
|
auto f0 = UInt64(10 * (1ull << 32ull) / 1e5 + 1) * n;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.fd[f0 >> 32];
|
||||||
|
if constexpr (sizeof(U) == 2)
|
||||||
|
b -= 1;
|
||||||
|
else
|
||||||
|
b -= n < U(1e5);
|
||||||
|
auto f2 = (f0 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 32];
|
||||||
|
auto f4 = (f2 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 4) = digits.dd[f4 >> 32];
|
||||||
|
return b + 6;
|
||||||
|
}
|
||||||
|
if (sizeof(U) == 4 || n < UInt64(1ull << 32ull))
|
||||||
|
{
|
||||||
|
if (n < U(1e8))
|
||||||
|
{
|
||||||
|
auto f0 = UInt64(10 * (1ull << 48ull) / 1e7 + 1) * n >> 16;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.fd[f0 >> 32];
|
||||||
|
b -= n < U(1e7);
|
||||||
|
auto f2 = (f0 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 32];
|
||||||
|
auto f4 = (f2 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 4) = digits.dd[f4 >> 32];
|
||||||
|
auto f6 = (f4 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 6) = digits.dd[f6 >> 32];
|
||||||
|
return b + 8;
|
||||||
|
}
|
||||||
|
auto f0 = UInt64(10 * (1ull << 57ull) / 1e9 + 1) * n;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.fd[f0 >> 57];
|
||||||
|
b -= n < UInt32(1e9);
|
||||||
|
auto f2 = (f0 & mask57) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 57];
|
||||||
|
auto f4 = (f2 & mask57) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 4) = digits.dd[f4 >> 57];
|
||||||
|
auto f6 = (f4 & mask57) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 6) = digits.dd[f6 >> 57];
|
||||||
|
auto f8 = (f6 & mask57) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 8) = digits.dd[f8 >> 57];
|
||||||
|
return b + 10;
|
||||||
|
}
|
||||||
|
|
||||||
return u < pow10<UnsignedOfSize<N>>(2 * N) ? head<N>(p, x) : large<N>(p, x);
|
// if we get here U must be UInt64 but some compilers don't know that, so reassign n to a UInt64 to avoid warnings
|
||||||
}
|
UInt32 z = n % UInt32(1e8);
|
||||||
|
UInt64 u = n / UInt32(1e8);
|
||||||
|
|
||||||
// selected when "u" is one byte
|
if (u < UInt32(1e2))
|
||||||
template <>
|
{
|
||||||
ALWAYS_INLINE inline char * uitoa<UnsignedOfSize<1>, 1>(char * p, UnsignedOfSize<1> u)
|
// u can't be 1 digit (if u < 10 it would have been handled above as a 9 digit 32bit number)
|
||||||
{
|
*reinterpret_cast<pair *>(b) = digits.dd[u];
|
||||||
if (u < 10)
|
b += 2;
|
||||||
return outDigit(p, u);
|
}
|
||||||
else if (u < 100)
|
else if (u < UInt32(1e6))
|
||||||
return outTwoDigits(p, u);
|
{
|
||||||
|
if (u < UInt32(1e4))
|
||||||
|
{
|
||||||
|
auto f0 = UInt32(10 * (1 << 24) / 1e3 + 1) * u;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.fd[f0 >> 24];
|
||||||
|
b -= u < UInt32(1e3);
|
||||||
|
auto f2 = (f0 & mask24) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 24];
|
||||||
|
b += 4;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
auto f0 = UInt64(10 * (1ull << 32ull) / 1e5 + 1) * u;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.fd[f0 >> 32];
|
||||||
|
b -= u < UInt32(1e5);
|
||||||
|
auto f2 = (f0 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 32];
|
||||||
|
auto f4 = (f2 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 4) = digits.dd[f4 >> 32];
|
||||||
|
b += 6;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (u < UInt32(1e8))
|
||||||
|
{
|
||||||
|
auto f0 = UInt64(10 * (1ull << 48ull) / 1e7 + 1) * u >> 16;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.fd[f0 >> 32];
|
||||||
|
b -= u < UInt32(1e7);
|
||||||
|
auto f2 = (f0 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 32];
|
||||||
|
auto f4 = (f2 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 4) = digits.dd[f4 >> 32];
|
||||||
|
auto f6 = (f4 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 6) = digits.dd[f6 >> 32];
|
||||||
|
b += 8;
|
||||||
|
}
|
||||||
|
else if (u < UInt64(1ull << 32ull))
|
||||||
|
{
|
||||||
|
auto f0 = UInt64(10 * (1ull << 57ull) / 1e9 + 1) * u;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.fd[f0 >> 57];
|
||||||
|
b -= u < UInt32(1e9);
|
||||||
|
auto f2 = (f0 & mask57) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 57];
|
||||||
|
auto f4 = (f2 & mask57) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 4) = digits.dd[f4 >> 57];
|
||||||
|
auto f6 = (f4 & mask57) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 6) = digits.dd[f6 >> 57];
|
||||||
|
auto f8 = (f6 & mask57) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 8) = digits.dd[f8 >> 57];
|
||||||
|
b += 10;
|
||||||
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
p = outDigit(p, u / 100);
|
UInt32 y = u % UInt32(1e8);
|
||||||
p = outTwoDigits(p, u % 100);
|
u /= UInt32(1e8);
|
||||||
return p;
|
|
||||||
|
// u is 2, 3, or 4 digits (if u < 10 it would have been handled above)
|
||||||
|
if (u < UInt32(1e2))
|
||||||
|
{
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.dd[u];
|
||||||
|
b += 2;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
auto f0 = UInt32(10 * (1 << 24) / 1e3 + 1) * u;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.fd[f0 >> 24];
|
||||||
|
b -= u < UInt32(1e3);
|
||||||
|
auto f2 = (f0 & mask24) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 24];
|
||||||
|
b += 4;
|
||||||
|
}
|
||||||
|
// do 8 digits
|
||||||
|
auto f0 = (UInt64((1ull << 48ull) / 1e6 + 1) * y >> 16) + 1;
|
||||||
|
*reinterpret_cast<pair *>(b) = digits.dd[f0 >> 32];
|
||||||
|
auto f2 = (f0 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 32];
|
||||||
|
auto f4 = (f2 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 4) = digits.dd[f4 >> 32];
|
||||||
|
auto f6 = (f4 & mask32) * 100;
|
||||||
|
*reinterpret_cast<pair *>(b + 6) = digits.dd[f6 >> 32];
|
||||||
|
b += 8;
|
||||||
}
|
}
|
||||||
}
|
// do 8 digits
|
||||||
|
auto f0 = (UInt64((1ull << 48ull) / 1e6 + 1) * z >> 16) + 1;
|
||||||
//===----------------------------------------------------------===//
|
*reinterpret_cast<pair *>(b) = digits.dd[f0 >> 32];
|
||||||
// handle unsigned and signed integral operands
|
auto f2 = (f0 & mask32) * 100;
|
||||||
//===----------------------------------------------------------===//
|
*reinterpret_cast<pair *>(b + 2) = digits.dd[f2 >> 32];
|
||||||
|
auto f4 = (f2 & mask32) * 100;
|
||||||
// itoa: handle unsigned integral operands (selected by SFINAE)
|
*reinterpret_cast<pair *>(b + 4) = digits.dd[f4 >> 32];
|
||||||
template <typename U>
|
auto f6 = (f4 & mask32) * 100;
|
||||||
requires(!std::is_signed_v<U> && std::is_integral_v<U>)
|
*reinterpret_cast<pair *>(b + 6) = digits.dd[f6 >> 32];
|
||||||
ALWAYS_INLINE inline char * itoa(U u, char * p)
|
return b + 8;
|
||||||
{
|
|
||||||
return convert::uitoa(p, u);
|
|
||||||
}
|
|
||||||
|
|
||||||
// itoa: handle signed integral operands (selected by SFINAE)
|
|
||||||
template <typename I, size_t N = sizeof(I)>
|
|
||||||
requires(std::is_signed_v<I> && std::is_integral_v<I>)
|
|
||||||
ALWAYS_INLINE inline char * itoa(I i, char * p)
|
|
||||||
{
|
|
||||||
// Need "mask" to be filled with a copy of the sign bit.
|
|
||||||
// If "i" is a negative value, then the result of "operator >>"
|
|
||||||
// is implementation-defined, though usually it is an arithmetic
|
|
||||||
// right shift that replicates the sign bit.
|
|
||||||
// Use a conditional expression to be portable,
|
|
||||||
// a good optimizing compiler generates an arithmetic right shift
|
|
||||||
// and avoids the conditional branch.
|
|
||||||
UnsignedOfSize<N> mask = i < 0 ? ~UnsignedOfSize<N>(0) : 0;
|
|
||||||
// Now get the absolute value of "i" and cast to unsigned type UnsignedOfSize<N>.
|
|
||||||
// Cannot use std::abs() because the result is undefined
|
|
||||||
// in 2's complement systems for the most-negative value.
|
|
||||||
// Want to avoid conditional branch for performance reasons since
|
|
||||||
// CPU branch prediction will be ineffective when negative values
|
|
||||||
// occur randomly.
|
|
||||||
// Let "u" be "i" cast to unsigned type UnsignedOfSize<N>.
|
|
||||||
// Subtract "u" from 2*u if "i" is positive or 0 if "i" is negative.
|
|
||||||
// This yields the absolute value with the desired type without
|
|
||||||
// using a conditional branch and without invoking undefined or
|
|
||||||
// implementation defined behavior:
|
|
||||||
UnsignedOfSize<N> u = ((2 * UnsignedOfSize<N>(i)) & ~mask) - UnsignedOfSize<N>(i);
|
|
||||||
// Unconditionally store a minus sign when producing digits
|
|
||||||
// in a forward direction and increment the pointer only if
|
|
||||||
// the value is in fact negative.
|
|
||||||
// This avoids a conditional branch and is safe because we will
|
|
||||||
// always produce at least one digit and it will overwrite the
|
|
||||||
// minus sign when the value is not negative.
|
|
||||||
*p = '-';
|
|
||||||
p += (mask & 1);
|
|
||||||
p = convert::uitoa(p, u);
|
|
||||||
return p;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -303,7 +296,7 @@ ALWAYS_INLINE inline char * writeUIntText(UInt128 _x, char * p)
|
|||||||
{
|
{
|
||||||
/// If we the highest 64bit item is empty, we can print just the lowest item as u64
|
/// If we the highest 64bit item is empty, we can print just the lowest item as u64
|
||||||
if (_x.items[UInt128::_impl::little(1)] == 0)
|
if (_x.items[UInt128::_impl::little(1)] == 0)
|
||||||
return convert::itoa(_x.items[UInt128::_impl::little(0)], p);
|
return jeaiii::to_text_from_integer(p, _x.items[UInt128::_impl::little(0)]);
|
||||||
|
|
||||||
/// Doing operations using __int128 is faster and we already rely on this feature
|
/// Doing operations using __int128 is faster and we already rely on this feature
|
||||||
using T = unsigned __int128;
|
using T = unsigned __int128;
|
||||||
@ -334,7 +327,7 @@ ALWAYS_INLINE inline char * writeUIntText(UInt128 _x, char * p)
|
|||||||
current_block += max_multiple_of_hundred_blocks;
|
current_block += max_multiple_of_hundred_blocks;
|
||||||
}
|
}
|
||||||
|
|
||||||
char * highest_part_print = convert::itoa(uint64_t(x), p);
|
char * highest_part_print = jeaiii::to_text_from_integer(p, uint64_t(x));
|
||||||
for (int i = 0; i < current_block; i++)
|
for (int i = 0; i < current_block; i++)
|
||||||
{
|
{
|
||||||
outTwoDigits(highest_part_print, two_values[current_block - 1 - i]);
|
outTwoDigits(highest_part_print, two_values[current_block - 1 - i]);
|
||||||
@ -450,12 +443,12 @@ ALWAYS_INLINE inline char * writeSIntText(T x, char * pos)
|
|||||||
|
|
||||||
char * itoa(UInt8 i, char * p)
|
char * itoa(UInt8 i, char * p)
|
||||||
{
|
{
|
||||||
return convert::itoa(uint8_t(i), p);
|
return jeaiii::to_text_from_integer(p, uint8_t(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
char * itoa(Int8 i, char * p)
|
char * itoa(Int8 i, char * p)
|
||||||
{
|
{
|
||||||
return convert::itoa(int8_t(i), p);
|
return jeaiii::to_text_from_integer(p, int8_t(i));
|
||||||
}
|
}
|
||||||
|
|
||||||
char * itoa(UInt128 i, char * p)
|
char * itoa(UInt128 i, char * p)
|
||||||
@ -481,7 +474,7 @@ char * itoa(Int256 i, char * p)
|
|||||||
#define DEFAULT_ITOA(T) \
|
#define DEFAULT_ITOA(T) \
|
||||||
char * itoa(T i, char * p) \
|
char * itoa(T i, char * p) \
|
||||||
{ \
|
{ \
|
||||||
return convert::itoa(i, p); \
|
return jeaiii::to_text_from_integer(p, i); \
|
||||||
}
|
}
|
||||||
|
|
||||||
#define FOR_MISSING_INTEGER_TYPES(M) \
|
#define FOR_MISSING_INTEGER_TYPES(M) \
|
||||||
|
@ -1,2 +0,0 @@
|
|||||||
clickhouse_add_executable (dump_variable dump_variable.cpp)
|
|
||||||
target_link_libraries (dump_variable PRIVATE clickhouse_common_io)
|
|
@ -1,70 +0,0 @@
|
|||||||
#include <base/iostream_debug_helpers.h>
|
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
#include <memory>
|
|
||||||
#include <vector>
|
|
||||||
#include <map>
|
|
||||||
#include <set>
|
|
||||||
#include <tuple>
|
|
||||||
#include <array>
|
|
||||||
#include <utility>
|
|
||||||
|
|
||||||
|
|
||||||
struct S1;
|
|
||||||
struct S2 {};
|
|
||||||
|
|
||||||
struct S3
|
|
||||||
{
|
|
||||||
std::set<const char *> m1;
|
|
||||||
};
|
|
||||||
|
|
||||||
std::ostream & operator<<(std::ostream & stream, const S3 & what)
|
|
||||||
{
|
|
||||||
stream << "S3 {m1=";
|
|
||||||
dumpValue(stream, what.m1) << "}";
|
|
||||||
return stream;
|
|
||||||
}
|
|
||||||
|
|
||||||
int main(int, char **)
|
|
||||||
{
|
|
||||||
int x = 1;
|
|
||||||
|
|
||||||
DUMP(x);
|
|
||||||
DUMP(x, 1, &x);
|
|
||||||
|
|
||||||
DUMP(std::make_unique<int>(1));
|
|
||||||
DUMP(std::make_shared<int>(1));
|
|
||||||
|
|
||||||
std::vector<int> vec{1, 2, 3};
|
|
||||||
DUMP(vec);
|
|
||||||
|
|
||||||
auto pair = std::make_pair(1, 2);
|
|
||||||
DUMP(pair);
|
|
||||||
|
|
||||||
auto tuple = std::make_tuple(1, 2, 3);
|
|
||||||
DUMP(tuple);
|
|
||||||
|
|
||||||
std::map<int, std::string> map{{1, "hello"}, {2, "world"}};
|
|
||||||
DUMP(map);
|
|
||||||
|
|
||||||
std::initializer_list<const char *> list{"hello", "world"};
|
|
||||||
DUMP(list);
|
|
||||||
|
|
||||||
std::array<const char *, 2> arr{{"hello", "world"}};
|
|
||||||
DUMP(arr);
|
|
||||||
|
|
||||||
//DUMP([]{});
|
|
||||||
|
|
||||||
S1 * s = nullptr;
|
|
||||||
DUMP(s);
|
|
||||||
|
|
||||||
DUMP(S2());
|
|
||||||
|
|
||||||
std::set<const char *> variants = {"hello", "world"};
|
|
||||||
DUMP(variants);
|
|
||||||
|
|
||||||
S3 s3 {{"hello", "world"}};
|
|
||||||
DUMP(s3);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
@ -62,7 +62,7 @@ struct fmt::formatter<wide::integer<Bits, Signed>>
|
|||||||
}
|
}
|
||||||
|
|
||||||
template <typename FormatContext>
|
template <typename FormatContext>
|
||||||
auto format(const wide::integer<Bits, Signed> & value, FormatContext & ctx)
|
auto format(const wide::integer<Bits, Signed> & value, FormatContext & ctx) const
|
||||||
{
|
{
|
||||||
return fmt::format_to(ctx.out(), "{}", to_string(value));
|
return fmt::format_to(ctx.out(), "{}", to_string(value));
|
||||||
}
|
}
|
||||||
|
@ -23,9 +23,6 @@
|
|||||||
#include <openssl/conf.h>
|
#include <openssl/conf.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if __has_feature(address_sanitizer)
|
|
||||||
#include <sanitizer/lsan_interface.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
using Poco::RandomInputStream;
|
using Poco::RandomInputStream;
|
||||||
using Poco::Thread;
|
using Poco::Thread;
|
||||||
@ -70,18 +67,12 @@ void OpenSSLInitializer::initialize()
|
|||||||
SSL_library_init();
|
SSL_library_init();
|
||||||
SSL_load_error_strings();
|
SSL_load_error_strings();
|
||||||
OpenSSL_add_all_algorithms();
|
OpenSSL_add_all_algorithms();
|
||||||
|
|
||||||
char seed[SEEDSIZE];
|
char seed[SEEDSIZE];
|
||||||
RandomInputStream rnd;
|
RandomInputStream rnd;
|
||||||
rnd.read(seed, sizeof(seed));
|
rnd.read(seed, sizeof(seed));
|
||||||
{
|
RAND_seed(seed, SEEDSIZE);
|
||||||
# if __has_feature(address_sanitizer)
|
|
||||||
/// Leak sanitizer (part of address sanitizer) thinks that a few bytes of memory in OpenSSL are allocated during but never released.
|
|
||||||
__lsan::ScopedDisabler lsan_disabler;
|
|
||||||
#endif
|
|
||||||
RAND_seed(seed, SEEDSIZE);
|
|
||||||
}
|
|
||||||
|
|
||||||
int nMutexes = CRYPTO_num_locks();
|
int nMutexes = CRYPTO_num_locks();
|
||||||
_mutexes = new Poco::FastMutex[nMutexes];
|
_mutexes = new Poco::FastMutex[nMutexes];
|
||||||
CRYPTO_set_locking_callback(&OpenSSLInitializer::lock);
|
CRYPTO_set_locking_callback(&OpenSSLInitializer::lock);
|
||||||
@ -89,8 +80,8 @@ void OpenSSLInitializer::initialize()
|
|||||||
// https://sourceforge.net/p/poco/bugs/110/
|
// https://sourceforge.net/p/poco/bugs/110/
|
||||||
//
|
//
|
||||||
// From http://www.openssl.org/docs/crypto/threads.html :
|
// From http://www.openssl.org/docs/crypto/threads.html :
|
||||||
// "If the application does not register such a callback using CRYPTO_THREADID_set_callback(),
|
// "If the application does not register such a callback using CRYPTO_THREADID_set_callback(),
|
||||||
// then a default implementation is used - on Windows and BeOS this uses the system's
|
// then a default implementation is used - on Windows and BeOS this uses the system's
|
||||||
// default thread identifying APIs"
|
// default thread identifying APIs"
|
||||||
CRYPTO_set_id_callback(&OpenSSLInitializer::id);
|
CRYPTO_set_id_callback(&OpenSSLInitializer::id);
|
||||||
CRYPTO_set_dynlock_create_callback(&OpenSSLInitializer::dynlockCreate);
|
CRYPTO_set_dynlock_create_callback(&OpenSSLInitializer::dynlockCreate);
|
||||||
@ -109,7 +100,7 @@ void OpenSSLInitializer::uninitialize()
|
|||||||
CRYPTO_set_locking_callback(0);
|
CRYPTO_set_locking_callback(0);
|
||||||
CRYPTO_set_id_callback(0);
|
CRYPTO_set_id_callback(0);
|
||||||
delete [] _mutexes;
|
delete [] _mutexes;
|
||||||
|
|
||||||
CONF_modules_free();
|
CONF_modules_free();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -213,6 +213,7 @@ target_compile_definitions (_poco_foundation
|
|||||||
)
|
)
|
||||||
|
|
||||||
target_include_directories (_poco_foundation SYSTEM PUBLIC "include")
|
target_include_directories (_poco_foundation SYSTEM PUBLIC "include")
|
||||||
|
target_link_libraries (_poco_foundation PRIVATE clickhouse_common_io)
|
||||||
|
|
||||||
target_link_libraries (_poco_foundation
|
target_link_libraries (_poco_foundation
|
||||||
PRIVATE
|
PRIVATE
|
||||||
|
@ -21,6 +21,8 @@
|
|||||||
#include <atomic>
|
#include <atomic>
|
||||||
#include <cstddef>
|
#include <cstddef>
|
||||||
#include <map>
|
#include <map>
|
||||||
|
#include <memory>
|
||||||
|
#include <unordered_map>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
#include "Poco/Channel.h"
|
#include "Poco/Channel.h"
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
|
|
||||||
#include <map>
|
#include <map>
|
||||||
|
#include <vector>
|
||||||
#include "Poco/Foundation.h"
|
#include "Poco/Foundation.h"
|
||||||
#include "Poco/Timestamp.h"
|
#include "Poco/Timestamp.h"
|
||||||
|
|
||||||
|
@ -48,7 +48,13 @@ class Foundation_API ThreadPool
|
|||||||
/// from the pool.
|
/// from the pool.
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
ThreadPool(int minCapacity = 2, int maxCapacity = 16, int idleTime = 60, int stackSize = POCO_THREAD_STACK_SIZE);
|
explicit ThreadPool(
|
||||||
|
int minCapacity = 2,
|
||||||
|
int maxCapacity = 16,
|
||||||
|
int idleTime = 60,
|
||||||
|
int stackSize = POCO_THREAD_STACK_SIZE,
|
||||||
|
size_t global_profiler_real_time_period_ns_ = 0,
|
||||||
|
size_t global_profiler_cpu_time_period_ns_ = 0);
|
||||||
/// Creates a thread pool with minCapacity threads.
|
/// Creates a thread pool with minCapacity threads.
|
||||||
/// If required, up to maxCapacity threads are created
|
/// If required, up to maxCapacity threads are created
|
||||||
/// a NoThreadAvailableException exception is thrown.
|
/// a NoThreadAvailableException exception is thrown.
|
||||||
@ -56,8 +62,14 @@ public:
|
|||||||
/// and more than minCapacity threads are running, the thread
|
/// and more than minCapacity threads are running, the thread
|
||||||
/// is killed. Threads are created with given stack size.
|
/// is killed. Threads are created with given stack size.
|
||||||
|
|
||||||
ThreadPool(
|
explicit ThreadPool(
|
||||||
const std::string & name, int minCapacity = 2, int maxCapacity = 16, int idleTime = 60, int stackSize = POCO_THREAD_STACK_SIZE);
|
const std::string & name,
|
||||||
|
int minCapacity = 2,
|
||||||
|
int maxCapacity = 16,
|
||||||
|
int idleTime = 60,
|
||||||
|
int stackSize = POCO_THREAD_STACK_SIZE,
|
||||||
|
size_t global_profiler_real_time_period_ns_ = 0,
|
||||||
|
size_t global_profiler_cpu_time_period_ns_ = 0);
|
||||||
/// Creates a thread pool with the given name and minCapacity threads.
|
/// Creates a thread pool with the given name and minCapacity threads.
|
||||||
/// If required, up to maxCapacity threads are created
|
/// If required, up to maxCapacity threads are created
|
||||||
/// a NoThreadAvailableException exception is thrown.
|
/// a NoThreadAvailableException exception is thrown.
|
||||||
@ -171,6 +183,8 @@ private:
|
|||||||
int _serial;
|
int _serial;
|
||||||
int _age;
|
int _age;
|
||||||
int _stackSize;
|
int _stackSize;
|
||||||
|
size_t _globalProfilerRealTimePeriodNs;
|
||||||
|
size_t _globalProfilerCPUTimePeriodNs;
|
||||||
ThreadVec _threads;
|
ThreadVec _threads;
|
||||||
mutable FastMutex _mutex;
|
mutable FastMutex _mutex;
|
||||||
};
|
};
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
#include "Poco/ErrorHandler.h"
|
#include "Poco/ErrorHandler.h"
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <ctime>
|
#include <ctime>
|
||||||
|
#include <Common/ThreadPool.h>
|
||||||
|
|
||||||
|
|
||||||
namespace Poco {
|
namespace Poco {
|
||||||
@ -28,7 +29,11 @@ namespace Poco {
|
|||||||
class PooledThread: public Runnable
|
class PooledThread: public Runnable
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
PooledThread(const std::string& name, int stackSize = POCO_THREAD_STACK_SIZE);
|
explicit PooledThread(
|
||||||
|
const std::string& name,
|
||||||
|
int stackSize = POCO_THREAD_STACK_SIZE,
|
||||||
|
size_t globalProfilerRealTimePeriodNs_ = 0,
|
||||||
|
size_t globalProfilerCPUTimePeriodNs_ = 0);
|
||||||
~PooledThread();
|
~PooledThread();
|
||||||
|
|
||||||
void start();
|
void start();
|
||||||
@ -51,16 +56,24 @@ private:
|
|||||||
Event _targetCompleted;
|
Event _targetCompleted;
|
||||||
Event _started;
|
Event _started;
|
||||||
FastMutex _mutex;
|
FastMutex _mutex;
|
||||||
|
size_t _globalProfilerRealTimePeriodNs;
|
||||||
|
size_t _globalProfilerCPUTimePeriodNs;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
PooledThread::PooledThread(const std::string& name, int stackSize):
|
PooledThread::PooledThread(
|
||||||
_idle(true),
|
const std::string& name,
|
||||||
_idleTime(0),
|
int stackSize,
|
||||||
_pTarget(0),
|
size_t globalProfilerRealTimePeriodNs_,
|
||||||
_name(name),
|
size_t globalProfilerCPUTimePeriodNs_) :
|
||||||
|
_idle(true),
|
||||||
|
_idleTime(0),
|
||||||
|
_pTarget(0),
|
||||||
|
_name(name),
|
||||||
_thread(name),
|
_thread(name),
|
||||||
_targetCompleted(false)
|
_targetCompleted(false),
|
||||||
|
_globalProfilerRealTimePeriodNs(globalProfilerRealTimePeriodNs_),
|
||||||
|
_globalProfilerCPUTimePeriodNs(globalProfilerCPUTimePeriodNs_)
|
||||||
{
|
{
|
||||||
poco_assert_dbg (stackSize >= 0);
|
poco_assert_dbg (stackSize >= 0);
|
||||||
_thread.setStackSize(stackSize);
|
_thread.setStackSize(stackSize);
|
||||||
@ -83,7 +96,7 @@ void PooledThread::start()
|
|||||||
void PooledThread::start(Thread::Priority priority, Runnable& target)
|
void PooledThread::start(Thread::Priority priority, Runnable& target)
|
||||||
{
|
{
|
||||||
FastMutex::ScopedLock lock(_mutex);
|
FastMutex::ScopedLock lock(_mutex);
|
||||||
|
|
||||||
poco_assert (_pTarget == 0);
|
poco_assert (_pTarget == 0);
|
||||||
|
|
||||||
_pTarget = ⌖
|
_pTarget = ⌖
|
||||||
@ -109,7 +122,7 @@ void PooledThread::start(Thread::Priority priority, Runnable& target, const std:
|
|||||||
}
|
}
|
||||||
_thread.setName(fullName);
|
_thread.setName(fullName);
|
||||||
_thread.setPriority(priority);
|
_thread.setPriority(priority);
|
||||||
|
|
||||||
poco_assert (_pTarget == 0);
|
poco_assert (_pTarget == 0);
|
||||||
|
|
||||||
_pTarget = ⌖
|
_pTarget = ⌖
|
||||||
@ -145,7 +158,7 @@ void PooledThread::join()
|
|||||||
void PooledThread::activate()
|
void PooledThread::activate()
|
||||||
{
|
{
|
||||||
FastMutex::ScopedLock lock(_mutex);
|
FastMutex::ScopedLock lock(_mutex);
|
||||||
|
|
||||||
poco_assert (_idle);
|
poco_assert (_idle);
|
||||||
_idle = false;
|
_idle = false;
|
||||||
_targetCompleted.reset();
|
_targetCompleted.reset();
|
||||||
@ -155,7 +168,7 @@ void PooledThread::activate()
|
|||||||
void PooledThread::release()
|
void PooledThread::release()
|
||||||
{
|
{
|
||||||
const long JOIN_TIMEOUT = 10000;
|
const long JOIN_TIMEOUT = 10000;
|
||||||
|
|
||||||
_mutex.lock();
|
_mutex.lock();
|
||||||
_pTarget = 0;
|
_pTarget = 0;
|
||||||
_mutex.unlock();
|
_mutex.unlock();
|
||||||
@ -174,6 +187,10 @@ void PooledThread::release()
|
|||||||
|
|
||||||
void PooledThread::run()
|
void PooledThread::run()
|
||||||
{
|
{
|
||||||
|
DB::ThreadStatus thread_status;
|
||||||
|
if (unlikely(_globalProfilerRealTimePeriodNs != 0 || _globalProfilerCPUTimePeriodNs != 0))
|
||||||
|
thread_status.initGlobalProfiler(_globalProfilerRealTimePeriodNs, _globalProfilerCPUTimePeriodNs);
|
||||||
|
|
||||||
_started.set();
|
_started.set();
|
||||||
for (;;)
|
for (;;)
|
||||||
{
|
{
|
||||||
@ -220,13 +237,17 @@ void PooledThread::run()
|
|||||||
ThreadPool::ThreadPool(int minCapacity,
|
ThreadPool::ThreadPool(int minCapacity,
|
||||||
int maxCapacity,
|
int maxCapacity,
|
||||||
int idleTime,
|
int idleTime,
|
||||||
int stackSize):
|
int stackSize,
|
||||||
_minCapacity(minCapacity),
|
size_t globalProfilerRealTimePeriodNs_,
|
||||||
_maxCapacity(maxCapacity),
|
size_t globalProfilerCPUTimePeriodNs_) :
|
||||||
|
_minCapacity(minCapacity),
|
||||||
|
_maxCapacity(maxCapacity),
|
||||||
_idleTime(idleTime),
|
_idleTime(idleTime),
|
||||||
_serial(0),
|
_serial(0),
|
||||||
_age(0),
|
_age(0),
|
||||||
_stackSize(stackSize)
|
_stackSize(stackSize),
|
||||||
|
_globalProfilerRealTimePeriodNs(globalProfilerRealTimePeriodNs_),
|
||||||
|
_globalProfilerCPUTimePeriodNs(globalProfilerCPUTimePeriodNs_)
|
||||||
{
|
{
|
||||||
poco_assert (minCapacity >= 1 && maxCapacity >= minCapacity && idleTime > 0);
|
poco_assert (minCapacity >= 1 && maxCapacity >= minCapacity && idleTime > 0);
|
||||||
|
|
||||||
@ -243,14 +264,18 @@ ThreadPool::ThreadPool(const std::string& name,
|
|||||||
int minCapacity,
|
int minCapacity,
|
||||||
int maxCapacity,
|
int maxCapacity,
|
||||||
int idleTime,
|
int idleTime,
|
||||||
int stackSize):
|
int stackSize,
|
||||||
|
size_t globalProfilerRealTimePeriodNs_,
|
||||||
|
size_t globalProfilerCPUTimePeriodNs_) :
|
||||||
_name(name),
|
_name(name),
|
||||||
_minCapacity(minCapacity),
|
_minCapacity(minCapacity),
|
||||||
_maxCapacity(maxCapacity),
|
_maxCapacity(maxCapacity),
|
||||||
_idleTime(idleTime),
|
_idleTime(idleTime),
|
||||||
_serial(0),
|
_serial(0),
|
||||||
_age(0),
|
_age(0),
|
||||||
_stackSize(stackSize)
|
_stackSize(stackSize),
|
||||||
|
_globalProfilerRealTimePeriodNs(globalProfilerRealTimePeriodNs_),
|
||||||
|
_globalProfilerCPUTimePeriodNs(globalProfilerCPUTimePeriodNs_)
|
||||||
{
|
{
|
||||||
poco_assert (minCapacity >= 1 && maxCapacity >= minCapacity && idleTime > 0);
|
poco_assert (minCapacity >= 1 && maxCapacity >= minCapacity && idleTime > 0);
|
||||||
|
|
||||||
@ -393,15 +418,15 @@ void ThreadPool::housekeep()
|
|||||||
ThreadVec activeThreads;
|
ThreadVec activeThreads;
|
||||||
idleThreads.reserve(_threads.size());
|
idleThreads.reserve(_threads.size());
|
||||||
activeThreads.reserve(_threads.size());
|
activeThreads.reserve(_threads.size());
|
||||||
|
|
||||||
for (ThreadVec::iterator it = _threads.begin(); it != _threads.end(); ++it)
|
for (ThreadVec::iterator it = _threads.begin(); it != _threads.end(); ++it)
|
||||||
{
|
{
|
||||||
if ((*it)->idle())
|
if ((*it)->idle())
|
||||||
{
|
{
|
||||||
if ((*it)->idleTime() < _idleTime)
|
if ((*it)->idleTime() < _idleTime)
|
||||||
idleThreads.push_back(*it);
|
idleThreads.push_back(*it);
|
||||||
else
|
else
|
||||||
expiredThreads.push_back(*it);
|
expiredThreads.push_back(*it);
|
||||||
}
|
}
|
||||||
else activeThreads.push_back(*it);
|
else activeThreads.push_back(*it);
|
||||||
}
|
}
|
||||||
@ -463,7 +488,7 @@ PooledThread* ThreadPool::createThread()
|
|||||||
{
|
{
|
||||||
std::ostringstream name;
|
std::ostringstream name;
|
||||||
name << _name << "[#" << ++_serial << "]";
|
name << _name << "[#" << ++_serial << "]";
|
||||||
return new PooledThread(name.str(), _stackSize);
|
return new PooledThread(name.str(), _stackSize, _globalProfilerRealTimePeriodNs, _globalProfilerCPUTimePeriodNs);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -481,7 +506,7 @@ public:
|
|||||||
ThreadPool* pool()
|
ThreadPool* pool()
|
||||||
{
|
{
|
||||||
FastMutex::ScopedLock lock(_mutex);
|
FastMutex::ScopedLock lock(_mutex);
|
||||||
|
|
||||||
if (!_pPool)
|
if (!_pPool)
|
||||||
{
|
{
|
||||||
_pPool = new ThreadPool("default");
|
_pPool = new ThreadPool("default");
|
||||||
@ -490,7 +515,7 @@ public:
|
|||||||
}
|
}
|
||||||
return _pPool;
|
return _pPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
ThreadPool* _pPool;
|
ThreadPool* _pPool;
|
||||||
FastMutex _mutex;
|
FastMutex _mutex;
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
#ifndef NetSSL_SSLManager_INCLUDED
|
#ifndef NetSSL_SSLManager_INCLUDED
|
||||||
#define NetSSL_SSLManager_INCLUDED
|
#define NetSSL_SSLManager_INCLUDED
|
||||||
|
|
||||||
|
#include <unordered_map>
|
||||||
|
|
||||||
#include <openssl/ssl.h>
|
#include <openssl/ssl.h>
|
||||||
#include "Poco/BasicEvent.h"
|
#include "Poco/BasicEvent.h"
|
||||||
@ -219,6 +220,13 @@ namespace Net
|
|||||||
/// Unless initializeClient() has been called, the first call to this method initializes the default Context
|
/// Unless initializeClient() has been called, the first call to this method initializes the default Context
|
||||||
/// from the application configuration.
|
/// from the application configuration.
|
||||||
|
|
||||||
|
Context::Ptr getCustomServerContext(const std::string & name);
|
||||||
|
/// Return custom Context used by the server.
|
||||||
|
|
||||||
|
Context::Ptr setCustomServerContext(const std::string & name, Context::Ptr ctx);
|
||||||
|
/// Set custom Context used by the server.
|
||||||
|
/// Return pointer on inserted Context or on old Context if exists.
|
||||||
|
|
||||||
PrivateKeyPassphraseHandlerPtr serverPassphraseHandler();
|
PrivateKeyPassphraseHandlerPtr serverPassphraseHandler();
|
||||||
/// Returns the configured passphrase handler of the server. If none is set, the method will create a default one
|
/// Returns the configured passphrase handler of the server. If none is set, the method will create a default one
|
||||||
/// from an application configuration.
|
/// from an application configuration.
|
||||||
@ -258,6 +266,40 @@ namespace Net
|
|||||||
static const std::string CFG_SERVER_PREFIX;
|
static const std::string CFG_SERVER_PREFIX;
|
||||||
static const std::string CFG_CLIENT_PREFIX;
|
static const std::string CFG_CLIENT_PREFIX;
|
||||||
|
|
||||||
|
static const std::string CFG_PRIV_KEY_FILE;
|
||||||
|
static const std::string CFG_CERTIFICATE_FILE;
|
||||||
|
static const std::string CFG_CA_LOCATION;
|
||||||
|
static const std::string CFG_VER_MODE;
|
||||||
|
static const Context::VerificationMode VAL_VER_MODE;
|
||||||
|
static const std::string CFG_VER_DEPTH;
|
||||||
|
static const int VAL_VER_DEPTH;
|
||||||
|
static const std::string CFG_ENABLE_DEFAULT_CA;
|
||||||
|
static const bool VAL_ENABLE_DEFAULT_CA;
|
||||||
|
static const std::string CFG_CIPHER_LIST;
|
||||||
|
static const std::string CFG_CYPHER_LIST; // for backwards compatibility
|
||||||
|
static const std::string VAL_CIPHER_LIST;
|
||||||
|
static const std::string CFG_PREFER_SERVER_CIPHERS;
|
||||||
|
static const std::string CFG_DELEGATE_HANDLER;
|
||||||
|
static const std::string VAL_DELEGATE_HANDLER;
|
||||||
|
static const std::string CFG_CERTIFICATE_HANDLER;
|
||||||
|
static const std::string VAL_CERTIFICATE_HANDLER;
|
||||||
|
static const std::string CFG_CACHE_SESSIONS;
|
||||||
|
static const std::string CFG_SESSION_ID_CONTEXT;
|
||||||
|
static const std::string CFG_SESSION_CACHE_SIZE;
|
||||||
|
static const std::string CFG_SESSION_TIMEOUT;
|
||||||
|
static const std::string CFG_EXTENDED_VERIFICATION;
|
||||||
|
static const std::string CFG_REQUIRE_TLSV1;
|
||||||
|
static const std::string CFG_REQUIRE_TLSV1_1;
|
||||||
|
static const std::string CFG_REQUIRE_TLSV1_2;
|
||||||
|
static const std::string CFG_DISABLE_PROTOCOLS;
|
||||||
|
static const std::string CFG_DH_PARAMS_FILE;
|
||||||
|
static const std::string CFG_ECDH_CURVE;
|
||||||
|
|
||||||
|
#ifdef OPENSSL_FIPS
|
||||||
|
static const std::string CFG_FIPS_MODE;
|
||||||
|
static const bool VAL_FIPS_MODE;
|
||||||
|
#endif
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
static int verifyClientCallback(int ok, X509_STORE_CTX * pStore);
|
static int verifyClientCallback(int ok, X509_STORE_CTX * pStore);
|
||||||
/// The return value of this method defines how errors in
|
/// The return value of this method defines how errors in
|
||||||
@ -314,39 +356,7 @@ namespace Net
|
|||||||
InvalidCertificateHandlerPtr _ptrClientCertificateHandler;
|
InvalidCertificateHandlerPtr _ptrClientCertificateHandler;
|
||||||
Poco::FastMutex _mutex;
|
Poco::FastMutex _mutex;
|
||||||
|
|
||||||
static const std::string CFG_PRIV_KEY_FILE;
|
std::unordered_map<std::string, Context::Ptr> _mapPtrServerContexts;
|
||||||
static const std::string CFG_CERTIFICATE_FILE;
|
|
||||||
static const std::string CFG_CA_LOCATION;
|
|
||||||
static const std::string CFG_VER_MODE;
|
|
||||||
static const Context::VerificationMode VAL_VER_MODE;
|
|
||||||
static const std::string CFG_VER_DEPTH;
|
|
||||||
static const int VAL_VER_DEPTH;
|
|
||||||
static const std::string CFG_ENABLE_DEFAULT_CA;
|
|
||||||
static const bool VAL_ENABLE_DEFAULT_CA;
|
|
||||||
static const std::string CFG_CIPHER_LIST;
|
|
||||||
static const std::string CFG_CYPHER_LIST; // for backwards compatibility
|
|
||||||
static const std::string VAL_CIPHER_LIST;
|
|
||||||
static const std::string CFG_PREFER_SERVER_CIPHERS;
|
|
||||||
static const std::string CFG_DELEGATE_HANDLER;
|
|
||||||
static const std::string VAL_DELEGATE_HANDLER;
|
|
||||||
static const std::string CFG_CERTIFICATE_HANDLER;
|
|
||||||
static const std::string VAL_CERTIFICATE_HANDLER;
|
|
||||||
static const std::string CFG_CACHE_SESSIONS;
|
|
||||||
static const std::string CFG_SESSION_ID_CONTEXT;
|
|
||||||
static const std::string CFG_SESSION_CACHE_SIZE;
|
|
||||||
static const std::string CFG_SESSION_TIMEOUT;
|
|
||||||
static const std::string CFG_EXTENDED_VERIFICATION;
|
|
||||||
static const std::string CFG_REQUIRE_TLSV1;
|
|
||||||
static const std::string CFG_REQUIRE_TLSV1_1;
|
|
||||||
static const std::string CFG_REQUIRE_TLSV1_2;
|
|
||||||
static const std::string CFG_DISABLE_PROTOCOLS;
|
|
||||||
static const std::string CFG_DH_PARAMS_FILE;
|
|
||||||
static const std::string CFG_ECDH_CURVE;
|
|
||||||
|
|
||||||
#ifdef OPENSSL_FIPS
|
|
||||||
static const std::string CFG_FIPS_MODE;
|
|
||||||
static const bool VAL_FIPS_MODE;
|
|
||||||
#endif
|
|
||||||
|
|
||||||
friend class Poco::SingletonHolder<SSLManager>;
|
friend class Poco::SingletonHolder<SSLManager>;
|
||||||
friend class Context;
|
friend class Context;
|
||||||
|
@ -235,8 +235,6 @@ namespace Net
|
|||||||
/// Note that simply closing a socket is not sufficient
|
/// Note that simply closing a socket is not sufficient
|
||||||
/// to be able to re-use it again.
|
/// to be able to re-use it again.
|
||||||
|
|
||||||
Poco::Timespan getMaxTimeout();
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
SecureSocketImpl(const SecureSocketImpl &);
|
SecureSocketImpl(const SecureSocketImpl &);
|
||||||
SecureSocketImpl & operator=(const SecureSocketImpl &);
|
SecureSocketImpl & operator=(const SecureSocketImpl &);
|
||||||
@ -250,6 +248,9 @@ namespace Net
|
|||||||
Session::Ptr _pSession;
|
Session::Ptr _pSession;
|
||||||
|
|
||||||
friend class SecureStreamSocketImpl;
|
friend class SecureStreamSocketImpl;
|
||||||
|
|
||||||
|
Poco::Timespan getMaxTimeoutOrLimit();
|
||||||
|
//// Return max(send, receive) if non zero, otherwise maximum timeout
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
|
@ -330,27 +330,26 @@ void SSLManager::initDefaultContext(bool server)
|
|||||||
else
|
else
|
||||||
_ptrDefaultClientContext->disableProtocols(disabledProtocols);
|
_ptrDefaultClientContext->disableProtocols(disabledProtocols);
|
||||||
|
|
||||||
/// Temporarily disabled during the transition from boringssl to OpenSSL due to tsan issues.
|
bool cacheSessions = config.getBool(prefix + CFG_CACHE_SESSIONS, false);
|
||||||
/// bool cacheSessions = config.getBool(prefix + CFG_CACHE_SESSIONS, false);
|
if (server)
|
||||||
/// if (server)
|
{
|
||||||
/// {
|
std::string sessionIdContext = config.getString(prefix + CFG_SESSION_ID_CONTEXT, config.getString("application.name", ""));
|
||||||
/// std::string sessionIdContext = config.getString(prefix + CFG_SESSION_ID_CONTEXT, config.getString("application.name", ""));
|
_ptrDefaultServerContext->enableSessionCache(cacheSessions, sessionIdContext);
|
||||||
/// _ptrDefaultServerContext->enableSessionCache(cacheSessions, sessionIdContext);
|
if (config.hasProperty(prefix + CFG_SESSION_CACHE_SIZE))
|
||||||
/// if (config.hasProperty(prefix + CFG_SESSION_CACHE_SIZE))
|
{
|
||||||
/// {
|
int cacheSize = config.getInt(prefix + CFG_SESSION_CACHE_SIZE);
|
||||||
/// int cacheSize = config.getInt(prefix + CFG_SESSION_CACHE_SIZE);
|
_ptrDefaultServerContext->setSessionCacheSize(cacheSize);
|
||||||
/// _ptrDefaultServerContext->setSessionCacheSize(cacheSize);
|
}
|
||||||
/// }
|
if (config.hasProperty(prefix + CFG_SESSION_TIMEOUT))
|
||||||
/// if (config.hasProperty(prefix + CFG_SESSION_TIMEOUT))
|
{
|
||||||
/// {
|
int timeout = config.getInt(prefix + CFG_SESSION_TIMEOUT);
|
||||||
/// int timeout = config.getInt(prefix + CFG_SESSION_TIMEOUT);
|
_ptrDefaultServerContext->setSessionTimeout(timeout);
|
||||||
/// _ptrDefaultServerContext->setSessionTimeout(timeout);
|
}
|
||||||
/// }
|
}
|
||||||
/// }
|
else
|
||||||
/// else
|
{
|
||||||
/// {
|
_ptrDefaultClientContext->enableSessionCache(cacheSessions);
|
||||||
/// _ptrDefaultClientContext->enableSessionCache(cacheSessions);
|
}
|
||||||
/// }
|
|
||||||
bool extendedVerification = config.getBool(prefix + CFG_EXTENDED_VERIFICATION, false);
|
bool extendedVerification = config.getBool(prefix + CFG_EXTENDED_VERIFICATION, false);
|
||||||
if (server)
|
if (server)
|
||||||
_ptrDefaultServerContext->enableExtendedCertificateVerification(extendedVerification);
|
_ptrDefaultServerContext->enableExtendedCertificateVerification(extendedVerification);
|
||||||
@ -429,6 +428,23 @@ void SSLManager::initCertificateHandler(bool server)
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Context::Ptr SSLManager::getCustomServerContext(const std::string & name)
|
||||||
|
{
|
||||||
|
Poco::FastMutex::ScopedLock lock(_mutex);
|
||||||
|
auto it = _mapPtrServerContexts.find(name);
|
||||||
|
if (it != _mapPtrServerContexts.end())
|
||||||
|
return it->second;
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
Context::Ptr SSLManager::setCustomServerContext(const std::string & name, Context::Ptr ctx)
|
||||||
|
{
|
||||||
|
Poco::FastMutex::ScopedLock lock(_mutex);
|
||||||
|
ctx = _mapPtrServerContexts.insert({name, ctx}).first->second;
|
||||||
|
return ctx;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
Poco::Util::AbstractConfiguration& SSLManager::appConfig()
|
Poco::Util::AbstractConfiguration& SSLManager::appConfig()
|
||||||
{
|
{
|
||||||
try
|
try
|
||||||
|
@ -199,7 +199,7 @@ void SecureSocketImpl::connectSSL(bool performHandshake)
|
|||||||
if (performHandshake && _pSocket->getBlocking())
|
if (performHandshake && _pSocket->getBlocking())
|
||||||
{
|
{
|
||||||
int ret;
|
int ret;
|
||||||
Poco::Timespan remaining_time = getMaxTimeout();
|
Poco::Timespan remaining_time = getMaxTimeoutOrLimit();
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
RemainingTimeCounter counter(remaining_time);
|
RemainingTimeCounter counter(remaining_time);
|
||||||
@ -302,7 +302,7 @@ int SecureSocketImpl::sendBytes(const void* buffer, int length, int flags)
|
|||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
Poco::Timespan remaining_time = getMaxTimeout();
|
Poco::Timespan remaining_time = getMaxTimeoutOrLimit();
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
RemainingTimeCounter counter(remaining_time);
|
RemainingTimeCounter counter(remaining_time);
|
||||||
@ -338,7 +338,7 @@ int SecureSocketImpl::receiveBytes(void* buffer, int length, int flags)
|
|||||||
return rc;
|
return rc;
|
||||||
}
|
}
|
||||||
|
|
||||||
Poco::Timespan remaining_time = getMaxTimeout();
|
Poco::Timespan remaining_time = getMaxTimeoutOrLimit();
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
/// SSL record may consist of several TCP packets,
|
/// SSL record may consist of several TCP packets,
|
||||||
@ -372,7 +372,7 @@ int SecureSocketImpl::completeHandshake()
|
|||||||
poco_check_ptr (_pSSL);
|
poco_check_ptr (_pSSL);
|
||||||
|
|
||||||
int rc;
|
int rc;
|
||||||
Poco::Timespan remaining_time = getMaxTimeout();
|
Poco::Timespan remaining_time = getMaxTimeoutOrLimit();
|
||||||
do
|
do
|
||||||
{
|
{
|
||||||
RemainingTimeCounter counter(remaining_time);
|
RemainingTimeCounter counter(remaining_time);
|
||||||
@ -453,18 +453,29 @@ X509* SecureSocketImpl::peerCertificate() const
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
Poco::Timespan SecureSocketImpl::getMaxTimeout()
|
Poco::Timespan SecureSocketImpl::getMaxTimeoutOrLimit()
|
||||||
{
|
{
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mutex);
|
std::lock_guard<std::recursive_mutex> lock(_mutex);
|
||||||
Poco::Timespan remaining_time = _pSocket->getReceiveTimeout();
|
Poco::Timespan remaining_time = _pSocket->getReceiveTimeout();
|
||||||
Poco::Timespan send_timeout = _pSocket->getSendTimeout();
|
Poco::Timespan send_timeout = _pSocket->getSendTimeout();
|
||||||
if (remaining_time < send_timeout)
|
if (remaining_time < send_timeout)
|
||||||
remaining_time = send_timeout;
|
remaining_time = send_timeout;
|
||||||
|
/// zero SO_SNDTIMEO/SO_RCVTIMEO works as no timeout, let's replicate this
|
||||||
|
///
|
||||||
|
/// NOTE: we cannot use INT64_MAX (std::numeric_limits<Poco::Timespan::TimeDiff>::max()),
|
||||||
|
/// since it will be later passed to poll() which accept int timeout, and
|
||||||
|
/// even though poll() accepts milliseconds and Timespan() accepts
|
||||||
|
/// microseconds, let's use smaller maximum value just to avoid some possible
|
||||||
|
/// issues, this should be enough anyway (it is ~24 days).
|
||||||
|
if (remaining_time == 0)
|
||||||
|
remaining_time = Poco::Timespan(std::numeric_limits<int>::max());
|
||||||
return remaining_time;
|
return remaining_time;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool SecureSocketImpl::mustRetry(int rc, Poco::Timespan& remaining_time)
|
bool SecureSocketImpl::mustRetry(int rc, Poco::Timespan& remaining_time)
|
||||||
{
|
{
|
||||||
|
if (remaining_time == 0)
|
||||||
|
return false;
|
||||||
std::lock_guard<std::recursive_mutex> lock(_mutex);
|
std::lock_guard<std::recursive_mutex> lock(_mutex);
|
||||||
if (rc <= 0)
|
if (rc <= 0)
|
||||||
{
|
{
|
||||||
@ -475,9 +486,7 @@ bool SecureSocketImpl::mustRetry(int rc, Poco::Timespan& remaining_time)
|
|||||||
case SSL_ERROR_WANT_READ:
|
case SSL_ERROR_WANT_READ:
|
||||||
if (_pSocket->getBlocking())
|
if (_pSocket->getBlocking())
|
||||||
{
|
{
|
||||||
/// Level-triggered mode of epoll_wait is used, so if SSL_read don't read all available data from socket,
|
if (_pSocket->pollImpl(remaining_time, Poco::Net::Socket::SELECT_READ))
|
||||||
/// epoll_wait returns true without waiting for new data even if remaining_time == 0
|
|
||||||
if (_pSocket->pollImpl(remaining_time, Poco::Net::Socket::SELECT_READ) && remaining_time != 0)
|
|
||||||
return true;
|
return true;
|
||||||
else
|
else
|
||||||
throw Poco::TimeoutException();
|
throw Poco::TimeoutException();
|
||||||
@ -486,13 +495,15 @@ bool SecureSocketImpl::mustRetry(int rc, Poco::Timespan& remaining_time)
|
|||||||
case SSL_ERROR_WANT_WRITE:
|
case SSL_ERROR_WANT_WRITE:
|
||||||
if (_pSocket->getBlocking())
|
if (_pSocket->getBlocking())
|
||||||
{
|
{
|
||||||
/// The same as for SSL_ERROR_WANT_READ
|
if (_pSocket->pollImpl(remaining_time, Poco::Net::Socket::SELECT_WRITE))
|
||||||
if (_pSocket->pollImpl(remaining_time, Poco::Net::Socket::SELECT_WRITE) && remaining_time != 0)
|
|
||||||
return true;
|
return true;
|
||||||
else
|
else
|
||||||
throw Poco::TimeoutException();
|
throw Poco::TimeoutException();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
/// NOTE: POCO_EINTR is the same as SSL_ERROR_WANT_READ (at least in
|
||||||
|
/// OpenSSL), so this likely dead code, but let's leave it for
|
||||||
|
/// compatibility with other implementations
|
||||||
case SSL_ERROR_SYSCALL:
|
case SSL_ERROR_SYSCALL:
|
||||||
return socketError == POCO_EAGAIN || socketError == POCO_EINTR;
|
return socketError == POCO_EAGAIN || socketError == POCO_EINTR;
|
||||||
default:
|
default:
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
# This variables autochanged by tests/ci/version_helper.py:
|
# This variables autochanged by tests/ci/version_helper.py:
|
||||||
|
|
||||||
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
# NOTE: VERSION_REVISION has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||||
SET(VERSION_REVISION 54486)
|
SET(VERSION_REVISION 54488)
|
||||||
SET(VERSION_MAJOR 24)
|
SET(VERSION_MAJOR 24)
|
||||||
SET(VERSION_MINOR 5)
|
SET(VERSION_MINOR 7)
|
||||||
SET(VERSION_PATCH 1)
|
SET(VERSION_PATCH 1)
|
||||||
SET(VERSION_GITHASH 6d4b31322d168356c8b10c43b4cef157c82337ff)
|
SET(VERSION_GITHASH aa023477a9265e403982fca5ee29a714db5133d9)
|
||||||
SET(VERSION_DESCRIBE v24.5.1.1-testing)
|
SET(VERSION_DESCRIBE v24.7.1.1-testing)
|
||||||
SET(VERSION_STRING 24.5.1.1)
|
SET(VERSION_STRING 24.7.1.1)
|
||||||
# end of autochange
|
# end of autochange
|
||||||
|
@ -84,5 +84,5 @@ if (CMAKE_CROSSCOMPILING)
|
|||||||
message (FATAL_ERROR "Trying to cross-compile to unsupported system: ${CMAKE_SYSTEM_NAME}!")
|
message (FATAL_ERROR "Trying to cross-compile to unsupported system: ${CMAKE_SYSTEM_NAME}!")
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
message (STATUS "Cross-compiling for target: ${CMAKE_CXX_COMPILE_TARGET}")
|
message (STATUS "Cross-compiling for target: ${CMAKE_CXX_COMPILER_TARGET}")
|
||||||
endif ()
|
endif ()
|
||||||
|
20
cmake/xray_instrumentation.cmake
Normal file
20
cmake/xray_instrumentation.cmake
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# https://llvm.org/docs/XRay.html
|
||||||
|
|
||||||
|
option (ENABLE_XRAY "Enable LLVM XRay" OFF)
|
||||||
|
|
||||||
|
if (NOT ENABLE_XRAY)
|
||||||
|
message (STATUS "Not using LLVM XRay")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
if (NOT (ARCH_AMD64 AND OS_LINUX))
|
||||||
|
message (STATUS "Not using LLVM XRay, only amd64 Linux or FreeBSD are supported")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
# The target clang must support xray, otherwise it should error on invalid option
|
||||||
|
set (XRAY_FLAGS "-fxray-instrument -DUSE_XRAY")
|
||||||
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${XRAY_FLAGS}")
|
||||||
|
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${XRAY_FLAGS}")
|
||||||
|
|
||||||
|
message (STATUS "Using LLVM XRay")
|
2
contrib/CMakeLists.txt
vendored
2
contrib/CMakeLists.txt
vendored
@ -228,6 +228,8 @@ add_contrib (ulid-c-cmake ulid-c)
|
|||||||
|
|
||||||
add_contrib (libssh-cmake libssh)
|
add_contrib (libssh-cmake libssh)
|
||||||
|
|
||||||
|
add_contrib (prometheus-protobufs-cmake prometheus-protobufs prometheus-protobufs-gogo)
|
||||||
|
|
||||||
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
# Put all targets defined here and in subdirectories under "contrib/<immediate-subdir>" folders in GUI-based IDEs.
|
||||||
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
# Some of third-party projects may override CMAKE_FOLDER or FOLDER property of their targets, so they would not appear
|
||||||
# in "contrib/..." as originally planned, so we workaround this by fixing FOLDER properties of all targets manually,
|
# in "contrib/..." as originally planned, so we workaround this by fixing FOLDER properties of all targets manually,
|
||||||
|
2
contrib/abseil-cpp
vendored
2
contrib/abseil-cpp
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 3bd86026c93da5a40006fd53403dff9d5f5e30e3
|
Subproject commit a3c4dd3e77f28b526efbb0eb394b72e29c633936
|
@ -1,6 +1,8 @@
|
|||||||
set(ABSL_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/abseil-cpp")
|
set(ABSL_ROOT_DIR "${ClickHouse_SOURCE_DIR}/contrib/abseil-cpp")
|
||||||
set(ABSL_COMMON_INCLUDE_DIRS "${ABSL_ROOT_DIR}")
|
set(ABSL_COMMON_INCLUDE_DIRS "${ABSL_ROOT_DIR}")
|
||||||
|
|
||||||
|
# This is a minimized version of the function definition in CMake/AbseilHelpers.cmake
|
||||||
|
|
||||||
#
|
#
|
||||||
# Copyright 2017 The Abseil Authors.
|
# Copyright 2017 The Abseil Authors.
|
||||||
#
|
#
|
||||||
@ -16,7 +18,6 @@ set(ABSL_COMMON_INCLUDE_DIRS "${ABSL_ROOT_DIR}")
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
function(absl_cc_library)
|
function(absl_cc_library)
|
||||||
cmake_parse_arguments(ABSL_CC_LIB
|
cmake_parse_arguments(ABSL_CC_LIB
|
||||||
"DISABLE_INSTALL;PUBLIC;TESTONLY"
|
"DISABLE_INSTALL;PUBLIC;TESTONLY"
|
||||||
@ -76,6 +77,12 @@ function(absl_cc_library)
|
|||||||
add_library(absl::${ABSL_CC_LIB_NAME} ALIAS ${_NAME})
|
add_library(absl::${ABSL_CC_LIB_NAME} ALIAS ${_NAME})
|
||||||
endfunction()
|
endfunction()
|
||||||
|
|
||||||
|
# The following definitions are an amalgamation of the CMakeLists.txt files in absl/*/
|
||||||
|
# To refresh them when upgrading to a new version:
|
||||||
|
# - copy them over from upstream
|
||||||
|
# - remove calls of 'absl_cc_test'
|
||||||
|
# - remove calls of `absl_cc_library` that contain `TESTONLY`
|
||||||
|
# - append '${DIR}' to the file definitions
|
||||||
|
|
||||||
set(DIR ${ABSL_ROOT_DIR}/absl/algorithm)
|
set(DIR ${ABSL_ROOT_DIR}/absl/algorithm)
|
||||||
|
|
||||||
@ -102,12 +109,12 @@ absl_cc_library(
|
|||||||
absl::algorithm
|
absl::algorithm
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::meta
|
absl::meta
|
||||||
|
absl::nullability
|
||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
set(DIR ${ABSL_ROOT_DIR}/absl/base)
|
set(DIR ${ABSL_ROOT_DIR}/absl/base)
|
||||||
|
|
||||||
# Internal-only target, do not depend on directly.
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
atomic_hook
|
atomic_hook
|
||||||
@ -146,6 +153,18 @@ absl_cc_library(
|
|||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
absl_cc_library(
|
||||||
|
NAME
|
||||||
|
no_destructor
|
||||||
|
HDRS
|
||||||
|
"${DIR}/no_destructor.h"
|
||||||
|
DEPS
|
||||||
|
absl::config
|
||||||
|
absl::nullability
|
||||||
|
COPTS
|
||||||
|
${ABSL_DEFAULT_COPTS}
|
||||||
|
)
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
nullability
|
nullability
|
||||||
@ -305,6 +324,8 @@ absl_cc_library(
|
|||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
LINKOPTS
|
LINKOPTS
|
||||||
${ABSL_DEFAULT_LINKOPTS}
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
|
$<$<BOOL:${LIBRT}>:-lrt>
|
||||||
|
$<$<BOOL:${MINGW}>:-ladvapi32>
|
||||||
DEPS
|
DEPS
|
||||||
absl::atomic_hook
|
absl::atomic_hook
|
||||||
absl::base_internal
|
absl::base_internal
|
||||||
@ -312,6 +333,7 @@ absl_cc_library(
|
|||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::dynamic_annotations
|
absl::dynamic_annotations
|
||||||
absl::log_severity
|
absl::log_severity
|
||||||
|
absl::nullability
|
||||||
absl::raw_logging_internal
|
absl::raw_logging_internal
|
||||||
absl::spinlock_wait
|
absl::spinlock_wait
|
||||||
absl::type_traits
|
absl::type_traits
|
||||||
@ -357,6 +379,7 @@ absl_cc_library(
|
|||||||
absl::base
|
absl::base
|
||||||
absl::config
|
absl::config
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
|
absl::nullability
|
||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -467,10 +490,11 @@ absl_cc_library(
|
|||||||
LINKOPTS
|
LINKOPTS
|
||||||
${ABSL_DEFAULT_LINKOPTS}
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
DEPS
|
DEPS
|
||||||
absl::container_common
|
|
||||||
absl::common_policy_traits
|
absl::common_policy_traits
|
||||||
absl::compare
|
absl::compare
|
||||||
absl::compressed_tuple
|
absl::compressed_tuple
|
||||||
|
absl::config
|
||||||
|
absl::container_common
|
||||||
absl::container_memory
|
absl::container_memory
|
||||||
absl::cord
|
absl::cord
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
@ -480,7 +504,6 @@ absl_cc_library(
|
|||||||
absl::strings
|
absl::strings
|
||||||
absl::throw_delegate
|
absl::throw_delegate
|
||||||
absl::type_traits
|
absl::type_traits
|
||||||
absl::utility
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Internal-only target, do not depend on directly.
|
# Internal-only target, do not depend on directly.
|
||||||
@ -523,7 +546,9 @@ absl_cc_library(
|
|||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
|
absl::base_internal
|
||||||
absl::compressed_tuple
|
absl::compressed_tuple
|
||||||
|
absl::config
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::memory
|
absl::memory
|
||||||
absl::span
|
absl::span
|
||||||
@ -548,18 +573,6 @@ absl_cc_library(
|
|||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
# Internal-only target, do not depend on directly.
|
|
||||||
absl_cc_library(
|
|
||||||
NAME
|
|
||||||
counting_allocator
|
|
||||||
HDRS
|
|
||||||
"${DIR}/internal/counting_allocator.h"
|
|
||||||
COPTS
|
|
||||||
${ABSL_DEFAULT_COPTS}
|
|
||||||
DEPS
|
|
||||||
absl::config
|
|
||||||
)
|
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
flat_hash_map
|
flat_hash_map
|
||||||
@ -570,7 +583,7 @@ absl_cc_library(
|
|||||||
DEPS
|
DEPS
|
||||||
absl::container_memory
|
absl::container_memory
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::hash_function_defaults
|
absl::hash_container_defaults
|
||||||
absl::raw_hash_map
|
absl::raw_hash_map
|
||||||
absl::algorithm_container
|
absl::algorithm_container
|
||||||
absl::memory
|
absl::memory
|
||||||
@ -586,7 +599,7 @@ absl_cc_library(
|
|||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
absl::container_memory
|
absl::container_memory
|
||||||
absl::hash_function_defaults
|
absl::hash_container_defaults
|
||||||
absl::raw_hash_set
|
absl::raw_hash_set
|
||||||
absl::algorithm_container
|
absl::algorithm_container
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
@ -604,7 +617,7 @@ absl_cc_library(
|
|||||||
DEPS
|
DEPS
|
||||||
absl::container_memory
|
absl::container_memory
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::hash_function_defaults
|
absl::hash_container_defaults
|
||||||
absl::node_slot_policy
|
absl::node_slot_policy
|
||||||
absl::raw_hash_map
|
absl::raw_hash_map
|
||||||
absl::algorithm_container
|
absl::algorithm_container
|
||||||
@ -620,8 +633,9 @@ absl_cc_library(
|
|||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
|
absl::container_memory
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::hash_function_defaults
|
absl::hash_container_defaults
|
||||||
absl::node_slot_policy
|
absl::node_slot_policy
|
||||||
absl::raw_hash_set
|
absl::raw_hash_set
|
||||||
absl::algorithm_container
|
absl::algorithm_container
|
||||||
@ -629,6 +643,19 @@ absl_cc_library(
|
|||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
|
absl_cc_library(
|
||||||
|
NAME
|
||||||
|
hash_container_defaults
|
||||||
|
HDRS
|
||||||
|
"${DIR}/hash_container_defaults.h"
|
||||||
|
COPTS
|
||||||
|
${ABSL_DEFAULT_COPTS}
|
||||||
|
DEPS
|
||||||
|
absl::config
|
||||||
|
absl::hash_function_defaults
|
||||||
|
PUBLIC
|
||||||
|
)
|
||||||
|
|
||||||
# Internal-only target, do not depend on directly.
|
# Internal-only target, do not depend on directly.
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
@ -655,9 +682,11 @@ absl_cc_library(
|
|||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
absl::config
|
absl::config
|
||||||
|
absl::container_common
|
||||||
absl::cord
|
absl::cord
|
||||||
absl::hash
|
absl::hash
|
||||||
absl::strings
|
absl::strings
|
||||||
|
absl::type_traits
|
||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -703,6 +732,7 @@ absl_cc_library(
|
|||||||
absl::base
|
absl::base
|
||||||
absl::config
|
absl::config
|
||||||
absl::exponential_biased
|
absl::exponential_biased
|
||||||
|
absl::no_destructor
|
||||||
absl::raw_logging_internal
|
absl::raw_logging_internal
|
||||||
absl::sample_recorder
|
absl::sample_recorder
|
||||||
absl::synchronization
|
absl::synchronization
|
||||||
@ -756,7 +786,9 @@ absl_cc_library(
|
|||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
|
absl::config
|
||||||
absl::container_memory
|
absl::container_memory
|
||||||
|
absl::core_headers
|
||||||
absl::raw_hash_set
|
absl::raw_hash_set
|
||||||
absl::throw_delegate
|
absl::throw_delegate
|
||||||
PUBLIC
|
PUBLIC
|
||||||
@ -817,6 +849,7 @@ absl_cc_library(
|
|||||||
DEPS
|
DEPS
|
||||||
absl::config
|
absl::config
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
|
absl::debugging_internal
|
||||||
absl::meta
|
absl::meta
|
||||||
absl::strings
|
absl::strings
|
||||||
absl::span
|
absl::span
|
||||||
@ -931,6 +964,7 @@ absl_cc_library(
|
|||||||
absl::crc32c
|
absl::crc32c
|
||||||
absl::config
|
absl::config
|
||||||
absl::strings
|
absl::strings
|
||||||
|
absl::no_destructor
|
||||||
)
|
)
|
||||||
|
|
||||||
set(DIR ${ABSL_ROOT_DIR}/absl/debugging)
|
set(DIR ${ABSL_ROOT_DIR}/absl/debugging)
|
||||||
@ -954,6 +988,8 @@ absl_cc_library(
|
|||||||
"${DIR}/stacktrace.cc"
|
"${DIR}/stacktrace.cc"
|
||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
|
LINKOPTS
|
||||||
|
$<$<BOOL:${EXECINFO_LIBRARY}>:${EXECINFO_LIBRARY}>
|
||||||
DEPS
|
DEPS
|
||||||
absl::debugging_internal
|
absl::debugging_internal
|
||||||
absl::config
|
absl::config
|
||||||
@ -980,6 +1016,7 @@ absl_cc_library(
|
|||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
LINKOPTS
|
LINKOPTS
|
||||||
${ABSL_DEFAULT_LINKOPTS}
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
|
$<$<BOOL:${MINGW}>:-ldbghelp>
|
||||||
DEPS
|
DEPS
|
||||||
absl::debugging_internal
|
absl::debugging_internal
|
||||||
absl::demangle_internal
|
absl::demangle_internal
|
||||||
@ -1058,8 +1095,10 @@ absl_cc_library(
|
|||||||
demangle_internal
|
demangle_internal
|
||||||
HDRS
|
HDRS
|
||||||
"${DIR}/internal/demangle.h"
|
"${DIR}/internal/demangle.h"
|
||||||
|
"${DIR}/internal/demangle_rust.h"
|
||||||
SRCS
|
SRCS
|
||||||
"${DIR}/internal/demangle.cc"
|
"${DIR}/internal/demangle.cc"
|
||||||
|
"${DIR}/internal/demangle_rust.cc"
|
||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
@ -1252,6 +1291,7 @@ absl_cc_library(
|
|||||||
absl::strings
|
absl::strings
|
||||||
absl::synchronization
|
absl::synchronization
|
||||||
absl::flat_hash_map
|
absl::flat_hash_map
|
||||||
|
absl::no_destructor
|
||||||
)
|
)
|
||||||
|
|
||||||
# Internal-only target, do not depend on directly.
|
# Internal-only target, do not depend on directly.
|
||||||
@ -1283,12 +1323,9 @@ absl_cc_library(
|
|||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
flags
|
flags
|
||||||
SRCS
|
|
||||||
"${DIR}/flag.cc"
|
|
||||||
HDRS
|
HDRS
|
||||||
"${DIR}/declare.h"
|
"${DIR}/declare.h"
|
||||||
"${DIR}/flag.h"
|
"${DIR}/flag.h"
|
||||||
"${DIR}/internal/flag_msvc.inc"
|
|
||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
LINKOPTS
|
LINKOPTS
|
||||||
@ -1299,7 +1336,6 @@ absl_cc_library(
|
|||||||
absl::flags_config
|
absl::flags_config
|
||||||
absl::flags_internal
|
absl::flags_internal
|
||||||
absl::flags_reflection
|
absl::flags_reflection
|
||||||
absl::base
|
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::strings
|
absl::strings
|
||||||
)
|
)
|
||||||
@ -1379,6 +1415,9 @@ absl_cc_library(
|
|||||||
absl::synchronization
|
absl::synchronization
|
||||||
)
|
)
|
||||||
|
|
||||||
|
############################################################################
|
||||||
|
# Unit tests in alphabetical order.
|
||||||
|
|
||||||
set(DIR ${ABSL_ROOT_DIR}/absl/functional)
|
set(DIR ${ABSL_ROOT_DIR}/absl/functional)
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
@ -1431,6 +1470,18 @@ absl_cc_library(
|
|||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
|
absl_cc_library(
|
||||||
|
NAME
|
||||||
|
overload
|
||||||
|
HDRS
|
||||||
|
"${DIR}/overload.h"
|
||||||
|
COPTS
|
||||||
|
${ABSL_DEFAULT_COPTS}
|
||||||
|
DEPS
|
||||||
|
absl::meta
|
||||||
|
PUBLIC
|
||||||
|
)
|
||||||
|
|
||||||
set(DIR ${ABSL_ROOT_DIR}/absl/hash)
|
set(DIR ${ABSL_ROOT_DIR}/absl/hash)
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
@ -1640,6 +1691,7 @@ absl_cc_library(
|
|||||||
absl::log_internal_conditions
|
absl::log_internal_conditions
|
||||||
absl::log_internal_message
|
absl::log_internal_message
|
||||||
absl::log_internal_strip
|
absl::log_internal_strip
|
||||||
|
absl::absl_vlog_is_on
|
||||||
)
|
)
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
@ -1721,6 +1773,7 @@ absl_cc_library(
|
|||||||
absl::log_entry
|
absl::log_entry
|
||||||
absl::log_severity
|
absl::log_severity
|
||||||
absl::log_sink
|
absl::log_sink
|
||||||
|
absl::no_destructor
|
||||||
absl::raw_logging_internal
|
absl::raw_logging_internal
|
||||||
absl::synchronization
|
absl::synchronization
|
||||||
absl::span
|
absl::span
|
||||||
@ -1771,6 +1824,7 @@ absl_cc_library(
|
|||||||
LINKOPTS
|
LINKOPTS
|
||||||
${ABSL_DEFAULT_LINKOPTS}
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
DEPS
|
DEPS
|
||||||
|
absl::core_headers
|
||||||
absl::log_internal_message
|
absl::log_internal_message
|
||||||
absl::log_internal_nullstream
|
absl::log_internal_nullstream
|
||||||
absl::log_severity
|
absl::log_severity
|
||||||
@ -1876,6 +1930,11 @@ absl_cc_library(
|
|||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Warning: Many linkers will strip the contents of this library because its
|
||||||
|
# symbols are only used in a global constructor. A workaround is for clients
|
||||||
|
# to link this using $<LINK_LIBRARY:WHOLE_ARCHIVE,absl::log_flags> instead of
|
||||||
|
# the plain absl::log_flags.
|
||||||
|
# TODO(b/320467376): Implement the equivalent of Bazel's alwayslink=True.
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
log_flags
|
log_flags
|
||||||
@ -1897,6 +1956,7 @@ absl_cc_library(
|
|||||||
absl::flags
|
absl::flags
|
||||||
absl::flags_marshalling
|
absl::flags_marshalling
|
||||||
absl::strings
|
absl::strings
|
||||||
|
absl::vlog_config_internal
|
||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1919,6 +1979,7 @@ absl_cc_library(
|
|||||||
absl::log_severity
|
absl::log_severity
|
||||||
absl::raw_logging_internal
|
absl::raw_logging_internal
|
||||||
absl::strings
|
absl::strings
|
||||||
|
absl::vlog_config_internal
|
||||||
)
|
)
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
@ -1952,6 +2013,7 @@ absl_cc_library(
|
|||||||
${ABSL_DEFAULT_LINKOPTS}
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
DEPS
|
DEPS
|
||||||
absl::log_internal_log_impl
|
absl::log_internal_log_impl
|
||||||
|
absl::vlog_is_on
|
||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -2064,21 +2126,75 @@ absl_cc_library(
|
|||||||
)
|
)
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
log_internal_fnmatch
|
vlog_config_internal
|
||||||
SRCS
|
SRCS
|
||||||
"${DIR}/internal/fnmatch.cc"
|
"${DIR}/internal/vlog_config.cc"
|
||||||
HDRS
|
HDRS
|
||||||
"${DIR}/internal/fnmatch.h"
|
"${DIR}/internal/vlog_config.h"
|
||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
LINKOPTS
|
LINKOPTS
|
||||||
${ABSL_DEFAULT_LINKOPTS}
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
DEPS
|
DEPS
|
||||||
absl::config
|
absl::base
|
||||||
absl::strings
|
absl::config
|
||||||
|
absl::core_headers
|
||||||
|
absl::log_internal_fnmatch
|
||||||
|
absl::memory
|
||||||
|
absl::no_destructor
|
||||||
|
absl::strings
|
||||||
|
absl::synchronization
|
||||||
|
absl::optional
|
||||||
)
|
)
|
||||||
|
|
||||||
|
absl_cc_library(
|
||||||
|
NAME
|
||||||
|
absl_vlog_is_on
|
||||||
|
COPTS
|
||||||
|
${ABSL_DEFAULT_COPTS}
|
||||||
|
LINKOPTS
|
||||||
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
|
HDRS
|
||||||
|
"${DIR}/absl_vlog_is_on.h"
|
||||||
|
DEPS
|
||||||
|
absl::vlog_config_internal
|
||||||
|
absl::config
|
||||||
|
absl::core_headers
|
||||||
|
absl::strings
|
||||||
|
)
|
||||||
|
|
||||||
|
absl_cc_library(
|
||||||
|
NAME
|
||||||
|
vlog_is_on
|
||||||
|
COPTS
|
||||||
|
${ABSL_DEFAULT_COPTS}
|
||||||
|
LINKOPTS
|
||||||
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
|
HDRS
|
||||||
|
"${DIR}/vlog_is_on.h"
|
||||||
|
DEPS
|
||||||
|
absl::absl_vlog_is_on
|
||||||
|
)
|
||||||
|
|
||||||
|
absl_cc_library(
|
||||||
|
NAME
|
||||||
|
log_internal_fnmatch
|
||||||
|
SRCS
|
||||||
|
"${DIR}/internal/fnmatch.cc"
|
||||||
|
HDRS
|
||||||
|
"${DIR}/internal/fnmatch.h"
|
||||||
|
COPTS
|
||||||
|
${ABSL_DEFAULT_COPTS}
|
||||||
|
LINKOPTS
|
||||||
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
|
DEPS
|
||||||
|
absl::config
|
||||||
|
absl::strings
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test targets
|
||||||
|
|
||||||
set(DIR ${ABSL_ROOT_DIR}/absl/memory)
|
set(DIR ${ABSL_ROOT_DIR}/absl/memory)
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
@ -2147,6 +2263,7 @@ absl_cc_library(
|
|||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
|
absl::compare
|
||||||
absl::config
|
absl::config
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::bits
|
absl::bits
|
||||||
@ -2176,6 +2293,8 @@ absl_cc_library(
|
|||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
|
set(DIR ${ABSL_ROOT_DIR}/absl/profiling)
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
sample_recorder
|
sample_recorder
|
||||||
@ -2188,8 +2307,6 @@ absl_cc_library(
|
|||||||
absl::synchronization
|
absl::synchronization
|
||||||
)
|
)
|
||||||
|
|
||||||
set(DIR ${ABSL_ROOT_DIR}/absl/profiling)
|
|
||||||
|
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
exponential_biased
|
exponential_biased
|
||||||
@ -2265,6 +2382,7 @@ absl_cc_library(
|
|||||||
LINKOPTS
|
LINKOPTS
|
||||||
${ABSL_DEFAULT_LINKOPTS}
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
DEPS
|
DEPS
|
||||||
|
absl::config
|
||||||
absl::fast_type_id
|
absl::fast_type_id
|
||||||
absl::optional
|
absl::optional
|
||||||
)
|
)
|
||||||
@ -2336,11 +2454,13 @@ absl_cc_library(
|
|||||||
DEPS
|
DEPS
|
||||||
absl::config
|
absl::config
|
||||||
absl::inlined_vector
|
absl::inlined_vector
|
||||||
|
absl::nullability
|
||||||
absl::random_internal_pool_urbg
|
absl::random_internal_pool_urbg
|
||||||
absl::random_internal_salted_seed_seq
|
absl::random_internal_salted_seed_seq
|
||||||
absl::random_internal_seed_material
|
absl::random_internal_seed_material
|
||||||
absl::random_seed_gen_exception
|
absl::random_seed_gen_exception
|
||||||
absl::span
|
absl::span
|
||||||
|
absl::string_view
|
||||||
)
|
)
|
||||||
|
|
||||||
# Internal-only target, do not depend on directly.
|
# Internal-only target, do not depend on directly.
|
||||||
@ -2399,6 +2519,7 @@ absl_cc_library(
|
|||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
LINKOPTS
|
LINKOPTS
|
||||||
${ABSL_DEFAULT_LINKOPTS}
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
|
$<$<BOOL:${MINGW}>:-lbcrypt>
|
||||||
DEPS
|
DEPS
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::optional
|
absl::optional
|
||||||
@ -2658,6 +2779,29 @@ absl_cc_library(
|
|||||||
absl::config
|
absl::config
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Internal-only target, do not depend on directly.
|
||||||
|
absl_cc_library(
|
||||||
|
NAME
|
||||||
|
random_internal_distribution_test_util
|
||||||
|
SRCS
|
||||||
|
"${DIR}/internal/chi_square.cc"
|
||||||
|
"${DIR}/internal/distribution_test_util.cc"
|
||||||
|
HDRS
|
||||||
|
"${DIR}/internal/chi_square.h"
|
||||||
|
"${DIR}/internal/distribution_test_util.h"
|
||||||
|
COPTS
|
||||||
|
${ABSL_DEFAULT_COPTS}
|
||||||
|
LINKOPTS
|
||||||
|
${ABSL_DEFAULT_LINKOPTS}
|
||||||
|
DEPS
|
||||||
|
absl::config
|
||||||
|
absl::core_headers
|
||||||
|
absl::raw_logging_internal
|
||||||
|
absl::strings
|
||||||
|
absl::str_format
|
||||||
|
absl::span
|
||||||
|
)
|
||||||
|
|
||||||
# Internal-only target, do not depend on directly.
|
# Internal-only target, do not depend on directly.
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
@ -2699,6 +2843,8 @@ absl_cc_library(
|
|||||||
absl::function_ref
|
absl::function_ref
|
||||||
absl::inlined_vector
|
absl::inlined_vector
|
||||||
absl::memory
|
absl::memory
|
||||||
|
absl::no_destructor
|
||||||
|
absl::nullability
|
||||||
absl::optional
|
absl::optional
|
||||||
absl::raw_logging_internal
|
absl::raw_logging_internal
|
||||||
absl::span
|
absl::span
|
||||||
@ -2724,8 +2870,11 @@ absl_cc_library(
|
|||||||
absl::base
|
absl::base
|
||||||
absl::config
|
absl::config
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
|
absl::has_ostream_operator
|
||||||
|
absl::nullability
|
||||||
absl::raw_logging_internal
|
absl::raw_logging_internal
|
||||||
absl::status
|
absl::status
|
||||||
|
absl::str_format
|
||||||
absl::strings
|
absl::strings
|
||||||
absl::type_traits
|
absl::type_traits
|
||||||
absl::utility
|
absl::utility
|
||||||
@ -2748,6 +2897,7 @@ absl_cc_library(
|
|||||||
absl::base
|
absl::base
|
||||||
absl::config
|
absl::config
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
|
absl::nullability
|
||||||
absl::throw_delegate
|
absl::throw_delegate
|
||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
@ -2762,6 +2912,7 @@ absl_cc_library(
|
|||||||
"${DIR}/has_absl_stringify.h"
|
"${DIR}/has_absl_stringify.h"
|
||||||
"${DIR}/internal/damerau_levenshtein_distance.h"
|
"${DIR}/internal/damerau_levenshtein_distance.h"
|
||||||
"${DIR}/internal/string_constant.h"
|
"${DIR}/internal/string_constant.h"
|
||||||
|
"${DIR}/internal/has_absl_stringify.h"
|
||||||
"${DIR}/match.h"
|
"${DIR}/match.h"
|
||||||
"${DIR}/numbers.h"
|
"${DIR}/numbers.h"
|
||||||
"${DIR}/str_cat.h"
|
"${DIR}/str_cat.h"
|
||||||
@ -2805,6 +2956,7 @@ absl_cc_library(
|
|||||||
absl::endian
|
absl::endian
|
||||||
absl::int128
|
absl::int128
|
||||||
absl::memory
|
absl::memory
|
||||||
|
absl::nullability
|
||||||
absl::raw_logging_internal
|
absl::raw_logging_internal
|
||||||
absl::throw_delegate
|
absl::throw_delegate
|
||||||
absl::type_traits
|
absl::type_traits
|
||||||
@ -2824,6 +2976,18 @@ absl_cc_library(
|
|||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
|
absl_cc_library(
|
||||||
|
NAME
|
||||||
|
has_ostream_operator
|
||||||
|
HDRS
|
||||||
|
"${DIR}/has_ostream_operator.h"
|
||||||
|
COPTS
|
||||||
|
${ABSL_DEFAULT_COPTS}
|
||||||
|
DEPS
|
||||||
|
absl::config
|
||||||
|
PUBLIC
|
||||||
|
)
|
||||||
|
|
||||||
# Internal-only target, do not depend on directly.
|
# Internal-only target, do not depend on directly.
|
||||||
absl_cc_library(
|
absl_cc_library(
|
||||||
NAME
|
NAME
|
||||||
@ -2855,7 +3019,12 @@ absl_cc_library(
|
|||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
|
absl::config
|
||||||
|
absl::core_headers
|
||||||
|
absl::nullability
|
||||||
|
absl::span
|
||||||
absl::str_format_internal
|
absl::str_format_internal
|
||||||
|
absl::string_view
|
||||||
PUBLIC
|
PUBLIC
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -2886,6 +3055,7 @@ absl_cc_library(
|
|||||||
absl::strings
|
absl::strings
|
||||||
absl::config
|
absl::config
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
|
absl::fixed_array
|
||||||
absl::inlined_vector
|
absl::inlined_vector
|
||||||
absl::numeric_representation
|
absl::numeric_representation
|
||||||
absl::type_traits
|
absl::type_traits
|
||||||
@ -2989,6 +3159,7 @@ absl_cc_library(
|
|||||||
DEPS
|
DEPS
|
||||||
absl::base
|
absl::base
|
||||||
absl::config
|
absl::config
|
||||||
|
absl::no_destructor
|
||||||
absl::raw_logging_internal
|
absl::raw_logging_internal
|
||||||
absl::synchronization
|
absl::synchronization
|
||||||
)
|
)
|
||||||
@ -3079,6 +3250,7 @@ absl_cc_library(
|
|||||||
absl::endian
|
absl::endian
|
||||||
absl::function_ref
|
absl::function_ref
|
||||||
absl::inlined_vector
|
absl::inlined_vector
|
||||||
|
absl::nullability
|
||||||
absl::optional
|
absl::optional
|
||||||
absl::raw_logging_internal
|
absl::raw_logging_internal
|
||||||
absl::span
|
absl::span
|
||||||
@ -3246,6 +3418,8 @@ absl_cc_library(
|
|||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
Threads::Threads
|
Threads::Threads
|
||||||
|
# TODO(#1495): Use $<LINK_LIBRARY:FRAMEWORK,CoreFoundation> once our
|
||||||
|
# minimum CMake version >= 3.24
|
||||||
$<$<PLATFORM_ID:Darwin>:-Wl,-framework,CoreFoundation>
|
$<$<PLATFORM_ID:Darwin>:-Wl,-framework,CoreFoundation>
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -3286,8 +3460,8 @@ absl_cc_library(
|
|||||||
NAME
|
NAME
|
||||||
bad_any_cast_impl
|
bad_any_cast_impl
|
||||||
SRCS
|
SRCS
|
||||||
"${DIR}/bad_any_cast.h"
|
"${DIR}/bad_any_cast.h"
|
||||||
"${DIR}/bad_any_cast.cc"
|
"${DIR}/bad_any_cast.cc"
|
||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
@ -3307,6 +3481,7 @@ absl_cc_library(
|
|||||||
DEPS
|
DEPS
|
||||||
absl::algorithm
|
absl::algorithm
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
|
absl::nullability
|
||||||
absl::throw_delegate
|
absl::throw_delegate
|
||||||
absl::type_traits
|
absl::type_traits
|
||||||
PUBLIC
|
PUBLIC
|
||||||
@ -3327,6 +3502,7 @@ absl_cc_library(
|
|||||||
absl::config
|
absl::config
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::memory
|
absl::memory
|
||||||
|
absl::nullability
|
||||||
absl::type_traits
|
absl::type_traits
|
||||||
absl::utility
|
absl::utility
|
||||||
PUBLIC
|
PUBLIC
|
||||||
@ -3389,6 +3565,7 @@ absl_cc_library(
|
|||||||
COPTS
|
COPTS
|
||||||
${ABSL_DEFAULT_COPTS}
|
${ABSL_DEFAULT_COPTS}
|
||||||
DEPS
|
DEPS
|
||||||
|
absl::config
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::type_traits
|
absl::type_traits
|
||||||
PUBLIC
|
PUBLIC
|
||||||
|
2
contrib/arrow
vendored
2
contrib/arrow
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 8f36d71d18587f1f315ec832f424183cb6519cbb
|
Subproject commit 5cfccd8ea65f33d4517e7409815d761c7650b45d
|
2
contrib/aws
vendored
2
contrib/aws
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 2e12d7c6dafa81311ee3d73ac6a178550ffa75be
|
Subproject commit 1c2946bfcb7f1e3ae0a858de0b59d4f1a7b4ccaf
|
@ -125,7 +125,7 @@ configure_file("${AWS_SDK_CORE_DIR}/include/aws/core/SDKConfig.h.in"
|
|||||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
|
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
|
||||||
|
|
||||||
aws_get_version(AWS_CRT_CPP_VERSION_MAJOR AWS_CRT_CPP_VERSION_MINOR AWS_CRT_CPP_VERSION_PATCH FULL_VERSION GIT_HASH)
|
aws_get_version(AWS_CRT_CPP_VERSION_MAJOR AWS_CRT_CPP_VERSION_MINOR AWS_CRT_CPP_VERSION_PATCH FULL_VERSION GIT_HASH)
|
||||||
configure_file("${AWS_CRT_DIR}/include/aws/crt/Config.h.in" "${AWS_CRT_DIR}/include/aws/crt/Config.h" @ONLY)
|
configure_file("${AWS_CRT_DIR}/include/aws/crt/Config.h.in" "${CMAKE_CURRENT_BINARY_DIR}/include/aws/crt/Config.h" @ONLY)
|
||||||
|
|
||||||
list(APPEND AWS_SOURCES ${AWS_SDK_CORE_SRC} ${AWS_SDK_CORE_NET_SRC} ${AWS_SDK_CORE_PLATFORM_SRC})
|
list(APPEND AWS_SOURCES ${AWS_SDK_CORE_SRC} ${AWS_SDK_CORE_NET_SRC} ${AWS_SDK_CORE_PLATFORM_SRC})
|
||||||
|
|
||||||
|
2
contrib/azure
vendored
2
contrib/azure
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 6262a76ef4c4c330c84e58dd4f6f13f4e6230fcd
|
Subproject commit 92c94d7f37a43cc8fc4d466884a95f610c0593bf
|
2
contrib/cld2
vendored
2
contrib/cld2
vendored
@ -1 +1 @@
|
|||||||
Subproject commit bc6d493a2f64ed1fc1c4c4b4294a542a04e04217
|
Subproject commit 217ba8b8805b41557faadaa47bb6e99f2242eea3
|
2
contrib/fmtlib
vendored
2
contrib/fmtlib
vendored
@ -1 +1 @@
|
|||||||
Subproject commit b6f4ceaed0a0a24ccf575fab6c56dd50ccf6f1a9
|
Subproject commit a33701196adfad74917046096bf5a2aa0ab0bb50
|
@ -13,7 +13,6 @@ set (SRCS
|
|||||||
${FMT_SOURCE_DIR}/include/fmt/core.h
|
${FMT_SOURCE_DIR}/include/fmt/core.h
|
||||||
${FMT_SOURCE_DIR}/include/fmt/format.h
|
${FMT_SOURCE_DIR}/include/fmt/format.h
|
||||||
${FMT_SOURCE_DIR}/include/fmt/format-inl.h
|
${FMT_SOURCE_DIR}/include/fmt/format-inl.h
|
||||||
${FMT_SOURCE_DIR}/include/fmt/locale.h
|
|
||||||
${FMT_SOURCE_DIR}/include/fmt/os.h
|
${FMT_SOURCE_DIR}/include/fmt/os.h
|
||||||
${FMT_SOURCE_DIR}/include/fmt/ostream.h
|
${FMT_SOURCE_DIR}/include/fmt/ostream.h
|
||||||
${FMT_SOURCE_DIR}/include/fmt/printf.h
|
${FMT_SOURCE_DIR}/include/fmt/printf.h
|
||||||
|
@ -157,15 +157,13 @@ function(protobuf_generate)
|
|||||||
|
|
||||||
set(_generated_srcs_all)
|
set(_generated_srcs_all)
|
||||||
foreach(_proto ${protobuf_generate_PROTOS})
|
foreach(_proto ${protobuf_generate_PROTOS})
|
||||||
get_filename_component(_abs_file ${_proto} ABSOLUTE)
|
# The protobuf compiler doesn't return paths to the files it generates so we have to calculate those paths here:
|
||||||
get_filename_component(_abs_dir ${_abs_file} DIRECTORY)
|
# _abs_file - absolute path to a .proto file,
|
||||||
get_filename_component(_basename ${_proto} NAME_WE)
|
# _possible_rel_dir - relative path to the .proto file from some import directory specified in Protobuf_IMPORT_DIRS,
|
||||||
file(RELATIVE_PATH _rel_dir ${CMAKE_CURRENT_SOURCE_DIR} ${_abs_dir})
|
# _basename - filename of the .proto file (without path and without extenstion).
|
||||||
|
get_proto_absolute_path(_abs_file "${_proto}" ${_protobuf_include_path})
|
||||||
set(_possible_rel_dir)
|
get_proto_relative_path(_possible_rel_dir "${_abs_file}" ${_protobuf_include_path})
|
||||||
if (NOT protobuf_generate_APPEND_PATH)
|
get_filename_component(_basename "${_abs_file}" NAME_WE)
|
||||||
set(_possible_rel_dir ${_rel_dir}/)
|
|
||||||
endif()
|
|
||||||
|
|
||||||
set(_generated_srcs)
|
set(_generated_srcs)
|
||||||
foreach(_ext ${protobuf_generate_GENERATE_EXTENSIONS})
|
foreach(_ext ${protobuf_generate_GENERATE_EXTENSIONS})
|
||||||
@ -173,7 +171,7 @@ function(protobuf_generate)
|
|||||||
endforeach()
|
endforeach()
|
||||||
|
|
||||||
if(protobuf_generate_DESCRIPTORS AND protobuf_generate_LANGUAGE STREQUAL cpp)
|
if(protobuf_generate_DESCRIPTORS AND protobuf_generate_LANGUAGE STREQUAL cpp)
|
||||||
set(_descriptor_file "${CMAKE_CURRENT_BINARY_DIR}/${_basename}.desc")
|
set(_descriptor_file "${protobuf_generate_PROTOC_OUT_DIR}/${_possible_rel_dir}${_basename}.desc")
|
||||||
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
|
set(_dll_desc_out "--descriptor_set_out=${_descriptor_file}")
|
||||||
list(APPEND _generated_srcs ${_descriptor_file})
|
list(APPEND _generated_srcs ${_descriptor_file})
|
||||||
endif()
|
endif()
|
||||||
@ -196,3 +194,36 @@ function(protobuf_generate)
|
|||||||
target_sources(${protobuf_generate_TARGET} PRIVATE ${_generated_srcs_all})
|
target_sources(${protobuf_generate_TARGET} PRIVATE ${_generated_srcs_all})
|
||||||
endif()
|
endif()
|
||||||
endfunction()
|
endfunction()
|
||||||
|
|
||||||
|
# Calculates the absolute path to a .proto file.
|
||||||
|
function(get_proto_absolute_path result proto)
|
||||||
|
cmake_path(IS_ABSOLUTE proto _is_abs_path)
|
||||||
|
if(_is_abs_path)
|
||||||
|
set(${result} "${proto}" PARENT_SCOPE)
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
foreach(_include_dir ${ARGN})
|
||||||
|
if(EXISTS "${_include_dir}/${proto}")
|
||||||
|
set(${result} "${_include_dir}/${proto}" PARENT_SCOPE)
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
message(SEND_ERROR "Not found protobuf ${proto} in Protobuf_IMPORT_DIRS: ${ARGN}")
|
||||||
|
endfunction()
|
||||||
|
|
||||||
|
# Calculates a relative path to a .proto file. The returned path is relative to one of include directories.
|
||||||
|
function(get_proto_relative_path result abs_path)
|
||||||
|
set(${result} "" PARENT_SCOPE)
|
||||||
|
get_filename_component(_abs_dir "${abs_path}" DIRECTORY)
|
||||||
|
foreach(_include_dir ${ARGN})
|
||||||
|
cmake_path(IS_PREFIX _include_dir "${_abs_dir}" _is_prefix)
|
||||||
|
if(_is_prefix)
|
||||||
|
file(RELATIVE_PATH _rel_dir "${_include_dir}" "${_abs_dir}")
|
||||||
|
if(NOT _rel_dir STREQUAL "")
|
||||||
|
set(${result} "${_rel_dir}/" PARENT_SCOPE)
|
||||||
|
endif()
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
endforeach()
|
||||||
|
message(WARNING "Not found protobuf ${abs_path} in Protobuf_IMPORT_DIRS: ${ARGN}")
|
||||||
|
endfunction()
|
||||||
|
2
contrib/googletest
vendored
2
contrib/googletest
vendored
@ -1 +1 @@
|
|||||||
Subproject commit e47544ad31cb3ceecd04cc13e8fe556f8df9fe0b
|
Subproject commit a7f443b80b105f940225332ed3c31f2790092f47
|
@ -5,7 +5,7 @@ else ()
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (NOT ENABLE_ICU)
|
if (NOT ENABLE_ICU)
|
||||||
message(STATUS "Not using icu")
|
message(STATUS "Not using ICU")
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
@ -34,7 +34,11 @@ if (OS_LINUX)
|
|||||||
# avoid spurious latencies and additional work associated with
|
# avoid spurious latencies and additional work associated with
|
||||||
# MADV_DONTNEED. See
|
# MADV_DONTNEED. See
|
||||||
# https://github.com/ClickHouse/ClickHouse/issues/11121 for motivation.
|
# https://github.com/ClickHouse/ClickHouse/issues/11121 for motivation.
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF "percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:0,dirty_decay_ms:5000")
|
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF "percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:0,dirty_decay_ms:5000")
|
||||||
|
else()
|
||||||
|
set (JEMALLOC_CONFIG_MALLOC_CONF "percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:0,dirty_decay_ms:5000,prof:true,prof_active:false,background_thread:true")
|
||||||
|
endif()
|
||||||
else()
|
else()
|
||||||
set (JEMALLOC_CONFIG_MALLOC_CONF "oversize_threshold:0,muzzy_decay_ms:0,dirty_decay_ms:5000")
|
set (JEMALLOC_CONFIG_MALLOC_CONF "oversize_threshold:0,muzzy_decay_ms:0,dirty_decay_ms:5000")
|
||||||
endif()
|
endif()
|
||||||
|
2
contrib/llvm-project
vendored
2
contrib/llvm-project
vendored
@ -1 +1 @@
|
|||||||
Subproject commit d2142eed98046a47ff7112e3cc1e197c8a5cd80f
|
Subproject commit 2a8967b60cbe5bc2df253712bac343cc5263c5fc
|
2
contrib/mariadb-connector-c
vendored
2
contrib/mariadb-connector-c
vendored
@ -1 +1 @@
|
|||||||
Subproject commit e39608998f5f6944ece9ec61f48e9172ec1de660
|
Subproject commit d0a788c5b9fcaca2368d9233770d3ca91ea79f88
|
2
contrib/openssl
vendored
2
contrib/openssl
vendored
@ -1 +1 @@
|
|||||||
Subproject commit f7b8721dfc66abb147f24ca07b9c9d1d64f40f71
|
Subproject commit ee2bb8513b28bf86b35404dd17a0e29305ca9e08
|
2
contrib/orc
vendored
2
contrib/orc
vendored
@ -1 +1 @@
|
|||||||
Subproject commit e24f2c2a3ca0769c96704ab20ad6f512a83ea2ad
|
Subproject commit bcc025c09828c556f54cfbdf83a66b9acae7d17f
|
34
contrib/prometheus-protobufs-cmake/CMakeLists.txt
Normal file
34
contrib/prometheus-protobufs-cmake/CMakeLists.txt
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
option(ENABLE_PROMETHEUS_PROTOBUFS "Enable Prometheus Protobufs" ${ENABLE_PROTOBUF})
|
||||||
|
|
||||||
|
if(NOT ENABLE_PROMETHEUS_PROTOBUFS)
|
||||||
|
message(STATUS "Not using prometheus-protobufs")
|
||||||
|
return()
|
||||||
|
endif()
|
||||||
|
|
||||||
|
set(Protobuf_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/google-protobuf/src")
|
||||||
|
set(Prometheus_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/prometheus-protobufs")
|
||||||
|
set(GogoProto_INCLUDE_DIR "${ClickHouse_SOURCE_DIR}/contrib/prometheus-protobufs-gogo")
|
||||||
|
|
||||||
|
# Protobuf_IMPORT_DIRS specify where the protobuf compiler will look for .proto files.
|
||||||
|
set(Old_Protobuf_IMPORT_DIRS ${Protobuf_IMPORT_DIRS})
|
||||||
|
list(APPEND Protobuf_IMPORT_DIRS "${Protobuf_INCLUDE_DIR}" "${Prometheus_INCLUDE_DIR}" "${GogoProto_INCLUDE_DIR}")
|
||||||
|
|
||||||
|
PROTOBUF_GENERATE_CPP(prometheus_protobufs_sources prometheus_protobufs_headers
|
||||||
|
"prompb/remote.proto"
|
||||||
|
"prompb/types.proto"
|
||||||
|
"gogoproto/gogo.proto"
|
||||||
|
)
|
||||||
|
|
||||||
|
set(Protobuf_IMPORT_DIRS ${Old_Protobuf_IMPORT_DIRS})
|
||||||
|
|
||||||
|
# Ignore warnings while compiling protobuf-generated *.pb.h and *.pb.cpp files.
|
||||||
|
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w")
|
||||||
|
|
||||||
|
# Disable clang-tidy for protobuf-generated *.pb.h and *.pb.cpp files.
|
||||||
|
set (CMAKE_CXX_CLANG_TIDY "")
|
||||||
|
|
||||||
|
add_library(_prometheus_protobufs ${prometheus_protobufs_sources} ${prometheus_protobufs_headers})
|
||||||
|
target_include_directories(_prometheus_protobufs SYSTEM PUBLIC "${CMAKE_CURRENT_BINARY_DIR}")
|
||||||
|
target_link_libraries (_prometheus_protobufs PUBLIC ch_contrib::protobuf)
|
||||||
|
|
||||||
|
add_library (ch_contrib::prometheus_protobufs ALIAS _prometheus_protobufs)
|
35
contrib/prometheus-protobufs-gogo/LICENSE
Normal file
35
contrib/prometheus-protobufs-gogo/LICENSE
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
Copyright (c) 2022, The Cosmos SDK Authors. All rights reserved.
|
||||||
|
Copyright (c) 2013, The GoGo Authors. All rights reserved.
|
||||||
|
|
||||||
|
Protocol Buffers for Go with Gadgets
|
||||||
|
|
||||||
|
Go support for Protocol Buffers - Google's data interchange format
|
||||||
|
|
||||||
|
Copyright 2010 The Go Authors. All rights reserved.
|
||||||
|
https://github.com/golang/protobuf
|
||||||
|
|
||||||
|
Redistribution and use in source and binary forms, with or without
|
||||||
|
modification, are permitted provided that the following conditions are
|
||||||
|
met:
|
||||||
|
|
||||||
|
* Redistributions of source code must retain the above copyright
|
||||||
|
notice, this list of conditions and the following disclaimer.
|
||||||
|
* Redistributions in binary form must reproduce the above
|
||||||
|
copyright notice, this list of conditions and the following disclaimer
|
||||||
|
in the documentation and/or other materials provided with the
|
||||||
|
distribution.
|
||||||
|
* Neither the name of Google Inc. nor the names of its
|
||||||
|
contributors may be used to endorse or promote products derived from
|
||||||
|
this software without specific prior written permission.
|
||||||
|
|
||||||
|
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
4
contrib/prometheus-protobufs-gogo/README
Normal file
4
contrib/prometheus-protobufs-gogo/README
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
File "gogoproto/gogo.proto" was downloaded from the "Protocol Buffers for Go with Gadgets" project:
|
||||||
|
https://github.com/cosmos/gogoproto/blob/main/gogoproto/gogo.proto
|
||||||
|
|
||||||
|
File "gogoproto/gogo.proto" is used in ClickHouse to compile prometheus protobufs.
|
145
contrib/prometheus-protobufs-gogo/gogoproto/gogo.proto
Normal file
145
contrib/prometheus-protobufs-gogo/gogoproto/gogo.proto
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
// Protocol Buffers for Go with Gadgets
|
||||||
|
//
|
||||||
|
// Copyright (c) 2013, The GoGo Authors. All rights reserved.
|
||||||
|
// http://github.com/cosmos/gogoproto
|
||||||
|
//
|
||||||
|
// Redistribution and use in source and binary forms, with or without
|
||||||
|
// modification, are permitted provided that the following conditions are
|
||||||
|
// met:
|
||||||
|
//
|
||||||
|
// * Redistributions of source code must retain the above copyright
|
||||||
|
// notice, this list of conditions and the following disclaimer.
|
||||||
|
// * Redistributions in binary form must reproduce the above
|
||||||
|
// copyright notice, this list of conditions and the following disclaimer
|
||||||
|
// in the documentation and/or other materials provided with the
|
||||||
|
// distribution.
|
||||||
|
//
|
||||||
|
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
syntax = "proto2";
|
||||||
|
package gogoproto;
|
||||||
|
|
||||||
|
import "google/protobuf/descriptor.proto";
|
||||||
|
|
||||||
|
option java_package = "com.google.protobuf";
|
||||||
|
option java_outer_classname = "GoGoProtos";
|
||||||
|
option go_package = "github.com/cosmos/gogoproto/gogoproto";
|
||||||
|
|
||||||
|
extend google.protobuf.EnumOptions {
|
||||||
|
optional bool goproto_enum_prefix = 62001;
|
||||||
|
optional bool goproto_enum_stringer = 62021;
|
||||||
|
optional bool enum_stringer = 62022;
|
||||||
|
optional string enum_customname = 62023;
|
||||||
|
optional bool enumdecl = 62024;
|
||||||
|
}
|
||||||
|
|
||||||
|
extend google.protobuf.EnumValueOptions {
|
||||||
|
optional string enumvalue_customname = 66001;
|
||||||
|
}
|
||||||
|
|
||||||
|
extend google.protobuf.FileOptions {
|
||||||
|
optional bool goproto_getters_all = 63001;
|
||||||
|
optional bool goproto_enum_prefix_all = 63002;
|
||||||
|
optional bool goproto_stringer_all = 63003;
|
||||||
|
optional bool verbose_equal_all = 63004;
|
||||||
|
optional bool face_all = 63005;
|
||||||
|
optional bool gostring_all = 63006;
|
||||||
|
optional bool populate_all = 63007;
|
||||||
|
optional bool stringer_all = 63008;
|
||||||
|
optional bool onlyone_all = 63009;
|
||||||
|
|
||||||
|
optional bool equal_all = 63013;
|
||||||
|
optional bool description_all = 63014;
|
||||||
|
optional bool testgen_all = 63015;
|
||||||
|
optional bool benchgen_all = 63016;
|
||||||
|
optional bool marshaler_all = 63017;
|
||||||
|
optional bool unmarshaler_all = 63018;
|
||||||
|
optional bool stable_marshaler_all = 63019;
|
||||||
|
|
||||||
|
optional bool sizer_all = 63020;
|
||||||
|
|
||||||
|
optional bool goproto_enum_stringer_all = 63021;
|
||||||
|
optional bool enum_stringer_all = 63022;
|
||||||
|
|
||||||
|
optional bool unsafe_marshaler_all = 63023;
|
||||||
|
optional bool unsafe_unmarshaler_all = 63024;
|
||||||
|
|
||||||
|
optional bool goproto_extensions_map_all = 63025;
|
||||||
|
optional bool goproto_unrecognized_all = 63026;
|
||||||
|
optional bool gogoproto_import = 63027;
|
||||||
|
optional bool protosizer_all = 63028;
|
||||||
|
optional bool compare_all = 63029;
|
||||||
|
optional bool typedecl_all = 63030;
|
||||||
|
optional bool enumdecl_all = 63031;
|
||||||
|
|
||||||
|
optional bool goproto_registration = 63032;
|
||||||
|
optional bool messagename_all = 63033;
|
||||||
|
|
||||||
|
optional bool goproto_sizecache_all = 63034;
|
||||||
|
optional bool goproto_unkeyed_all = 63035;
|
||||||
|
}
|
||||||
|
|
||||||
|
extend google.protobuf.MessageOptions {
|
||||||
|
optional bool goproto_getters = 64001;
|
||||||
|
optional bool goproto_stringer = 64003;
|
||||||
|
optional bool verbose_equal = 64004;
|
||||||
|
optional bool face = 64005;
|
||||||
|
optional bool gostring = 64006;
|
||||||
|
optional bool populate = 64007;
|
||||||
|
optional bool stringer = 67008;
|
||||||
|
optional bool onlyone = 64009;
|
||||||
|
|
||||||
|
optional bool equal = 64013;
|
||||||
|
optional bool description = 64014;
|
||||||
|
optional bool testgen = 64015;
|
||||||
|
optional bool benchgen = 64016;
|
||||||
|
optional bool marshaler = 64017;
|
||||||
|
optional bool unmarshaler = 64018;
|
||||||
|
optional bool stable_marshaler = 64019;
|
||||||
|
|
||||||
|
optional bool sizer = 64020;
|
||||||
|
|
||||||
|
optional bool unsafe_marshaler = 64023;
|
||||||
|
optional bool unsafe_unmarshaler = 64024;
|
||||||
|
|
||||||
|
optional bool goproto_extensions_map = 64025;
|
||||||
|
optional bool goproto_unrecognized = 64026;
|
||||||
|
|
||||||
|
optional bool protosizer = 64028;
|
||||||
|
optional bool compare = 64029;
|
||||||
|
|
||||||
|
optional bool typedecl = 64030;
|
||||||
|
|
||||||
|
optional bool messagename = 64033;
|
||||||
|
|
||||||
|
optional bool goproto_sizecache = 64034;
|
||||||
|
optional bool goproto_unkeyed = 64035;
|
||||||
|
}
|
||||||
|
|
||||||
|
extend google.protobuf.FieldOptions {
|
||||||
|
optional bool nullable = 65001;
|
||||||
|
optional bool embed = 65002;
|
||||||
|
optional string customtype = 65003;
|
||||||
|
optional string customname = 65004;
|
||||||
|
optional string jsontag = 65005;
|
||||||
|
optional string moretags = 65006;
|
||||||
|
optional string casttype = 65007;
|
||||||
|
optional string castkey = 65008;
|
||||||
|
optional string castvalue = 65009;
|
||||||
|
|
||||||
|
optional bool stdtime = 65010;
|
||||||
|
optional bool stdduration = 65011;
|
||||||
|
optional bool wktpointer = 65012;
|
||||||
|
|
||||||
|
optional string castrepeated = 65013;
|
||||||
|
}
|
201
contrib/prometheus-protobufs/LICENSE
Normal file
201
contrib/prometheus-protobufs/LICENSE
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
2
contrib/prometheus-protobufs/README
Normal file
2
contrib/prometheus-protobufs/README
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
Files "prompb/remote.proto" and "prompb/types.proto" were downloaded from the Prometheus repository:
|
||||||
|
https://github.com/prometheus/prometheus/tree/main/prompb
|
88
contrib/prometheus-protobufs/prompb/remote.proto
Normal file
88
contrib/prometheus-protobufs/prompb/remote.proto
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
// Copyright 2016 Prometheus Team
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
package prometheus;
|
||||||
|
|
||||||
|
option go_package = "prompb";
|
||||||
|
|
||||||
|
import "prompb/types.proto";
|
||||||
|
import "gogoproto/gogo.proto";
|
||||||
|
|
||||||
|
message WriteRequest {
|
||||||
|
repeated prometheus.TimeSeries timeseries = 1 [(gogoproto.nullable) = false];
|
||||||
|
// Cortex uses this field to determine the source of the write request.
|
||||||
|
// We reserve it to avoid any compatibility issues.
|
||||||
|
reserved 2;
|
||||||
|
repeated prometheus.MetricMetadata metadata = 3 [(gogoproto.nullable) = false];
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadRequest represents a remote read request.
|
||||||
|
message ReadRequest {
|
||||||
|
repeated Query queries = 1;
|
||||||
|
|
||||||
|
enum ResponseType {
|
||||||
|
// Server will return a single ReadResponse message with matched series that includes list of raw samples.
|
||||||
|
// It's recommended to use streamed response types instead.
|
||||||
|
//
|
||||||
|
// Response headers:
|
||||||
|
// Content-Type: "application/x-protobuf"
|
||||||
|
// Content-Encoding: "snappy"
|
||||||
|
SAMPLES = 0;
|
||||||
|
// Server will stream a delimited ChunkedReadResponse message that
|
||||||
|
// contains XOR or HISTOGRAM(!) encoded chunks for a single series.
|
||||||
|
// Each message is following varint size and fixed size bigendian
|
||||||
|
// uint32 for CRC32 Castagnoli checksum.
|
||||||
|
//
|
||||||
|
// Response headers:
|
||||||
|
// Content-Type: "application/x-streamed-protobuf; proto=prometheus.ChunkedReadResponse"
|
||||||
|
// Content-Encoding: ""
|
||||||
|
STREAMED_XOR_CHUNKS = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// accepted_response_types allows negotiating the content type of the response.
|
||||||
|
//
|
||||||
|
// Response types are taken from the list in the FIFO order. If no response type in `accepted_response_types` is
|
||||||
|
// implemented by server, error is returned.
|
||||||
|
// For request that do not contain `accepted_response_types` field the SAMPLES response type will be used.
|
||||||
|
repeated ResponseType accepted_response_types = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadResponse is a response when response_type equals SAMPLES.
|
||||||
|
message ReadResponse {
|
||||||
|
// In same order as the request's queries.
|
||||||
|
repeated QueryResult results = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Query {
|
||||||
|
int64 start_timestamp_ms = 1;
|
||||||
|
int64 end_timestamp_ms = 2;
|
||||||
|
repeated prometheus.LabelMatcher matchers = 3;
|
||||||
|
prometheus.ReadHints hints = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
message QueryResult {
|
||||||
|
// Samples within a time series must be ordered by time.
|
||||||
|
repeated prometheus.TimeSeries timeseries = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChunkedReadResponse is a response when response_type equals STREAMED_XOR_CHUNKS.
|
||||||
|
// We strictly stream full series after series, optionally split by time. This means that a single frame can contain
|
||||||
|
// partition of the single series, but once a new series is started to be streamed it means that no more chunks will
|
||||||
|
// be sent for previous one. Series are returned sorted in the same way TSDB block are internally.
|
||||||
|
message ChunkedReadResponse {
|
||||||
|
repeated prometheus.ChunkedSeries chunked_series = 1;
|
||||||
|
|
||||||
|
// query_index represents an index of the query from ReadRequest.queries these chunks relates to.
|
||||||
|
int64 query_index = 2;
|
||||||
|
}
|
187
contrib/prometheus-protobufs/prompb/types.proto
Normal file
187
contrib/prometheus-protobufs/prompb/types.proto
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
// Copyright 2017 Prometheus Team
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
package prometheus;
|
||||||
|
|
||||||
|
option go_package = "prompb";
|
||||||
|
|
||||||
|
import "gogoproto/gogo.proto";
|
||||||
|
|
||||||
|
message MetricMetadata {
|
||||||
|
enum MetricType {
|
||||||
|
UNKNOWN = 0;
|
||||||
|
COUNTER = 1;
|
||||||
|
GAUGE = 2;
|
||||||
|
HISTOGRAM = 3;
|
||||||
|
GAUGEHISTOGRAM = 4;
|
||||||
|
SUMMARY = 5;
|
||||||
|
INFO = 6;
|
||||||
|
STATESET = 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Represents the metric type, these match the set from Prometheus.
|
||||||
|
// Refer to github.com/prometheus/common/model/metadata.go for details.
|
||||||
|
MetricType type = 1;
|
||||||
|
string metric_family_name = 2;
|
||||||
|
string help = 4;
|
||||||
|
string unit = 5;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Sample {
|
||||||
|
double value = 1;
|
||||||
|
// timestamp is in ms format, see model/timestamp/timestamp.go for
|
||||||
|
// conversion from time.Time to Prometheus timestamp.
|
||||||
|
int64 timestamp = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Exemplar {
|
||||||
|
// Optional, can be empty.
|
||||||
|
repeated Label labels = 1 [(gogoproto.nullable) = false];
|
||||||
|
double value = 2;
|
||||||
|
// timestamp is in ms format, see model/timestamp/timestamp.go for
|
||||||
|
// conversion from time.Time to Prometheus timestamp.
|
||||||
|
int64 timestamp = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
// A native histogram, also known as a sparse histogram.
|
||||||
|
// Original design doc:
|
||||||
|
// https://docs.google.com/document/d/1cLNv3aufPZb3fNfaJgdaRBZsInZKKIHo9E6HinJVbpM/edit
|
||||||
|
// The appendix of this design doc also explains the concept of float
|
||||||
|
// histograms. This Histogram message can represent both, the usual
|
||||||
|
// integer histogram as well as a float histogram.
|
||||||
|
message Histogram {
|
||||||
|
enum ResetHint {
|
||||||
|
UNKNOWN = 0; // Need to test for a counter reset explicitly.
|
||||||
|
YES = 1; // This is the 1st histogram after a counter reset.
|
||||||
|
NO = 2; // There was no counter reset between this and the previous Histogram.
|
||||||
|
GAUGE = 3; // This is a gauge histogram where counter resets don't happen.
|
||||||
|
}
|
||||||
|
|
||||||
|
oneof count { // Count of observations in the histogram.
|
||||||
|
uint64 count_int = 1;
|
||||||
|
double count_float = 2;
|
||||||
|
}
|
||||||
|
double sum = 3; // Sum of observations in the histogram.
|
||||||
|
// The schema defines the bucket schema. Currently, valid numbers
|
||||||
|
// are -4 <= n <= 8. They are all for base-2 bucket schemas, where 1
|
||||||
|
// is a bucket boundary in each case, and then each power of two is
|
||||||
|
// divided into 2^n logarithmic buckets. Or in other words, each
|
||||||
|
// bucket boundary is the previous boundary times 2^(2^-n). In the
|
||||||
|
// future, more bucket schemas may be added using numbers < -4 or >
|
||||||
|
// 8.
|
||||||
|
sint32 schema = 4;
|
||||||
|
double zero_threshold = 5; // Breadth of the zero bucket.
|
||||||
|
oneof zero_count { // Count in zero bucket.
|
||||||
|
uint64 zero_count_int = 6;
|
||||||
|
double zero_count_float = 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Negative Buckets.
|
||||||
|
repeated BucketSpan negative_spans = 8 [(gogoproto.nullable) = false];
|
||||||
|
// Use either "negative_deltas" or "negative_counts", the former for
|
||||||
|
// regular histograms with integer counts, the latter for float
|
||||||
|
// histograms.
|
||||||
|
repeated sint64 negative_deltas = 9; // Count delta of each bucket compared to previous one (or to zero for 1st bucket).
|
||||||
|
repeated double negative_counts = 10; // Absolute count of each bucket.
|
||||||
|
|
||||||
|
// Positive Buckets.
|
||||||
|
repeated BucketSpan positive_spans = 11 [(gogoproto.nullable) = false];
|
||||||
|
// Use either "positive_deltas" or "positive_counts", the former for
|
||||||
|
// regular histograms with integer counts, the latter for float
|
||||||
|
// histograms.
|
||||||
|
repeated sint64 positive_deltas = 12; // Count delta of each bucket compared to previous one (or to zero for 1st bucket).
|
||||||
|
repeated double positive_counts = 13; // Absolute count of each bucket.
|
||||||
|
|
||||||
|
ResetHint reset_hint = 14;
|
||||||
|
// timestamp is in ms format, see model/timestamp/timestamp.go for
|
||||||
|
// conversion from time.Time to Prometheus timestamp.
|
||||||
|
int64 timestamp = 15;
|
||||||
|
}
|
||||||
|
|
||||||
|
// A BucketSpan defines a number of consecutive buckets with their
|
||||||
|
// offset. Logically, it would be more straightforward to include the
|
||||||
|
// bucket counts in the Span. However, the protobuf representation is
|
||||||
|
// more compact in the way the data is structured here (with all the
|
||||||
|
// buckets in a single array separate from the Spans).
|
||||||
|
message BucketSpan {
|
||||||
|
sint32 offset = 1; // Gap to previous span, or starting point for 1st span (which can be negative).
|
||||||
|
uint32 length = 2; // Length of consecutive buckets.
|
||||||
|
}
|
||||||
|
|
||||||
|
// TimeSeries represents samples and labels for a single time series.
|
||||||
|
message TimeSeries {
|
||||||
|
// For a timeseries to be valid, and for the samples and exemplars
|
||||||
|
// to be ingested by the remote system properly, the labels field is required.
|
||||||
|
repeated Label labels = 1 [(gogoproto.nullable) = false];
|
||||||
|
repeated Sample samples = 2 [(gogoproto.nullable) = false];
|
||||||
|
repeated Exemplar exemplars = 3 [(gogoproto.nullable) = false];
|
||||||
|
repeated Histogram histograms = 4 [(gogoproto.nullable) = false];
|
||||||
|
}
|
||||||
|
|
||||||
|
message Label {
|
||||||
|
string name = 1;
|
||||||
|
string value = 2;
|
||||||
|
}
|
||||||
|
|
||||||
|
message Labels {
|
||||||
|
repeated Label labels = 1 [(gogoproto.nullable) = false];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Matcher specifies a rule, which can match or set of labels or not.
|
||||||
|
message LabelMatcher {
|
||||||
|
enum Type {
|
||||||
|
EQ = 0;
|
||||||
|
NEQ = 1;
|
||||||
|
RE = 2;
|
||||||
|
NRE = 3;
|
||||||
|
}
|
||||||
|
Type type = 1;
|
||||||
|
string name = 2;
|
||||||
|
string value = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message ReadHints {
|
||||||
|
int64 step_ms = 1; // Query step size in milliseconds.
|
||||||
|
string func = 2; // String representation of surrounding function or aggregation.
|
||||||
|
int64 start_ms = 3; // Start time in milliseconds.
|
||||||
|
int64 end_ms = 4; // End time in milliseconds.
|
||||||
|
repeated string grouping = 5; // List of label names used in aggregation.
|
||||||
|
bool by = 6; // Indicate whether it is without or by.
|
||||||
|
int64 range_ms = 7; // Range vector selector range in milliseconds.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Chunk represents a TSDB chunk.
|
||||||
|
// Time range [min, max] is inclusive.
|
||||||
|
message Chunk {
|
||||||
|
int64 min_time_ms = 1;
|
||||||
|
int64 max_time_ms = 2;
|
||||||
|
|
||||||
|
// We require this to match chunkenc.Encoding.
|
||||||
|
enum Encoding {
|
||||||
|
UNKNOWN = 0;
|
||||||
|
XOR = 1;
|
||||||
|
HISTOGRAM = 2;
|
||||||
|
FLOAT_HISTOGRAM = 3;
|
||||||
|
}
|
||||||
|
Encoding type = 3;
|
||||||
|
bytes data = 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChunkedSeries represents single, encoded time series.
|
||||||
|
message ChunkedSeries {
|
||||||
|
// Labels should be sorted.
|
||||||
|
repeated Label labels = 1 [(gogoproto.nullable) = false];
|
||||||
|
// Chunks will be in start time order and may overlap.
|
||||||
|
repeated Chunk chunks = 2 [(gogoproto.nullable) = false];
|
||||||
|
}
|
2
contrib/re2
vendored
2
contrib/re2
vendored
@ -1 +1 @@
|
|||||||
Subproject commit a807e8a3aac2cc33c77b7071efea54fcabe38e0c
|
Subproject commit 85dd7ad833a73095ecf3e3baea608ba051bbe2c7
|
@ -28,16 +28,20 @@ set(RE2_SOURCES
|
|||||||
add_library(_re2 ${RE2_SOURCES})
|
add_library(_re2 ${RE2_SOURCES})
|
||||||
target_include_directories(_re2 PUBLIC "${SRC_DIR}")
|
target_include_directories(_re2 PUBLIC "${SRC_DIR}")
|
||||||
target_link_libraries(_re2 PRIVATE
|
target_link_libraries(_re2 PRIVATE
|
||||||
|
absl::absl_check
|
||||||
|
absl::absl_log
|
||||||
absl::base
|
absl::base
|
||||||
absl::core_headers
|
absl::core_headers
|
||||||
absl::fixed_array
|
absl::fixed_array
|
||||||
|
absl::flags
|
||||||
absl::flat_hash_map
|
absl::flat_hash_map
|
||||||
absl::flat_hash_set
|
absl::flat_hash_set
|
||||||
|
absl::hash
|
||||||
absl::inlined_vector
|
absl::inlined_vector
|
||||||
absl::strings
|
|
||||||
absl::str_format
|
|
||||||
absl::synchronization
|
|
||||||
absl::optional
|
absl::optional
|
||||||
absl::span)
|
absl::span
|
||||||
|
absl::str_format
|
||||||
|
absl::strings
|
||||||
|
absl::synchronization)
|
||||||
|
|
||||||
add_library(ch_contrib::re2 ALIAS _re2)
|
add_library(ch_contrib::re2 ALIAS _re2)
|
||||||
|
2
contrib/rocksdb
vendored
2
contrib/rocksdb
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 3a0b80ca9d6eebb38fad7ea3f41dfc9db4f6a984
|
Subproject commit 078fa5638690004e1f744076d1bdcc4e93767304
|
@ -1,7 +1,7 @@
|
|||||||
option (ENABLE_ROCKSDB "Enable rocksdb library" ${ENABLE_LIBRARIES})
|
option (ENABLE_ROCKSDB "Enable RocksDB" ${ENABLE_LIBRARIES})
|
||||||
|
|
||||||
if (NOT ENABLE_ROCKSDB)
|
if (NOT ENABLE_ROCKSDB)
|
||||||
message (STATUS "Not using rocksdb")
|
message (STATUS "Not using RocksDB")
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
2
contrib/s2geometry
vendored
2
contrib/s2geometry
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 0547c38371777a1c1c8be263a6f05c3bf71bb05b
|
Subproject commit 6522a40338d58752c2a4227a3fc2bc4107c73e43
|
@ -1,7 +1,7 @@
|
|||||||
option(ENABLE_S2_GEOMETRY "Enable S2 geometry library" ${ENABLE_LIBRARIES})
|
option(ENABLE_S2_GEOMETRY "Enable S2 Geometry" ${ENABLE_LIBRARIES})
|
||||||
|
|
||||||
if (NOT ENABLE_S2_GEOMETRY)
|
if (NOT ENABLE_S2_GEOMETRY)
|
||||||
message(STATUS "Not using S2 geometry")
|
message(STATUS "Not using S2 Geometry")
|
||||||
return()
|
return()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
@ -38,6 +38,7 @@ set(S2_SRCS
|
|||||||
"${S2_SOURCE_DIR}/s2/s2cell_index.cc"
|
"${S2_SOURCE_DIR}/s2/s2cell_index.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2cell_union.cc"
|
"${S2_SOURCE_DIR}/s2/s2cell_union.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2centroids.cc"
|
"${S2_SOURCE_DIR}/s2/s2centroids.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2chain_interpolation_query.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2closest_cell_query.cc"
|
"${S2_SOURCE_DIR}/s2/s2closest_cell_query.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2closest_edge_query.cc"
|
"${S2_SOURCE_DIR}/s2/s2closest_edge_query.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2closest_point_query.cc"
|
"${S2_SOURCE_DIR}/s2/s2closest_point_query.cc"
|
||||||
@ -46,6 +47,7 @@ set(S2_SRCS
|
|||||||
"${S2_SOURCE_DIR}/s2/s2coords.cc"
|
"${S2_SOURCE_DIR}/s2/s2coords.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2crossing_edge_query.cc"
|
"${S2_SOURCE_DIR}/s2/s2crossing_edge_query.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2debug.cc"
|
"${S2_SOURCE_DIR}/s2/s2debug.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2density_tree.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2earth.cc"
|
"${S2_SOURCE_DIR}/s2/s2earth.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2edge_clipping.cc"
|
"${S2_SOURCE_DIR}/s2/s2edge_clipping.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2edge_crosser.cc"
|
"${S2_SOURCE_DIR}/s2/s2edge_crosser.cc"
|
||||||
@ -53,8 +55,10 @@ set(S2_SRCS
|
|||||||
"${S2_SOURCE_DIR}/s2/s2edge_distances.cc"
|
"${S2_SOURCE_DIR}/s2/s2edge_distances.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2edge_tessellator.cc"
|
"${S2_SOURCE_DIR}/s2/s2edge_tessellator.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2error.cc"
|
"${S2_SOURCE_DIR}/s2/s2error.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2fractal.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2furthest_edge_query.cc"
|
"${S2_SOURCE_DIR}/s2/s2furthest_edge_query.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2hausdorff_distance_query.cc"
|
"${S2_SOURCE_DIR}/s2/s2hausdorff_distance_query.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2index_cell_data.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2latlng.cc"
|
"${S2_SOURCE_DIR}/s2/s2latlng.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2latlng_rect.cc"
|
"${S2_SOURCE_DIR}/s2/s2latlng_rect.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2latlng_rect_bounder.cc"
|
"${S2_SOURCE_DIR}/s2/s2latlng_rect_bounder.cc"
|
||||||
@ -63,10 +67,10 @@ set(S2_SRCS
|
|||||||
"${S2_SOURCE_DIR}/s2/s2lax_polyline_shape.cc"
|
"${S2_SOURCE_DIR}/s2/s2lax_polyline_shape.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2loop.cc"
|
"${S2_SOURCE_DIR}/s2/s2loop.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2loop_measures.cc"
|
"${S2_SOURCE_DIR}/s2/s2loop_measures.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2max_distance_targets.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2measures.cc"
|
"${S2_SOURCE_DIR}/s2/s2measures.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2memory_tracker.cc"
|
"${S2_SOURCE_DIR}/s2/s2memory_tracker.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2metrics.cc"
|
"${S2_SOURCE_DIR}/s2/s2metrics.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2max_distance_targets.cc"
|
|
||||||
"${S2_SOURCE_DIR}/s2/s2min_distance_targets.cc"
|
"${S2_SOURCE_DIR}/s2/s2min_distance_targets.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2padded_cell.cc"
|
"${S2_SOURCE_DIR}/s2/s2padded_cell.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2point_compression.cc"
|
"${S2_SOURCE_DIR}/s2/s2point_compression.cc"
|
||||||
@ -80,10 +84,11 @@ set(S2_SRCS
|
|||||||
"${S2_SOURCE_DIR}/s2/s2predicates.cc"
|
"${S2_SOURCE_DIR}/s2/s2predicates.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2projections.cc"
|
"${S2_SOURCE_DIR}/s2/s2projections.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2r2rect.cc"
|
"${S2_SOURCE_DIR}/s2/s2r2rect.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2region.cc"
|
"${S2_SOURCE_DIR}/s2/s2random.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2region_term_indexer.cc"
|
|
||||||
"${S2_SOURCE_DIR}/s2/s2region_coverer.cc"
|
"${S2_SOURCE_DIR}/s2/s2region_coverer.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2region_intersection.cc"
|
"${S2_SOURCE_DIR}/s2/s2region_intersection.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2region_sharder.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2region_term_indexer.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2region_union.cc"
|
"${S2_SOURCE_DIR}/s2/s2region_union.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2shape_index.cc"
|
"${S2_SOURCE_DIR}/s2/s2shape_index.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2shape_index_buffered_region.cc"
|
"${S2_SOURCE_DIR}/s2/s2shape_index_buffered_region.cc"
|
||||||
@ -94,9 +99,12 @@ set(S2_SRCS
|
|||||||
"${S2_SOURCE_DIR}/s2/s2shapeutil_coding.cc"
|
"${S2_SOURCE_DIR}/s2/s2shapeutil_coding.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2shapeutil_contains_brute_force.cc"
|
"${S2_SOURCE_DIR}/s2/s2shapeutil_contains_brute_force.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2shapeutil_conversion.cc"
|
"${S2_SOURCE_DIR}/s2/s2shapeutil_conversion.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2shapeutil_count_vertices.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2shapeutil_edge_iterator.cc"
|
"${S2_SOURCE_DIR}/s2/s2shapeutil_edge_iterator.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2shapeutil_edge_wrap.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2shapeutil_get_reference_point.cc"
|
"${S2_SOURCE_DIR}/s2/s2shapeutil_get_reference_point.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2shapeutil_visit_crossing_edge_pairs.cc"
|
"${S2_SOURCE_DIR}/s2/s2shapeutil_visit_crossing_edge_pairs.cc"
|
||||||
|
"${S2_SOURCE_DIR}/s2/s2testing.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2text_format.cc"
|
"${S2_SOURCE_DIR}/s2/s2text_format.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2wedge_relations.cc"
|
"${S2_SOURCE_DIR}/s2/s2wedge_relations.cc"
|
||||||
"${S2_SOURCE_DIR}/s2/s2winding_operation.cc"
|
"${S2_SOURCE_DIR}/s2/s2winding_operation.cc"
|
||||||
@ -140,6 +148,7 @@ target_link_libraries(_s2 PRIVATE
|
|||||||
absl::strings
|
absl::strings
|
||||||
absl::type_traits
|
absl::type_traits
|
||||||
absl::utility
|
absl::utility
|
||||||
|
absl::vlog_is_on
|
||||||
)
|
)
|
||||||
|
|
||||||
target_include_directories(_s2 SYSTEM BEFORE PUBLIC "${S2_SOURCE_DIR}/")
|
target_include_directories(_s2 SYSTEM BEFORE PUBLIC "${S2_SOURCE_DIR}/")
|
||||||
|
2
contrib/sysroot
vendored
2
contrib/sysroot
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 39c4713334f9f156dbf508f548d510d9129a657c
|
Subproject commit cc385041b226d1fc28ead14dbab5d40a5f821dd8
|
2
contrib/vectorscan
vendored
2
contrib/vectorscan
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 38431d111781843741a781a57a6381a527d900a4
|
Subproject commit d29730e1cb9daaa66bda63426cdce83505d2c809
|
@ -1,11 +1,8 @@
|
|||||||
# We use vectorscan, a portable and API/ABI-compatible drop-in replacement for hyperscan.
|
# Vectorscan is drop-in replacement for Hyperscan.
|
||||||
|
|
||||||
if ((ARCH_AMD64 AND NOT NO_SSE3_OR_HIGHER) OR ARCH_AARCH64)
|
if ((ARCH_AMD64 AND NOT NO_SSE3_OR_HIGHER) OR ARCH_AARCH64)
|
||||||
option (ENABLE_VECTORSCAN "Enable vectorscan library" ${ENABLE_LIBRARIES})
|
option (ENABLE_VECTORSCAN "Enable vectorscan" ${ENABLE_LIBRARIES})
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# TODO PPC should generally work but needs manual generation of ppc/config.h file on a PPC machine
|
|
||||||
|
|
||||||
if (NOT ENABLE_VECTORSCAN)
|
if (NOT ENABLE_VECTORSCAN)
|
||||||
message (STATUS "Not using vectorscan")
|
message (STATUS "Not using vectorscan")
|
||||||
return()
|
return()
|
||||||
@ -272,34 +269,24 @@ if (ARCH_AARCH64)
|
|||||||
)
|
)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# TODO
|
|
||||||
# if (ARCH_PPC64LE)
|
|
||||||
# list(APPEND SRCS
|
|
||||||
# "${LIBRARY_DIR}/src/util/supervector/arch/ppc64el/impl.cpp"
|
|
||||||
# )
|
|
||||||
# endif()
|
|
||||||
|
|
||||||
add_library (_vectorscan ${SRCS})
|
add_library (_vectorscan ${SRCS})
|
||||||
|
|
||||||
target_compile_options (_vectorscan PRIVATE
|
|
||||||
-fno-sanitize=undefined # assume the library takes care of itself
|
|
||||||
-O2 -fno-strict-aliasing -fno-omit-frame-pointer -fvisibility=hidden # options from original build system
|
|
||||||
)
|
|
||||||
# library has too much debug information
|
# library has too much debug information
|
||||||
if (OMIT_HEAVY_DEBUG_SYMBOLS)
|
if (OMIT_HEAVY_DEBUG_SYMBOLS)
|
||||||
target_compile_options (_vectorscan PRIVATE -g0)
|
target_compile_options (_vectorscan PRIVATE -g0)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Include version header manually generated by running the original build system
|
target_include_directories (_vectorscan SYSTEM PUBLIC "${LIBRARY_DIR}/src")
|
||||||
target_include_directories (_vectorscan SYSTEM PRIVATE common)
|
|
||||||
|
# Makes the version header visible. It was generated by running the native build system manually.
|
||||||
|
# Please update whenever you update vectorscan.
|
||||||
|
target_include_directories (_vectorscan SYSTEM PUBLIC common)
|
||||||
|
|
||||||
# vectorscan inherited some patched in-source versions of boost headers to fix a bug in
|
# vectorscan inherited some patched in-source versions of boost headers to fix a bug in
|
||||||
# boost 1.69. This bug has been solved long ago but vectorscan's source code still
|
# boost 1.69. This bug has been solved long ago but vectorscan's source code still
|
||||||
# points to the patched versions, so include it here.
|
# points to the patched versions, so include it here.
|
||||||
target_include_directories (_vectorscan SYSTEM PRIVATE "${LIBRARY_DIR}/include")
|
target_include_directories (_vectorscan SYSTEM PRIVATE "${LIBRARY_DIR}/include")
|
||||||
|
|
||||||
target_include_directories (_vectorscan SYSTEM PUBLIC "${LIBRARY_DIR}/src")
|
|
||||||
|
|
||||||
# Include platform-specific config header generated by manually running the original build system
|
# Include platform-specific config header generated by manually running the original build system
|
||||||
# Please regenerate these files if you update vectorscan.
|
# Please regenerate these files if you update vectorscan.
|
||||||
|
|
||||||
|
@ -32,8 +32,12 @@
|
|||||||
/**
|
/**
|
||||||
* A version string to identify this release of Hyperscan.
|
* A version string to identify this release of Hyperscan.
|
||||||
*/
|
*/
|
||||||
#define HS_VERSION_STRING "5.4.7 2022-06-20"
|
#define HS_VERSION_STRING "5.4.11 2024-07-04"
|
||||||
|
|
||||||
#define HS_VERSION_32BIT ((5 << 24) | (1 << 16) | (7 << 8) | 0)
|
#define HS_VERSION_32BIT ((5 << 24) | (1 << 16) | (7 << 8) | 0)
|
||||||
|
|
||||||
|
#define HS_MAJOR 5
|
||||||
|
#define HS_MINOR 4
|
||||||
|
#define HS_PATCH 11
|
||||||
|
|
||||||
#endif /* HS_VERSION_H_C6428FAF8E3713 */
|
#endif /* HS_VERSION_H_C6428FAF8E3713 */
|
||||||
|
@ -41,8 +41,7 @@
|
|||||||
"docker/test/stateless": {
|
"docker/test/stateless": {
|
||||||
"name": "clickhouse/stateless-test",
|
"name": "clickhouse/stateless-test",
|
||||||
"dependent": [
|
"dependent": [
|
||||||
"docker/test/stateful",
|
"docker/test/stateful"
|
||||||
"docker/test/unit"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"docker/test/stateful": {
|
"docker/test/stateful": {
|
||||||
@ -122,15 +121,16 @@
|
|||||||
"docker/test/base": {
|
"docker/test/base": {
|
||||||
"name": "clickhouse/test-base",
|
"name": "clickhouse/test-base",
|
||||||
"dependent": [
|
"dependent": [
|
||||||
|
"docker/test/clickbench",
|
||||||
"docker/test/fuzzer",
|
"docker/test/fuzzer",
|
||||||
"docker/test/libfuzzer",
|
|
||||||
"docker/test/integration/base",
|
"docker/test/integration/base",
|
||||||
"docker/test/keeper-jepsen",
|
"docker/test/keeper-jepsen",
|
||||||
|
"docker/test/libfuzzer",
|
||||||
"docker/test/server-jepsen",
|
"docker/test/server-jepsen",
|
||||||
"docker/test/sqllogic",
|
"docker/test/sqllogic",
|
||||||
"docker/test/sqltest",
|
"docker/test/sqltest",
|
||||||
"docker/test/clickbench",
|
"docker/test/stateless",
|
||||||
"docker/test/stateless"
|
"docker/test/unit"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"docker/test/integration/kerberized_hadoop": {
|
"docker/test/integration/kerberized_hadoop": {
|
||||||
|
@ -34,7 +34,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
# lts / testing / prestable / etc
|
# lts / testing / prestable / etc
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
||||||
ARG VERSION="24.4.1.2088"
|
ARG VERSION="24.6.2.17"
|
||||||
ARG PACKAGES="clickhouse-keeper"
|
ARG PACKAGES="clickhouse-keeper"
|
||||||
ARG DIRECT_DOWNLOAD_URLS=""
|
ARG DIRECT_DOWNLOAD_URLS=""
|
||||||
|
|
||||||
|
@ -111,6 +111,7 @@ fi
|
|||||||
mv ./programs/clickhouse* /output || mv ./programs/*_fuzzer /output
|
mv ./programs/clickhouse* /output || mv ./programs/*_fuzzer /output
|
||||||
[ -x ./programs/self-extracting/clickhouse ] && mv ./programs/self-extracting/clickhouse /output
|
[ -x ./programs/self-extracting/clickhouse ] && mv ./programs/self-extracting/clickhouse /output
|
||||||
[ -x ./programs/self-extracting/clickhouse-stripped ] && mv ./programs/self-extracting/clickhouse-stripped /output
|
[ -x ./programs/self-extracting/clickhouse-stripped ] && mv ./programs/self-extracting/clickhouse-stripped /output
|
||||||
|
[ -x ./programs/self-extracting/clickhouse-keeper ] && mv ./programs/self-extracting/clickhouse-keeper /output
|
||||||
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds
|
mv ./src/unit_tests_dbms /output ||: # may not exist for some binary builds
|
||||||
mv ./programs/*.dict ./programs/*.options ./programs/*_seed_corpus.zip /output ||: # libFuzzer oss-fuzz compatible infrastructure
|
mv ./programs/*.dict ./programs/*.options ./programs/*_seed_corpus.zip /output ||: # libFuzzer oss-fuzz compatible infrastructure
|
||||||
|
|
||||||
|
@ -276,10 +276,7 @@ def parse_env_variables(
|
|||||||
if is_release_build(debug_build, package_type, sanitizer, coverage):
|
if is_release_build(debug_build, package_type, sanitizer, coverage):
|
||||||
cmake_flags.append("-DSPLIT_DEBUG_SYMBOLS=ON")
|
cmake_flags.append("-DSPLIT_DEBUG_SYMBOLS=ON")
|
||||||
result.append("WITH_PERFORMANCE=1")
|
result.append("WITH_PERFORMANCE=1")
|
||||||
if is_cross_arm:
|
cmake_flags.append("-DBUILD_STANDALONE_KEEPER=1")
|
||||||
cmake_flags.append("-DBUILD_STANDALONE_KEEPER=1")
|
|
||||||
else:
|
|
||||||
result.append("BUILD_MUSL_KEEPER=1")
|
|
||||||
elif package_type == "fuzzers":
|
elif package_type == "fuzzers":
|
||||||
cmake_flags.append("-DENABLE_FUZZING=1")
|
cmake_flags.append("-DENABLE_FUZZING=1")
|
||||||
cmake_flags.append("-DENABLE_PROTOBUF=1")
|
cmake_flags.append("-DENABLE_PROTOBUF=1")
|
||||||
|
47
docker/reqgenerator.py
Normal file
47
docker/reqgenerator.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
# To run this script you must install docker and piddeptree python package
|
||||||
|
#
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def build_docker_deps(image_name, imagedir):
|
||||||
|
cmd = f"""docker run --entrypoint "/bin/bash" {image_name} -c "pip install pipdeptree 2>/dev/null 1>/dev/null && pipdeptree --freeze --warn silence | sed 's/ \+//g' | sort | uniq" > {imagedir}/requirements.txt"""
|
||||||
|
subprocess.check_call(cmd, shell=True)
|
||||||
|
|
||||||
|
|
||||||
|
def check_docker_file_install_with_pip(filepath):
|
||||||
|
image_name = None
|
||||||
|
with open(filepath, "r") as f:
|
||||||
|
for line in f:
|
||||||
|
if "docker build" in line:
|
||||||
|
arr = line.split(" ")
|
||||||
|
if len(arr) > 4:
|
||||||
|
image_name = arr[4]
|
||||||
|
if "pip3 install" in line or "pip install" in line:
|
||||||
|
return image_name, True
|
||||||
|
return image_name, False
|
||||||
|
|
||||||
|
|
||||||
|
def process_affected_images(images_dir):
|
||||||
|
for root, _dirs, files in os.walk(images_dir):
|
||||||
|
for f in files:
|
||||||
|
if f == "Dockerfile":
|
||||||
|
docker_file_path = os.path.join(root, f)
|
||||||
|
print("Checking image on path", docker_file_path)
|
||||||
|
image_name, has_pip = check_docker_file_install_with_pip(
|
||||||
|
docker_file_path
|
||||||
|
)
|
||||||
|
if has_pip:
|
||||||
|
print("Find pip in", image_name)
|
||||||
|
try:
|
||||||
|
build_docker_deps(image_name, root)
|
||||||
|
except Exception as ex:
|
||||||
|
print(ex)
|
||||||
|
else:
|
||||||
|
print("Pip not found in", docker_file_path)
|
||||||
|
|
||||||
|
|
||||||
|
process_affected_images(sys.argv[1])
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user