mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-29 19:12:03 +00:00
Merge branch 'master' into fix_race_async_inserts_queue
This commit is contained in:
commit
5d7e5e5a72
200
.clang-tidy
200
.clang-tidy
@ -5,128 +5,128 @@
|
||||
# a) the new check is not controversial (this includes many checks in readability-* and google-*) or
|
||||
# b) too noisy (checks with > 100 new warnings are considered noisy, this includes e.g. cppcoreguidelines-*).
|
||||
|
||||
# TODO: Once clang(-tidy) 17 is the minimum, we can convert this list to YAML
|
||||
# See https://releases.llvm.org/17.0.1/tools/clang/tools/extra/docs/ReleaseNotes.html#improvements-to-clang-tidy
|
||||
HeaderFilterRegex: '^.*/(base|src|programs|utils)/.*(h|hpp)$'
|
||||
|
||||
# TODO Let clang-tidy check headers in further directories
|
||||
# --> HeaderFilterRegex: '^.*/(src|base|programs|utils)/.*(h|hpp)$'
|
||||
HeaderFilterRegex: '^.*/(base)/.*(h|hpp)$'
|
||||
Checks: [
|
||||
'*',
|
||||
|
||||
Checks: '*,
|
||||
-abseil-*,
|
||||
'-abseil-*',
|
||||
|
||||
-altera-*,
|
||||
'-altera-*',
|
||||
|
||||
-android-*,
|
||||
'-android-*',
|
||||
|
||||
-bugprone-assignment-in-if-condition,
|
||||
-bugprone-branch-clone,
|
||||
-bugprone-easily-swappable-parameters,
|
||||
-bugprone-exception-escape,
|
||||
-bugprone-implicit-widening-of-multiplication-result,
|
||||
-bugprone-narrowing-conversions,
|
||||
-bugprone-not-null-terminated-result,
|
||||
-bugprone-reserved-identifier, # useful but too slow, TODO retry when https://reviews.llvm.org/rG1c282052624f9d0bd273bde0b47b30c96699c6c7 is merged
|
||||
-bugprone-unchecked-optional-access,
|
||||
'-bugprone-assignment-in-if-condition',
|
||||
'-bugprone-branch-clone',
|
||||
'-bugprone-easily-swappable-parameters',
|
||||
'-bugprone-exception-escape',
|
||||
'-bugprone-forward-declaration-namespace',
|
||||
'-bugprone-implicit-widening-of-multiplication-result',
|
||||
'-bugprone-narrowing-conversions',
|
||||
'-bugprone-not-null-terminated-result',
|
||||
'-bugprone-reserved-identifier', # useful but too slow, TODO retry when https://reviews.llvm.org/rG1c282052624f9d0bd273bde0b47b30c96699c6c7 is merged
|
||||
'-bugprone-unchecked-optional-access',
|
||||
|
||||
-cert-dcl16-c,
|
||||
-cert-dcl37-c,
|
||||
-cert-dcl51-cpp,
|
||||
-cert-err58-cpp,
|
||||
-cert-msc32-c,
|
||||
-cert-msc51-cpp,
|
||||
-cert-oop54-cpp,
|
||||
-cert-oop57-cpp,
|
||||
'-cert-dcl16-c',
|
||||
'-cert-dcl37-c',
|
||||
'-cert-dcl51-cpp',
|
||||
'-cert-err58-cpp',
|
||||
'-cert-msc32-c',
|
||||
'-cert-msc51-cpp',
|
||||
'-cert-oop54-cpp',
|
||||
'-cert-oop57-cpp',
|
||||
|
||||
-clang-analyzer-unix.Malloc,
|
||||
'-clang-analyzer-optin.performance.Padding',
|
||||
|
||||
-cppcoreguidelines-*, # impractical in a codebase as large as ClickHouse, also slow
|
||||
'-clang-analyzer-unix.Malloc',
|
||||
|
||||
-darwin-*,
|
||||
'-cppcoreguidelines-*', # impractical in a codebase as large as ClickHouse, also slow
|
||||
|
||||
-fuchsia-*,
|
||||
'-darwin-*',
|
||||
|
||||
-google-build-using-namespace,
|
||||
-google-readability-braces-around-statements,
|
||||
-google-readability-casting,
|
||||
-google-readability-function-size,
|
||||
-google-readability-namespace-comments,
|
||||
-google-readability-todo,
|
||||
'-fuchsia-*',
|
||||
|
||||
-hicpp-avoid-c-arrays,
|
||||
-hicpp-avoid-goto,
|
||||
-hicpp-braces-around-statements,
|
||||
-hicpp-explicit-conversions,
|
||||
-hicpp-function-size,
|
||||
-hicpp-member-init,
|
||||
-hicpp-move-const-arg,
|
||||
-hicpp-multiway-paths-covered,
|
||||
-hicpp-named-parameter,
|
||||
-hicpp-no-array-decay,
|
||||
-hicpp-no-assembler,
|
||||
-hicpp-no-malloc,
|
||||
-hicpp-signed-bitwise,
|
||||
-hicpp-special-member-functions,
|
||||
-hicpp-uppercase-literal-suffix,
|
||||
-hicpp-use-auto,
|
||||
-hicpp-use-emplace,
|
||||
-hicpp-vararg,
|
||||
'-google-build-using-namespace',
|
||||
'-google-readability-braces-around-statements',
|
||||
'-google-readability-casting',
|
||||
'-google-readability-function-size',
|
||||
'-google-readability-namespace-comments',
|
||||
'-google-readability-todo',
|
||||
|
||||
-linuxkernel-*,
|
||||
'-hicpp-avoid-c-arrays',
|
||||
'-hicpp-avoid-goto',
|
||||
'-hicpp-braces-around-statements',
|
||||
'-hicpp-explicit-conversions',
|
||||
'-hicpp-function-size',
|
||||
'-hicpp-member-init',
|
||||
'-hicpp-move-const-arg',
|
||||
'-hicpp-multiway-paths-covered',
|
||||
'-hicpp-named-parameter',
|
||||
'-hicpp-no-array-decay',
|
||||
'-hicpp-no-assembler',
|
||||
'-hicpp-no-malloc',
|
||||
'-hicpp-signed-bitwise',
|
||||
'-hicpp-special-member-functions',
|
||||
'-hicpp-uppercase-literal-suffix',
|
||||
'-hicpp-use-auto',
|
||||
'-hicpp-use-emplace',
|
||||
'-hicpp-vararg',
|
||||
|
||||
-llvm-*,
|
||||
'-linuxkernel-*',
|
||||
|
||||
-llvmlibc-*,
|
||||
'-llvm-*',
|
||||
|
||||
-openmp-*,
|
||||
'-llvmlibc-*',
|
||||
|
||||
-misc-const-correctness,
|
||||
-misc-include-cleaner, # useful but far too many occurrences
|
||||
-misc-no-recursion,
|
||||
-misc-non-private-member-variables-in-classes,
|
||||
-misc-confusable-identifiers, # useful but slooow
|
||||
-misc-use-anonymous-namespace,
|
||||
'-openmp-*',
|
||||
|
||||
-modernize-avoid-c-arrays,
|
||||
-modernize-concat-nested-namespaces,
|
||||
-modernize-macro-to-enum,
|
||||
-modernize-pass-by-value,
|
||||
-modernize-return-braced-init-list,
|
||||
-modernize-use-auto,
|
||||
-modernize-use-default-member-init,
|
||||
-modernize-use-emplace,
|
||||
-modernize-use-nodiscard,
|
||||
-modernize-use-override,
|
||||
-modernize-use-trailing-return-type,
|
||||
'-misc-const-correctness',
|
||||
'-misc-include-cleaner', # useful but far too many occurrences
|
||||
'-misc-no-recursion',
|
||||
'-misc-non-private-member-variables-in-classes',
|
||||
'-misc-confusable-identifiers', # useful but slooo
|
||||
'-misc-use-anonymous-namespace',
|
||||
|
||||
-performance-inefficient-string-concatenation,
|
||||
-performance-no-int-to-ptr,
|
||||
-performance-avoid-endl,
|
||||
-performance-unnecessary-value-param,
|
||||
'-modernize-avoid-c-arrays',
|
||||
'-modernize-concat-nested-namespaces',
|
||||
'-modernize-macro-to-enum',
|
||||
'-modernize-pass-by-value',
|
||||
'-modernize-return-braced-init-list',
|
||||
'-modernize-use-auto',
|
||||
'-modernize-use-default-member-init',
|
||||
'-modernize-use-emplace',
|
||||
'-modernize-use-nodiscard',
|
||||
'-modernize-use-override',
|
||||
'-modernize-use-trailing-return-type',
|
||||
|
||||
-portability-simd-intrinsics,
|
||||
'-performance-inefficient-string-concatenation',
|
||||
'-performance-no-int-to-ptr',
|
||||
'-performance-avoid-endl',
|
||||
'-performance-unnecessary-value-param',
|
||||
|
||||
-readability-avoid-unconditional-preprocessor-if,
|
||||
-readability-braces-around-statements,
|
||||
-readability-convert-member-functions-to-static,
|
||||
-readability-else-after-return,
|
||||
-readability-function-cognitive-complexity,
|
||||
-readability-function-size,
|
||||
-readability-identifier-length,
|
||||
-readability-identifier-naming, # useful but too slow
|
||||
-readability-implicit-bool-conversion,
|
||||
-readability-isolate-declaration,
|
||||
-readability-magic-numbers,
|
||||
-readability-named-parameter,
|
||||
-readability-redundant-declaration,
|
||||
-readability-simplify-boolean-expr,
|
||||
-readability-static-accessed-through-instance,
|
||||
-readability-suspicious-call-argument,
|
||||
-readability-uppercase-literal-suffix,
|
||||
-readability-use-anyofallof,
|
||||
'-portability-simd-intrinsics',
|
||||
|
||||
-zircon-*,
|
||||
'
|
||||
'-readability-avoid-unconditional-preprocessor-if',
|
||||
'-readability-braces-around-statements',
|
||||
'-readability-convert-member-functions-to-static',
|
||||
'-readability-else-after-return',
|
||||
'-readability-function-cognitive-complexity',
|
||||
'-readability-function-size',
|
||||
'-readability-identifier-length',
|
||||
'-readability-identifier-naming', # useful but too slow
|
||||
'-readability-implicit-bool-conversion',
|
||||
'-readability-isolate-declaration',
|
||||
'-readability-magic-numbers',
|
||||
'-readability-named-parameter',
|
||||
'-readability-redundant-declaration',
|
||||
'-readability-simplify-boolean-expr',
|
||||
'-readability-static-accessed-through-instance',
|
||||
'-readability-suspicious-call-argument',
|
||||
'-readability-uppercase-literal-suffix',
|
||||
'-readability-use-anyofallof',
|
||||
|
||||
'-zircon-*'
|
||||
]
|
||||
|
||||
WarningsAsErrors: '*'
|
||||
|
||||
|
6
.github/ISSUE_TEMPLATE/85_bug-report.md
vendored
6
.github/ISSUE_TEMPLATE/85_bug-report.md
vendored
@ -17,7 +17,7 @@ assignees: ''
|
||||
|
||||
> A link to reproducer in [https://fiddle.clickhouse.com/](https://fiddle.clickhouse.com/).
|
||||
|
||||
**Does it reproduce on recent release?**
|
||||
**Does it reproduce on the most recent release?**
|
||||
|
||||
[The list of releases](https://github.com/ClickHouse/ClickHouse/blob/master/utils/list-versions/version_date.tsv)
|
||||
|
||||
@ -34,11 +34,11 @@ assignees: ''
|
||||
**How to reproduce**
|
||||
|
||||
* Which ClickHouse server version to use
|
||||
* Which interface to use, if matters
|
||||
* Which interface to use, if it matters
|
||||
* Non-default settings, if any
|
||||
* `CREATE TABLE` statements for all tables involved
|
||||
* Sample data for all these tables, use [clickhouse-obfuscator](https://github.com/ClickHouse/ClickHouse/blob/master/programs/obfuscator/Obfuscator.cpp#L42-L80) if necessary
|
||||
* Queries to run that lead to unexpected result
|
||||
* Queries to run that lead to an unexpected result
|
||||
|
||||
**Expected behavior**
|
||||
|
||||
|
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -12,6 +12,7 @@ tests/ci/cancel_and_rerun_workflow_lambda/app.py
|
||||
- Build/Testing/Packaging Improvement
|
||||
- Documentation (changelog entry is not required)
|
||||
- Bug Fix (user-visible misbehavior in an official stable release)
|
||||
- CI Fix or Improvement (changelog entry is not required)
|
||||
- Not for changelog (changelog entry is not required)
|
||||
|
||||
|
||||
|
41
.github/workflows/backport_branches.yml
vendored
41
.github/workflows/backport_branches.yml
vendored
@ -11,7 +11,7 @@ on: # yamllint disable-line rule:truthy
|
||||
- 'backport/**'
|
||||
jobs:
|
||||
RunConfig:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
@ -67,8 +67,6 @@ jobs:
|
||||
test_name: Compatibility check (amd64)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions
|
||||
CompatibilityCheckAarch64:
|
||||
needs: [RunConfig, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -77,8 +75,6 @@ jobs:
|
||||
test_name: Compatibility check (aarch64)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc
|
||||
#########################################################################################
|
||||
#################################### ORDINARY BUILDS ####################################
|
||||
#########################################################################################
|
||||
@ -138,21 +134,22 @@ jobs:
|
||||
############################################################################################
|
||||
##################################### Docker images #######################################
|
||||
############################################################################################
|
||||
DockerServerImages:
|
||||
DockerServerImage:
|
||||
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Docker server and keeper images
|
||||
test_name: Docker server image
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
DockerKeeperImage:
|
||||
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Docker keeper image
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
checkout_depth: 0 # It MUST BE THE SAME for all dependencies and the job itself
|
||||
run_command: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_server.py --release-type head --no-push \
|
||||
--image-repo clickhouse/clickhouse-server --image-path docker/server --allow-build-reuse
|
||||
python3 docker_server.py --release-type head --no-push \
|
||||
--image-repo clickhouse/clickhouse-keeper --image-path docker/keeper --allow-build-reuse
|
||||
############################################################################################
|
||||
##################################### BUILD REPORTER #######################################
|
||||
############################################################################################
|
||||
@ -169,14 +166,8 @@ jobs:
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse build check
|
||||
runner_type: style-checker
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
additional_envs: |
|
||||
NEEDS_DATA<<NDENV
|
||||
${{ toJSON(needs) }}
|
||||
NDENV
|
||||
run_command: |
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
BuilderSpecialReport:
|
||||
# run report check for failed builds to indicate the CI error
|
||||
if: ${{ !cancelled() }}
|
||||
@ -187,14 +178,8 @@ jobs:
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse special build check
|
||||
runner_type: style-checker
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
additional_envs: |
|
||||
NEEDS_DATA<<NDENV
|
||||
${{ toJSON(needs) }}
|
||||
NDENV
|
||||
run_command: |
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
############################################################################################
|
||||
#################################### INSTALL PACKAGES ######################################
|
||||
############################################################################################
|
||||
|
184
.github/workflows/master.yml
vendored
184
.github/workflows/master.yml
vendored
@ -11,7 +11,7 @@ on: # yamllint disable-line rule:truthy
|
||||
- 'master'
|
||||
jobs:
|
||||
RunConfig:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
@ -35,7 +35,7 @@ jobs:
|
||||
- name: PrepareRunConfig
|
||||
id: runconfig
|
||||
run: |
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --rebuild-all-binaries --outfile ${{ runner.temp }}/ci_run_data.json
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --outfile ${{ runner.temp }}/ci_run_data.json
|
||||
|
||||
echo "::group::CI configuration"
|
||||
python3 -m json.tool ${{ runner.temp }}/ci_run_data.json
|
||||
@ -55,7 +55,6 @@ jobs:
|
||||
uses: ./.github/workflows/reusable_docker.yml
|
||||
with:
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
set_latest: true
|
||||
StyleCheck:
|
||||
needs: [RunConfig, BuildDockers]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -74,8 +73,6 @@ jobs:
|
||||
test_name: Compatibility check (amd64)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions
|
||||
CompatibilityCheckAarch64:
|
||||
needs: [RunConfig, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -84,8 +81,6 @@ jobs:
|
||||
test_name: Compatibility check (aarch64)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc
|
||||
#########################################################################################
|
||||
#################################### ORDINARY BUILDS ####################################
|
||||
#########################################################################################
|
||||
@ -98,6 +93,14 @@ jobs:
|
||||
build_name: package_release
|
||||
checkout_depth: 0
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderDebReleaseCoverage:
|
||||
needs: [RunConfig, BuildDockers]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: package_release_coverage
|
||||
checkout_depth: 0
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderDebAarch64:
|
||||
needs: [RunConfig, BuildDockers]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -242,22 +245,22 @@ jobs:
|
||||
############################################################################################
|
||||
##################################### Docker images #######################################
|
||||
############################################################################################
|
||||
DockerServerImages:
|
||||
DockerServerImage:
|
||||
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Docker server and keeper images
|
||||
test_name: Docker server image
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
DockerKeeperImage:
|
||||
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Docker keeper image
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
# FIXME: avoid using 0 checkout
|
||||
checkout_depth: 0 # It MUST BE THE SAME for all dependencies and the job itself
|
||||
run_command: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_server.py --release-type head \
|
||||
--image-repo clickhouse/clickhouse-server --image-path docker/server --allow-build-reuse
|
||||
python3 docker_server.py --release-type head \
|
||||
--image-repo clickhouse/clickhouse-keeper --image-path docker/keeper --allow-build-reuse
|
||||
############################################################################################
|
||||
##################################### BUILD REPORTER #######################################
|
||||
############################################################################################
|
||||
@ -266,7 +269,6 @@ jobs:
|
||||
if: ${{ !cancelled() }}
|
||||
needs:
|
||||
- RunConfig
|
||||
- BuilderBinRelease
|
||||
- BuilderDebAarch64
|
||||
- BuilderDebAsan
|
||||
- BuilderDebDebug
|
||||
@ -277,14 +279,8 @@ jobs:
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse build check
|
||||
runner_type: style-checker
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
additional_envs: |
|
||||
NEEDS_DATA<<NDENV
|
||||
${{ toJSON(needs) }}
|
||||
NDENV
|
||||
run_command: |
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
BuilderSpecialReport:
|
||||
# run report check for failed builds to indicate the CI error
|
||||
if: ${{ !cancelled() }}
|
||||
@ -301,17 +297,13 @@ jobs:
|
||||
- BuilderBinAarch64V80Compat
|
||||
- BuilderBinClangTidy
|
||||
- BuilderBinAmd64Musl
|
||||
- BuilderDebReleaseCoverage
|
||||
- BuilderBinRelease
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse special build check
|
||||
runner_type: style-checker
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
additional_envs: |
|
||||
NEEDS_DATA<<NDENV
|
||||
${{ toJSON(needs) }}
|
||||
NDENV
|
||||
run_command: |
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
MarkReleaseReady:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs:
|
||||
@ -321,11 +313,25 @@ jobs:
|
||||
- BuilderDebAarch64
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Debug
|
||||
run: |
|
||||
echo need with different filters
|
||||
cat << 'EOF'
|
||||
${{ toJSON(needs) }}
|
||||
${{ toJSON(needs.*.result) }}
|
||||
no failures ${{ !contains(needs.*.result, 'failure') }}
|
||||
no skips ${{ !contains(needs.*.result, 'skipped') }}
|
||||
no both ${{ !(contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }}
|
||||
EOF
|
||||
- name: Not ready
|
||||
# fail the job to be able restart it
|
||||
if: ${{ contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: Check out repository code
|
||||
if: ${{ ! (contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }}
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Mark Commit Release Ready
|
||||
if: ${{ ! (contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }}
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 mark_release_ready.py
|
||||
@ -363,36 +369,28 @@ jobs:
|
||||
test_name: Stateless tests (release)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestReleaseDatabaseOrdinary:
|
||||
FunctionalStatelessTestReleaseAnalyzerS3Replicated:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (release, DatabaseOrdinary)
|
||||
test_name: Stateless tests (release, analyzer, s3, DatabaseReplicated)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestReleaseDatabaseReplicated:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
FunctionalStatelessTestS3Debug:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (release, DatabaseReplicated)
|
||||
test_name: Stateless tests (debug, s3 storage)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestReleaseAnalyzer:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
FunctionalStatelessTestS3Tsan:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (release, analyzer)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestReleaseS3:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (release, s3 storage)
|
||||
test_name: Stateless tests (tsan, s3 storage)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestAarch64:
|
||||
@ -503,6 +501,55 @@ jobs:
|
||||
test_name: Stateful tests (debug)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
# Parallel replicas
|
||||
FunctionalStatefulTestDebugParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (debug, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestUBsanParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebUBsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (ubsan, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestMsanParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebMsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (msan, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestTsanParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (tsan, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestAsanParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (asan, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestReleaseParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (release, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##############################################################################################
|
||||
########################### ClickBench #######################################################
|
||||
##############################################################################################
|
||||
@ -710,6 +757,28 @@ jobs:
|
||||
runner_type: func-tester-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##############################################################################################
|
||||
############################ SQLLOGIC TEST ###################################################
|
||||
##############################################################################################
|
||||
SQLLogicTestRelease:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Sqllogic test (release)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##############################################################################################
|
||||
##################################### SQL TEST ###############################################
|
||||
##############################################################################################
|
||||
SQLTest:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: SQLTest
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##############################################################################################
|
||||
###################################### SQLANCER FUZZERS ######################################
|
||||
##############################################################################################
|
||||
SQLancerTestRelease:
|
||||
@ -734,15 +803,14 @@ jobs:
|
||||
- MarkReleaseReady
|
||||
- FunctionalStatelessTestDebug
|
||||
- FunctionalStatelessTestRelease
|
||||
- FunctionalStatelessTestReleaseDatabaseOrdinary
|
||||
- FunctionalStatelessTestReleaseDatabaseReplicated
|
||||
- FunctionalStatelessTestReleaseAnalyzer
|
||||
- FunctionalStatelessTestReleaseS3
|
||||
- FunctionalStatelessTestReleaseAnalyzerS3Replicated
|
||||
- FunctionalStatelessTestAarch64
|
||||
- FunctionalStatelessTestAsan
|
||||
- FunctionalStatelessTestTsan
|
||||
- FunctionalStatelessTestMsan
|
||||
- FunctionalStatelessTestUBsan
|
||||
- FunctionalStatelessTestS3Debug
|
||||
- FunctionalStatelessTestS3Tsan
|
||||
- FunctionalStatefulTestDebug
|
||||
- FunctionalStatefulTestRelease
|
||||
- FunctionalStatefulTestAarch64
|
||||
@ -750,6 +818,12 @@ jobs:
|
||||
- FunctionalStatefulTestTsan
|
||||
- FunctionalStatefulTestMsan
|
||||
- FunctionalStatefulTestUBsan
|
||||
- FunctionalStatefulTestDebugParallelReplicas
|
||||
- FunctionalStatefulTestUBsanParallelReplicas
|
||||
- FunctionalStatefulTestMsanParallelReplicas
|
||||
- FunctionalStatefulTestTsanParallelReplicas
|
||||
- FunctionalStatefulTestAsanParallelReplicas
|
||||
- FunctionalStatefulTestReleaseParallelReplicas
|
||||
- StressTestDebug
|
||||
- StressTestAsan
|
||||
- StressTestTsan
|
||||
@ -775,6 +849,8 @@ jobs:
|
||||
- UnitTestsReleaseClang
|
||||
- SQLancerTestRelease
|
||||
- SQLancerTestDebug
|
||||
- SQLLogicTestRelease
|
||||
- SQLTest
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
|
63
.github/workflows/nightly.yml
vendored
63
.github/workflows/nightly.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
||||
# The task for having a preserved ENV and event.json for later investigation
|
||||
uses: ./.github/workflows/debug.yml
|
||||
RunConfig:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
@ -28,7 +28,7 @@ jobs:
|
||||
id: runconfig
|
||||
run: |
|
||||
echo "::group::configure CI run"
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --skip-jobs --rebuild-all-docker --outfile ${{ runner.temp }}/ci_run_data.json
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --skip-jobs --outfile ${{ runner.temp }}/ci_run_data.json
|
||||
echo "::endgroup::"
|
||||
|
||||
echo "::group::CI run configure results"
|
||||
@ -45,62 +45,3 @@ jobs:
|
||||
with:
|
||||
data: "${{ needs.RunConfig.outputs.data }}"
|
||||
set_latest: true
|
||||
SonarCloud:
|
||||
runs-on: [self-hosted, builder]
|
||||
env:
|
||||
SONAR_SCANNER_VERSION: 4.8.0.2856
|
||||
SONAR_SERVER_URL: "https://sonarcloud.io"
|
||||
BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed
|
||||
CC: clang-17
|
||||
CXX: clang++-17
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
|
||||
filter: tree:0
|
||||
submodules: true
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: 11
|
||||
- name: Download and set up sonar-scanner
|
||||
env:
|
||||
SONAR_SCANNER_DOWNLOAD_URL: https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-${{ env.SONAR_SCANNER_VERSION }}-linux.zip
|
||||
run: |
|
||||
mkdir -p "$HOME/.sonar"
|
||||
curl -sSLo "$HOME/.sonar/sonar-scanner.zip" "${{ env.SONAR_SCANNER_DOWNLOAD_URL }}"
|
||||
unzip -o "$HOME/.sonar/sonar-scanner.zip" -d "$HOME/.sonar/"
|
||||
echo "$HOME/.sonar/sonar-scanner-${{ env.SONAR_SCANNER_VERSION }}-linux/bin" >> "$GITHUB_PATH"
|
||||
- name: Download and set up build-wrapper
|
||||
env:
|
||||
BUILD_WRAPPER_DOWNLOAD_URL: ${{ env.SONAR_SERVER_URL }}/static/cpp/build-wrapper-linux-x86.zip
|
||||
run: |
|
||||
curl -sSLo "$HOME/.sonar/build-wrapper-linux-x86.zip" "${{ env.BUILD_WRAPPER_DOWNLOAD_URL }}"
|
||||
unzip -o "$HOME/.sonar/build-wrapper-linux-x86.zip" -d "$HOME/.sonar/"
|
||||
echo "$HOME/.sonar/build-wrapper-linux-x86" >> "$GITHUB_PATH"
|
||||
- name: Set Up Build Tools
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -yq git cmake ccache ninja-build python3 yasm nasm
|
||||
sudo bash -c "$(wget -O - https://apt.llvm.org/llvm.sh)"
|
||||
- name: Run build-wrapper
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
cmake ..
|
||||
cd ..
|
||||
build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build build/
|
||||
- name: Run sonar-scanner
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
run: |
|
||||
sonar-scanner \
|
||||
--define sonar.host.url="${{ env.SONAR_SERVER_URL }}" \
|
||||
--define sonar.cfamily.build-wrapper-output="${{ env.BUILD_WRAPPER_OUT_DIR }}" \
|
||||
--define sonar.projectKey="ClickHouse_ClickHouse" \
|
||||
--define sonar.organization="clickhouse-java" \
|
||||
--define sonar.cfamily.cpp23.enabled=true \
|
||||
--define sonar.exclusions="**/*.java,**/*.ts,**/*.js,**/*.css,**/*.sql"
|
||||
|
899
.github/workflows/pull_request.yml
vendored
899
.github/workflows/pull_request.yml
vendored
@ -13,12 +13,10 @@ on: # yamllint disable-line rule:truthy
|
||||
- opened
|
||||
branches:
|
||||
- master
|
||||
##########################################################################################
|
||||
##################################### SMALL CHECKS #######################################
|
||||
##########################################################################################
|
||||
|
||||
jobs:
|
||||
RunConfig:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
@ -70,13 +68,13 @@ jobs:
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ runner.temp }}/ci_run_data.json --post --job-name 'Style check'
|
||||
BuildDockers:
|
||||
needs: [RunConfig]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
if: ${{ !failure() && !cancelled() && toJson(fromJson(needs.RunConfig.outputs.data).docker_data.missing_multi) != '[]' }}
|
||||
uses: ./.github/workflows/reusable_docker.yml
|
||||
with:
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
StyleCheck:
|
||||
needs: [RunConfig, BuildDockers]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Style check')}}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Style check
|
||||
@ -89,881 +87,94 @@ jobs:
|
||||
ROBOT_CLICKHOUSE_SSH_KEY<<RCSK
|
||||
${{secrets.ROBOT_CLICKHOUSE_SSH_KEY}}
|
||||
RCSK
|
||||
DocsCheck:
|
||||
needs: [RunConfig, StyleCheck]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Docs check
|
||||
runner_type: func-tester-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 docs_check.py
|
||||
FastTest:
|
||||
needs: [RunConfig, StyleCheck]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'Fast test') }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Fast tests
|
||||
test_name: Fast test
|
||||
runner_type: builder
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 fast_test_check.py
|
||||
CompatibilityCheckX86:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Compatibility check (amd64)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions
|
||||
CompatibilityCheckAarch64:
|
||||
needs: [RunConfig, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Compatibility check (aarch64)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc
|
||||
#########################################################################################
|
||||
#################################### ORDINARY BUILDS ####################################
|
||||
#########################################################################################
|
||||
BuilderDebDebug:
|
||||
|
||||
################################# Main statges #################################
|
||||
# for main CI chain
|
||||
#
|
||||
Builds_1:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_1') }}
|
||||
# using callable wf (reusable_stage.yml) allows to group all nested jobs under a tab
|
||||
uses: ./.github/workflows/reusable_build_stage.yml
|
||||
with:
|
||||
build_name: package_debug
|
||||
stage: Builds_1
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderDebRelease:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
Tests_1:
|
||||
needs: [RunConfig, Builds_1]
|
||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_1') }}
|
||||
# using callable wf (reusable_stage.yml) allows to group all nested jobs under a tab
|
||||
uses: ./.github/workflows/reusable_test_stage.yml
|
||||
with:
|
||||
build_name: package_release
|
||||
checkout_depth: 0
|
||||
stage: Tests_1
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderDebAarch64:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
Builds_2:
|
||||
needs: [RunConfig, Builds_1]
|
||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Builds_2') }}
|
||||
# using callable wf (reusable_stage.yml) allows to group all nested jobs under a tab
|
||||
uses: ./.github/workflows/reusable_build_stage.yml
|
||||
with:
|
||||
build_name: package_aarch64
|
||||
checkout_depth: 0
|
||||
stage: Builds_2
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinRelease:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
Tests_2:
|
||||
needs: [RunConfig, Builds_2]
|
||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).stages_data.stages_to_do, 'Tests_2') }}
|
||||
# using callable wf (reusable_stage.yml) allows to group all nested jobs under a tab
|
||||
uses: ./.github/workflows/reusable_test_stage.yml
|
||||
with:
|
||||
build_name: binary_release
|
||||
stage: Tests_2
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderDebAsan:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: package_asan
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderDebUBsan:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: package_ubsan
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderDebTsan:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: package_tsan
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderDebMsan:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: package_msan
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##########################################################################################
|
||||
##################################### SPECIAL BUILDS #####################################
|
||||
##########################################################################################
|
||||
BuilderBinClangTidy:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_tidy
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinDarwin:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_darwin
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinAarch64:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinFreeBSD:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_freebsd
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinDarwinAarch64:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_darwin_aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinPPC64:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_ppc64le
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinAmd64Compat:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_amd64_compat
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinAmd64Musl:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_amd64_musl
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinAarch64V80Compat:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_aarch64_v80compat
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinRISCV64:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_riscv64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
BuilderBinS390X:
|
||||
needs: [RunConfig, FastTest]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: binary_s390x
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
############################################################################################
|
||||
##################################### Docker images #######################################
|
||||
############################################################################################
|
||||
DockerServerImages:
|
||||
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Docker server and keeper images
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
checkout_depth: 0 # It MUST BE THE SAME for all dependencies and the job itself
|
||||
run_command: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_server.py --release-type head --no-push \
|
||||
--image-repo clickhouse/clickhouse-server --image-path docker/server --allow-build-reuse
|
||||
python3 docker_server.py --release-type head --no-push \
|
||||
--image-repo clickhouse/clickhouse-keeper --image-path docker/keeper --allow-build-reuse
|
||||
############################################################################################
|
||||
##################################### BUILD REPORTER #######################################
|
||||
############################################################################################
|
||||
BuilderReport:
|
||||
|
||||
################################# Reports #################################
|
||||
# Reports should by run even if Builds_1/2 fail, so put them separatly in wf (not in Tests_1/2)
|
||||
Builds_1_Report:
|
||||
# run report check for failed builds to indicate the CI error
|
||||
if: ${{ !cancelled() }}
|
||||
if: ${{ !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse build check') }}
|
||||
needs:
|
||||
- RunConfig
|
||||
- BuilderBinRelease
|
||||
- BuilderDebAarch64
|
||||
- BuilderDebAsan
|
||||
- BuilderDebDebug
|
||||
- BuilderDebMsan
|
||||
- BuilderDebRelease
|
||||
- BuilderDebTsan
|
||||
- BuilderDebUBsan
|
||||
- Builds_1
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse build check
|
||||
runner_type: style-checker
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
additional_envs: |
|
||||
NEEDS_DATA<<NDENV
|
||||
${{ toJSON(needs) }}
|
||||
NDENV
|
||||
run_command: |
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
BuilderSpecialReport:
|
||||
Builds_2_Report:
|
||||
# run report check for failed builds to indicate the CI error
|
||||
if: ${{ !cancelled() }}
|
||||
if: ${{ !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse special build check') }}
|
||||
needs:
|
||||
- RunConfig
|
||||
- BuilderBinAarch64
|
||||
- BuilderBinDarwin
|
||||
- BuilderBinDarwinAarch64
|
||||
- BuilderBinFreeBSD
|
||||
- BuilderBinPPC64
|
||||
- BuilderBinRISCV64
|
||||
- BuilderBinS390X
|
||||
- BuilderBinAmd64Compat
|
||||
- BuilderBinAarch64V80Compat
|
||||
- BuilderBinClangTidy
|
||||
- Builds_2
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse special build check
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
additional_envs: |
|
||||
NEEDS_DATA<<NDENV
|
||||
${{ toJSON(needs) }}
|
||||
NDENV
|
||||
run_command: |
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
############################################################################################
|
||||
#################################### INSTALL PACKAGES ######################################
|
||||
############################################################################################
|
||||
InstallPackagesTestRelease:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Install packages (amd64)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 install_check.py "$CHECK_NAME"
|
||||
InstallPackagesTestAarch64:
|
||||
needs: [RunConfig, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Install packages (arm64)
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 install_check.py "$CHECK_NAME"
|
||||
##############################################################################################
|
||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||
##############################################################################################
|
||||
FunctionalStatelessTestRelease:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (release)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestReleaseDatabaseReplicated:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (release, DatabaseReplicated)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestReleaseAnalyzer:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (release, analyzer)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestReleaseS3:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (release, s3 storage)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestS3Debug:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (debug, s3 storage)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestS3Tsan:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (tsan, s3 storage)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestAarch64:
|
||||
needs: [RunConfig, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (aarch64)
|
||||
runner_type: func-tester-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestAsan:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (asan)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestTsan:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (tsan)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestMsan:
|
||||
needs: [RunConfig, BuilderDebMsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (msan)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestUBsan:
|
||||
needs: [RunConfig, BuilderDebUBsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (ubsan)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestDebug:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests (debug)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatelessTestFlakyCheck:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateless tests flaky check (asan)
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
runner_type: func-tester
|
||||
TestsBugfixCheck:
|
||||
needs: [RunConfig, StyleCheck]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: tests bugfix validate check
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
additional_envs: |
|
||||
KILL_TIMEOUT=3600
|
||||
run_command: |
|
||||
TEMP_PATH="${TEMP_PATH}/integration" \
|
||||
python3 integration_test_check.py "Integration $CHECK_NAME" \
|
||||
--validate-bugfix --post-commit-status=file || echo 'ignore exit code'
|
||||
|
||||
TEMP_PATH="${TEMP_PATH}/stateless" \
|
||||
python3 functional_test_check.py "Stateless $CHECK_NAME" "$KILL_TIMEOUT" \
|
||||
--validate-bugfix --post-commit-status=file || echo 'ignore exit code'
|
||||
|
||||
python3 bugfix_validate_check.py "${TEMP_PATH}/stateless/functional_commit_status.tsv" "${TEMP_PATH}/integration/integration_commit_status.tsv"
|
||||
##############################################################################################
|
||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||
##############################################################################################
|
||||
FunctionalStatefulTestRelease:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (release)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestAarch64:
|
||||
needs: [RunConfig, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (aarch64)
|
||||
runner_type: func-tester-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestAsan:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (asan)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestTsan:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (tsan)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestMsan:
|
||||
needs: [RunConfig, BuilderDebMsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (msan)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestUBsan:
|
||||
needs: [RunConfig, BuilderDebUBsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (ubsan)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestDebug:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (debug)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
# Parallel replicas
|
||||
FunctionalStatefulTestDebugParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (debug, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestUBsanParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebUBsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (ubsan, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestMsanParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebMsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (msan, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestTsanParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (tsan, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestAsanParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (asan, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
FunctionalStatefulTestReleaseParallelReplicas:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stateful tests (release, ParallelReplicas)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##############################################################################################
|
||||
########################### ClickBench #######################################################
|
||||
##############################################################################################
|
||||
ClickBenchAMD64:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickBench (amd64)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 clickbench.py "$CHECK_NAME"
|
||||
ClickBenchAarch64:
|
||||
needs: [RunConfig, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickBench (aarch64)
|
||||
runner_type: func-tester-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 clickbench.py "$CHECK_NAME"
|
||||
##############################################################################################
|
||||
######################################### STRESS TESTS #######################################
|
||||
##############################################################################################
|
||||
StressTestAsan:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stress test (asan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
StressTestTsan:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stress test (tsan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
StressTestMsan:
|
||||
needs: [RunConfig, BuilderDebMsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stress test (msan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
StressTestUBsan:
|
||||
needs: [RunConfig, BuilderDebUBsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stress test (ubsan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
StressTestDebug:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Stress test (debug)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##############################################################################################
|
||||
######################################### UPGRADE CHECK ######################################
|
||||
##############################################################################################
|
||||
UpgradeCheckAsan:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Upgrade check (asan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
UpgradeCheckTsan:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Upgrade check (tsan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
UpgradeCheckMsan:
|
||||
needs: [RunConfig, BuilderDebMsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Upgrade check (msan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
UpgradeCheckDebug:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Upgrade check (debug)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##############################################################################################
|
||||
##################################### AST FUZZERS ############################################
|
||||
##############################################################################################
|
||||
ASTFuzzerTestAsan:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: AST fuzzer (asan)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
ASTFuzzerTestTsan:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: AST fuzzer (tsan)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
ASTFuzzerTestUBSan:
|
||||
needs: [RunConfig, BuilderDebUBsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: AST fuzzer (ubsan)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
ASTFuzzerTestMSan:
|
||||
needs: [RunConfig, BuilderDebMsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: AST fuzzer (msan)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
ASTFuzzerTestDebug:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: AST fuzzer (debug)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
#############################################################################################
|
||||
############################# INTEGRATION TESTS #############################################
|
||||
#############################################################################################
|
||||
IntegrationTestsAsan:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Integration tests (asan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
IntegrationTestsAnalyzerAsan:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Integration tests (asan, analyzer)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
IntegrationTestsTsan:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Integration tests (tsan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
IntegrationTestsRelease:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Integration tests (release)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
IntegrationTestsFlakyCheck:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Integration tests flaky check (asan)
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
#############################################################################################
|
||||
#################################### UNIT TESTS #############################################
|
||||
#############################################################################################
|
||||
UnitTestsAsan:
|
||||
needs: [RunConfig, BuilderDebAsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Unit tests (asan)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
UnitTestsRelease:
|
||||
needs: [RunConfig, BuilderBinRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Unit tests (release)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
UnitTestsTsan:
|
||||
needs: [RunConfig, BuilderDebTsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Unit tests (tsan)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
UnitTestsMsan:
|
||||
needs: [RunConfig, BuilderDebMsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Unit tests (msan)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
UnitTestsUBsan:
|
||||
needs: [RunConfig, BuilderDebUBsan]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Unit tests (ubsan)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
#############################################################################################
|
||||
#################################### PERFORMANCE TESTS ######################################
|
||||
#############################################################################################
|
||||
PerformanceComparisonX86:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Performance Comparison
|
||||
runner_type: stress-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
PerformanceComparisonAarch:
|
||||
needs: [RunConfig, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Performance Comparison Aarch64
|
||||
runner_type: func-tester-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##############################################################################################
|
||||
###################################### SQLANCER FUZZERS ######################################
|
||||
##############################################################################################
|
||||
SQLancerTestRelease:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: SQLancer (release)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
SQLancerTestDebug:
|
||||
needs: [RunConfig, BuilderDebDebug]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: SQLancer (debug)
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
################################# Stage Final #################################
|
||||
#
|
||||
FinishCheck:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs:
|
||||
- BuilderReport
|
||||
- BuilderSpecialReport
|
||||
- DocsCheck
|
||||
- FastTest
|
||||
- FunctionalStatelessTestDebug
|
||||
- FunctionalStatelessTestRelease
|
||||
- FunctionalStatelessTestReleaseDatabaseReplicated
|
||||
- FunctionalStatelessTestReleaseAnalyzer
|
||||
- FunctionalStatelessTestAarch64
|
||||
- FunctionalStatelessTestAsan
|
||||
- FunctionalStatelessTestTsan
|
||||
- FunctionalStatelessTestMsan
|
||||
- FunctionalStatelessTestUBsan
|
||||
- FunctionalStatefulTestDebug
|
||||
- FunctionalStatefulTestRelease
|
||||
- FunctionalStatefulTestAarch64
|
||||
- FunctionalStatefulTestAsan
|
||||
- FunctionalStatefulTestTsan
|
||||
- FunctionalStatefulTestMsan
|
||||
- FunctionalStatefulTestUBsan
|
||||
- FunctionalStatelessTestReleaseS3
|
||||
- FunctionalStatelessTestS3Debug
|
||||
- FunctionalStatelessTestS3Tsan
|
||||
- FunctionalStatefulTestReleaseParallelReplicas
|
||||
- FunctionalStatefulTestAsanParallelReplicas
|
||||
- FunctionalStatefulTestTsanParallelReplicas
|
||||
- FunctionalStatefulTestMsanParallelReplicas
|
||||
- FunctionalStatefulTestUBsanParallelReplicas
|
||||
- FunctionalStatefulTestDebugParallelReplicas
|
||||
- StressTestDebug
|
||||
- StressTestAsan
|
||||
- StressTestTsan
|
||||
- StressTestMsan
|
||||
- StressTestUBsan
|
||||
- UpgradeCheckAsan
|
||||
- UpgradeCheckTsan
|
||||
- UpgradeCheckMsan
|
||||
- UpgradeCheckDebug
|
||||
- ASTFuzzerTestDebug
|
||||
- ASTFuzzerTestAsan
|
||||
- ASTFuzzerTestTsan
|
||||
- ASTFuzzerTestMSan
|
||||
- ASTFuzzerTestUBSan
|
||||
- IntegrationTestsAsan
|
||||
- IntegrationTestsAnalyzerAsan
|
||||
- IntegrationTestsTsan
|
||||
- IntegrationTestsRelease
|
||||
- IntegrationTestsFlakyCheck
|
||||
- PerformanceComparisonX86
|
||||
- PerformanceComparisonAarch
|
||||
- UnitTestsAsan
|
||||
- UnitTestsTsan
|
||||
- UnitTestsMsan
|
||||
- UnitTestsUBsan
|
||||
- UnitTestsRelease
|
||||
- CompatibilityCheckX86
|
||||
- CompatibilityCheckAarch64
|
||||
- SQLancerTestRelease
|
||||
- SQLancerTestDebug
|
||||
needs: [Tests_1, Tests_2]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Finish label
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 finish_check.py
|
||||
python3 merge_pr.py --check-approved
|
||||
##############################################################################################
|
||||
############################ SQLLOGIC TEST ###################################################
|
||||
##############################################################################################
|
||||
SQLLogicTestRelease:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Sqllogic test (release)
|
||||
runner_type: func-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
##############################################################################################
|
||||
##################################### SQL TEST ###############################################
|
||||
##############################################################################################
|
||||
SQLTest:
|
||||
needs: [RunConfig, BuilderDebRelease]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: SQLTest
|
||||
runner_type: fuzzer-unit-tester
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
#############################################################################################
|
||||
###################################### NOT IN FINISH ########################################
|
||||
|
||||
|
||||
#############################################################################################
|
||||
###################################### JEPSEN TESTS #########################################
|
||||
#############################################################################################
|
||||
@ -974,19 +185,11 @@ jobs:
|
||||
# we need concurrency as the job uses dedicated instances in the cloud
|
||||
concurrency:
|
||||
group: jepsen
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs: [RunConfig, BuilderBinRelease]
|
||||
if: ${{ !failure() && !cancelled() && contains(fromJson(needs.RunConfig.outputs.data).jobs_data.jobs_to_do, 'ClickHouse Keeper Jepsen') }}
|
||||
# jepsen needs binary_release build which is in Builds_2
|
||||
needs: [RunConfig, Builds_2]
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse Keeper Jepsen
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
#############################################################################################
|
||||
####################################### libFuzzer ###########################################
|
||||
#############################################################################################
|
||||
libFuzzer:
|
||||
if: ${{ !failure() && !cancelled() && contains(github.event.pull_request.labels.*.name, 'libFuzzer') }}
|
||||
needs: [RunConfig, StyleCheck]
|
||||
uses: ./.github/workflows/libfuzzer.yml
|
||||
with:
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
|
11
.github/workflows/release.yml
vendored
11
.github/workflows/release.yml
vendored
@ -54,10 +54,13 @@ jobs:
|
||||
- name: Check docker clickhouse/clickhouse-server building
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_server.py --release-type auto --version "$GITHUB_TAG" \
|
||||
--image-repo clickhouse/clickhouse-server --image-path docker/server
|
||||
python3 docker_server.py --release-type auto --version "$GITHUB_TAG" \
|
||||
--image-repo clickhouse/clickhouse-keeper --image-path docker/keeper
|
||||
export CHECK_NAME="Docker server image"
|
||||
python3 docker_server.py --release-type auto --version "$GITHUB_TAG" --check-name "$CHECK_NAME" --push
|
||||
- name: Check docker clickhouse/clickhouse-keeper building
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
export CHECK_NAME="Docker keeper image"
|
||||
python3 docker_server.py --release-type auto --version "$GITHUB_TAG" --check-name "$CHECK_NAME" --push
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
|
83
.github/workflows/release_branches.yml
vendored
83
.github/workflows/release_branches.yml
vendored
@ -14,7 +14,7 @@ on: # yamllint disable-line rule:truthy
|
||||
|
||||
jobs:
|
||||
RunConfig:
|
||||
runs-on: [self-hosted, style-checker]
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
outputs:
|
||||
data: ${{ steps.runconfig.outputs.CI_DATA }}
|
||||
steps:
|
||||
@ -41,7 +41,7 @@ jobs:
|
||||
id: runconfig
|
||||
run: |
|
||||
echo "::group::configure CI run"
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --rebuild-all-binaries --outfile ${{ runner.temp }}/ci_run_data.json
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --configure --outfile ${{ runner.temp }}/ci_run_data.json
|
||||
echo "::endgroup::"
|
||||
echo "::group::CI run configure results"
|
||||
python3 -m json.tool ${{ runner.temp }}/ci_run_data.json
|
||||
@ -68,8 +68,6 @@ jobs:
|
||||
test_name: Compatibility check (amd64)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 compatibility_check.py --check-name "Compatibility check (amd64)" --check-glibc --check-distributions
|
||||
CompatibilityCheckAarch64:
|
||||
needs: [RunConfig, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -78,8 +76,6 @@ jobs:
|
||||
test_name: Compatibility check (aarch64)
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
run_command: |
|
||||
python3 compatibility_check.py --check-name "Compatibility check (aarch64)" --check-glibc
|
||||
#########################################################################################
|
||||
#################################### ORDINARY BUILDS ####################################
|
||||
#########################################################################################
|
||||
@ -91,6 +87,8 @@ jobs:
|
||||
build_name: package_release
|
||||
checkout_depth: 0
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
# always rebuild on release branches to be able to publish from any commit
|
||||
force: true
|
||||
BuilderDebAarch64:
|
||||
needs: [RunConfig, BuildDockers]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -99,6 +97,8 @@ jobs:
|
||||
build_name: package_aarch64
|
||||
checkout_depth: 0
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
# always rebuild on release branches to be able to publish from any commit
|
||||
force: true
|
||||
BuilderDebAsan:
|
||||
needs: [RunConfig, BuildDockers]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -142,6 +142,8 @@ jobs:
|
||||
build_name: binary_darwin
|
||||
checkout_depth: 0
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
# always rebuild on release branches to be able to publish from any commit
|
||||
force: true
|
||||
BuilderBinDarwinAarch64:
|
||||
needs: [RunConfig, BuildDockers]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
@ -150,24 +152,27 @@ jobs:
|
||||
build_name: binary_darwin_aarch64
|
||||
checkout_depth: 0
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
# always rebuild on release branches to be able to publish from any commit
|
||||
force: true
|
||||
############################################################################################
|
||||
##################################### Docker images #######################################
|
||||
############################################################################################
|
||||
DockerServerImages:
|
||||
DockerServerImage:
|
||||
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Docker server and keeper images
|
||||
test_name: Docker server image
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
DockerKeeperImage:
|
||||
needs: [RunConfig, BuilderDebRelease, BuilderDebAarch64]
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: Docker keeper image
|
||||
runner_type: style-checker
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
checkout_depth: 0
|
||||
run_command: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 docker_server.py --release-type head --no-push \
|
||||
--image-repo clickhouse/clickhouse-server --image-path docker/server --allow-build-reuse
|
||||
python3 docker_server.py --release-type head --no-push \
|
||||
--image-repo clickhouse/clickhouse-keeper --image-path docker/keeper --allow-build-reuse
|
||||
############################################################################################
|
||||
##################################### BUILD REPORTER #######################################
|
||||
############################################################################################
|
||||
@ -186,37 +191,20 @@ jobs:
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse build check
|
||||
runner_type: style-checker
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
additional_envs: |
|
||||
NEEDS_DATA<<NDENV
|
||||
${{ toJSON(needs) }}
|
||||
NDENV
|
||||
run_command: |
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
BuilderSpecialReport:
|
||||
# run report check for failed builds to indicate the CI error
|
||||
if: ${{ !cancelled() }}
|
||||
needs:
|
||||
- RunConfig
|
||||
- BuilderDebRelease
|
||||
- BuilderDebAarch64
|
||||
- BuilderDebAsan
|
||||
- BuilderDebTsan
|
||||
- BuilderDebUBsan
|
||||
- BuilderDebMsan
|
||||
- BuilderDebDebug
|
||||
- BuilderBinDarwin
|
||||
- BuilderBinDarwinAarch64
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ClickHouse special build check
|
||||
runner_type: style-checker
|
||||
runner_type: style-checker-aarch64
|
||||
data: ${{ needs.RunConfig.outputs.data }}
|
||||
additional_envs: |
|
||||
NEEDS_DATA<<NDENV
|
||||
${{ toJSON(needs) }}
|
||||
NDENV
|
||||
run_command: |
|
||||
python3 build_report_check.py "$CHECK_NAME"
|
||||
MarkReleaseReady:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs:
|
||||
@ -224,13 +212,27 @@ jobs:
|
||||
- BuilderBinDarwinAarch64
|
||||
- BuilderDebRelease
|
||||
- BuilderDebAarch64
|
||||
runs-on: [self-hosted, style-checker]
|
||||
runs-on: [self-hosted, style-checker-aarch64]
|
||||
steps:
|
||||
- name: Debug
|
||||
run: |
|
||||
echo need with different filters
|
||||
cat << 'EOF'
|
||||
${{ toJSON(needs) }}
|
||||
${{ toJSON(needs.*.result) }}
|
||||
no failures ${{ !contains(needs.*.result, 'failure') }}
|
||||
no skips ${{ !contains(needs.*.result, 'skipped') }}
|
||||
no both ${{ !(contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }}
|
||||
EOF
|
||||
- name: Not ready
|
||||
# fail the job to be able restart it
|
||||
if: ${{ contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure') }}
|
||||
run: exit 1
|
||||
- name: Check out repository code
|
||||
if: ${{ ! (contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }}
|
||||
uses: ClickHouse/checkout@v1
|
||||
with:
|
||||
clear-repository: true
|
||||
- name: Mark Commit Release Ready
|
||||
if: ${{ ! (contains(needs.*.result, 'skipped') || contains(needs.*.result, 'failure')) }}
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
python3 mark_release_ready.py
|
||||
@ -456,7 +458,8 @@ jobs:
|
||||
FinishCheck:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
needs:
|
||||
- DockerServerImages
|
||||
- DockerServerImage
|
||||
- DockerKeeperImage
|
||||
- BuilderReport
|
||||
- BuilderSpecialReport
|
||||
- MarkReleaseReady
|
||||
|
10
.github/workflows/reusable_build.yml
vendored
10
.github/workflows/reusable_build.yml
vendored
@ -26,6 +26,10 @@ name: Build ClickHouse
|
||||
description: json ci data
|
||||
type: string
|
||||
required: true
|
||||
force:
|
||||
description: disallow job skipping
|
||||
type: boolean
|
||||
default: false
|
||||
additional_envs:
|
||||
description: additional ENV variables to setup the job
|
||||
type: string
|
||||
@ -33,7 +37,7 @@ name: Build ClickHouse
|
||||
jobs:
|
||||
Build:
|
||||
name: Build-${{inputs.build_name}}
|
||||
if: contains(fromJson(inputs.data).jobs_data.jobs_to_do, inputs.build_name)
|
||||
if: ${{ contains(fromJson(inputs.data).jobs_data.jobs_to_do, inputs.build_name) || inputs.force }}
|
||||
env:
|
||||
GITHUB_JOB_OVERRIDDEN: Build-${{inputs.build_name}}
|
||||
runs-on: [self-hosted, '${{inputs.runner_type}}']
|
||||
@ -78,13 +82,15 @@ jobs:
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" \
|
||||
--infile ${{ toJson(inputs.data) }} \
|
||||
--job-name "$BUILD_NAME" \
|
||||
--run
|
||||
--run \
|
||||
${{ inputs.force && '--force' || '' }}
|
||||
- name: Post
|
||||
# it still be build report to upload for failed build job
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --post --job-name '${{inputs.build_name}}'
|
||||
- name: Mark as done
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --mark-success --job-name '${{inputs.build_name}}'
|
||||
- name: Clean
|
||||
|
32
.github/workflows/reusable_build_stage.yml
vendored
Normal file
32
.github/workflows/reusable_build_stage.yml
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
### FIXME: merge reusable_test.yml and reusable_build.yml as they are almost identical
|
||||
# and then merge reusable_build_stage.yml and reusable_test_stage.yml
|
||||
|
||||
name: BuildStageWF
|
||||
'on':
|
||||
workflow_call:
|
||||
inputs:
|
||||
stage:
|
||||
description: stage name
|
||||
type: string
|
||||
required: true
|
||||
data:
|
||||
description: ci data
|
||||
type: string
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
s:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job_name_and_runner_type: ${{ fromJson(inputs.data).stages_data[inputs.stage] }}
|
||||
uses: ./.github/workflows/reusable_build.yml
|
||||
with:
|
||||
build_name: ${{ matrix.job_name_and_runner_type.job_name }}
|
||||
runner_type: ${{ matrix.job_name_and_runner_type.runner_type }}
|
||||
# don't forget to pass force flag (no ci cache/no reuse) - once it's needed
|
||||
force: false
|
||||
# for now let's do I deep checkout for builds
|
||||
checkout_depth: 0
|
||||
data: ${{ inputs.data }}
|
16
.github/workflows/reusable_docker.yml
vendored
16
.github/workflows/reusable_docker.yml
vendored
@ -46,7 +46,7 @@ jobs:
|
||||
needs: [DockerBuildAmd64, DockerBuildAarch64]
|
||||
runs-on: [self-hosted, style-checker]
|
||||
if: |
|
||||
!failure() && !cancelled() && toJson(fromJson(inputs.data).docker_data.missing_multi) != '[]'
|
||||
!failure() && !cancelled() && (toJson(fromJson(inputs.data).docker_data.missing_multi) != '[]' || inputs.set_latest)
|
||||
steps:
|
||||
- name: Check out repository code
|
||||
uses: ClickHouse/checkout@v1
|
||||
@ -55,14 +55,12 @@ jobs:
|
||||
- name: Build images
|
||||
run: |
|
||||
cd "$GITHUB_WORKSPACE/tests/ci"
|
||||
FLAG_LATEST=''
|
||||
if [ "${{ inputs.set_latest }}" == "true" ]; then
|
||||
FLAG_LATEST='--set-latest'
|
||||
echo "latest tag will be set for resulting manifests"
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 \
|
||||
--image-tags '${{ toJson(fromJson(inputs.data).docker_data.images) }}' \
|
||||
--missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_multi) }}' \
|
||||
--set-latest
|
||||
else
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 \
|
||||
--image-tags '${{ toJson(fromJson(inputs.data).docker_data.images) }}' \
|
||||
--missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_multi) }}'
|
||||
fi
|
||||
python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64 \
|
||||
--image-tags '${{ toJson(fromJson(inputs.data).docker_data.images) }}' \
|
||||
--missing-images '${{ toJson(fromJson(inputs.data).docker_data.missing_multi) }}' \
|
||||
$FLAG_LATEST
|
||||
|
1
.github/workflows/reusable_test.yml
vendored
1
.github/workflows/reusable_test.yml
vendored
@ -107,6 +107,7 @@ jobs:
|
||||
run: |
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --post --job-name '${{inputs.test_name}}'
|
||||
- name: Mark as done
|
||||
if: ${{ !cancelled() }}
|
||||
run: |
|
||||
python3 "$GITHUB_WORKSPACE/tests/ci/ci.py" --infile ${{ toJson(inputs.data) }} --mark-success --job-name '${{inputs.test_name}}' --batch ${{matrix.batch}}
|
||||
- name: Clean
|
||||
|
25
.github/workflows/reusable_test_stage.yml
vendored
Normal file
25
.github/workflows/reusable_test_stage.yml
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
name: StageWF
|
||||
'on':
|
||||
workflow_call:
|
||||
inputs:
|
||||
stage:
|
||||
description: stage name
|
||||
type: string
|
||||
required: true
|
||||
data:
|
||||
description: ci data
|
||||
type: string
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
s:
|
||||
if: ${{ !failure() && !cancelled() }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job_name_and_runner_type: ${{ fromJson(inputs.data).stages_data[inputs.stage] }}
|
||||
uses: ./.github/workflows/reusable_test.yml
|
||||
with:
|
||||
test_name: ${{ matrix.job_name_and_runner_type.job_name }}
|
||||
runner_type: ${{ matrix.job_name_and_runner_type.runner_type }}
|
||||
data: ${{ inputs.data }}
|
2
.github/workflows/tags_stable.yml
vendored
2
.github/workflows/tags_stable.yml
vendored
@ -55,7 +55,7 @@ jobs:
|
||||
python3 ./utils/security-generator/generate_security.py > SECURITY.md
|
||||
git diff HEAD
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v3
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
author: "robot-clickhouse <robot-clickhouse@users.noreply.github.com>"
|
||||
token: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -165,7 +165,7 @@ tests/queries/0_stateless/*.expect.history
|
||||
tests/integration/**/_gen
|
||||
|
||||
# rust
|
||||
/rust/**/target
|
||||
/rust/**/target*
|
||||
# It is autogenerated from *.in
|
||||
/rust/**/.cargo/config.toml
|
||||
/rust/**/vendor
|
||||
|
12
.gitmessage
12
.gitmessage
@ -1,6 +1,6 @@
|
||||
|
||||
|
||||
### CI modificators (add a leading space to apply):
|
||||
### CI modificators (add a leading space to apply) ###
|
||||
|
||||
## To avoid a merge commit in CI:
|
||||
#no_merge_commit
|
||||
@ -8,12 +8,22 @@
|
||||
## To discard CI cache:
|
||||
#no_ci_cache
|
||||
|
||||
## To not test (only style check):
|
||||
#do_not_test
|
||||
|
||||
## To run specified set of tests in CI:
|
||||
#ci_set_<SET_NAME>
|
||||
#ci_set_reduced
|
||||
#ci_set_arm
|
||||
#ci_set_integration
|
||||
#ci_set_analyzer
|
||||
|
||||
## To run specified job in CI:
|
||||
#job_<JOB NAME>
|
||||
#job_stateless_tests_release
|
||||
#job_package_debug
|
||||
#job_integration_tests_asan
|
||||
|
||||
## To run only specified batches for multi-batch job(s)
|
||||
#batch_2
|
||||
#btach_1_2_3
|
||||
|
2
.gitmodules
vendored
2
.gitmodules
vendored
@ -99,7 +99,7 @@
|
||||
url = https://github.com/awslabs/aws-c-event-stream
|
||||
[submodule "aws-c-common"]
|
||||
path = contrib/aws-c-common
|
||||
url = https://github.com/ClickHouse/aws-c-common
|
||||
url = https://github.com/awslabs/aws-c-common.git
|
||||
[submodule "aws-checksums"]
|
||||
path = contrib/aws-checksums
|
||||
url = https://github.com/awslabs/aws-checksums
|
||||
|
2433
CHANGELOG.md
2433
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@ -56,13 +56,13 @@ option(ENABLE_CHECK_HEAVY_BUILDS "Don't allow C++ translation units to compile t
|
||||
if (ENABLE_CHECK_HEAVY_BUILDS)
|
||||
# set DATA (since RSS does not work since 2.6.x+) to 5G
|
||||
set (RLIMIT_DATA 5000000000)
|
||||
# set VIRT (RLIMIT_AS) to 10G (DATA*10)
|
||||
# set VIRT (RLIMIT_AS) to 10G (DATA*2)
|
||||
set (RLIMIT_AS 10000000000)
|
||||
# set CPU time limit to 1000 seconds
|
||||
set (RLIMIT_CPU 1000)
|
||||
|
||||
# -fsanitize=memory is too heavy
|
||||
if (SANITIZE STREQUAL "memory")
|
||||
# -fsanitize=memory and address are too heavy
|
||||
if (SANITIZE OR SANITIZE_COVERAGE OR WITH_COVERAGE)
|
||||
set (RLIMIT_DATA 10000000000) # 10G
|
||||
endif()
|
||||
|
||||
@ -110,11 +110,6 @@ endif()
|
||||
# - sanitize.cmake
|
||||
add_library(global-libs INTERFACE)
|
||||
|
||||
# We don't want to instrument everything with fuzzer, but only specific targets (see below),
|
||||
# also, since we build our own llvm, we specifically don't want to instrument
|
||||
# libFuzzer library itself - it would result in infinite recursion
|
||||
#include (cmake/fuzzer.cmake)
|
||||
|
||||
include (cmake/sanitize.cmake)
|
||||
|
||||
option(ENABLE_COLORED_BUILD "Enable colors in compiler output" ON)
|
||||
@ -254,10 +249,17 @@ endif()
|
||||
|
||||
include(cmake/cpu_features.cmake)
|
||||
|
||||
# Asynchronous unwind tables are needed for Query Profiler.
|
||||
# They are already by default on some platforms but possibly not on all platforms.
|
||||
# Enable it explicitly.
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -fasynchronous-unwind-tables")
|
||||
|
||||
# Query Profiler doesn't work on MacOS for several reasons
|
||||
# - PHDR cache is not available
|
||||
# - We use native functionality to get stacktraces which is not async signal safe
|
||||
# and thus we don't need to generate asynchronous unwind tables
|
||||
if (NOT OS_DARWIN)
|
||||
# Asynchronous unwind tables are needed for Query Profiler.
|
||||
# They are already by default on some platforms but possibly not on all platforms.
|
||||
# Enable it explicitly.
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -fasynchronous-unwind-tables")
|
||||
endif()
|
||||
|
||||
# Reproducible builds.
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
@ -312,7 +314,8 @@ if (COMPILER_CLANG)
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS}")
|
||||
# Disable floating-point expression contraction in order to get consistent floating point calculation results across platforms
|
||||
set (COMPILER_FLAGS "${COMPILER_FLAGS} -ffp-contract=off")
|
||||
|
||||
# Our built-in unwinder only supports DWARF version up to 4.
|
||||
set (DEBUG_INFO_FLAGS "-g")
|
||||
@ -348,7 +351,7 @@ if (COMPILER_CLANG)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fdiagnostics-absolute-paths")
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fdiagnostics-absolute-paths")
|
||||
|
||||
if (NOT ENABLE_TESTS AND NOT SANITIZE AND OS_LINUX)
|
||||
if (NOT ENABLE_TESTS AND NOT SANITIZE AND NOT SANITIZE_COVERAGE AND OS_LINUX)
|
||||
# https://clang.llvm.org/docs/ThinLTO.html
|
||||
# Applies to clang and linux only.
|
||||
# Disabled when building with tests or sanitizers.
|
||||
@ -546,7 +549,9 @@ if (ENABLE_RUST)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO" AND NOT SANITIZE AND OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64))
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "RELWITHDEBINFO"
|
||||
AND NOT SANITIZE AND NOT SANITIZE_COVERAGE AND NOT ENABLE_FUZZING
|
||||
AND OS_LINUX AND (ARCH_AMD64 OR ARCH_AARCH64))
|
||||
set(CHECK_LARGE_OBJECT_SIZES_DEFAULT ON)
|
||||
else ()
|
||||
set(CHECK_LARGE_OBJECT_SIZES_DEFAULT OFF)
|
||||
@ -569,10 +574,7 @@ if (FUZZER)
|
||||
if (NOT(target_type STREQUAL "INTERFACE_LIBRARY" OR target_type STREQUAL "UTILITY"))
|
||||
target_compile_options(${target} PRIVATE "-fsanitize=fuzzer-no-link")
|
||||
endif()
|
||||
# clickhouse fuzzer isn't working correctly
|
||||
# initial PR https://github.com/ClickHouse/ClickHouse/pull/27526
|
||||
#if (target MATCHES ".+_fuzzer" OR target STREQUAL "clickhouse")
|
||||
if (target_type STREQUAL "EXECUTABLE" AND target MATCHES ".+_fuzzer")
|
||||
if (target_type STREQUAL "EXECUTABLE" AND (target MATCHES ".+_fuzzer" OR target STREQUAL "clickhouse"))
|
||||
message(STATUS "${target} instrumented with fuzzer")
|
||||
target_link_libraries(${target} PUBLIC ch_contrib::fuzzer)
|
||||
# Add to fuzzers bundle
|
||||
|
23
README.md
23
README.md
@ -31,15 +31,30 @@ curl https://clickhouse.com/ | sh
|
||||
* [Static Analysis (SonarCloud)](https://sonarcloud.io/project/issues?resolved=false&id=ClickHouse_ClickHouse) proposes C++ quality improvements.
|
||||
* [Contacts](https://clickhouse.com/company/contact) can help to get your questions answered if there are any.
|
||||
|
||||
## Monthly Release & Community Call
|
||||
|
||||
Every month we get together with the community (users, contributors, customers, those interested in learning more about ClickHouse) to discuss what is coming in the latest release. If you are interested in sharing what you've built on ClickHouse, let us know.
|
||||
|
||||
* [v24.3 Community Call](https://clickhouse.com/company/events/v24-3-community-release-call) - Mar 26
|
||||
* [v24.4 Community Call](https://clickhouse.com/company/events/v24-4-community-release-call) - Apr 30
|
||||
|
||||
## Upcoming Events
|
||||
|
||||
Keep an eye out for upcoming meetups around the world. Somewhere else you want us to be? Please feel free to reach out to tyler `<at>` clickhouse `<dot>` com.
|
||||
Keep an eye out for upcoming meetups and eventsaround the world. Somewhere else you want us to be? Please feel free to reach out to tyler `<at>` clickhouse `<dot>` com. You can also peruse [ClickHouse Events](https://clickhouse.com/company/news-events) for a list of all upcoming trainings, meetups, speaking engagements, etc.
|
||||
|
||||
* [ClickHouse Meetup in Bellevue](https://www.meetup.com/clickhouse-seattle-user-group/events/298650371/) - Mar 11
|
||||
* [ClickHouse Meetup at Ramp's Offices in NYC](https://www.meetup.com/clickhouse-new-york-user-group/events/298640542/) - Mar 19
|
||||
* [ClickHouse Melbourne Meetup](https://www.meetup.com/clickhouse-australia-user-group/events/299479750/) - Mar 20
|
||||
* [ClickHouse Meetup in Paris](https://www.meetup.com/clickhouse-france-user-group/events/298997115/) - Mar 21
|
||||
* [ClickHouse Meetup in Bengaluru](https://www.meetup.com/clickhouse-bangalore-user-group/events/299479850/) - Mar 23
|
||||
* [ClickHouse Meetup in Zurich](https://www.meetup.com/clickhouse-switzerland-meetup-group/events/299628922/) - Apr 16
|
||||
* [ClickHouse Meetup in Copenhagen](https://www.meetup.com/clickhouse-denmark-meetup-group/events/299629133/) - Apr 23
|
||||
* [ClickHouse Meetup in Dubai](https://www.meetup.com/clickhouse-dubai-meetup-group/events/299629189/) - May 28
|
||||
|
||||
|
||||
## Recent Recordings
|
||||
* **Recent Meetup Videos**: [Meetup Playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3iNDUzpY1S3L_iV4nARda_U) Whenever possible recordings of the ClickHouse Community Meetups are edited and presented as individual talks. Current featuring "Modern SQL in 2023", "Fast, Concurrent, and Consistent Asynchronous INSERTS in ClickHouse", and "Full-Text Indices: Design and Experiments"
|
||||
* **Recording available**: [**v23.10 Release Webinar**](https://www.youtube.com/watch?v=PGQS6uPb970) All the features of 23.10, one convenient video! Watch it now!
|
||||
* **All release webinar recordings**: [YouTube playlist](https://www.youtube.com/playlist?list=PL0Z2YDlm0b3jAlSy1JxyP8zluvXaN3nxU)
|
||||
|
||||
* **Recording available**: [**v24.2 Release Call**](https://www.youtube.com/watch?v=iN2y-TK8f3A) All the features of 24.2, one convenient video! Watch it now!
|
||||
|
||||
## Interested in joining ClickHouse and making it your full-time job?
|
||||
|
||||
|
@ -13,9 +13,11 @@ The following versions of ClickHouse server are currently being supported with s
|
||||
|
||||
| Version | Supported |
|
||||
|:-|:-|
|
||||
| 24.2 | ✔️ |
|
||||
| 24.1 | ✔️ |
|
||||
| 23.12 | ✔️ |
|
||||
| 23.11 | ✔️ |
|
||||
| 23.10 | ✔️ |
|
||||
| 23.11 | ❌ |
|
||||
| 23.10 | ❌ |
|
||||
| 23.9 | ❌ |
|
||||
| 23.8 | ✔️ |
|
||||
| 23.7 | ❌ |
|
||||
|
@ -10,13 +10,16 @@ set (CMAKE_CXX_STANDARD 20)
|
||||
|
||||
set (SRCS
|
||||
argsToConfig.cpp
|
||||
cgroupsv2.cpp
|
||||
coverage.cpp
|
||||
demangle.cpp
|
||||
Decimal.cpp
|
||||
getAvailableMemoryAmount.cpp
|
||||
getFQDNOrHostName.cpp
|
||||
getMemoryAmount.cpp
|
||||
getPageSize.cpp
|
||||
getThreadId.cpp
|
||||
int8_to_string.cpp
|
||||
JSON.cpp
|
||||
mremap.cpp
|
||||
phdr_cache.cpp
|
||||
|
87
base/base/Decimal.cpp
Normal file
87
base/base/Decimal.cpp
Normal file
@ -0,0 +1,87 @@
|
||||
#include <base/Decimal.h>
|
||||
#include <base/extended_types.h>
|
||||
|
||||
namespace DB
|
||||
{
|
||||
|
||||
/// Explicit template instantiations.
|
||||
|
||||
#define FOR_EACH_UNDERLYING_DECIMAL_TYPE(M) \
|
||||
M(Int32) \
|
||||
M(Int64) \
|
||||
M(Int128) \
|
||||
M(Int256)
|
||||
|
||||
#define FOR_EACH_UNDERLYING_DECIMAL_TYPE_PASS(M, X) \
|
||||
M(Int32, X) \
|
||||
M(Int64, X) \
|
||||
M(Int128, X) \
|
||||
M(Int256, X)
|
||||
|
||||
template <typename T> const Decimal<T> & Decimal<T>::operator += (const T & x) { value += x; return *this; }
|
||||
template <typename T> const Decimal<T> & Decimal<T>::operator -= (const T & x) { value -= x; return *this; }
|
||||
template <typename T> const Decimal<T> & Decimal<T>::operator *= (const T & x) { value *= x; return *this; }
|
||||
template <typename T> const Decimal<T> & Decimal<T>::operator /= (const T & x) { value /= x; return *this; }
|
||||
template <typename T> const Decimal<T> & Decimal<T>::operator %= (const T & x) { value %= x; return *this; }
|
||||
|
||||
template <typename T> void NO_SANITIZE_UNDEFINED Decimal<T>::addOverflow(const T & x) { value += x; }
|
||||
|
||||
/// Maybe this explicit instantiation affects performance since operators cannot be inlined.
|
||||
|
||||
template <typename T> template <typename U> const Decimal<T> & Decimal<T>::operator += (const Decimal<U> & x) { value += static_cast<T>(x.value); return *this; }
|
||||
template <typename T> template <typename U> const Decimal<T> & Decimal<T>::operator -= (const Decimal<U> & x) { value -= static_cast<T>(x.value); return *this; }
|
||||
template <typename T> template <typename U> const Decimal<T> & Decimal<T>::operator *= (const Decimal<U> & x) { value *= static_cast<T>(x.value); return *this; }
|
||||
template <typename T> template <typename U> const Decimal<T> & Decimal<T>::operator /= (const Decimal<U> & x) { value /= static_cast<T>(x.value); return *this; }
|
||||
template <typename T> template <typename U> const Decimal<T> & Decimal<T>::operator %= (const Decimal<U> & x) { value %= static_cast<T>(x.value); return *this; }
|
||||
|
||||
#define DISPATCH(TYPE_T, TYPE_U) \
|
||||
template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator += (const Decimal<TYPE_U> & x); \
|
||||
template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator -= (const Decimal<TYPE_U> & x); \
|
||||
template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator *= (const Decimal<TYPE_U> & x); \
|
||||
template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator /= (const Decimal<TYPE_U> & x); \
|
||||
template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator %= (const Decimal<TYPE_U> & x);
|
||||
#define INVOKE(X) FOR_EACH_UNDERLYING_DECIMAL_TYPE_PASS(DISPATCH, X)
|
||||
FOR_EACH_UNDERLYING_DECIMAL_TYPE(INVOKE);
|
||||
#undef INVOKE
|
||||
#undef DISPATCH
|
||||
|
||||
#define DISPATCH(TYPE) template struct Decimal<TYPE>;
|
||||
FOR_EACH_UNDERLYING_DECIMAL_TYPE(DISPATCH)
|
||||
#undef DISPATCH
|
||||
|
||||
template <typename T> bool operator< (const Decimal<T> & x, const Decimal<T> & y) { return x.value < y.value; }
|
||||
template <typename T> bool operator> (const Decimal<T> & x, const Decimal<T> & y) { return x.value > y.value; }
|
||||
template <typename T> bool operator<= (const Decimal<T> & x, const Decimal<T> & y) { return x.value <= y.value; }
|
||||
template <typename T> bool operator>= (const Decimal<T> & x, const Decimal<T> & y) { return x.value >= y.value; }
|
||||
template <typename T> bool operator== (const Decimal<T> & x, const Decimal<T> & y) { return x.value == y.value; }
|
||||
template <typename T> bool operator!= (const Decimal<T> & x, const Decimal<T> & y) { return x.value != y.value; }
|
||||
|
||||
#define DISPATCH(TYPE) \
|
||||
template bool operator< (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
template bool operator> (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
template bool operator<= (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
template bool operator>= (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
template bool operator== (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
template bool operator!= (const Decimal<TYPE> & x, const Decimal<TYPE> & y);
|
||||
FOR_EACH_UNDERLYING_DECIMAL_TYPE(DISPATCH)
|
||||
#undef DISPATCH
|
||||
|
||||
|
||||
template <typename T> Decimal<T> operator+ (const Decimal<T> & x, const Decimal<T> & y) { return x.value + y.value; }
|
||||
template <typename T> Decimal<T> operator- (const Decimal<T> & x, const Decimal<T> & y) { return x.value - y.value; }
|
||||
template <typename T> Decimal<T> operator* (const Decimal<T> & x, const Decimal<T> & y) { return x.value * y.value; }
|
||||
template <typename T> Decimal<T> operator/ (const Decimal<T> & x, const Decimal<T> & y) { return x.value / y.value; }
|
||||
template <typename T> Decimal<T> operator- (const Decimal<T> & x) { return -x.value; }
|
||||
|
||||
#define DISPATCH(TYPE) \
|
||||
template Decimal<TYPE> operator+ (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
template Decimal<TYPE> operator- (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
template Decimal<TYPE> operator* (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
template Decimal<TYPE> operator/ (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
template Decimal<TYPE> operator- (const Decimal<TYPE> & x);
|
||||
FOR_EACH_UNDERLYING_DECIMAL_TYPE(DISPATCH)
|
||||
#undef DISPATCH
|
||||
|
||||
#undef FOR_EACH_UNDERLYING_DECIMAL_TYPE_PASS
|
||||
#undef FOR_EACH_UNDERLYING_DECIMAL_TYPE
|
||||
}
|
@ -1,20 +1,28 @@
|
||||
#pragma once
|
||||
|
||||
#include <base/extended_types.h>
|
||||
#include <base/Decimal_fwd.h>
|
||||
#include <base/types.h>
|
||||
#include <base/defines.h>
|
||||
|
||||
#if !defined(NO_SANITIZE_UNDEFINED)
|
||||
#if defined(__clang__)
|
||||
#define NO_SANITIZE_UNDEFINED __attribute__((__no_sanitize__("undefined")))
|
||||
#else
|
||||
#define NO_SANITIZE_UNDEFINED
|
||||
#endif
|
||||
#endif
|
||||
|
||||
namespace DB
|
||||
{
|
||||
template <class> struct Decimal;
|
||||
class DateTime64;
|
||||
|
||||
#define FOR_EACH_UNDERLYING_DECIMAL_TYPE(M) \
|
||||
M(Int32) \
|
||||
M(Int64) \
|
||||
M(Int128) \
|
||||
M(Int256)
|
||||
|
||||
#define FOR_EACH_UNDERLYING_DECIMAL_TYPE_PASS(M, X) \
|
||||
M(Int32, X) \
|
||||
M(Int64, X) \
|
||||
M(Int128, X) \
|
||||
M(Int256, X)
|
||||
|
||||
using Decimal32 = Decimal<Int32>;
|
||||
using Decimal64 = Decimal<Int64>;
|
||||
using Decimal128 = Decimal<Int128>;
|
||||
@ -55,36 +63,73 @@ struct Decimal
|
||||
return static_cast<U>(value);
|
||||
}
|
||||
|
||||
const Decimal<T> & operator += (const T & x) { value += x; return *this; }
|
||||
const Decimal<T> & operator -= (const T & x) { value -= x; return *this; }
|
||||
const Decimal<T> & operator *= (const T & x) { value *= x; return *this; }
|
||||
const Decimal<T> & operator /= (const T & x) { value /= x; return *this; }
|
||||
const Decimal<T> & operator %= (const T & x) { value %= x; return *this; }
|
||||
const Decimal<T> & operator += (const T & x);
|
||||
const Decimal<T> & operator -= (const T & x);
|
||||
const Decimal<T> & operator *= (const T & x);
|
||||
const Decimal<T> & operator /= (const T & x);
|
||||
const Decimal<T> & operator %= (const T & x);
|
||||
|
||||
template <typename U> const Decimal<T> & operator += (const Decimal<U> & x) { value += x.value; return *this; }
|
||||
template <typename U> const Decimal<T> & operator -= (const Decimal<U> & x) { value -= x.value; return *this; }
|
||||
template <typename U> const Decimal<T> & operator *= (const Decimal<U> & x) { value *= x.value; return *this; }
|
||||
template <typename U> const Decimal<T> & operator /= (const Decimal<U> & x) { value /= x.value; return *this; }
|
||||
template <typename U> const Decimal<T> & operator %= (const Decimal<U> & x) { value %= x.value; return *this; }
|
||||
template <typename U> const Decimal<T> & operator += (const Decimal<U> & x);
|
||||
template <typename U> const Decimal<T> & operator -= (const Decimal<U> & x);
|
||||
template <typename U> const Decimal<T> & operator *= (const Decimal<U> & x);
|
||||
template <typename U> const Decimal<T> & operator /= (const Decimal<U> & x);
|
||||
template <typename U> const Decimal<T> & operator %= (const Decimal<U> & x);
|
||||
|
||||
/// This is to avoid UB for sumWithOverflow()
|
||||
void NO_SANITIZE_UNDEFINED addOverflow(const T & x) { value += x; }
|
||||
void NO_SANITIZE_UNDEFINED addOverflow(const T & x);
|
||||
|
||||
T value;
|
||||
};
|
||||
|
||||
template <typename T> inline bool operator< (const Decimal<T> & x, const Decimal<T> & y) { return x.value < y.value; }
|
||||
template <typename T> inline bool operator> (const Decimal<T> & x, const Decimal<T> & y) { return x.value > y.value; }
|
||||
template <typename T> inline bool operator<= (const Decimal<T> & x, const Decimal<T> & y) { return x.value <= y.value; }
|
||||
template <typename T> inline bool operator>= (const Decimal<T> & x, const Decimal<T> & y) { return x.value >= y.value; }
|
||||
template <typename T> inline bool operator== (const Decimal<T> & x, const Decimal<T> & y) { return x.value == y.value; }
|
||||
template <typename T> inline bool operator!= (const Decimal<T> & x, const Decimal<T> & y) { return x.value != y.value; }
|
||||
#define DISPATCH(TYPE) extern template struct Decimal<TYPE>;
|
||||
FOR_EACH_UNDERLYING_DECIMAL_TYPE(DISPATCH)
|
||||
#undef DISPATCH
|
||||
|
||||
template <typename T> inline Decimal<T> operator+ (const Decimal<T> & x, const Decimal<T> & y) { return x.value + y.value; }
|
||||
template <typename T> inline Decimal<T> operator- (const Decimal<T> & x, const Decimal<T> & y) { return x.value - y.value; }
|
||||
template <typename T> inline Decimal<T> operator* (const Decimal<T> & x, const Decimal<T> & y) { return x.value * y.value; }
|
||||
template <typename T> inline Decimal<T> operator/ (const Decimal<T> & x, const Decimal<T> & y) { return x.value / y.value; }
|
||||
template <typename T> inline Decimal<T> operator- (const Decimal<T> & x) { return -x.value; }
|
||||
#define DISPATCH(TYPE_T, TYPE_U) \
|
||||
extern template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator += (const Decimal<TYPE_U> & x); \
|
||||
extern template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator -= (const Decimal<TYPE_U> & x); \
|
||||
extern template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator *= (const Decimal<TYPE_U> & x); \
|
||||
extern template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator /= (const Decimal<TYPE_U> & x); \
|
||||
extern template const Decimal<TYPE_T> & Decimal<TYPE_T>::operator %= (const Decimal<TYPE_U> & x);
|
||||
#define INVOKE(X) FOR_EACH_UNDERLYING_DECIMAL_TYPE_PASS(DISPATCH, X)
|
||||
FOR_EACH_UNDERLYING_DECIMAL_TYPE(INVOKE);
|
||||
#undef INVOKE
|
||||
#undef DISPATCH
|
||||
|
||||
template <typename T> bool operator< (const Decimal<T> & x, const Decimal<T> & y);
|
||||
template <typename T> bool operator> (const Decimal<T> & x, const Decimal<T> & y);
|
||||
template <typename T> bool operator<= (const Decimal<T> & x, const Decimal<T> & y);
|
||||
template <typename T> bool operator>= (const Decimal<T> & x, const Decimal<T> & y);
|
||||
template <typename T> bool operator== (const Decimal<T> & x, const Decimal<T> & y);
|
||||
template <typename T> bool operator!= (const Decimal<T> & x, const Decimal<T> & y);
|
||||
|
||||
#define DISPATCH(TYPE) \
|
||||
extern template bool operator< (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
extern template bool operator> (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
extern template bool operator<= (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
extern template bool operator>= (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
extern template bool operator== (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
extern template bool operator!= (const Decimal<TYPE> & x, const Decimal<TYPE> & y);
|
||||
FOR_EACH_UNDERLYING_DECIMAL_TYPE(DISPATCH)
|
||||
#undef DISPATCH
|
||||
|
||||
template <typename T> Decimal<T> operator+ (const Decimal<T> & x, const Decimal<T> & y);
|
||||
template <typename T> Decimal<T> operator- (const Decimal<T> & x, const Decimal<T> & y);
|
||||
template <typename T> Decimal<T> operator* (const Decimal<T> & x, const Decimal<T> & y);
|
||||
template <typename T> Decimal<T> operator/ (const Decimal<T> & x, const Decimal<T> & y);
|
||||
template <typename T> Decimal<T> operator- (const Decimal<T> & x);
|
||||
|
||||
#define DISPATCH(TYPE) \
|
||||
extern template Decimal<TYPE> operator+ (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
extern template Decimal<TYPE> operator- (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
extern template Decimal<TYPE> operator* (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
extern template Decimal<TYPE> operator/ (const Decimal<TYPE> & x, const Decimal<TYPE> & y); \
|
||||
extern template Decimal<TYPE> operator- (const Decimal<TYPE> & x);
|
||||
FOR_EACH_UNDERLYING_DECIMAL_TYPE(DISPATCH)
|
||||
#undef DISPATCH
|
||||
|
||||
#undef FOR_EACH_UNDERLYING_DECIMAL_TYPE_PASS
|
||||
#undef FOR_EACH_UNDERLYING_DECIMAL_TYPE
|
||||
|
||||
/// Distinguishable type to allow function resolution/deduction based on value type,
|
||||
/// but also relatively easy to convert to/from Decimal64.
|
||||
|
@ -1,6 +1,7 @@
|
||||
#pragma once
|
||||
|
||||
#include <base/types.h>
|
||||
#include <base/extended_types.h>
|
||||
|
||||
namespace wide
|
||||
{
|
||||
@ -44,3 +45,8 @@ concept is_over_big_int =
|
||||
|| std::is_same_v<T, Decimal128>
|
||||
|| std::is_same_v<T, Decimal256>;
|
||||
}
|
||||
|
||||
template <> struct is_signed<DB::Decimal32> { static constexpr bool value = true; };
|
||||
template <> struct is_signed<DB::Decimal64> { static constexpr bool value = true; };
|
||||
template <> struct is_signed<DB::Decimal128> { static constexpr bool value = true; };
|
||||
template <> struct is_signed<DB::Decimal256> { static constexpr bool value = true; };
|
||||
|
@ -10,14 +10,10 @@
|
||||
#define JSON_MAX_DEPTH 100
|
||||
|
||||
|
||||
#ifdef __clang__
|
||||
# pragma clang diagnostic push
|
||||
# pragma clang diagnostic ignored "-Wdeprecated-dynamic-exception-spec"
|
||||
#endif
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wdeprecated-dynamic-exception-spec"
|
||||
POCO_IMPLEMENT_EXCEPTION(JSONException, Poco::Exception, "JSONException") // NOLINT(cert-err60-cpp, modernize-use-noexcept, hicpp-use-noexcept)
|
||||
#ifdef __clang__
|
||||
# pragma clang diagnostic pop
|
||||
#endif
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
|
||||
/// Read unsigned integer in a simple form from a non-0-terminated string.
|
||||
|
@ -39,14 +39,10 @@
|
||||
|
||||
|
||||
// NOLINTBEGIN(google-explicit-constructor)
|
||||
#ifdef __clang__
|
||||
# pragma clang diagnostic push
|
||||
# pragma clang diagnostic ignored "-Wdeprecated-dynamic-exception-spec"
|
||||
#endif
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wdeprecated-dynamic-exception-spec"
|
||||
POCO_DECLARE_EXCEPTION(Foundation_API, JSONException, Poco::Exception)
|
||||
#ifdef __clang__
|
||||
# pragma clang diagnostic pop
|
||||
#endif
|
||||
#pragma clang diagnostic pop
|
||||
// NOLINTEND(google-explicit-constructor)
|
||||
|
||||
class JSON
|
||||
|
@ -185,7 +185,8 @@ inline bool memequalWide(const char * p1, const char * p2, size_t size)
|
||||
{
|
||||
case 3: if (!compare8(p1 + 32, p2 + 32)) return false; [[fallthrough]];
|
||||
case 2: if (!compare8(p1 + 16, p2 + 16)) return false; [[fallthrough]];
|
||||
case 1: if (!compare8(p1, p2)) return false;
|
||||
case 1: if (!compare8(p1, p2)) return false; [[fallthrough]];
|
||||
default: ;
|
||||
}
|
||||
|
||||
return compare8(p1 + size - 16, p2 + size - 16);
|
||||
|
@ -1,5 +1,6 @@
|
||||
#pragma once
|
||||
|
||||
#include <bit>
|
||||
#include <cstring>
|
||||
#include <algorithm>
|
||||
#include <type_traits>
|
||||
|
64
base/base/cgroupsv2.cpp
Normal file
64
base/base/cgroupsv2.cpp
Normal file
@ -0,0 +1,64 @@
|
||||
#include <base/cgroupsv2.h>
|
||||
|
||||
#include <base/defines.h>
|
||||
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
|
||||
|
||||
bool cgroupsV2Enabled()
|
||||
{
|
||||
#if defined(OS_LINUX)
|
||||
/// This file exists iff the host has cgroups v2 enabled.
|
||||
auto controllers_file = default_cgroups_mount / "cgroup.controllers";
|
||||
if (!std::filesystem::exists(controllers_file))
|
||||
return false;
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
bool cgroupsV2MemoryControllerEnabled()
|
||||
{
|
||||
#if defined(OS_LINUX)
|
||||
chassert(cgroupsV2Enabled());
|
||||
/// According to https://docs.kernel.org/admin-guide/cgroup-v2.html:
|
||||
/// - file 'cgroup.controllers' defines which controllers *can* be enabled
|
||||
/// - file 'cgroup.subtree_control' defines which controllers *are* enabled
|
||||
/// Caveat: nested groups may disable controllers. For simplicity, check only the top-level group.
|
||||
std::ifstream subtree_control_file(default_cgroups_mount / "cgroup.subtree_control");
|
||||
if (!subtree_control_file.is_open())
|
||||
return false;
|
||||
std::string controllers;
|
||||
std::getline(subtree_control_file, controllers);
|
||||
if (controllers.find("memory") == std::string::npos)
|
||||
return false;
|
||||
return true;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
std::string cgroupV2OfProcess()
|
||||
{
|
||||
#if defined(OS_LINUX)
|
||||
chassert(cgroupsV2Enabled());
|
||||
/// All PIDs assigned to a cgroup are in /sys/fs/cgroups/{cgroup_name}/cgroup.procs
|
||||
/// A simpler way to get the membership is:
|
||||
std::ifstream cgroup_name_file("/proc/self/cgroup");
|
||||
if (!cgroup_name_file.is_open())
|
||||
return "";
|
||||
/// With cgroups v2, there will be a *single* line with prefix "0::/"
|
||||
/// (see https://docs.kernel.org/admin-guide/cgroup-v2.html)
|
||||
std::string cgroup;
|
||||
std::getline(cgroup_name_file, cgroup);
|
||||
static const std::string v2_prefix = "0::/";
|
||||
if (!cgroup.starts_with(v2_prefix))
|
||||
return "";
|
||||
cgroup = cgroup.substr(v2_prefix.length());
|
||||
return cgroup;
|
||||
#else
|
||||
return "";
|
||||
#endif
|
||||
}
|
22
base/base/cgroupsv2.h
Normal file
22
base/base/cgroupsv2.h
Normal file
@ -0,0 +1,22 @@
|
||||
#pragma once
|
||||
|
||||
#include <filesystem>
|
||||
#include <string>
|
||||
|
||||
#if defined(OS_LINUX)
|
||||
/// I think it is possible to mount the cgroups hierarchy somewhere else (e.g. when in containers).
|
||||
/// /sys/fs/cgroup was still symlinked to the actual mount in the cases that I have seen.
|
||||
static inline const std::filesystem::path default_cgroups_mount = "/sys/fs/cgroup";
|
||||
#endif
|
||||
|
||||
/// Is cgroups v2 enabled on the system?
|
||||
bool cgroupsV2Enabled();
|
||||
|
||||
/// Is the memory controller of cgroups v2 enabled on the system?
|
||||
/// Assumes that cgroupsV2Enabled() is enabled.
|
||||
bool cgroupsV2MemoryControllerEnabled();
|
||||
|
||||
/// Which cgroup does the process belong to?
|
||||
/// Returns an empty string if the cgroup cannot be determined.
|
||||
/// Assumes that cgroupsV2Enabled() is enabled.
|
||||
std::string cgroupV2OfProcess();
|
@ -1,6 +1,7 @@
|
||||
#include "coverage.h"
|
||||
#include <sys/mman.h>
|
||||
|
||||
#pragma GCC diagnostic ignored "-Wreserved-identifier"
|
||||
#pragma clang diagnostic ignored "-Wreserved-identifier"
|
||||
|
||||
|
||||
/// WITH_COVERAGE enables the default implementation of code coverage,
|
||||
@ -12,11 +13,7 @@
|
||||
#include <unistd.h>
|
||||
|
||||
|
||||
# if defined(__clang__)
|
||||
extern "C" void __llvm_profile_dump(); // NOLINT
|
||||
# elif defined(__GNUC__) || defined(__GNUG__)
|
||||
extern "C" void __gcov_exit();
|
||||
# endif
|
||||
|
||||
#endif
|
||||
|
||||
@ -27,12 +24,7 @@ void dumpCoverageReportIfPossible()
|
||||
static std::mutex mutex;
|
||||
std::lock_guard lock(mutex);
|
||||
|
||||
# if defined(__clang__)
|
||||
__llvm_profile_dump(); // NOLINT
|
||||
# elif defined(__GNUC__) || defined(__GNUG__)
|
||||
__gcov_exit();
|
||||
# endif
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -52,11 +44,21 @@ namespace
|
||||
uint32_t * guards_start = nullptr;
|
||||
uint32_t * guards_end = nullptr;
|
||||
|
||||
uintptr_t * coverage_array = nullptr;
|
||||
uintptr_t * current_coverage_array = nullptr;
|
||||
uintptr_t * cumulative_coverage_array = nullptr;
|
||||
size_t coverage_array_size = 0;
|
||||
|
||||
uintptr_t * all_addresses_array = nullptr;
|
||||
size_t all_addresses_array_size = 0;
|
||||
|
||||
uintptr_t * allocate(size_t size)
|
||||
{
|
||||
/// Note: mmap return zero-initialized memory, and we count on that.
|
||||
void * map = mmap(nullptr, size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
|
||||
if (MAP_FAILED == map)
|
||||
return nullptr;
|
||||
return static_cast<uintptr_t*>(map);
|
||||
}
|
||||
}
|
||||
|
||||
extern "C"
|
||||
@ -79,7 +81,8 @@ void __sanitizer_cov_trace_pc_guard_init(uint32_t * start, uint32_t * stop)
|
||||
coverage_array_size = stop - start;
|
||||
|
||||
/// Note: we will leak this.
|
||||
coverage_array = static_cast<uintptr_t*>(malloc(sizeof(uintptr_t) * coverage_array_size));
|
||||
current_coverage_array = allocate(sizeof(uintptr_t) * coverage_array_size);
|
||||
cumulative_coverage_array = allocate(sizeof(uintptr_t) * coverage_array_size);
|
||||
|
||||
resetCoverage();
|
||||
}
|
||||
@ -92,8 +95,8 @@ void __sanitizer_cov_pcs_init(const uintptr_t * pcs_begin, const uintptr_t * pcs
|
||||
return;
|
||||
pc_table_initialized = true;
|
||||
|
||||
all_addresses_array = static_cast<uintptr_t*>(malloc(sizeof(uintptr_t) * coverage_array_size));
|
||||
all_addresses_array_size = pcs_end - pcs_begin;
|
||||
all_addresses_array = allocate(sizeof(uintptr_t) * all_addresses_array_size);
|
||||
|
||||
/// They are not a real pointers, but also contain a flag in the most significant bit,
|
||||
/// in which we are not interested for now. Reset it.
|
||||
@ -115,17 +118,24 @@ void __sanitizer_cov_trace_pc_guard(uint32_t * guard)
|
||||
/// The values of `*guard` are as you set them in
|
||||
/// __sanitizer_cov_trace_pc_guard_init and so you can make them consecutive
|
||||
/// and use them to dereference an array or a bit vector.
|
||||
void * pc = __builtin_return_address(0);
|
||||
intptr_t pc = reinterpret_cast<uintptr_t>(__builtin_return_address(0));
|
||||
|
||||
coverage_array[guard - guards_start] = reinterpret_cast<uintptr_t>(pc);
|
||||
current_coverage_array[guard - guards_start] = pc;
|
||||
cumulative_coverage_array[guard - guards_start] = pc;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
__attribute__((no_sanitize("coverage")))
|
||||
std::span<const uintptr_t> getCoverage()
|
||||
std::span<const uintptr_t> getCurrentCoverage()
|
||||
{
|
||||
return {coverage_array, coverage_array_size};
|
||||
return {current_coverage_array, coverage_array_size};
|
||||
}
|
||||
|
||||
__attribute__((no_sanitize("coverage")))
|
||||
std::span<const uintptr_t> getCumulativeCoverage()
|
||||
{
|
||||
return {cumulative_coverage_array, coverage_array_size};
|
||||
}
|
||||
|
||||
__attribute__((no_sanitize("coverage")))
|
||||
@ -137,7 +147,7 @@ std::span<const uintptr_t> getAllInstrumentedAddresses()
|
||||
__attribute__((no_sanitize("coverage")))
|
||||
void resetCoverage()
|
||||
{
|
||||
memset(coverage_array, 0, coverage_array_size * sizeof(*coverage_array));
|
||||
memset(current_coverage_array, 0, coverage_array_size * sizeof(*current_coverage_array));
|
||||
|
||||
/// The guard defines whether the __sanitizer_cov_trace_pc_guard should be called.
|
||||
/// For example, you can unset it after first invocation to prevent excessive work.
|
||||
|
@ -15,7 +15,10 @@ void dumpCoverageReportIfPossible();
|
||||
/// Get accumulated unique program addresses of the instrumented parts of the code,
|
||||
/// seen so far after program startup or after previous reset.
|
||||
/// The returned span will be represented as a sparse map, containing mostly zeros, which you should filter away.
|
||||
std::span<const uintptr_t> getCoverage();
|
||||
std::span<const uintptr_t> getCurrentCoverage();
|
||||
|
||||
/// Similar but not being reset.
|
||||
std::span<const uintptr_t> getCumulativeCoverage();
|
||||
|
||||
/// Get all instrumented addresses that could be in the coverage.
|
||||
std::span<const uintptr_t> getAllInstrumentedAddresses();
|
||||
|
@ -11,7 +11,7 @@
|
||||
/// including <base/defines.h>
|
||||
/// - it should not have fallback to 0,
|
||||
/// since this may create false-positive detection (common problem)
|
||||
#if defined(__clang__) && defined(__has_feature)
|
||||
#if defined(__has_feature)
|
||||
# define ch_has_feature __has_feature
|
||||
#endif
|
||||
|
||||
@ -76,24 +76,11 @@
|
||||
/// Explicitly allow undefined behaviour for certain functions. Use it as a function attribute.
|
||||
/// It is useful in case when compiler cannot see (and exploit) it, but UBSan can.
|
||||
/// Example: multiplication of signed integers with possibility of overflow when both sides are from user input.
|
||||
#if defined(__clang__)
|
||||
# define NO_SANITIZE_UNDEFINED __attribute__((__no_sanitize__("undefined")))
|
||||
# define NO_SANITIZE_ADDRESS __attribute__((__no_sanitize__("address")))
|
||||
# define NO_SANITIZE_THREAD __attribute__((__no_sanitize__("thread")))
|
||||
# define ALWAYS_INLINE_NO_SANITIZE_UNDEFINED __attribute__((__always_inline__, __no_sanitize__("undefined")))
|
||||
#else /// It does not work in GCC. GCC 7 cannot recognize this attribute and GCC 8 simply ignores it.
|
||||
# define NO_SANITIZE_UNDEFINED
|
||||
# define NO_SANITIZE_ADDRESS
|
||||
# define NO_SANITIZE_THREAD
|
||||
# define ALWAYS_INLINE_NO_SANITIZE_UNDEFINED ALWAYS_INLINE
|
||||
#endif
|
||||
|
||||
#if defined(__clang__) && defined(__clang_major__) && __clang_major__ >= 14
|
||||
# define DISABLE_SANITIZER_INSTRUMENTATION __attribute__((disable_sanitizer_instrumentation))
|
||||
#else
|
||||
# define DISABLE_SANITIZER_INSTRUMENTATION
|
||||
#endif
|
||||
|
||||
#define NO_SANITIZE_UNDEFINED __attribute__((__no_sanitize__("undefined")))
|
||||
#define NO_SANITIZE_ADDRESS __attribute__((__no_sanitize__("address")))
|
||||
#define NO_SANITIZE_THREAD __attribute__((__no_sanitize__("thread")))
|
||||
#define ALWAYS_INLINE_NO_SANITIZE_UNDEFINED __attribute__((__always_inline__, __no_sanitize__("undefined")))
|
||||
#define DISABLE_SANITIZER_INSTRUMENTATION __attribute__((disable_sanitizer_instrumentation))
|
||||
|
||||
#if !__has_include(<sanitizer/asan_interface.h>) || !defined(ADDRESS_SANITIZER)
|
||||
# define ASAN_UNPOISON_MEMORY_REGION(a, b)
|
||||
@ -121,68 +108,53 @@
|
||||
{
|
||||
[[noreturn]] void abortOnFailedAssertion(const String & description);
|
||||
}
|
||||
#define chassert(x) do { static_cast<bool>(x) ? void(0) : ::DB::abortOnFailedAssertion(#x); } while (0)
|
||||
#define chassert_1(x, ...) do { static_cast<bool>(x) ? void(0) : ::DB::abortOnFailedAssertion(#x); } while (0)
|
||||
#define chassert_2(x, comment, ...) do { static_cast<bool>(x) ? void(0) : ::DB::abortOnFailedAssertion(comment); } while (0)
|
||||
#define UNREACHABLE() abort()
|
||||
// clang-format off
|
||||
#else
|
||||
/// Here sizeof() trick is used to suppress unused warning for result,
|
||||
/// since simple "(void)x" will evaluate the expression, while
|
||||
/// "sizeof(!(x))" will not.
|
||||
#define chassert(x) (void)sizeof(!(x))
|
||||
#define chassert_1(x, ...) (void)sizeof(!(x))
|
||||
#define chassert_2(x, comment, ...) (void)sizeof(!(x))
|
||||
#define UNREACHABLE() __builtin_unreachable()
|
||||
#endif
|
||||
#define CHASSERT_DISPATCH(_1,_2, N,...) N(_1, _2)
|
||||
#define CHASSERT_INVOKE(tuple) CHASSERT_DISPATCH tuple
|
||||
#define chassert(...) CHASSERT_INVOKE((__VA_ARGS__, chassert_2, chassert_1))
|
||||
|
||||
#endif
|
||||
|
||||
/// Macros for Clang Thread Safety Analysis (TSA). They can be safely ignored by other compilers.
|
||||
/// Feel free to extend, but please stay close to https://clang.llvm.org/docs/ThreadSafetyAnalysis.html#mutexheader
|
||||
#if defined(__clang__)
|
||||
# define TSA_GUARDED_BY(...) __attribute__((guarded_by(__VA_ARGS__))) /// data is protected by given capability
|
||||
# define TSA_PT_GUARDED_BY(...) __attribute__((pt_guarded_by(__VA_ARGS__))) /// pointed-to data is protected by the given capability
|
||||
# define TSA_REQUIRES(...) __attribute__((requires_capability(__VA_ARGS__))) /// thread needs exclusive possession of given capability
|
||||
# define TSA_REQUIRES_SHARED(...) __attribute__((requires_shared_capability(__VA_ARGS__))) /// thread needs shared possession of given capability
|
||||
# define TSA_ACQUIRED_AFTER(...) __attribute__((acquired_after(__VA_ARGS__))) /// annotated lock must be locked after given lock
|
||||
# define TSA_NO_THREAD_SAFETY_ANALYSIS __attribute__((no_thread_safety_analysis)) /// disable TSA for a function
|
||||
# define TSA_CAPABILITY(...) __attribute__((capability(__VA_ARGS__))) /// object of a class can be used as capability
|
||||
# define TSA_ACQUIRE(...) __attribute__((acquire_capability(__VA_ARGS__))) /// function acquires a capability, but does not release it
|
||||
# define TSA_TRY_ACQUIRE(...) __attribute__((try_acquire_capability(__VA_ARGS__))) /// function tries to acquire a capability and returns a boolean value indicating success or failure
|
||||
# define TSA_RELEASE(...) __attribute__((release_capability(__VA_ARGS__))) /// function releases the given capability
|
||||
# define TSA_ACQUIRE_SHARED(...) __attribute__((acquire_shared_capability(__VA_ARGS__))) /// function acquires a shared capability, but does not release it
|
||||
# define TSA_TRY_ACQUIRE_SHARED(...) __attribute__((try_acquire_shared_capability(__VA_ARGS__))) /// function tries to acquire a shared capability and returns a boolean value indicating success or failure
|
||||
# define TSA_RELEASE_SHARED(...) __attribute__((release_shared_capability(__VA_ARGS__))) /// function releases the given shared capability
|
||||
# define TSA_SCOPED_LOCKABLE __attribute__((scoped_lockable)) /// object of a class has scoped lockable capability
|
||||
#define TSA_GUARDED_BY(...) __attribute__((guarded_by(__VA_ARGS__))) /// data is protected by given capability
|
||||
#define TSA_PT_GUARDED_BY(...) __attribute__((pt_guarded_by(__VA_ARGS__))) /// pointed-to data is protected by the given capability
|
||||
#define TSA_REQUIRES(...) __attribute__((requires_capability(__VA_ARGS__))) /// thread needs exclusive possession of given capability
|
||||
#define TSA_REQUIRES_SHARED(...) __attribute__((requires_shared_capability(__VA_ARGS__))) /// thread needs shared possession of given capability
|
||||
#define TSA_ACQUIRED_AFTER(...) __attribute__((acquired_after(__VA_ARGS__))) /// annotated lock must be locked after given lock
|
||||
#define TSA_NO_THREAD_SAFETY_ANALYSIS __attribute__((no_thread_safety_analysis)) /// disable TSA for a function
|
||||
#define TSA_CAPABILITY(...) __attribute__((capability(__VA_ARGS__))) /// object of a class can be used as capability
|
||||
#define TSA_ACQUIRE(...) __attribute__((acquire_capability(__VA_ARGS__))) /// function acquires a capability, but does not release it
|
||||
#define TSA_TRY_ACQUIRE(...) __attribute__((try_acquire_capability(__VA_ARGS__))) /// function tries to acquire a capability and returns a boolean value indicating success or failure
|
||||
#define TSA_RELEASE(...) __attribute__((release_capability(__VA_ARGS__))) /// function releases the given capability
|
||||
#define TSA_ACQUIRE_SHARED(...) __attribute__((acquire_shared_capability(__VA_ARGS__))) /// function acquires a shared capability, but does not release it
|
||||
#define TSA_TRY_ACQUIRE_SHARED(...) __attribute__((try_acquire_shared_capability(__VA_ARGS__))) /// function tries to acquire a shared capability and returns a boolean value indicating success or failure
|
||||
#define TSA_RELEASE_SHARED(...) __attribute__((release_shared_capability(__VA_ARGS__))) /// function releases the given shared capability
|
||||
#define TSA_SCOPED_LOCKABLE __attribute__((scoped_lockable)) /// object of a class has scoped lockable capability
|
||||
|
||||
/// Macros for suppressing TSA warnings for specific reads/writes (instead of suppressing it for the whole function)
|
||||
/// They use a lambda function to apply function attribute to a single statement. This enable us to suppress warnings locally instead of
|
||||
/// suppressing them in the whole function
|
||||
/// Consider adding a comment when using these macros.
|
||||
# define TSA_SUPPRESS_WARNING_FOR_READ(x) ([&]() TSA_NO_THREAD_SAFETY_ANALYSIS -> const auto & { return (x); }())
|
||||
# define TSA_SUPPRESS_WARNING_FOR_WRITE(x) ([&]() TSA_NO_THREAD_SAFETY_ANALYSIS -> auto & { return (x); }())
|
||||
#define TSA_SUPPRESS_WARNING_FOR_READ(x) ([&]() TSA_NO_THREAD_SAFETY_ANALYSIS -> const auto & { return (x); }())
|
||||
#define TSA_SUPPRESS_WARNING_FOR_WRITE(x) ([&]() TSA_NO_THREAD_SAFETY_ANALYSIS -> auto & { return (x); }())
|
||||
|
||||
/// This macro is useful when only one thread writes to a member
|
||||
/// and you want to read this member from the same thread without locking a mutex.
|
||||
/// It's safe (because no concurrent writes are possible), but TSA generates a warning.
|
||||
/// (Seems like there's no way to verify it, but it makes sense to distinguish it from TSA_SUPPRESS_WARNING_FOR_READ for readability)
|
||||
# define TSA_READ_ONE_THREAD(x) TSA_SUPPRESS_WARNING_FOR_READ(x)
|
||||
|
||||
#else
|
||||
# define TSA_GUARDED_BY(...)
|
||||
# define TSA_PT_GUARDED_BY(...)
|
||||
# define TSA_REQUIRES(...)
|
||||
# define TSA_REQUIRES_SHARED(...)
|
||||
# define TSA_NO_THREAD_SAFETY_ANALYSIS
|
||||
# define TSA_CAPABILITY(...)
|
||||
# define TSA_ACQUIRE(...)
|
||||
# define TSA_TRY_ACQUIRE(...)
|
||||
# define TSA_RELEASE(...)
|
||||
# define TSA_ACQUIRE_SHARED(...)
|
||||
# define TSA_TRY_ACQUIRE_SHARED(...)
|
||||
# define TSA_RELEASE_SHARED(...)
|
||||
# define TSA_SCOPED_LOCKABLE
|
||||
|
||||
# define TSA_SUPPRESS_WARNING_FOR_READ(x) (x)
|
||||
# define TSA_SUPPRESS_WARNING_FOR_WRITE(x) (x)
|
||||
# define TSA_READ_ONE_THREAD(x) TSA_SUPPRESS_WARNING_FOR_READ(x)
|
||||
#endif
|
||||
#define TSA_READ_ONE_THREAD(x) TSA_SUPPRESS_WARNING_FOR_READ(x)
|
||||
|
||||
/// A template function for suppressing warnings about unused variables or function results.
|
||||
template <typename... Args>
|
||||
|
@ -64,6 +64,44 @@ template <> struct is_arithmetic<UInt256> { static constexpr bool value = true;
|
||||
template <typename T>
|
||||
inline constexpr bool is_arithmetic_v = is_arithmetic<T>::value;
|
||||
|
||||
#define FOR_EACH_ARITHMETIC_TYPE(M) \
|
||||
M(DataTypeDate) \
|
||||
M(DataTypeDate32) \
|
||||
M(DataTypeDateTime) \
|
||||
M(DataTypeInt8) \
|
||||
M(DataTypeUInt8) \
|
||||
M(DataTypeInt16) \
|
||||
M(DataTypeUInt16) \
|
||||
M(DataTypeInt32) \
|
||||
M(DataTypeUInt32) \
|
||||
M(DataTypeInt64) \
|
||||
M(DataTypeUInt64) \
|
||||
M(DataTypeInt128) \
|
||||
M(DataTypeUInt128) \
|
||||
M(DataTypeInt256) \
|
||||
M(DataTypeUInt256) \
|
||||
M(DataTypeFloat32) \
|
||||
M(DataTypeFloat64)
|
||||
|
||||
#define FOR_EACH_ARITHMETIC_TYPE_PASS(M, X) \
|
||||
M(DataTypeDate, X) \
|
||||
M(DataTypeDate32, X) \
|
||||
M(DataTypeDateTime, X) \
|
||||
M(DataTypeInt8, X) \
|
||||
M(DataTypeUInt8, X) \
|
||||
M(DataTypeInt16, X) \
|
||||
M(DataTypeUInt16, X) \
|
||||
M(DataTypeInt32, X) \
|
||||
M(DataTypeUInt32, X) \
|
||||
M(DataTypeInt64, X) \
|
||||
M(DataTypeUInt64, X) \
|
||||
M(DataTypeInt128, X) \
|
||||
M(DataTypeUInt128, X) \
|
||||
M(DataTypeInt256, X) \
|
||||
M(DataTypeUInt256, X) \
|
||||
M(DataTypeFloat32, X) \
|
||||
M(DataTypeFloat64, X)
|
||||
|
||||
template <typename T>
|
||||
struct make_unsigned // NOLINT(readability-identifier-naming)
|
||||
{
|
||||
|
@ -1,19 +1,55 @@
|
||||
#include <stdexcept>
|
||||
#include <fstream>
|
||||
#include <base/getMemoryAmount.h>
|
||||
|
||||
#include <base/cgroupsv2.h>
|
||||
#include <base/getPageSize.h>
|
||||
|
||||
#include <fstream>
|
||||
#include <stdexcept>
|
||||
|
||||
#include <unistd.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/param.h>
|
||||
#if defined(BSD)
|
||||
#include <sys/sysctl.h>
|
||||
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
std::optional<uint64_t> getCgroupsV2MemoryLimit()
|
||||
{
|
||||
#if defined(OS_LINUX)
|
||||
if (!cgroupsV2Enabled())
|
||||
return {};
|
||||
|
||||
if (!cgroupsV2MemoryControllerEnabled())
|
||||
return {};
|
||||
|
||||
std::string cgroup = cgroupV2OfProcess();
|
||||
auto current_cgroup = cgroup.empty() ? default_cgroups_mount : (default_cgroups_mount / cgroup);
|
||||
|
||||
/// Open the bottom-most nested memory limit setting file. If there is no such file at the current
|
||||
/// level, try again at the parent level as memory settings are inherited.
|
||||
while (current_cgroup != default_cgroups_mount.parent_path())
|
||||
{
|
||||
std::ifstream setting_file(current_cgroup / "memory.max");
|
||||
if (setting_file.is_open())
|
||||
{
|
||||
uint64_t value;
|
||||
if (setting_file >> value)
|
||||
return {value};
|
||||
else
|
||||
return {}; /// e.g. the cgroups default "max"
|
||||
}
|
||||
current_cgroup = current_cgroup.parent_path();
|
||||
}
|
||||
|
||||
return {};
|
||||
#else
|
||||
return {};
|
||||
#endif
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/** Returns the size of physical memory (RAM) in bytes.
|
||||
* Returns 0 on unsupported platform
|
||||
*/
|
||||
uint64_t getMemoryAmountOrZero()
|
||||
{
|
||||
int64_t num_pages = sysconf(_SC_PHYS_PAGES);
|
||||
@ -26,34 +62,27 @@ uint64_t getMemoryAmountOrZero()
|
||||
|
||||
uint64_t memory_amount = num_pages * page_size;
|
||||
|
||||
#if defined(OS_LINUX)
|
||||
// Try to lookup at the Cgroup limit
|
||||
|
||||
// CGroups v2
|
||||
std::ifstream cgroupv2_limit("/sys/fs/cgroup/memory.max");
|
||||
if (cgroupv2_limit.is_open())
|
||||
{
|
||||
uint64_t memory_limit = 0;
|
||||
cgroupv2_limit >> memory_limit;
|
||||
if (memory_limit > 0 && memory_limit < memory_amount)
|
||||
memory_amount = memory_limit;
|
||||
}
|
||||
/// Respect the memory limit set by cgroups v2.
|
||||
auto limit_v2 = getCgroupsV2MemoryLimit();
|
||||
if (limit_v2.has_value() && *limit_v2 < memory_amount)
|
||||
memory_amount = *limit_v2;
|
||||
else
|
||||
{
|
||||
// CGroups v1
|
||||
std::ifstream cgroup_limit("/sys/fs/cgroup/memory/memory.limit_in_bytes");
|
||||
if (cgroup_limit.is_open())
|
||||
/// Cgroups v1 were replaced by v2 in 2015. The only reason we keep supporting v1 is that the transition to v2
|
||||
/// has been slow. Caveat : Hierarchical groups as in v2 are not supported for v1, the location of the memory
|
||||
/// limit (virtual) file is hard-coded.
|
||||
/// TODO: check at the end of 2024 if we can get rid of v1.
|
||||
std::ifstream limit_file_v1("/sys/fs/cgroup/memory/memory.limit_in_bytes");
|
||||
if (limit_file_v1.is_open())
|
||||
{
|
||||
uint64_t memory_limit = 0; // in case of read error
|
||||
cgroup_limit >> memory_limit;
|
||||
if (memory_limit > 0 && memory_limit < memory_amount)
|
||||
memory_amount = memory_limit;
|
||||
uint64_t limit_v1;
|
||||
if (limit_file_v1 >> limit_v1)
|
||||
if (limit_v1 < memory_amount)
|
||||
memory_amount = limit_v1;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return memory_amount;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -2,11 +2,10 @@
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
/** Returns the size of physical memory (RAM) in bytes.
|
||||
* Returns 0 on unsupported platform or if it cannot determine the size of physical memory.
|
||||
*/
|
||||
/// Returns the size in bytes of physical memory (RAM) available to the process. The value can
|
||||
/// be smaller than the total available RAM available to the system due to cgroups settings.
|
||||
/// Returns 0 on unsupported platform or if it cannot determine the size of physical memory.
|
||||
uint64_t getMemoryAmountOrZero();
|
||||
|
||||
/** Throws exception if it cannot determine the size of physical memory.
|
||||
*/
|
||||
/// Throws exception if it cannot determine the size of physical memory.
|
||||
uint64_t getMemoryAmount();
|
||||
|
9
base/base/int8_to_string.cpp
Normal file
9
base/base/int8_to_string.cpp
Normal file
@ -0,0 +1,9 @@
|
||||
#include <base/int8_to_string.h>
|
||||
|
||||
namespace std
|
||||
{
|
||||
std::string to_string(Int8 v) /// NOLINT (cert-dcl58-cpp)
|
||||
{
|
||||
return to_string(int8_t{v});
|
||||
}
|
||||
}
|
17
base/base/int8_to_string.h
Normal file
17
base/base/int8_to_string.h
Normal file
@ -0,0 +1,17 @@
|
||||
#pragma once
|
||||
|
||||
#include <base/defines.h>
|
||||
#include <base/types.h>
|
||||
|
||||
#include <fmt/format.h>
|
||||
|
||||
template <>
|
||||
struct fmt::formatter<Int8> : fmt::formatter<int8_t>
|
||||
{
|
||||
};
|
||||
|
||||
|
||||
namespace std
|
||||
{
|
||||
std::string to_string(Int8 v); /// NOLINT (cert-dcl58-cpp)
|
||||
}
|
@ -155,9 +155,7 @@ Out & dump(Out & out, const char * name, T && x) // NOLINT(cppcoreguidelines-mis
|
||||
return dumpValue(out, x) << "; ";
|
||||
}
|
||||
|
||||
#ifdef __clang__
|
||||
#pragma clang diagnostic ignored "-Wgnu-zero-variadic-macro-arguments"
|
||||
#endif
|
||||
|
||||
#define DUMPVAR(VAR) ::dump(std::cerr, #VAR, (VAR));
|
||||
#define DUMPHEAD std::cerr << __FILE__ << ':' << __LINE__ << " [ " << getThreadId() << " ] ";
|
||||
|
@ -11,10 +11,8 @@
|
||||
/// Thread Sanitizer uses dl_iterate_phdr function on initialization and fails if we provide our own.
|
||||
#ifdef USE_PHDR_CACHE
|
||||
|
||||
#if defined(__clang__)
|
||||
# pragma clang diagnostic ignored "-Wreserved-id-macro"
|
||||
# pragma clang diagnostic ignored "-Wunused-macros"
|
||||
#endif
|
||||
#pragma clang diagnostic ignored "-Wreserved-id-macro"
|
||||
#pragma clang diagnostic ignored "-Wunused-macros"
|
||||
|
||||
#define __msan_unpoison(X, Y) // NOLINT
|
||||
#if defined(ch_has_feature)
|
||||
@ -57,10 +55,6 @@ std::atomic<PHDRCache *> phdr_cache {};
|
||||
|
||||
|
||||
extern "C"
|
||||
#ifndef __clang__
|
||||
[[gnu::visibility("default")]]
|
||||
[[gnu::externally_visible]]
|
||||
#endif
|
||||
int dl_iterate_phdr(int (*callback) (dl_phdr_info * info, size_t size, void * data), void * data)
|
||||
{
|
||||
auto * current_phdr_cache = phdr_cache.load();
|
||||
|
@ -59,24 +59,19 @@ using ComparatorWrapper = Comparator;
|
||||
|
||||
#endif
|
||||
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wold-style-cast"
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wold-style-cast"
|
||||
|
||||
#include <miniselect/floyd_rivest_select.h>
|
||||
|
||||
template <typename RandomIt>
|
||||
void nth_element(RandomIt first, RandomIt nth, RandomIt last)
|
||||
template <typename RandomIt, typename Compare>
|
||||
void nth_element(RandomIt first, RandomIt nth, RandomIt last, Compare compare)
|
||||
{
|
||||
using value_type = typename std::iterator_traits<RandomIt>::value_type;
|
||||
using comparator = std::less<value_type>;
|
||||
|
||||
comparator compare;
|
||||
ComparatorWrapper<comparator> compare_wrapper = compare;
|
||||
|
||||
#ifndef NDEBUG
|
||||
::shuffle(first, last);
|
||||
#endif
|
||||
|
||||
ComparatorWrapper<Compare> compare_wrapper = compare;
|
||||
::miniselect::floyd_rivest_select(first, nth, last, compare_wrapper);
|
||||
|
||||
#ifndef NDEBUG
|
||||
@ -87,6 +82,15 @@ void nth_element(RandomIt first, RandomIt nth, RandomIt last)
|
||||
#endif
|
||||
}
|
||||
|
||||
template <typename RandomIt>
|
||||
void nth_element(RandomIt first, RandomIt nth, RandomIt last)
|
||||
{
|
||||
using value_type = typename std::iterator_traits<RandomIt>::value_type;
|
||||
using comparator = std::less<value_type>;
|
||||
|
||||
::nth_element(first, nth, last, comparator());
|
||||
}
|
||||
|
||||
template <typename RandomIt, typename Compare>
|
||||
void partial_sort(RandomIt first, RandomIt middle, RandomIt last, Compare compare)
|
||||
{
|
||||
@ -111,7 +115,7 @@ void partial_sort(RandomIt first, RandomIt middle, RandomIt last)
|
||||
::partial_sort(first, middle, last, comparator());
|
||||
}
|
||||
|
||||
#pragma GCC diagnostic pop
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
template <typename RandomIt, typename Compare>
|
||||
void sort(RandomIt first, RandomIt last, Compare compare)
|
||||
|
@ -3,14 +3,29 @@
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
|
||||
/// This is needed for more strict aliasing. https://godbolt.org/z/xpJBSb https://stackoverflow.com/a/57453713
|
||||
/// Using char8_t more strict aliasing (https://stackoverflow.com/a/57453713)
|
||||
using UInt8 = char8_t;
|
||||
|
||||
/// Same for using signed _BitInt(8) (there isn't a signed char8_t, which would be more convenient)
|
||||
/// See https://godbolt.org/z/fafnWEnnf
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Wbit-int-extension"
|
||||
using Int8 = signed _BitInt(8);
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
namespace std
|
||||
{
|
||||
template <>
|
||||
struct hash<Int8> /// NOLINT (cert-dcl58-cpp)
|
||||
{
|
||||
size_t operator()(const Int8 x) const { return std::hash<int8_t>()(int8_t{x}); }
|
||||
};
|
||||
}
|
||||
|
||||
using UInt16 = uint16_t;
|
||||
using UInt32 = uint32_t;
|
||||
using UInt64 = uint64_t;
|
||||
|
||||
using Int8 = int8_t;
|
||||
using Int16 = int16_t;
|
||||
using Int32 = int32_t;
|
||||
using Int64 = int64_t;
|
||||
|
@ -6,6 +6,7 @@
|
||||
|
||||
#include "throwError.h"
|
||||
|
||||
#include <bit>
|
||||
#include <cmath>
|
||||
#include <cfloat>
|
||||
#include <cassert>
|
||||
|
@ -2,6 +2,7 @@
|
||||
.hidden __syscall
|
||||
.type __syscall,%function
|
||||
__syscall:
|
||||
.cfi_startproc
|
||||
uxtw x8,w0
|
||||
mov x0,x1
|
||||
mov x1,x2
|
||||
@ -12,3 +13,4 @@ __syscall:
|
||||
mov x6,x7
|
||||
svc 0
|
||||
ret
|
||||
.cfi_endproc
|
||||
|
@ -20,11 +20,7 @@
|
||||
|
||||
/// Suppress TSan since it is possible for this code to be called from multiple threads,
|
||||
/// and initialization is safe to be done multiple times from multiple threads.
|
||||
#if defined(__clang__)
|
||||
# define NO_SANITIZE_THREAD __attribute__((__no_sanitize__("thread")))
|
||||
#else
|
||||
# define NO_SANITIZE_THREAD
|
||||
#endif
|
||||
#define NO_SANITIZE_THREAD __attribute__((__no_sanitize__("thread")))
|
||||
|
||||
// We don't have libc struct available here.
|
||||
// Compute aux vector manually (from /proc/self/auxv).
|
||||
|
@ -6,11 +6,7 @@
|
||||
/// It is only enabled in debug build (its intended use is for CI checks).
|
||||
#if !defined(NDEBUG)
|
||||
|
||||
#if defined(__clang__)
|
||||
#pragma clang diagnostic ignored "-Wincompatible-library-redeclaration"
|
||||
#else
|
||||
#pragma GCC diagnostic ignored "-Wbuiltin-declaration-mismatch"
|
||||
#endif
|
||||
#pragma clang diagnostic ignored "-Wincompatible-library-redeclaration"
|
||||
|
||||
/// We cannot use libc headers here.
|
||||
long write(int, const void *, unsigned long);
|
||||
|
@ -68,7 +68,7 @@ public:
|
||||
typedef typename Bucket::iterator BucketIterator;
|
||||
typedef typename BucketVec::iterator BucketVecIterator;
|
||||
|
||||
class ConstIterator : public std::iterator<std::forward_iterator_tag, Value>
|
||||
class ConstIterator
|
||||
{
|
||||
public:
|
||||
ConstIterator() : _initialized(false) { }
|
||||
|
@ -22,6 +22,7 @@
|
||||
#include <cstddef>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
|
||||
#include "Poco/Channel.h"
|
||||
#include "Poco/Format.h"
|
||||
#include "Poco/Foundation.h"
|
||||
@ -33,7 +34,8 @@ namespace Poco
|
||||
|
||||
|
||||
class Exception;
|
||||
|
||||
class Logger;
|
||||
using LoggerPtr = std::shared_ptr<Logger>;
|
||||
|
||||
class Foundation_API Logger : public Channel
|
||||
/// Logger is a special Channel that acts as the main
|
||||
@ -870,21 +872,21 @@ public:
|
||||
/// If the Logger does not yet exist, it is created, based
|
||||
/// on its parent logger.
|
||||
|
||||
static Logger & unsafeGet(const std::string & name);
|
||||
/// Returns a reference to the Logger with the given name.
|
||||
static LoggerPtr getShared(const std::string & name, bool should_be_owned_by_shared_ptr_if_created = true);
|
||||
/// Returns a shared pointer to the Logger with the given name.
|
||||
/// If the Logger does not yet exist, it is created, based
|
||||
/// on its parent logger.
|
||||
///
|
||||
/// WARNING: This method is not thread safe. You should
|
||||
/// probably use get() instead.
|
||||
/// The only time this method should be used is during
|
||||
/// program initialization, when only one thread is running.
|
||||
|
||||
static Logger & create(const std::string & name, Channel * pChannel, int level = Message::PRIO_INFORMATION);
|
||||
/// Creates and returns a reference to a Logger with the
|
||||
/// given name. The Logger's Channel and log level as set as
|
||||
/// specified.
|
||||
|
||||
static LoggerPtr createShared(const std::string & name, Channel * pChannel, int level = Message::PRIO_INFORMATION);
|
||||
/// Creates and returns a shared pointer to a Logger with the
|
||||
/// given name. The Logger's Channel and log level as set as
|
||||
/// specified.
|
||||
|
||||
static Logger & root();
|
||||
/// Returns a reference to the root logger, which is the ultimate
|
||||
/// ancestor of all Loggers.
|
||||
@ -893,13 +895,6 @@ public:
|
||||
/// Returns a pointer to the Logger with the given name if it
|
||||
/// exists, or a null pointer otherwise.
|
||||
|
||||
static void destroy(const std::string & name);
|
||||
/// Destroys the logger with the specified name. Does nothing
|
||||
/// if the logger is not found.
|
||||
///
|
||||
/// After a logger has been destroyed, all references to it
|
||||
/// become invalid.
|
||||
|
||||
static void shutdown();
|
||||
/// Shuts down the logging framework and releases all
|
||||
/// Loggers.
|
||||
@ -928,9 +923,17 @@ public:
|
||||
|
||||
static const std::string ROOT; /// The name of the root logger ("").
|
||||
|
||||
protected:
|
||||
typedef std::map<std::string, Logger *> LoggerMap;
|
||||
public:
|
||||
struct LoggerEntry
|
||||
{
|
||||
Poco::Logger * logger;
|
||||
bool owned_by_shared_ptr = false;
|
||||
};
|
||||
|
||||
using LoggerMap = std::unordered_map<std::string, LoggerEntry>;
|
||||
using LoggerMapIterator = LoggerMap::iterator;
|
||||
|
||||
protected:
|
||||
Logger(const std::string & name, Channel * pChannel, int level);
|
||||
~Logger();
|
||||
|
||||
@ -938,11 +941,16 @@ protected:
|
||||
void log(const std::string & text, Message::Priority prio, const char * file, int line);
|
||||
|
||||
static std::string format(const std::string & fmt, int argc, std::string argv[]);
|
||||
static Logger & parent(const std::string & name);
|
||||
static void add(Logger * pLogger);
|
||||
static Logger * find(const std::string & name);
|
||||
|
||||
private:
|
||||
static std::pair<Logger::LoggerMapIterator, bool> unsafeGet(const std::string & name, bool get_shared);
|
||||
static Logger * unsafeGetRawPtr(const std::string & name);
|
||||
static std::pair<LoggerMapIterator, bool> unsafeCreate(const std::string & name, Channel * pChannel, int level = Message::PRIO_INFORMATION);
|
||||
static Logger & parent(const std::string & name);
|
||||
static std::pair<LoggerMapIterator, bool> add(Logger * pLogger);
|
||||
static std::optional<LoggerMapIterator> find(const std::string & name);
|
||||
static Logger * findRawPtr(const std::string & name);
|
||||
|
||||
Logger();
|
||||
Logger(const Logger &);
|
||||
Logger & operator=(const Logger &);
|
||||
@ -950,9 +958,6 @@ private:
|
||||
std::string _name;
|
||||
Channel * _pChannel;
|
||||
std::atomic_int _level;
|
||||
|
||||
static LoggerMap * _pLoggerMap;
|
||||
static Mutex _mapMtx;
|
||||
};
|
||||
|
||||
|
||||
|
@ -38,15 +38,15 @@ public:
|
||||
/// Creates the RefCountedObject.
|
||||
/// The initial reference count is one.
|
||||
|
||||
void duplicate() const;
|
||||
/// Increments the object's reference count.
|
||||
size_t duplicate() const;
|
||||
/// Increments the object's reference count, returns reference count before call.
|
||||
|
||||
void release() const throw();
|
||||
size_t release() const throw();
|
||||
/// Decrements the object's reference count
|
||||
/// and deletes the object if the count
|
||||
/// reaches zero.
|
||||
/// reaches zero, returns reference count before call.
|
||||
|
||||
int referenceCount() const;
|
||||
size_t referenceCount() const;
|
||||
/// Returns the reference count.
|
||||
|
||||
protected:
|
||||
@ -57,36 +57,40 @@ private:
|
||||
RefCountedObject(const RefCountedObject &);
|
||||
RefCountedObject & operator=(const RefCountedObject &);
|
||||
|
||||
mutable AtomicCounter _counter;
|
||||
mutable std::atomic<size_t> _counter;
|
||||
};
|
||||
|
||||
|
||||
//
|
||||
// inlines
|
||||
//
|
||||
inline int RefCountedObject::referenceCount() const
|
||||
inline size_t RefCountedObject::referenceCount() const
|
||||
{
|
||||
return _counter.value();
|
||||
return _counter.load(std::memory_order_acquire);
|
||||
}
|
||||
|
||||
|
||||
inline void RefCountedObject::duplicate() const
|
||||
inline size_t RefCountedObject::duplicate() const
|
||||
{
|
||||
++_counter;
|
||||
return _counter.fetch_add(1, std::memory_order_acq_rel);
|
||||
}
|
||||
|
||||
|
||||
inline void RefCountedObject::release() const throw()
|
||||
inline size_t RefCountedObject::release() const throw()
|
||||
{
|
||||
size_t reference_count_before = _counter.fetch_sub(1, std::memory_order_acq_rel);
|
||||
|
||||
try
|
||||
{
|
||||
if (--_counter == 0)
|
||||
if (reference_count_before == 1)
|
||||
delete this;
|
||||
}
|
||||
catch (...)
|
||||
{
|
||||
poco_unexpected();
|
||||
}
|
||||
|
||||
return reference_count_before;
|
||||
}
|
||||
|
||||
|
||||
|
@ -20,12 +20,31 @@
|
||||
#include "Poco/NumberParser.h"
|
||||
#include "Poco/String.h"
|
||||
|
||||
#include <cassert>
|
||||
#include <mutex>
|
||||
|
||||
namespace
|
||||
{
|
||||
|
||||
std::mutex & getLoggerMutex()
|
||||
{
|
||||
auto get_logger_mutex_placeholder_memory = []()
|
||||
{
|
||||
static char buffer[sizeof(std::mutex)]{};
|
||||
return buffer;
|
||||
};
|
||||
|
||||
static std::mutex * logger_mutex = new (get_logger_mutex_placeholder_memory()) std::mutex();
|
||||
return *logger_mutex;
|
||||
}
|
||||
|
||||
Poco::Logger::LoggerMap * _pLoggerMap = nullptr;
|
||||
|
||||
}
|
||||
|
||||
namespace Poco {
|
||||
|
||||
|
||||
Logger::LoggerMap* Logger::_pLoggerMap = 0;
|
||||
Mutex Logger::_mapMtx;
|
||||
const std::string Logger::ROOT;
|
||||
|
||||
|
||||
@ -112,17 +131,17 @@ void Logger::dump(const std::string& msg, const void* buffer, std::size_t length
|
||||
|
||||
void Logger::setLevel(const std::string& name, int level)
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
if (_pLoggerMap)
|
||||
{
|
||||
std::string::size_type len = name.length();
|
||||
for (LoggerMap::iterator it = _pLoggerMap->begin(); it != _pLoggerMap->end(); ++it)
|
||||
for (auto & it : *_pLoggerMap)
|
||||
{
|
||||
if (len == 0 ||
|
||||
(it->first.compare(0, len, name) == 0 && (it->first.length() == len || it->first[len] == '.')))
|
||||
(it.first.compare(0, len, name) == 0 && (it.first.length() == len || it.first[len] == '.')))
|
||||
{
|
||||
it->second->setLevel(level);
|
||||
it.second.logger->setLevel(level);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -131,17 +150,17 @@ void Logger::setLevel(const std::string& name, int level)
|
||||
|
||||
void Logger::setChannel(const std::string& name, Channel* pChannel)
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
if (_pLoggerMap)
|
||||
{
|
||||
std::string::size_type len = name.length();
|
||||
for (LoggerMap::iterator it = _pLoggerMap->begin(); it != _pLoggerMap->end(); ++it)
|
||||
for (auto & it : *_pLoggerMap)
|
||||
{
|
||||
if (len == 0 ||
|
||||
(it->first.compare(0, len, name) == 0 && (it->first.length() == len || it->first[len] == '.')))
|
||||
(it.first.compare(0, len, name) == 0 && (it.first.length() == len || it.first[len] == '.')))
|
||||
{
|
||||
it->second->setChannel(pChannel);
|
||||
it.second.logger->setChannel(pChannel);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -150,17 +169,17 @@ void Logger::setChannel(const std::string& name, Channel* pChannel)
|
||||
|
||||
void Logger::setProperty(const std::string& loggerName, const std::string& propertyName, const std::string& value)
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
if (_pLoggerMap)
|
||||
{
|
||||
std::string::size_type len = loggerName.length();
|
||||
for (LoggerMap::iterator it = _pLoggerMap->begin(); it != _pLoggerMap->end(); ++it)
|
||||
for (auto & it : *_pLoggerMap)
|
||||
{
|
||||
if (len == 0 ||
|
||||
(it->first.compare(0, len, loggerName) == 0 && (it->first.length() == len || it->first[len] == '.')))
|
||||
(it.first.compare(0, len, loggerName) == 0 && (it.first.length() == len || it.first[len] == '.')))
|
||||
{
|
||||
it->second->setProperty(propertyName, value);
|
||||
it.second.logger->setProperty(propertyName, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -280,108 +299,200 @@ void Logger::formatDump(std::string& message, const void* buffer, std::size_t le
|
||||
}
|
||||
|
||||
|
||||
Logger& Logger::get(const std::string& name)
|
||||
namespace
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
|
||||
return unsafeGet(name);
|
||||
struct LoggerDeleter
|
||||
{
|
||||
void operator()(Poco::Logger * logger)
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
/// If logger infrastructure is destroyed just decrement logger reference count
|
||||
if (!_pLoggerMap)
|
||||
{
|
||||
logger->release();
|
||||
return;
|
||||
}
|
||||
|
||||
auto it = _pLoggerMap->find(logger->name());
|
||||
assert(it != _pLoggerMap->end());
|
||||
|
||||
/** If reference count is 1, this means this shared pointer owns logger
|
||||
* and need destroy it.
|
||||
*/
|
||||
size_t reference_count_before_release = logger->release();
|
||||
if (reference_count_before_release == 1)
|
||||
{
|
||||
assert(it->second.owned_by_shared_ptr);
|
||||
_pLoggerMap->erase(it);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
inline LoggerPtr makeLoggerPtr(Logger & logger, bool owned_by_shared_ptr)
|
||||
{
|
||||
if (owned_by_shared_ptr)
|
||||
return LoggerPtr(&logger, LoggerDeleter());
|
||||
|
||||
return LoggerPtr(std::shared_ptr<void>{}, &logger);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
Logger& Logger::unsafeGet(const std::string& name)
|
||||
Logger& Logger::get(const std::string& name)
|
||||
{
|
||||
Logger* pLogger = find(name);
|
||||
if (!pLogger)
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
auto [it, inserted] = unsafeGet(name, false /*get_shared*/);
|
||||
return *it->second.logger;
|
||||
}
|
||||
|
||||
|
||||
LoggerPtr Logger::getShared(const std::string & name, bool should_be_owned_by_shared_ptr_if_created)
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
auto [it, inserted] = unsafeGet(name, true /*get_shared*/);
|
||||
|
||||
/** If during `unsafeGet` logger was created, then this shared pointer owns it.
|
||||
* If logger was already created, then this shared pointer does not own it.
|
||||
*/
|
||||
if (inserted && should_be_owned_by_shared_ptr_if_created)
|
||||
it->second.owned_by_shared_ptr = true;
|
||||
|
||||
return makeLoggerPtr(*it->second.logger, it->second.owned_by_shared_ptr);
|
||||
}
|
||||
|
||||
|
||||
std::pair<Logger::LoggerMapIterator, bool> Logger::unsafeGet(const std::string& name, bool get_shared)
|
||||
{
|
||||
std::optional<Logger::LoggerMapIterator> optional_logger_it = find(name);
|
||||
|
||||
if (optional_logger_it)
|
||||
{
|
||||
auto & logger_it = *optional_logger_it;
|
||||
|
||||
if (logger_it->second.owned_by_shared_ptr)
|
||||
{
|
||||
logger_it->second.logger->duplicate();
|
||||
|
||||
if (!get_shared)
|
||||
logger_it->second.owned_by_shared_ptr = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!optional_logger_it)
|
||||
{
|
||||
Logger * logger = nullptr;
|
||||
|
||||
if (name == ROOT)
|
||||
{
|
||||
pLogger = new Logger(name, 0, Message::PRIO_INFORMATION);
|
||||
logger = new Logger(name, nullptr, Message::PRIO_INFORMATION);
|
||||
}
|
||||
else
|
||||
{
|
||||
Logger& par = parent(name);
|
||||
pLogger = new Logger(name, par.getChannel(), par.getLevel());
|
||||
logger = new Logger(name, par.getChannel(), par.getLevel());
|
||||
}
|
||||
add(pLogger);
|
||||
|
||||
return add(logger);
|
||||
}
|
||||
return *pLogger;
|
||||
|
||||
return std::make_pair(*optional_logger_it, false);
|
||||
}
|
||||
|
||||
|
||||
Logger * Logger::unsafeGetRawPtr(const std::string & name)
|
||||
{
|
||||
return unsafeGet(name, false /*get_shared*/).first->second.logger;
|
||||
}
|
||||
|
||||
|
||||
Logger& Logger::create(const std::string& name, Channel* pChannel, int level)
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
if (find(name)) throw ExistsException();
|
||||
Logger* pLogger = new Logger(name, pChannel, level);
|
||||
add(pLogger);
|
||||
return *pLogger;
|
||||
return *unsafeCreate(name, pChannel, level).first->second.logger;
|
||||
}
|
||||
|
||||
LoggerPtr Logger::createShared(const std::string & name, Channel * pChannel, int level)
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
auto [it, inserted] = unsafeCreate(name, pChannel, level);
|
||||
it->second.owned_by_shared_ptr = true;
|
||||
|
||||
return makeLoggerPtr(*it->second.logger, it->second.owned_by_shared_ptr);
|
||||
}
|
||||
|
||||
Logger& Logger::root()
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
return unsafeGet(ROOT);
|
||||
return *unsafeGetRawPtr(ROOT);
|
||||
}
|
||||
|
||||
|
||||
Logger* Logger::has(const std::string& name)
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
return find(name);
|
||||
auto optional_it = find(name);
|
||||
if (!optional_it)
|
||||
return nullptr;
|
||||
|
||||
return (*optional_it)->second.logger;
|
||||
}
|
||||
|
||||
|
||||
void Logger::shutdown()
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
if (_pLoggerMap)
|
||||
{
|
||||
for (LoggerMap::iterator it = _pLoggerMap->begin(); it != _pLoggerMap->end(); ++it)
|
||||
for (auto & it : *_pLoggerMap)
|
||||
{
|
||||
it->second->release();
|
||||
if (it.second.owned_by_shared_ptr)
|
||||
continue;
|
||||
|
||||
it.second.logger->release();
|
||||
}
|
||||
|
||||
delete _pLoggerMap;
|
||||
_pLoggerMap = 0;
|
||||
_pLoggerMap = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Logger* Logger::find(const std::string& name)
|
||||
std::optional<Logger::LoggerMapIterator> Logger::find(const std::string& name)
|
||||
{
|
||||
if (_pLoggerMap)
|
||||
{
|
||||
LoggerMap::iterator it = _pLoggerMap->find(name);
|
||||
if (it != _pLoggerMap->end())
|
||||
return it->second;
|
||||
return it;
|
||||
|
||||
return {};
|
||||
}
|
||||
return 0;
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
|
||||
void Logger::destroy(const std::string& name)
|
||||
Logger * Logger::findRawPtr(const std::string & name)
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
auto optional_it = find(name);
|
||||
if (!optional_it)
|
||||
return nullptr;
|
||||
|
||||
if (_pLoggerMap)
|
||||
{
|
||||
LoggerMap::iterator it = _pLoggerMap->find(name);
|
||||
if (it != _pLoggerMap->end())
|
||||
{
|
||||
it->second->release();
|
||||
_pLoggerMap->erase(it);
|
||||
}
|
||||
}
|
||||
return (*optional_it)->second.logger;
|
||||
}
|
||||
|
||||
|
||||
void Logger::names(std::vector<std::string>& names)
|
||||
{
|
||||
Mutex::ScopedLock lock(_mapMtx);
|
||||
std::lock_guard<std::mutex> lock(getLoggerMutex());
|
||||
|
||||
names.clear();
|
||||
if (_pLoggerMap)
|
||||
@ -394,19 +505,27 @@ void Logger::names(std::vector<std::string>& names)
|
||||
}
|
||||
|
||||
|
||||
std::pair<Logger::LoggerMapIterator, bool> Logger::unsafeCreate(const std::string & name, Channel * pChannel, int level)
|
||||
{
|
||||
if (find(name)) throw ExistsException();
|
||||
Logger* pLogger = new Logger(name, pChannel, level);
|
||||
return add(pLogger);
|
||||
}
|
||||
|
||||
|
||||
Logger& Logger::parent(const std::string& name)
|
||||
{
|
||||
std::string::size_type pos = name.rfind('.');
|
||||
if (pos != std::string::npos)
|
||||
{
|
||||
std::string pname = name.substr(0, pos);
|
||||
Logger* pParent = find(pname);
|
||||
Logger* pParent = findRawPtr(pname);
|
||||
if (pParent)
|
||||
return *pParent;
|
||||
else
|
||||
return parent(pname);
|
||||
}
|
||||
else return unsafeGet(ROOT);
|
||||
else return *unsafeGetRawPtr(ROOT);
|
||||
}
|
||||
|
||||
|
||||
@ -474,11 +593,14 @@ namespace
|
||||
}
|
||||
|
||||
|
||||
void Logger::add(Logger* pLogger)
|
||||
std::pair<Logger::LoggerMapIterator, bool> Logger::add(Logger* pLogger)
|
||||
{
|
||||
if (!_pLoggerMap)
|
||||
_pLoggerMap = new LoggerMap;
|
||||
_pLoggerMap->insert(LoggerMap::value_type(pLogger->name(), pLogger));
|
||||
_pLoggerMap = new Logger::LoggerMap;
|
||||
|
||||
auto result = _pLoggerMap->emplace(pLogger->name(), LoggerEntry{pLogger, false /*owned_by_shared_ptr*/});
|
||||
assert(result.second);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
|
@ -45,6 +45,8 @@ namespace Net
|
||||
~HTTPChunkedStreamBuf();
|
||||
void close();
|
||||
|
||||
bool isComplete() const { return _chunk == std::char_traits<char>::eof(); }
|
||||
|
||||
protected:
|
||||
int readFromDevice(char * buffer, std::streamsize length);
|
||||
int writeToDevice(const char * buffer, std::streamsize length);
|
||||
@ -68,6 +70,8 @@ namespace Net
|
||||
~HTTPChunkedIOS();
|
||||
HTTPChunkedStreamBuf * rdbuf();
|
||||
|
||||
bool isComplete() const { return _buf.isComplete(); }
|
||||
|
||||
protected:
|
||||
HTTPChunkedStreamBuf _buf;
|
||||
};
|
||||
|
@ -210,7 +210,7 @@ namespace Net
|
||||
void setKeepAliveTimeout(const Poco::Timespan & timeout);
|
||||
/// Sets the connection timeout for HTTP connections.
|
||||
|
||||
const Poco::Timespan & getKeepAliveTimeout() const;
|
||||
Poco::Timespan getKeepAliveTimeout() const;
|
||||
/// Returns the connection timeout for HTTP connections.
|
||||
|
||||
virtual std::ostream & sendRequest(HTTPRequest & request);
|
||||
@ -275,7 +275,7 @@ namespace Net
|
||||
/// This method should only be called if the request contains
|
||||
/// a "Expect: 100-continue" header.
|
||||
|
||||
void flushRequest();
|
||||
virtual void flushRequest();
|
||||
/// Flushes the request stream.
|
||||
///
|
||||
/// Normally this method does not need to be called.
|
||||
@ -283,7 +283,7 @@ namespace Net
|
||||
/// fully sent if receiveResponse() is not called, e.g.,
|
||||
/// because the underlying socket will be detached.
|
||||
|
||||
void reset();
|
||||
virtual void reset();
|
||||
/// Resets the session and closes the socket.
|
||||
///
|
||||
/// The next request will initiate a new connection,
|
||||
@ -303,6 +303,9 @@ namespace Net
|
||||
/// Returns true if the proxy should be bypassed
|
||||
/// for the current host.
|
||||
|
||||
const Poco::Timestamp & getLastRequest() const;
|
||||
/// Returns time when connection has been used last time
|
||||
|
||||
protected:
|
||||
enum
|
||||
{
|
||||
@ -338,6 +341,10 @@ namespace Net
|
||||
/// Calls proxyConnect() and attaches the resulting StreamSocket
|
||||
/// to the HTTPClientSession.
|
||||
|
||||
void setLastRequest(Poco::Timestamp time);
|
||||
|
||||
void assign(HTTPClientSession & session);
|
||||
|
||||
HTTPSessionFactory _proxySessionFactory;
|
||||
/// Factory to create HTTPClientSession to proxy.
|
||||
private:
|
||||
@ -433,11 +440,20 @@ namespace Net
|
||||
}
|
||||
|
||||
|
||||
inline const Poco::Timespan & HTTPClientSession::getKeepAliveTimeout() const
|
||||
inline Poco::Timespan HTTPClientSession::getKeepAliveTimeout() const
|
||||
{
|
||||
return _keepAliveTimeout;
|
||||
}
|
||||
|
||||
inline const Poco::Timestamp & HTTPClientSession::getLastRequest() const
|
||||
{
|
||||
return _lastRequest;
|
||||
}
|
||||
|
||||
inline void HTTPClientSession::setLastRequest(Poco::Timestamp time)
|
||||
{
|
||||
_lastRequest = time;
|
||||
}
|
||||
|
||||
}
|
||||
} // namespace Poco::Net
|
||||
|
@ -48,6 +48,8 @@ namespace Net
|
||||
HTTPFixedLengthStreamBuf(HTTPSession & session, ContentLength length, openmode mode);
|
||||
~HTTPFixedLengthStreamBuf();
|
||||
|
||||
bool isComplete() const;
|
||||
|
||||
protected:
|
||||
int readFromDevice(char * buffer, std::streamsize length);
|
||||
int writeToDevice(const char * buffer, std::streamsize length);
|
||||
@ -67,6 +69,8 @@ namespace Net
|
||||
~HTTPFixedLengthIOS();
|
||||
HTTPFixedLengthStreamBuf * rdbuf();
|
||||
|
||||
bool isComplete() const { return _buf.isComplete(); }
|
||||
|
||||
protected:
|
||||
HTTPFixedLengthStreamBuf _buf;
|
||||
};
|
||||
|
@ -30,7 +30,6 @@ namespace Net
|
||||
|
||||
|
||||
class HTTPServerRequest;
|
||||
class HTTPServerResponse;
|
||||
class HTTPRequestHandler;
|
||||
|
||||
|
||||
|
@ -64,6 +64,15 @@ namespace Net
|
||||
Poco::Timespan getTimeout() const;
|
||||
/// Returns the timeout for the HTTP session.
|
||||
|
||||
Poco::Timespan getConnectionTimeout() const;
|
||||
/// Returns connection timeout for the HTTP session.
|
||||
|
||||
Poco::Timespan getSendTimeout() const;
|
||||
/// Returns send timeout for the HTTP session.
|
||||
|
||||
Poco::Timespan getReceiveTimeout() const;
|
||||
/// Returns receive timeout for the HTTP session.
|
||||
|
||||
bool connected() const;
|
||||
/// Returns true if the underlying socket is connected.
|
||||
|
||||
@ -217,12 +226,25 @@ namespace Net
|
||||
return _keepAlive;
|
||||
}
|
||||
|
||||
|
||||
inline Poco::Timespan HTTPSession::getTimeout() const
|
||||
{
|
||||
return _receiveTimeout;
|
||||
}
|
||||
|
||||
inline Poco::Timespan HTTPSession::getConnectionTimeout() const
|
||||
{
|
||||
return _connectionTimeout;
|
||||
}
|
||||
|
||||
inline Poco::Timespan HTTPSession::getSendTimeout() const
|
||||
{
|
||||
return _sendTimeout;
|
||||
}
|
||||
|
||||
inline Poco::Timespan HTTPSession::getReceiveTimeout() const
|
||||
{
|
||||
return _receiveTimeout;
|
||||
}
|
||||
|
||||
inline StreamSocket & HTTPSession::socket()
|
||||
{
|
||||
|
@ -63,6 +63,8 @@ namespace Net
|
||||
~HTTPIOS();
|
||||
HTTPStreamBuf * rdbuf();
|
||||
|
||||
bool isComplete() const { return false; }
|
||||
|
||||
protected:
|
||||
HTTPStreamBuf _buf;
|
||||
};
|
||||
|
@ -49,10 +49,12 @@ HTTPChunkedStreamBuf::~HTTPChunkedStreamBuf()
|
||||
|
||||
void HTTPChunkedStreamBuf::close()
|
||||
{
|
||||
if (_mode & std::ios::out)
|
||||
if (_mode & std::ios::out && _chunk != std::char_traits<char>::eof())
|
||||
{
|
||||
sync();
|
||||
_session.write("0\r\n\r\n", 5);
|
||||
|
||||
_chunk = std::char_traits<char>::eof();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -227,7 +227,7 @@ void HTTPClientSession::setKeepAliveTimeout(const Poco::Timespan& timeout)
|
||||
std::ostream& HTTPClientSession::sendRequest(HTTPRequest& request)
|
||||
{
|
||||
_pRequestStream = 0;
|
||||
_pResponseStream = 0;
|
||||
_pResponseStream = 0;
|
||||
clearException();
|
||||
_responseReceived = false;
|
||||
|
||||
@ -501,5 +501,26 @@ bool HTTPClientSession::bypassProxy() const
|
||||
else return false;
|
||||
}
|
||||
|
||||
void HTTPClientSession::assign(Poco::Net::HTTPClientSession & session)
|
||||
{
|
||||
poco_assert (this != &session);
|
||||
|
||||
if (session.buffered())
|
||||
throw Poco::LogicException("assign a session with not empty buffered data");
|
||||
|
||||
if (buffered())
|
||||
throw Poco::LogicException("assign to a session with not empty buffered data");
|
||||
|
||||
attachSocket(session.detachSocket());
|
||||
setLastRequest(session.getLastRequest());
|
||||
setResolvedHost(session.getResolvedHost());
|
||||
setKeepAlive(session.getKeepAlive());
|
||||
|
||||
setTimeout(session.getConnectionTimeout(), session.getSendTimeout(), session.getReceiveTimeout());
|
||||
setKeepAliveTimeout(session.getKeepAliveTimeout());
|
||||
setProxyConfig(session.getProxyConfig());
|
||||
|
||||
session.reset();
|
||||
}
|
||||
|
||||
} } // namespace Poco::Net
|
||||
|
@ -43,6 +43,12 @@ HTTPFixedLengthStreamBuf::~HTTPFixedLengthStreamBuf()
|
||||
}
|
||||
|
||||
|
||||
bool HTTPFixedLengthStreamBuf::isComplete() const
|
||||
{
|
||||
return _count == _length;
|
||||
}
|
||||
|
||||
|
||||
int HTTPFixedLengthStreamBuf::readFromDevice(char* buffer, std::streamsize length)
|
||||
{
|
||||
int n = 0;
|
||||
|
@ -93,7 +93,7 @@ void TCPServerDispatcher::release()
|
||||
|
||||
void TCPServerDispatcher::run()
|
||||
{
|
||||
AutoPtr<TCPServerDispatcher> guard(this, true); // ensure object stays alive
|
||||
AutoPtr<TCPServerDispatcher> guard(this); // ensure object stays alive
|
||||
|
||||
int idleTime = (int) _pParams->getThreadIdleTime().totalMilliseconds();
|
||||
|
||||
@ -149,11 +149,13 @@ void TCPServerDispatcher::enqueue(const StreamSocket& socket)
|
||||
{
|
||||
try
|
||||
{
|
||||
this->duplicate();
|
||||
_threadPool.startWithPriority(_pParams->getThreadPriority(), *this, threadName);
|
||||
++_currentThreads;
|
||||
}
|
||||
catch (Poco::Exception& exc)
|
||||
{
|
||||
this->release();
|
||||
++_refusedConnections;
|
||||
std::cerr << "Got exception while starting thread for connection. Error code: "
|
||||
<< exc.code() << ", message: '" << exc.displayText() << "'" << std::endl;
|
||||
|
@ -2,11 +2,11 @@
|
||||
|
||||
# NOTE: has nothing common with DBMS_TCP_PROTOCOL_VERSION,
|
||||
# only DBMS_TCP_PROTOCOL_VERSION should be incremented on protocol changes.
|
||||
SET(VERSION_REVISION 54482)
|
||||
SET(VERSION_REVISION 54484)
|
||||
SET(VERSION_MAJOR 24)
|
||||
SET(VERSION_MINOR 1)
|
||||
SET(VERSION_MINOR 3)
|
||||
SET(VERSION_PATCH 1)
|
||||
SET(VERSION_GITHASH a2faa65b080a587026c86844f3a20c74d23a86f8)
|
||||
SET(VERSION_DESCRIBE v24.1.1.1-testing)
|
||||
SET(VERSION_STRING 24.1.1.1)
|
||||
SET(VERSION_GITHASH 891689a41506d00aa169548f5b4a8774351242c4)
|
||||
SET(VERSION_DESCRIBE v24.3.1.1-testing)
|
||||
SET(VERSION_STRING 24.3.1.1)
|
||||
# end of autochange
|
||||
|
@ -1,17 +0,0 @@
|
||||
# see ./CMakeLists.txt for variable declaration
|
||||
if (FUZZER)
|
||||
if (FUZZER STREQUAL "libfuzzer")
|
||||
# NOTE: Eldar Zaitov decided to name it "libfuzzer" instead of "fuzzer" to keep in mind another possible fuzzer backends.
|
||||
# NOTE: no-link means that all the targets are built with instrumentation for fuzzer, but only some of them
|
||||
# (tests) have entry point for fuzzer and it's not checked.
|
||||
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${SAN_FLAGS} -fsanitize=fuzzer-no-link -DFUZZER=1")
|
||||
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${SAN_FLAGS} -fsanitize=fuzzer-no-link -DFUZZER=1")
|
||||
|
||||
# NOTE: oss-fuzz can change LIB_FUZZING_ENGINE variable
|
||||
if (NOT LIB_FUZZING_ENGINE)
|
||||
set (LIB_FUZZING_ENGINE "-fsanitize=fuzzer")
|
||||
endif ()
|
||||
else ()
|
||||
message (FATAL_ERROR "Unknown fuzzer type: ${FUZZER}")
|
||||
endif ()
|
||||
endif()
|
@ -63,14 +63,14 @@ endif()
|
||||
option(WITH_COVERAGE "Instrumentation for code coverage with default implementation" OFF)
|
||||
|
||||
if (WITH_COVERAGE)
|
||||
message (INFORMATION "Enabled instrumentation for code coverage")
|
||||
message (STATUS "Enabled instrumentation for code coverage")
|
||||
set(COVERAGE_FLAGS "-fprofile-instr-generate -fcoverage-mapping")
|
||||
endif()
|
||||
|
||||
option (SANITIZE_COVERAGE "Instrumentation for code coverage with custom callbacks" OFF)
|
||||
|
||||
if (SANITIZE_COVERAGE)
|
||||
message (INFORMATION "Enabled instrumentation for code coverage")
|
||||
message (STATUS "Enabled instrumentation for code coverage")
|
||||
|
||||
# We set this define for whole build to indicate that at least some parts are compiled with coverage.
|
||||
# And to expose it in system.build_options.
|
||||
@ -79,7 +79,10 @@ if (SANITIZE_COVERAGE)
|
||||
|
||||
# But the actual coverage will be enabled on per-library basis: for ClickHouse code, but not for 3rd-party.
|
||||
set (COVERAGE_FLAGS "-fsanitize-coverage=trace-pc-guard,pc-table")
|
||||
endif()
|
||||
|
||||
set (WITHOUT_COVERAGE_FLAGS "-fno-profile-instr-generate -fno-coverage-mapping -fno-sanitize-coverage=trace-pc-guard,pc-table")
|
||||
set (WITHOUT_COVERAGE_FLAGS_LIST -fno-profile-instr-generate -fno-coverage-mapping -fno-sanitize-coverage=trace-pc-guard,pc-table)
|
||||
set (WITHOUT_COVERAGE_FLAGS "-fno-profile-instr-generate -fno-coverage-mapping -fno-sanitize-coverage=trace-pc-guard,pc-table")
|
||||
set (WITHOUT_COVERAGE_FLAGS_LIST -fno-profile-instr-generate -fno-coverage-mapping -fno-sanitize-coverage=trace-pc-guard,pc-table)
|
||||
else()
|
||||
set (WITHOUT_COVERAGE_FLAGS "")
|
||||
set (WITHOUT_COVERAGE_FLAGS_LIST "")
|
||||
endif()
|
||||
|
@ -46,5 +46,6 @@ if (COMPILER_CLANG)
|
||||
no_warning(thread-safety-negative) # experimental flag, too many false positives
|
||||
no_warning(enum-constexpr-conversion) # breaks magic-enum library in clang-16
|
||||
no_warning(unsafe-buffer-usage) # too aggressive
|
||||
no_warning(switch-default) # conflicts with "defaults in a switch covering all enum values"
|
||||
# TODO Enable conversion, sign-conversion, double-promotion warnings.
|
||||
endif ()
|
||||
|
2
contrib/NuRaft
vendored
2
contrib/NuRaft
vendored
@ -1 +1 @@
|
||||
Subproject commit 1278e32bb0d5dc489f947e002bdf8c71b0ddaa63
|
||||
Subproject commit 4a12f99dfc9d47c687ff7700b927cc76856225d1
|
2
contrib/aws
vendored
2
contrib/aws
vendored
@ -1 +1 @@
|
||||
Subproject commit ca02358dcc7ce3ab733dd4cbcc32734eecfa4ee3
|
||||
Subproject commit 32870e234cac03e0ac46370c26858b0ffdf14200
|
2
contrib/aws-c-auth
vendored
2
contrib/aws-c-auth
vendored
@ -1 +1 @@
|
||||
Subproject commit 97133a2b5dbca1ccdf88cd6f44f39d0531d27d12
|
||||
Subproject commit baeffa791d9d1cf61460662a6d9ac2186aaf05df
|
2
contrib/aws-c-cal
vendored
2
contrib/aws-c-cal
vendored
@ -1 +1 @@
|
||||
Subproject commit 85dd7664b786a389c6fb1a6f031ab4bb2282133d
|
||||
Subproject commit 9453687ff5493ba94eaccf8851200565c4364c77
|
2
contrib/aws-c-common
vendored
2
contrib/aws-c-common
vendored
@ -1 +1 @@
|
||||
Subproject commit 45dcb2849c891dba2100b270b4676765c92949ff
|
||||
Subproject commit 80f21b3cac5ac51c6b8a62c7d2a5ef58a75195ee
|
2
contrib/aws-c-compression
vendored
2
contrib/aws-c-compression
vendored
@ -1 +1 @@
|
||||
Subproject commit b517b7decd0dac30be2162f5186c250221c53aff
|
||||
Subproject commit 99ec79ee2970f1a045d4ced1501b97ee521f2f85
|
2
contrib/aws-c-event-stream
vendored
2
contrib/aws-c-event-stream
vendored
@ -1 +1 @@
|
||||
Subproject commit 2f9b60c42f90840ec11822acda3d8cdfa97a773d
|
||||
Subproject commit 08f24e384e5be20bcffa42b49213d24dad7881ae
|
2
contrib/aws-c-http
vendored
2
contrib/aws-c-http
vendored
@ -1 +1 @@
|
||||
Subproject commit dd34461987947672444d0bc872c5a733dfdb9711
|
||||
Subproject commit a082f8a2067e4a31db73f1d4ffd702a8dc0f7089
|
2
contrib/aws-c-io
vendored
2
contrib/aws-c-io
vendored
@ -1 +1 @@
|
||||
Subproject commit d58ed4f272b1cb4f89ac9196526ceebe5f2b0d89
|
||||
Subproject commit 11ce3c750a1dac7b04069fc5bff89e97e91bad4d
|
2
contrib/aws-c-mqtt
vendored
2
contrib/aws-c-mqtt
vendored
@ -1 +1 @@
|
||||
Subproject commit 33c3455cec82b16feb940e12006cefd7b3ef4194
|
||||
Subproject commit 6d36cd3726233cb757468d0ea26f6cd8dad151ec
|
2
contrib/aws-c-s3
vendored
2
contrib/aws-c-s3
vendored
@ -1 +1 @@
|
||||
Subproject commit d7bfe602d6925948f1fff95784e3613cca6a3900
|
||||
Subproject commit de36fee8fe7ab02f10987877ae94a805bf440c1f
|
2
contrib/aws-c-sdkutils
vendored
2
contrib/aws-c-sdkutils
vendored
@ -1 +1 @@
|
||||
Subproject commit 208a701fa01e99c7c8cc3dcebc8317da71362972
|
||||
Subproject commit fd8c0ba2e233997eaaefe82fb818b8b444b956d3
|
2
contrib/aws-checksums
vendored
2
contrib/aws-checksums
vendored
@ -1 +1 @@
|
||||
Subproject commit ad53be196a25bbefa3700a01187fdce573a7d2d0
|
||||
Subproject commit 321b805559c8e911be5bddba13fcbd222a3e2d3a
|
@ -25,6 +25,7 @@ include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsFeatureTests.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsThreadAffinity.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsThreadName.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-cmake/AwsSIMD.cmake")
|
||||
include("${ClickHouse_SOURCE_DIR}/contrib/aws-crt-cpp/cmake/AwsGetVersion.cmake")
|
||||
|
||||
|
||||
# Gather sources and options.
|
||||
@ -35,6 +36,8 @@ set(AWS_PUBLIC_COMPILE_DEFS)
|
||||
set(AWS_PRIVATE_COMPILE_DEFS)
|
||||
set(AWS_PRIVATE_LIBS)
|
||||
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DINTEL_NO_ITTNOTIFY_API")
|
||||
|
||||
if (CMAKE_BUILD_TYPE_UC STREQUAL "DEBUG")
|
||||
list(APPEND AWS_PRIVATE_COMPILE_DEFS "-DDEBUG_BUILD")
|
||||
endif()
|
||||
@ -85,14 +88,20 @@ file(GLOB AWS_SDK_CORE_SRC
|
||||
"${AWS_SDK_CORE_DIR}/source/external/cjson/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/external/tinyxml2/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/http/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/http/crt/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/http/standard/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/internal/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/monitoring/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/net/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/net/linux-shared/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/platform/linux-shared/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/smithy/tracing/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/base64/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/component-registry/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/openssl/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/factory/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/crypto/openssl/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/event/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/json/*.cpp"
|
||||
"${AWS_SDK_CORE_DIR}/source/utils/logging/*.cpp"
|
||||
@ -115,9 +124,8 @@ OPTION(USE_AWS_MEMORY_MANAGEMENT "Aws memory management" OFF)
|
||||
configure_file("${AWS_SDK_CORE_DIR}/include/aws/core/SDKConfig.h.in"
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/include/aws/core/SDKConfig.h" @ONLY)
|
||||
|
||||
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_MAJOR=1")
|
||||
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_MINOR=10")
|
||||
list(APPEND AWS_PUBLIC_COMPILE_DEFS "-DAWS_SDK_VERSION_PATCH=36")
|
||||
aws_get_version(AWS_CRT_CPP_VERSION_MAJOR AWS_CRT_CPP_VERSION_MINOR AWS_CRT_CPP_VERSION_PATCH FULL_VERSION GIT_HASH)
|
||||
configure_file("${AWS_CRT_DIR}/include/aws/crt/Config.h.in" "${AWS_CRT_DIR}/include/aws/crt/Config.h" @ONLY)
|
||||
|
||||
list(APPEND AWS_SOURCES ${AWS_SDK_CORE_SRC} ${AWS_SDK_CORE_NET_SRC} ${AWS_SDK_CORE_PLATFORM_SRC})
|
||||
|
||||
@ -176,6 +184,7 @@ file(GLOB AWS_COMMON_SRC
|
||||
"${AWS_COMMON_DIR}/source/*.c"
|
||||
"${AWS_COMMON_DIR}/source/external/*.c"
|
||||
"${AWS_COMMON_DIR}/source/posix/*.c"
|
||||
"${AWS_COMMON_DIR}/source/linux/*.c"
|
||||
)
|
||||
|
||||
file(GLOB AWS_COMMON_ARCH_SRC
|
||||
|
2
contrib/aws-crt-cpp
vendored
2
contrib/aws-crt-cpp
vendored
@ -1 +1 @@
|
||||
Subproject commit 8a301b7e842f1daed478090c869207300972379f
|
||||
Subproject commit f532d6abc0d2b0d8b5d6fe9e7c51eaedbe4afbd0
|
2
contrib/aws-s2n-tls
vendored
2
contrib/aws-s2n-tls
vendored
@ -1 +1 @@
|
||||
Subproject commit 71f4794b7580cf780eb4aca77d69eded5d3c7bb4
|
||||
Subproject commit 9a1e75454023e952b366ce1eab9c54007250119f
|
2
contrib/cctz
vendored
2
contrib/cctz
vendored
@ -1 +1 @@
|
||||
Subproject commit 8529bcef5cd996b7c0f4d7475286b76b5d126c4c
|
||||
Subproject commit 7918cb7afe82e53428e39a045a437fdfd4f3df47
|
@ -1,8 +1,5 @@
|
||||
if (NOT ENABLE_LIBRARIES)
|
||||
set(DEFAULT_ENABLE_RUST FALSE)
|
||||
elseif((CMAKE_TOOLCHAIN_FILE MATCHES "darwin") AND (CMAKE_TOOLCHAIN_FILE MATCHES "aarch64"))
|
||||
message(STATUS "Rust is not available on aarch64-apple-darwin")
|
||||
set(DEFAULT_ENABLE_RUST FALSE)
|
||||
else()
|
||||
list (APPEND CMAKE_MODULE_PATH "${ClickHouse_SOURCE_DIR}/contrib/corrosion/cmake")
|
||||
find_package(Rust)
|
||||
@ -19,27 +16,30 @@ message(STATUS "Checking Rust toolchain for current target")
|
||||
|
||||
# See https://doc.rust-lang.org/nightly/rustc/platform-support.html
|
||||
|
||||
if((CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-x86_64") AND (CMAKE_TOOLCHAIN_FILE MATCHES "musl"))
|
||||
set(Rust_CARGO_TARGET "x86_64-unknown-linux-musl")
|
||||
elseif(CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-x86_64")
|
||||
set(Rust_CARGO_TARGET "x86_64-unknown-linux-gnu")
|
||||
elseif((CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-aarch64") AND (CMAKE_TOOLCHAIN_FILE MATCHES "musl"))
|
||||
set(Rust_CARGO_TARGET "aarch64-unknown-linux-musl")
|
||||
elseif(CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-aarch64")
|
||||
set(Rust_CARGO_TARGET "aarch64-unknown-linux-gnu")
|
||||
elseif((CMAKE_TOOLCHAIN_FILE MATCHES "darwin") AND (CMAKE_TOOLCHAIN_FILE MATCHES "x86_64"))
|
||||
set(Rust_CARGO_TARGET "x86_64-apple-darwin")
|
||||
elseif((CMAKE_TOOLCHAIN_FILE MATCHES "freebsd") AND (CMAKE_TOOLCHAIN_FILE MATCHES "x86_64"))
|
||||
set(Rust_CARGO_TARGET "x86_64-unknown-freebsd")
|
||||
elseif(CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-riscv64")
|
||||
set(Rust_CARGO_TARGET "riscv64gc-unknown-linux-gnu")
|
||||
endif()
|
||||
|
||||
if(CMAKE_TOOLCHAIN_FILE MATCHES "ppc64le")
|
||||
set(Rust_CARGO_TARGET "powerpc64le-unknown-linux-gnu")
|
||||
endif()
|
||||
|
||||
message(STATUS "Switched Rust target to ${Rust_CARGO_TARGET}")
|
||||
if(DEFINED CMAKE_TOOLCHAIN_FILE)
|
||||
if(CMAKE_TOOLCHAIN_FILE MATCHES "ppc64le")
|
||||
set(Rust_CARGO_TARGET "powerpc64le-unknown-linux-gnu")
|
||||
elseif((CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-x86_64") AND (CMAKE_TOOLCHAIN_FILE MATCHES "musl"))
|
||||
set(Rust_CARGO_TARGET "x86_64-unknown-linux-musl")
|
||||
elseif(CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-x86_64")
|
||||
set(Rust_CARGO_TARGET "x86_64-unknown-linux-gnu")
|
||||
elseif((CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-aarch64") AND (CMAKE_TOOLCHAIN_FILE MATCHES "musl"))
|
||||
set(Rust_CARGO_TARGET "aarch64-unknown-linux-musl")
|
||||
elseif(CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-aarch64")
|
||||
set(Rust_CARGO_TARGET "aarch64-unknown-linux-gnu")
|
||||
elseif((CMAKE_TOOLCHAIN_FILE MATCHES "darwin") AND (CMAKE_TOOLCHAIN_FILE MATCHES "x86_64"))
|
||||
set(Rust_CARGO_TARGET "x86_64-apple-darwin")
|
||||
elseif((CMAKE_TOOLCHAIN_FILE MATCHES "darwin") AND (CMAKE_TOOLCHAIN_FILE MATCHES "aarch64"))
|
||||
set(Rust_CARGO_TARGET "aarch64-apple-darwin")
|
||||
elseif((CMAKE_TOOLCHAIN_FILE MATCHES "freebsd") AND (CMAKE_TOOLCHAIN_FILE MATCHES "x86_64"))
|
||||
set(Rust_CARGO_TARGET "x86_64-unknown-freebsd")
|
||||
elseif(CMAKE_TOOLCHAIN_FILE MATCHES "linux/toolchain-riscv64")
|
||||
set(Rust_CARGO_TARGET "riscv64gc-unknown-linux-gnu")
|
||||
else()
|
||||
message(FATAL_ERROR "Unsupported rust target")
|
||||
endif()
|
||||
message(STATUS "Switched Rust target to ${Rust_CARGO_TARGET}")
|
||||
endif ()
|
||||
|
||||
# FindRust.cmake
|
||||
list(APPEND CMAKE_MODULE_PATH "${ClickHouse_SOURCE_DIR}/contrib/corrosion/cmake")
|
||||
|
2
contrib/curl
vendored
2
contrib/curl
vendored
@ -1 +1 @@
|
||||
Subproject commit d755a5f7c009dd63a61b2c745180d8ba937cbfeb
|
||||
Subproject commit 1a05e833f8f7140628b27882b10525fd9ec4b873
|
@ -157,7 +157,7 @@ if (TARGET ch_contrib::zlib)
|
||||
endif()
|
||||
|
||||
if (TARGET ch_contrib::zstd)
|
||||
target_compile_definitions(_libarchive PUBLIC HAVE_ZSTD_H=1 HAVE_LIBZSTD=1)
|
||||
target_compile_definitions(_libarchive PUBLIC HAVE_ZSTD_H=1 HAVE_LIBZSTD=1 HAVE_LIBZSTD_COMPRESSOR=1)
|
||||
target_link_libraries(_libarchive PRIVATE ch_contrib::zstd)
|
||||
endif()
|
||||
|
||||
|
2
contrib/libhdfs3
vendored
2
contrib/libhdfs3
vendored
@ -1 +1 @@
|
||||
Subproject commit b9598e6016720a7c088bfe85ce1fa0410f9d2103
|
||||
Subproject commit 0d04201c45359f0d0701fb1e8297d25eff7cfecf
|
@ -17,6 +17,8 @@
|
||||
#ifndef METROHASH_METROHASH_128_H
|
||||
#define METROHASH_METROHASH_128_H
|
||||
|
||||
// NOLINTBEGIN(readability-avoid-const-params-in-decls)
|
||||
|
||||
#include <stdint.h>
|
||||
|
||||
class MetroHash128
|
||||
@ -25,7 +27,7 @@ public:
|
||||
static const uint32_t bits = 128;
|
||||
|
||||
// Constructor initializes the same as Initialize()
|
||||
MetroHash128(const uint64_t seed=0);
|
||||
explicit MetroHash128(const uint64_t seed=0);
|
||||
|
||||
// Initializes internal state for new hash with optional seed
|
||||
void Initialize(const uint64_t seed=0);
|
||||
@ -68,5 +70,6 @@ private:
|
||||
void metrohash128_1(const uint8_t * key, uint64_t len, uint32_t seed, uint8_t * out);
|
||||
void metrohash128_2(const uint8_t * key, uint64_t len, uint32_t seed, uint8_t * out);
|
||||
|
||||
// NOLINTEND(readability-avoid-const-params-in-decls)
|
||||
|
||||
#endif // #ifndef METROHASH_METROHASH_128_H
|
||||
|
@ -26,13 +26,13 @@ const uint8_t MetroHash64::test_seed_1[8] = { 0x3B, 0x0D, 0x48, 0x1C, 0xF4, 0x
|
||||
|
||||
|
||||
|
||||
MetroHash64::MetroHash64(const uint64_t seed)
|
||||
MetroHash64::MetroHash64(uint64_t seed)
|
||||
{
|
||||
Initialize(seed);
|
||||
}
|
||||
|
||||
|
||||
void MetroHash64::Initialize(const uint64_t seed)
|
||||
void MetroHash64::Initialize(uint64_t seed)
|
||||
{
|
||||
vseed = (static_cast<uint64_t>(seed) + k2) * k0;
|
||||
|
||||
@ -47,7 +47,7 @@ void MetroHash64::Initialize(const uint64_t seed)
|
||||
}
|
||||
|
||||
|
||||
void MetroHash64::Update(const uint8_t * const buffer, const uint64_t length)
|
||||
void MetroHash64::Update(const uint8_t * const buffer, uint64_t length)
|
||||
{
|
||||
const uint8_t * ptr = reinterpret_cast<const uint8_t*>(buffer);
|
||||
const uint8_t * const end = ptr + length;
|
||||
@ -90,7 +90,7 @@ void MetroHash64::Update(const uint8_t * const buffer, const uint64_t length)
|
||||
}
|
||||
|
||||
|
||||
void MetroHash64::Finalize(uint8_t * const hash)
|
||||
void MetroHash64::Finalize(uint8_t * hash)
|
||||
{
|
||||
// finalize bulk loop, if used
|
||||
if (bytes >= 32)
|
||||
@ -152,7 +152,7 @@ void MetroHash64::Finalize(uint8_t * const hash)
|
||||
}
|
||||
|
||||
|
||||
void MetroHash64::Hash(const uint8_t * buffer, const uint64_t length, uint8_t * const hash, const uint64_t seed)
|
||||
void MetroHash64::Hash(const uint8_t * buffer, uint64_t length, uint8_t * const hash, uint64_t seed)
|
||||
{
|
||||
const uint8_t * ptr = reinterpret_cast<const uint8_t*>(buffer);
|
||||
const uint8_t * const end = ptr + length;
|
||||
|
@ -25,24 +25,24 @@ public:
|
||||
static const uint32_t bits = 64;
|
||||
|
||||
// Constructor initializes the same as Initialize()
|
||||
MetroHash64(const uint64_t seed=0);
|
||||
explicit MetroHash64(uint64_t seed=0);
|
||||
|
||||
// Initializes internal state for new hash with optional seed
|
||||
void Initialize(const uint64_t seed=0);
|
||||
void Initialize(uint64_t seed=0);
|
||||
|
||||
// Update the hash state with a string of bytes. If the length
|
||||
// is sufficiently long, the implementation switches to a bulk
|
||||
// hashing algorithm directly on the argument buffer for speed.
|
||||
void Update(const uint8_t * buffer, const uint64_t length);
|
||||
void Update(const uint8_t * buffer, uint64_t length);
|
||||
|
||||
// Constructs the final hash and writes it to the argument buffer.
|
||||
// After a hash is finalized, this instance must be Initialized()-ed
|
||||
// again or the behavior of Update() and Finalize() is undefined.
|
||||
void Finalize(uint8_t * const hash);
|
||||
void Finalize(uint8_t * hash);
|
||||
|
||||
// A non-incremental function implementation. This can be significantly
|
||||
// faster than the incremental implementation for some usage patterns.
|
||||
static void Hash(const uint8_t * buffer, const uint64_t length, uint8_t * const hash, const uint64_t seed=0);
|
||||
static void Hash(const uint8_t * buffer, uint64_t length, uint8_t * hash, uint64_t seed=0);
|
||||
|
||||
// Does implementation correctly execute test vectors?
|
||||
static bool ImplementationVerified();
|
||||
|
2
contrib/libssh
vendored
2
contrib/libssh
vendored
@ -1 +1 @@
|
||||
Subproject commit 2c76332ef56d90f55965ab24da6b6dbcbef29c4c
|
||||
Subproject commit ed4011b91873836713576475a98cd132cd834539
|
@ -1,4 +1,4 @@
|
||||
option (ENABLE_SSH "Enable support for SSH keys and protocol" ON)
|
||||
option (ENABLE_SSH "Enable support for SSH keys and protocol" ${ENABLE_LIBRARIES})
|
||||
|
||||
if (NOT ENABLE_SSH)
|
||||
message(STATUS "Not using SSH")
|
||||
@ -8,24 +8,12 @@ endif()
|
||||
set(LIB_SOURCE_DIR "${ClickHouse_SOURCE_DIR}/contrib/libssh")
|
||||
set(LIB_BINARY_DIR "${ClickHouse_BINARY_DIR}/contrib/libssh")
|
||||
|
||||
project(libssh VERSION 0.9.7 LANGUAGES C)
|
||||
# Set CMake variables which are used in libssh_version.h.cmake
|
||||
project(libssh VERSION 0.9.8 LANGUAGES C)
|
||||
|
||||
# global needed variable
|
||||
set(APPLICATION_NAME ${PROJECT_NAME})
|
||||
|
||||
# SOVERSION scheme: CURRENT.AGE.REVISION
|
||||
# If there was an incompatible interface change:
|
||||
# Increment CURRENT. Set AGE and REVISION to 0
|
||||
# If there was a compatible interface change:
|
||||
# Increment AGE. Set REVISION to 0
|
||||
# If the source code was changed, but there were no interface changes:
|
||||
# Increment REVISION.
|
||||
set(LIBRARY_VERSION "4.8.7")
|
||||
set(LIBRARY_VERSION "4.8.8")
|
||||
set(LIBRARY_SOVERSION "4")
|
||||
|
||||
# Copy library files to a lib sub-directory
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${LIB_BINARY_DIR}/lib")
|
||||
|
||||
set(CMAKE_THREAD_PREFER_PTHREADS ON)
|
||||
set(THREADS_PREFER_PTHREAD_FLAG ON)
|
||||
|
||||
@ -33,7 +21,87 @@ set(WITH_ZLIB OFF)
|
||||
set(WITH_SYMBOL_VERSIONING OFF)
|
||||
set(WITH_SERVER ON)
|
||||
|
||||
include(IncludeSources.cmake)
|
||||
set(libssh_SRCS
|
||||
${LIB_SOURCE_DIR}/src/agent.c
|
||||
${LIB_SOURCE_DIR}/src/auth.c
|
||||
${LIB_SOURCE_DIR}/src/base64.c
|
||||
${LIB_SOURCE_DIR}/src/bignum.c
|
||||
${LIB_SOURCE_DIR}/src/buffer.c
|
||||
${LIB_SOURCE_DIR}/src/callbacks.c
|
||||
${LIB_SOURCE_DIR}/src/channels.c
|
||||
${LIB_SOURCE_DIR}/src/client.c
|
||||
${LIB_SOURCE_DIR}/src/config.c
|
||||
${LIB_SOURCE_DIR}/src/connect.c
|
||||
${LIB_SOURCE_DIR}/src/connector.c
|
||||
${LIB_SOURCE_DIR}/src/curve25519.c
|
||||
${LIB_SOURCE_DIR}/src/dh.c
|
||||
${LIB_SOURCE_DIR}/src/ecdh.c
|
||||
${LIB_SOURCE_DIR}/src/error.c
|
||||
${LIB_SOURCE_DIR}/src/getpass.c
|
||||
${LIB_SOURCE_DIR}/src/init.c
|
||||
${LIB_SOURCE_DIR}/src/kdf.c
|
||||
${LIB_SOURCE_DIR}/src/kex.c
|
||||
${LIB_SOURCE_DIR}/src/known_hosts.c
|
||||
${LIB_SOURCE_DIR}/src/knownhosts.c
|
||||
${LIB_SOURCE_DIR}/src/legacy.c
|
||||
${LIB_SOURCE_DIR}/src/log.c
|
||||
${LIB_SOURCE_DIR}/src/match.c
|
||||
${LIB_SOURCE_DIR}/src/messages.c
|
||||
${LIB_SOURCE_DIR}/src/misc.c
|
||||
${LIB_SOURCE_DIR}/src/options.c
|
||||
${LIB_SOURCE_DIR}/src/packet.c
|
||||
${LIB_SOURCE_DIR}/src/packet_cb.c
|
||||
${LIB_SOURCE_DIR}/src/packet_crypt.c
|
||||
${LIB_SOURCE_DIR}/src/pcap.c
|
||||
${LIB_SOURCE_DIR}/src/pki.c
|
||||
${LIB_SOURCE_DIR}/src/pki_container_openssh.c
|
||||
${LIB_SOURCE_DIR}/src/poll.c
|
||||
${LIB_SOURCE_DIR}/src/session.c
|
||||
${LIB_SOURCE_DIR}/src/scp.c
|
||||
${LIB_SOURCE_DIR}/src/socket.c
|
||||
${LIB_SOURCE_DIR}/src/string.c
|
||||
${LIB_SOURCE_DIR}/src/threads.c
|
||||
${LIB_SOURCE_DIR}/src/wrapper.c
|
||||
${LIB_SOURCE_DIR}/src/external/bcrypt_pbkdf.c
|
||||
${LIB_SOURCE_DIR}/src/external/blowfish.c
|
||||
${LIB_SOURCE_DIR}/src/external/chacha.c
|
||||
${LIB_SOURCE_DIR}/src/external/poly1305.c
|
||||
${LIB_SOURCE_DIR}/src/chachapoly.c
|
||||
${LIB_SOURCE_DIR}/src/config_parser.c
|
||||
${LIB_SOURCE_DIR}/src/token.c
|
||||
${LIB_SOURCE_DIR}/src/pki_ed25519_common.c
|
||||
|
||||
${LIB_SOURCE_DIR}/src/threads/noop.c
|
||||
${LIB_SOURCE_DIR}/src/threads/pthread.c
|
||||
|
||||
# LIBCRYPT specific
|
||||
${libssh_SRCS}
|
||||
${LIB_SOURCE_DIR}/src/threads/libcrypto.c
|
||||
${LIB_SOURCE_DIR}/src/pki_crypto.c
|
||||
${LIB_SOURCE_DIR}/src/ecdh_crypto.c
|
||||
${LIB_SOURCE_DIR}/src/libcrypto.c
|
||||
${LIB_SOURCE_DIR}/src/dh_crypto.c
|
||||
|
||||
${LIB_SOURCE_DIR}/src/options.c
|
||||
${LIB_SOURCE_DIR}/src/server.c
|
||||
${LIB_SOURCE_DIR}/src/bind.c
|
||||
${LIB_SOURCE_DIR}/src/bind_config.c
|
||||
)
|
||||
|
||||
if (NOT (ENABLE_OPENSSL OR ENABLE_OPENSSL_DYNAMIC))
|
||||
add_compile_definitions(USE_BORINGSSL=1)
|
||||
endif()
|
||||
|
||||
configure_file(${LIB_SOURCE_DIR}/include/libssh/libssh_version.h.cmake ${LIB_BINARY_DIR}/include/libssh/libssh_version.h @ONLY)
|
||||
|
||||
add_library(_ssh STATIC ${libssh_SRCS})
|
||||
add_library(ch_contrib::ssh ALIAS _ssh)
|
||||
|
||||
target_link_libraries(_ssh PRIVATE OpenSSL::Crypto)
|
||||
|
||||
target_include_directories(_ssh PUBLIC "${LIB_SOURCE_DIR}/include" "${LIB_BINARY_DIR}/include")
|
||||
|
||||
# These headers need to be generated using the native build system on each platform.
|
||||
if (OS_LINUX)
|
||||
if (ARCH_AMD64)
|
||||
if (USE_MUSL)
|
||||
@ -63,7 +131,3 @@ elseif (OS_FREEBSD)
|
||||
else ()
|
||||
message(FATAL_ERROR "Platform is not supported")
|
||||
endif()
|
||||
|
||||
configure_file(${LIB_SOURCE_DIR}/include/libssh/libssh_version.h.cmake
|
||||
${LIB_BINARY_DIR}/include/libssh/libssh_version.h
|
||||
@ONLY)
|
||||
|
@ -1,98 +0,0 @@
|
||||
set(LIBSSH_LINK_LIBRARIES
|
||||
${LIBSSH_LINK_LIBRARIES}
|
||||
OpenSSL::Crypto
|
||||
)
|
||||
|
||||
set(libssh_SRCS
|
||||
${LIB_SOURCE_DIR}/src/agent.c
|
||||
${LIB_SOURCE_DIR}/src/auth.c
|
||||
${LIB_SOURCE_DIR}/src/base64.c
|
||||
${LIB_SOURCE_DIR}/src/bignum.c
|
||||
${LIB_SOURCE_DIR}/src/buffer.c
|
||||
${LIB_SOURCE_DIR}/src/callbacks.c
|
||||
${LIB_SOURCE_DIR}/src/channels.c
|
||||
${LIB_SOURCE_DIR}/src/client.c
|
||||
${LIB_SOURCE_DIR}/src/config.c
|
||||
${LIB_SOURCE_DIR}/src/connect.c
|
||||
${LIB_SOURCE_DIR}/src/connector.c
|
||||
${LIB_SOURCE_DIR}/src/curve25519.c
|
||||
${LIB_SOURCE_DIR}/src/dh.c
|
||||
${LIB_SOURCE_DIR}/src/ecdh.c
|
||||
${LIB_SOURCE_DIR}/src/error.c
|
||||
${LIB_SOURCE_DIR}/src/getpass.c
|
||||
${LIB_SOURCE_DIR}/src/init.c
|
||||
${LIB_SOURCE_DIR}/src/kdf.c
|
||||
${LIB_SOURCE_DIR}/src/kex.c
|
||||
${LIB_SOURCE_DIR}/src/known_hosts.c
|
||||
${LIB_SOURCE_DIR}/src/knownhosts.c
|
||||
${LIB_SOURCE_DIR}/src/legacy.c
|
||||
${LIB_SOURCE_DIR}/src/log.c
|
||||
${LIB_SOURCE_DIR}/src/match.c
|
||||
${LIB_SOURCE_DIR}/src/messages.c
|
||||
${LIB_SOURCE_DIR}/src/misc.c
|
||||
${LIB_SOURCE_DIR}/src/options.c
|
||||
${LIB_SOURCE_DIR}/src/packet.c
|
||||
${LIB_SOURCE_DIR}/src/packet_cb.c
|
||||
${LIB_SOURCE_DIR}/src/packet_crypt.c
|
||||
${LIB_SOURCE_DIR}/src/pcap.c
|
||||
${LIB_SOURCE_DIR}/src/pki.c
|
||||
${LIB_SOURCE_DIR}/src/pki_container_openssh.c
|
||||
${LIB_SOURCE_DIR}/src/poll.c
|
||||
${LIB_SOURCE_DIR}/src/session.c
|
||||
${LIB_SOURCE_DIR}/src/scp.c
|
||||
${LIB_SOURCE_DIR}/src/socket.c
|
||||
${LIB_SOURCE_DIR}/src/string.c
|
||||
${LIB_SOURCE_DIR}/src/threads.c
|
||||
${LIB_SOURCE_DIR}/src/wrapper.c
|
||||
${LIB_SOURCE_DIR}/src/external/bcrypt_pbkdf.c
|
||||
${LIB_SOURCE_DIR}/src/external/blowfish.c
|
||||
${LIB_SOURCE_DIR}/src/external/chacha.c
|
||||
${LIB_SOURCE_DIR}/src/external/poly1305.c
|
||||
${LIB_SOURCE_DIR}/src/chachapoly.c
|
||||
${LIB_SOURCE_DIR}/src/config_parser.c
|
||||
${LIB_SOURCE_DIR}/src/token.c
|
||||
${LIB_SOURCE_DIR}/src/pki_ed25519_common.c
|
||||
)
|
||||
|
||||
set(libssh_SRCS
|
||||
${libssh_SRCS}
|
||||
${LIB_SOURCE_DIR}/src/threads/noop.c
|
||||
${LIB_SOURCE_DIR}/src/threads/pthread.c
|
||||
)
|
||||
|
||||
# LIBCRYPT specific
|
||||
set(libssh_SRCS
|
||||
${libssh_SRCS}
|
||||
${LIB_SOURCE_DIR}/src/threads/libcrypto.c
|
||||
${LIB_SOURCE_DIR}/src/pki_crypto.c
|
||||
${LIB_SOURCE_DIR}/src/ecdh_crypto.c
|
||||
${LIB_SOURCE_DIR}/src/libcrypto.c
|
||||
${LIB_SOURCE_DIR}/src/dh_crypto.c
|
||||
)
|
||||
|
||||
if (NOT (ENABLE_OPENSSL OR ENABLE_OPENSSL_DYNAMIC))
|
||||
add_compile_definitions(USE_BORINGSSL=1)
|
||||
endif()
|
||||
|
||||
set(libssh_SRCS
|
||||
${libssh_SRCS}
|
||||
${LIB_SOURCE_DIR}/src/options.c
|
||||
${LIB_SOURCE_DIR}/src/server.c
|
||||
${LIB_SOURCE_DIR}/src/bind.c
|
||||
${LIB_SOURCE_DIR}/src/bind_config.c
|
||||
)
|
||||
|
||||
|
||||
add_library(_ssh STATIC ${libssh_SRCS})
|
||||
|
||||
target_include_directories(_ssh PRIVATE ${LIB_BINARY_DIR})
|
||||
target_include_directories(_ssh PUBLIC "${LIB_SOURCE_DIR}/include" "${LIB_BINARY_DIR}/include")
|
||||
target_link_libraries(_ssh
|
||||
PRIVATE ${LIBSSH_LINK_LIBRARIES})
|
||||
|
||||
add_library(ch_contrib::ssh ALIAS _ssh)
|
||||
|
||||
target_compile_options(_ssh
|
||||
PRIVATE
|
||||
${DEFAULT_C_COMPILE_FLAGS}
|
||||
-D_GNU_SOURCE)
|
@ -1,6 +1,10 @@
|
||||
#include <libunwind.h>
|
||||
|
||||
/// On MacOS this function will be replaced with a dynamic symbol
|
||||
/// from the system library.
|
||||
#if !defined(OS_DARWIN)
|
||||
int backtrace(void ** buffer, int size)
|
||||
{
|
||||
return unw_backtrace(buffer, size);
|
||||
}
|
||||
#endif
|
||||
|
2
contrib/liburing
vendored
2
contrib/liburing
vendored
@ -1 +1 @@
|
||||
Subproject commit f5a48392c4ea33f222cbebeb2e2fc31620162949
|
||||
Subproject commit f4e42a515cd78c8c9cac2be14222834be5f8df2b
|
2
contrib/libuv
vendored
2
contrib/libuv
vendored
@ -1 +1 @@
|
||||
Subproject commit 3a85b2eb3d83f369b8a8cafd329d7e9dc28f60cf
|
||||
Subproject commit 4482964660c77eec1166cd7d14fb915e3dbd774a
|
2
contrib/llvm-project
vendored
2
contrib/llvm-project
vendored
@ -1 +1 @@
|
||||
Subproject commit 2568a7cd1297c7c3044b0f3cc0c23a6f6444d856
|
||||
Subproject commit d2142eed98046a47ff7112e3cc1e197c8a5cd80f
|
@ -1,5 +1,6 @@
|
||||
if (APPLE OR SANITIZE STREQUAL "undefined" OR SANITIZE STREQUAL "memory")
|
||||
# llvm-tblgen, that is used during LLVM build, doesn't work with UBSan.
|
||||
if (APPLE OR SANITIZE STREQUAL "memory")
|
||||
# llvm-tblgen, that is used during LLVM build, will throw MSAN errors when running (breaking the build)
|
||||
# TODO: Retest when upgrading LLVM or build only llvm-tblgen without sanitizers
|
||||
set (ENABLE_EMBEDDED_COMPILER_DEFAULT OFF)
|
||||
set (ENABLE_DWARF_PARSER_DEFAULT OFF)
|
||||
else()
|
||||
|
2
contrib/lz4
vendored
2
contrib/lz4
vendored
@ -1 +1 @@
|
||||
Subproject commit 92ebf1870b9acbefc0e7970409a181954a10ff40
|
||||
Subproject commit ce45a9dbdb059511a3e9576b19db3e7f1a4f172e
|
2
contrib/qpl
vendored
2
contrib/qpl
vendored
@ -1 +1 @@
|
||||
Subproject commit a61bdd845fd7ca363b2bcc55454aa520dfcd8298
|
||||
Subproject commit d4715e0e79896b85612158e135ee1a85f3b3e04d
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user