mirror of
https://github.com/ClickHouse/ClickHouse.git
synced 2024-11-24 08:32:02 +00:00
Merge branch 'master' into from-select
This commit is contained in:
commit
dfea1a7be7
@ -22,6 +22,8 @@ Checks: '*,
|
|||||||
-bugprone-implicit-widening-of-multiplication-result,
|
-bugprone-implicit-widening-of-multiplication-result,
|
||||||
-bugprone-narrowing-conversions,
|
-bugprone-narrowing-conversions,
|
||||||
-bugprone-not-null-terminated-result,
|
-bugprone-not-null-terminated-result,
|
||||||
|
-bugprone-unchecked-optional-access,
|
||||||
|
-bugprone-assignment-in-if-condition,
|
||||||
|
|
||||||
-cert-dcl16-c,
|
-cert-dcl16-c,
|
||||||
-cert-err58-cpp,
|
-cert-err58-cpp,
|
||||||
@ -103,6 +105,7 @@ Checks: '*,
|
|||||||
|
|
||||||
-misc-no-recursion,
|
-misc-no-recursion,
|
||||||
-misc-non-private-member-variables-in-classes,
|
-misc-non-private-member-variables-in-classes,
|
||||||
|
-misc-const-correctness,
|
||||||
|
|
||||||
-modernize-avoid-c-arrays,
|
-modernize-avoid-c-arrays,
|
||||||
-modernize-concat-nested-namespaces,
|
-modernize-concat-nested-namespaces,
|
||||||
@ -114,6 +117,7 @@ Checks: '*,
|
|||||||
-modernize-use-nodiscard,
|
-modernize-use-nodiscard,
|
||||||
-modernize-use-override,
|
-modernize-use-override,
|
||||||
-modernize-use-trailing-return-type,
|
-modernize-use-trailing-return-type,
|
||||||
|
-modernize-macro-to-enum,
|
||||||
|
|
||||||
-performance-inefficient-string-concatenation,
|
-performance-inefficient-string-concatenation,
|
||||||
-performance-no-int-to-ptr,
|
-performance-no-int-to-ptr,
|
||||||
@ -135,6 +139,7 @@ Checks: '*,
|
|||||||
-readability-suspicious-call-argument,
|
-readability-suspicious-call-argument,
|
||||||
-readability-uppercase-literal-suffix,
|
-readability-uppercase-literal-suffix,
|
||||||
-readability-use-anyofallof,
|
-readability-use-anyofallof,
|
||||||
|
-readability-simplify-boolean-expr,
|
||||||
|
|
||||||
-zirkon-*,
|
-zirkon-*,
|
||||||
'
|
'
|
||||||
|
15
.git-blame-ignore-revs
Normal file
15
.git-blame-ignore-revs
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# This is a file that can be used by git-blame to ignore some revisions.
|
||||||
|
# (git 2.23+, released in August 2019)
|
||||||
|
#
|
||||||
|
# Can be configured as follow:
|
||||||
|
#
|
||||||
|
# $ git config blame.ignoreRevsFile .git-blame-ignore-revs
|
||||||
|
#
|
||||||
|
# For more information you can look at git-blame(1) man page.
|
||||||
|
|
||||||
|
# Changed tabs to spaces in code [#CLICKHOUSE-3]
|
||||||
|
137ad95929ee016cc6d3c03bccb5586941c163ff
|
||||||
|
|
||||||
|
# dbms/ → src/
|
||||||
|
# (though it is unlikely that you will see it in blame)
|
||||||
|
06446b4f08a142d6f1bc30664c47ded88ab51782
|
90
.github/workflows/backport_branches.yml
vendored
90
.github/workflows/backport_branches.yml
vendored
@ -112,10 +112,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
#################################### ORDINARY BUILDS ####################################
|
#################################### ORDINARY BUILDS ####################################
|
||||||
@ -162,10 +160,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebAarch64:
|
BuilderDebAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -209,10 +205,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebAsan:
|
BuilderDebAsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -254,10 +248,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebTsan:
|
BuilderDebTsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -299,10 +291,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebDebug:
|
BuilderDebDebug:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -344,10 +334,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinDarwin:
|
BuilderBinDarwin:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -391,10 +379,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinDarwinAarch64:
|
BuilderBinDarwinAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -438,10 +424,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### Docker images #######################################
|
##################################### Docker images #######################################
|
||||||
@ -468,10 +452,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### BUILD REPORTER #######################################
|
##################################### BUILD REPORTER #######################################
|
||||||
@ -514,10 +496,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
BuilderSpecialReport:
|
BuilderSpecialReport:
|
||||||
needs:
|
needs:
|
||||||
@ -554,10 +534,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||||
@ -594,10 +572,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||||
@ -634,10 +610,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
######################################### STRESS TESTS #######################################
|
######################################### STRESS TESTS #######################################
|
||||||
@ -677,10 +651,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
############################# INTEGRATION TESTS #############################################
|
############################# INTEGRATION TESTS #############################################
|
||||||
@ -716,10 +688,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
|
6
.github/workflows/cherry_pick.yml
vendored
6
.github/workflows/cherry_pick.yml
vendored
@ -40,8 +40,6 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
12
.github/workflows/docs_check.yml
vendored
12
.github/workflows/docs_check.yml
vendored
@ -125,10 +125,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
DocsCheck:
|
DocsCheck:
|
||||||
needs: DockerHubPush
|
needs: DockerHubPush
|
||||||
@ -158,10 +156,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
|
6
.github/workflows/docs_release.yml
vendored
6
.github/workflows/docs_release.yml
vendored
@ -116,8 +116,6 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
6
.github/workflows/jepsen.yml
vendored
6
.github/workflows/jepsen.yml
vendored
@ -36,8 +36,6 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
501
.github/workflows/master.yml
vendored
501
.github/workflows/master.yml
vendored
@ -112,10 +112,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
CompatibilityCheck:
|
CompatibilityCheck:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -146,10 +144,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
SharedBuildSmokeTest:
|
SharedBuildSmokeTest:
|
||||||
needs: [BuilderDebShared]
|
needs: [BuilderDebShared]
|
||||||
@ -180,10 +176,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
#################################### ORDINARY BUILDS ####################################
|
#################################### ORDINARY BUILDS ####################################
|
||||||
@ -230,10 +224,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
BuilderDebAarch64:
|
BuilderDebAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -273,10 +265,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinRelease:
|
BuilderBinRelease:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -320,56 +310,9 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
# BuilderBinGCC:
|
|
||||||
# needs: [DockerHubPush]
|
|
||||||
# runs-on: [self-hosted, builder]
|
|
||||||
# steps:
|
|
||||||
# - name: Set envs
|
|
||||||
# run: |
|
|
||||||
# cat >> "$GITHUB_ENV" << 'EOF'
|
|
||||||
# TEMP_PATH=${{runner.temp}}/build_check
|
|
||||||
# IMAGES_PATH=${{runner.temp}}/images_path
|
|
||||||
# REPO_COPY=${{runner.temp}}/build_check/ClickHouse
|
|
||||||
# CACHES_PATH=${{runner.temp}}/../ccaches
|
|
||||||
# BUILD_NAME=binary_gcc
|
|
||||||
# EOF
|
|
||||||
# - name: Download changed images
|
|
||||||
# uses: actions/download-artifact@v2
|
|
||||||
# with:
|
|
||||||
# name: changed_images
|
|
||||||
# path: ${{ env.IMAGES_PATH }}
|
|
||||||
# - name: Clear repository
|
|
||||||
# run: |
|
|
||||||
# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
|
|
||||||
# - name: Check out repository code
|
|
||||||
# uses: actions/checkout@v2
|
|
||||||
# - name: Build
|
|
||||||
# run: |
|
|
||||||
# git -C "$GITHUB_WORKSPACE" submodule sync --recursive
|
|
||||||
# git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
|
|
||||||
# sudo rm -fr "$TEMP_PATH"
|
|
||||||
# mkdir -p "$TEMP_PATH"
|
|
||||||
# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
|
||||||
# cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
|
|
||||||
# - name: Upload build URLs to artifacts
|
|
||||||
# if: ${{ success() || failure() }}
|
|
||||||
# uses: actions/upload-artifact@v2
|
|
||||||
# with:
|
|
||||||
# name: ${{ env.BUILD_URLS }}
|
|
||||||
# path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
|
|
||||||
# - name: Cleanup
|
|
||||||
# if: always()
|
|
||||||
# run: |
|
|
||||||
# # shellcheck disable=SC2046
|
|
||||||
# docker kill $(docker ps -q) ||:
|
|
||||||
# # shellcheck disable=SC2046
|
|
||||||
# docker rm -f $(docker ps -a -q) ||:
|
|
||||||
# sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
|
||||||
BuilderDebAsan:
|
BuilderDebAsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
runs-on: [self-hosted, builder]
|
runs-on: [self-hosted, builder]
|
||||||
@ -410,10 +353,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebUBsan:
|
BuilderDebUBsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -455,10 +396,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebTsan:
|
BuilderDebTsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -500,10 +439,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebMsan:
|
BuilderDebMsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -545,10 +482,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebDebug:
|
BuilderDebDebug:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -590,10 +525,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
##########################################################################################
|
##########################################################################################
|
||||||
##################################### SPECIAL BUILDS #####################################
|
##################################### SPECIAL BUILDS #####################################
|
||||||
@ -638,10 +571,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinClangTidy:
|
BuilderBinClangTidy:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -683,10 +614,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinDarwin:
|
BuilderBinDarwin:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -730,10 +659,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinAarch64:
|
BuilderBinAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -777,10 +704,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinFreeBSD:
|
BuilderBinFreeBSD:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -824,10 +749,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinDarwinAarch64:
|
BuilderBinDarwinAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -871,10 +794,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinPPC64:
|
BuilderBinPPC64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -918,10 +839,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinAmd64SSE2:
|
BuilderBinAmd64SSE2:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -965,10 +884,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### Docker images #######################################
|
##################################### Docker images #######################################
|
||||||
@ -995,10 +912,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### BUILD REPORTER #######################################
|
##################################### BUILD REPORTER #######################################
|
||||||
@ -1045,10 +960,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
BuilderSpecialReport:
|
BuilderSpecialReport:
|
||||||
needs:
|
needs:
|
||||||
@ -1092,10 +1005,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||||
@ -1132,10 +1043,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestReleaseDatabaseOrdinary:
|
FunctionalStatelessTestReleaseDatabaseOrdinary:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -1169,10 +1078,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestReleaseDatabaseReplicated0:
|
FunctionalStatelessTestReleaseDatabaseReplicated0:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -1208,10 +1115,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestReleaseDatabaseReplicated1:
|
FunctionalStatelessTestReleaseDatabaseReplicated1:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -1247,10 +1152,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestReleaseS3:
|
FunctionalStatelessTestReleaseS3:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -1284,10 +1187,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestAarch64:
|
FunctionalStatelessTestAarch64:
|
||||||
needs: [BuilderDebAarch64]
|
needs: [BuilderDebAarch64]
|
||||||
@ -1321,10 +1222,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestAsan0:
|
FunctionalStatelessTestAsan0:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -1360,10 +1259,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestAsan1:
|
FunctionalStatelessTestAsan1:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -1399,10 +1296,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestTsan0:
|
FunctionalStatelessTestTsan0:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1438,10 +1333,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestTsan1:
|
FunctionalStatelessTestTsan1:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1477,10 +1370,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestTsan2:
|
FunctionalStatelessTestTsan2:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1516,10 +1407,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestUBsan:
|
FunctionalStatelessTestUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -1553,10 +1442,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestMsan0:
|
FunctionalStatelessTestMsan0:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1592,10 +1479,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestMsan1:
|
FunctionalStatelessTestMsan1:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1631,10 +1516,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestMsan2:
|
FunctionalStatelessTestMsan2:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1670,10 +1553,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestDebug0:
|
FunctionalStatelessTestDebug0:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1709,10 +1590,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestDebug1:
|
FunctionalStatelessTestDebug1:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1748,10 +1627,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestDebug2:
|
FunctionalStatelessTestDebug2:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1787,10 +1664,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||||
@ -1827,10 +1702,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestAarch64:
|
FunctionalStatefulTestAarch64:
|
||||||
needs: [BuilderDebAarch64]
|
needs: [BuilderDebAarch64]
|
||||||
@ -1864,10 +1737,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestAsan:
|
FunctionalStatefulTestAsan:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -1901,10 +1772,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestTsan:
|
FunctionalStatefulTestTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1938,10 +1807,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestMsan:
|
FunctionalStatefulTestMsan:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1975,10 +1842,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestUBsan:
|
FunctionalStatefulTestUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -2012,10 +1877,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestDebug:
|
FunctionalStatefulTestDebug:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -2049,10 +1912,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
######################################### STRESS TESTS #######################################
|
######################################### STRESS TESTS #######################################
|
||||||
@ -2088,10 +1949,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestTsan:
|
StressTestTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -2128,10 +1987,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestMsan:
|
StressTestMsan:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -2164,10 +2021,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestUBsan:
|
StressTestUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -2200,10 +2055,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestDebug:
|
StressTestDebug:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -2236,10 +2089,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
############################# INTEGRATION TESTS #############################################
|
############################# INTEGRATION TESTS #############################################
|
||||||
@ -2277,10 +2128,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsAsan1:
|
IntegrationTestsAsan1:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -2315,10 +2164,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsAsan2:
|
IntegrationTestsAsan2:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -2353,10 +2200,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan0:
|
IntegrationTestsTsan0:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -2391,10 +2236,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan1:
|
IntegrationTestsTsan1:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -2429,10 +2272,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan2:
|
IntegrationTestsTsan2:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -2467,10 +2308,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan3:
|
IntegrationTestsTsan3:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -2505,10 +2344,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsRelease0:
|
IntegrationTestsRelease0:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -2543,10 +2380,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsRelease1:
|
IntegrationTestsRelease1:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -2581,10 +2416,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
##################################### AST FUZZERS ############################################
|
##################################### AST FUZZERS ############################################
|
||||||
@ -2620,10 +2453,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
ASTFuzzerTestTsan:
|
ASTFuzzerTestTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -2656,10 +2487,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
ASTFuzzerTestUBSan:
|
ASTFuzzerTestUBSan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -2692,10 +2521,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
ASTFuzzerTestMSan:
|
ASTFuzzerTestMSan:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -2728,10 +2555,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
ASTFuzzerTestDebug:
|
ASTFuzzerTestDebug:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -2764,10 +2589,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
#################################### UNIT TESTS #############################################
|
#################################### UNIT TESTS #############################################
|
||||||
@ -2803,10 +2626,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
UnitTestsReleaseClang:
|
UnitTestsReleaseClang:
|
||||||
needs: [BuilderBinRelease]
|
needs: [BuilderBinRelease]
|
||||||
@ -2839,10 +2660,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
# UnitTestsReleaseGCC:
|
# UnitTestsReleaseGCC:
|
||||||
# needs: [BuilderBinGCC]
|
# needs: [BuilderBinGCC]
|
||||||
@ -2875,10 +2694,8 @@ jobs:
|
|||||||
# - name: Cleanup
|
# - name: Cleanup
|
||||||
# if: always()
|
# if: always()
|
||||||
# run: |
|
# run: |
|
||||||
# # shellcheck disable=SC2046
|
# docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
# docker kill $(docker ps -q) ||:
|
# docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# # shellcheck disable=SC2046
|
|
||||||
# docker rm -f $(docker ps -a -q) ||:
|
|
||||||
# sudo rm -fr "$TEMP_PATH"
|
# sudo rm -fr "$TEMP_PATH"
|
||||||
UnitTestsTsan:
|
UnitTestsTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -2911,10 +2728,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
UnitTestsMsan:
|
UnitTestsMsan:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -2947,10 +2762,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
UnitTestsUBsan:
|
UnitTestsUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -2983,10 +2796,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
#################################### PERFORMANCE TESTS ######################################
|
#################################### PERFORMANCE TESTS ######################################
|
||||||
@ -3024,10 +2835,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
PerformanceComparisonX86-1:
|
PerformanceComparisonX86-1:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -3062,10 +2871,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
PerformanceComparisonX86-2:
|
PerformanceComparisonX86-2:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -3100,10 +2907,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
PerformanceComparisonX86-3:
|
PerformanceComparisonX86-3:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -3138,10 +2943,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
|
6
.github/workflows/nightly.yml
vendored
6
.github/workflows/nightly.yml
vendored
@ -119,8 +119,6 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
|
522
.github/workflows/pull_request.yml
vendored
522
.github/workflows/pull_request.yml
vendored
File diff suppressed because it is too large
Load Diff
13
.github/workflows/release.yml
vendored
13
.github/workflows/release.yml
vendored
@ -30,10 +30,11 @@ jobs:
|
|||||||
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
|
||||||
cd "$REPO_COPY"
|
cd "$REPO_COPY"
|
||||||
# Download and push packages to artifactory
|
# Download and push packages to artifactory
|
||||||
python3 ./tests/ci/push_to_artifactory.py --release "${{ github.ref }}" \
|
python3 ./tests/ci/push_to_artifactory.py --release '${{ github.ref }}' \
|
||||||
--commit '${{ github.sha }}' --artifactory-url "${{ secrets.JFROG_ARTIFACTORY_URL }}" --all
|
--commit '${{ github.sha }}' --artifactory-url '${{ secrets.JFROG_ARTIFACTORY_URL }}' --all
|
||||||
# Download macos binaries to ${{runner.temp}}/download_binary
|
# Download macos binaries to ${{runner.temp}}/download_binary
|
||||||
python3 ./tests/ci/download_binary.py binary_darwin binary_darwin_aarch64
|
python3 ./tests/ci/download_binary.py --version '${{ github.ref }}' \
|
||||||
|
--commit '${{ github.sha }}' binary_darwin binary_darwin_aarch64
|
||||||
mv '${{runner.temp}}/download_binary/'clickhouse-* '${{runner.temp}}/push_to_artifactory'
|
mv '${{runner.temp}}/download_binary/'clickhouse-* '${{runner.temp}}/push_to_artifactory'
|
||||||
- name: Upload packages to release assets
|
- name: Upload packages to release assets
|
||||||
uses: svenstaro/upload-release-action@v2
|
uses: svenstaro/upload-release-action@v2
|
||||||
@ -65,8 +66,6 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
288
.github/workflows/release_branches.yml
vendored
288
.github/workflows/release_branches.yml
vendored
@ -103,10 +103,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#########################################################################################
|
#########################################################################################
|
||||||
#################################### ORDINARY BUILDS ####################################
|
#################################### ORDINARY BUILDS ####################################
|
||||||
@ -153,10 +151,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebAarch64:
|
BuilderDebAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -196,10 +192,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebAsan:
|
BuilderDebAsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -241,10 +235,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebUBsan:
|
BuilderDebUBsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -286,10 +278,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebTsan:
|
BuilderDebTsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -331,10 +321,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebMsan:
|
BuilderDebMsan:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -376,10 +364,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderDebDebug:
|
BuilderDebDebug:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -421,10 +407,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinDarwin:
|
BuilderBinDarwin:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -468,10 +452,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
BuilderBinDarwinAarch64:
|
BuilderBinDarwinAarch64:
|
||||||
needs: [DockerHubPush]
|
needs: [DockerHubPush]
|
||||||
@ -515,10 +497,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### Docker images #######################################
|
##################################### Docker images #######################################
|
||||||
@ -545,10 +525,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
############################################################################################
|
############################################################################################
|
||||||
##################################### BUILD REPORTER #######################################
|
##################################### BUILD REPORTER #######################################
|
||||||
@ -594,10 +572,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
BuilderSpecialReport:
|
BuilderSpecialReport:
|
||||||
needs:
|
needs:
|
||||||
@ -634,10 +610,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
########################### FUNCTIONAl STATELESS TESTS #######################################
|
########################### FUNCTIONAl STATELESS TESTS #######################################
|
||||||
@ -674,10 +648,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestAarch64:
|
FunctionalStatelessTestAarch64:
|
||||||
needs: [BuilderDebAarch64]
|
needs: [BuilderDebAarch64]
|
||||||
@ -711,10 +683,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestAsan0:
|
FunctionalStatelessTestAsan0:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -750,10 +720,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestAsan1:
|
FunctionalStatelessTestAsan1:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -789,10 +757,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestTsan0:
|
FunctionalStatelessTestTsan0:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -828,10 +794,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestTsan1:
|
FunctionalStatelessTestTsan1:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -867,10 +831,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestTsan2:
|
FunctionalStatelessTestTsan2:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -906,10 +868,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestUBsan:
|
FunctionalStatelessTestUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -943,10 +903,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestMsan0:
|
FunctionalStatelessTestMsan0:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -982,10 +940,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestMsan1:
|
FunctionalStatelessTestMsan1:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1021,10 +977,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestMsan2:
|
FunctionalStatelessTestMsan2:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1060,10 +1014,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestDebug0:
|
FunctionalStatelessTestDebug0:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1099,10 +1051,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestDebug1:
|
FunctionalStatelessTestDebug1:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1138,10 +1088,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatelessTestDebug2:
|
FunctionalStatelessTestDebug2:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1177,10 +1125,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
############################ FUNCTIONAl STATEFUL TESTS #######################################
|
||||||
@ -1217,10 +1163,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestAarch64:
|
FunctionalStatefulTestAarch64:
|
||||||
needs: [BuilderDebAarch64]
|
needs: [BuilderDebAarch64]
|
||||||
@ -1254,10 +1198,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestAsan:
|
FunctionalStatefulTestAsan:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -1291,10 +1233,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestTsan:
|
FunctionalStatefulTestTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1328,10 +1268,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestMsan:
|
FunctionalStatefulTestMsan:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1365,10 +1303,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestUBsan:
|
FunctionalStatefulTestUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -1402,10 +1338,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FunctionalStatefulTestDebug:
|
FunctionalStatefulTestDebug:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1439,10 +1373,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
##############################################################################################
|
##############################################################################################
|
||||||
######################################### STRESS TESTS #######################################
|
######################################### STRESS TESTS #######################################
|
||||||
@ -1478,10 +1410,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestTsan:
|
StressTestTsan:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1518,10 +1448,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestMsan:
|
StressTestMsan:
|
||||||
needs: [BuilderDebMsan]
|
needs: [BuilderDebMsan]
|
||||||
@ -1554,10 +1482,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestUBsan:
|
StressTestUBsan:
|
||||||
needs: [BuilderDebUBsan]
|
needs: [BuilderDebUBsan]
|
||||||
@ -1590,10 +1516,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
StressTestDebug:
|
StressTestDebug:
|
||||||
needs: [BuilderDebDebug]
|
needs: [BuilderDebDebug]
|
||||||
@ -1626,10 +1550,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
#############################################################################################
|
#############################################################################################
|
||||||
############################# INTEGRATION TESTS #############################################
|
############################# INTEGRATION TESTS #############################################
|
||||||
@ -1667,10 +1589,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsAsan1:
|
IntegrationTestsAsan1:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -1705,10 +1625,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsAsan2:
|
IntegrationTestsAsan2:
|
||||||
needs: [BuilderDebAsan]
|
needs: [BuilderDebAsan]
|
||||||
@ -1743,10 +1661,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan0:
|
IntegrationTestsTsan0:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1781,10 +1697,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan1:
|
IntegrationTestsTsan1:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1819,10 +1733,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan2:
|
IntegrationTestsTsan2:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1857,10 +1769,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsTsan3:
|
IntegrationTestsTsan3:
|
||||||
needs: [BuilderDebTsan]
|
needs: [BuilderDebTsan]
|
||||||
@ -1895,10 +1805,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsRelease0:
|
IntegrationTestsRelease0:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -1933,10 +1841,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
IntegrationTestsRelease1:
|
IntegrationTestsRelease1:
|
||||||
needs: [BuilderDebRelease]
|
needs: [BuilderDebRelease]
|
||||||
@ -1971,10 +1877,8 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
FinishCheck:
|
FinishCheck:
|
||||||
needs:
|
needs:
|
||||||
|
1
.github/workflows/tags_stable.yml
vendored
1
.github/workflows/tags_stable.yml
vendored
@ -43,6 +43,7 @@ jobs:
|
|||||||
GITHUB_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.ROBOT_CLICKHOUSE_COMMIT_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
|
./utils/list-versions/list-versions.sh > ./utils/list-versions/version_date.tsv
|
||||||
|
./utils/list-versions/update-docker-version.sh
|
||||||
GID=$(id -g "${UID}")
|
GID=$(id -g "${UID}")
|
||||||
docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 \
|
docker run -u "${UID}:${GID}" -e PYTHONUNBUFFERED=1 \
|
||||||
--volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \
|
--volume="${GITHUB_WORKSPACE}:/ClickHouse" clickhouse/style-test \
|
||||||
|
6
.github/workflows/woboq.yml
vendored
6
.github/workflows/woboq.yml
vendored
@ -37,8 +37,6 @@ jobs:
|
|||||||
- name: Cleanup
|
- name: Cleanup
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
# shellcheck disable=SC2046
|
docker ps --quiet | xargs --no-run-if-empty docker kill ||:
|
||||||
docker kill $(docker ps -q) ||:
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm -f ||:
|
||||||
# shellcheck disable=SC2046
|
|
||||||
docker rm -f $(docker ps -a -q) ||:
|
|
||||||
sudo rm -fr "$TEMP_PATH"
|
sudo rm -fr "$TEMP_PATH"
|
||||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -58,6 +58,10 @@ cmake_install.cmake
|
|||||||
CTestTestfile.cmake
|
CTestTestfile.cmake
|
||||||
*.a
|
*.a
|
||||||
*.o
|
*.o
|
||||||
|
*.so
|
||||||
|
*.dll
|
||||||
|
*.lib
|
||||||
|
*.dylib
|
||||||
cmake-build-*
|
cmake-build-*
|
||||||
|
|
||||||
# Python cache
|
# Python cache
|
||||||
|
@ -220,6 +220,35 @@ ReplxxLineReader::ReplxxLineReader(
|
|||||||
rx.bind_key(Replxx::KEY::control('W'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::KILL_TO_WHITESPACE_ON_LEFT, code); });
|
rx.bind_key(Replxx::KEY::control('W'), [this](char32_t code) { return rx.invoke(Replxx::ACTION::KILL_TO_WHITESPACE_ON_LEFT, code); });
|
||||||
|
|
||||||
rx.bind_key(Replxx::KEY::meta('E'), [this](char32_t) { openEditor(); return Replxx::ACTION_RESULT::CONTINUE; });
|
rx.bind_key(Replxx::KEY::meta('E'), [this](char32_t) { openEditor(); return Replxx::ACTION_RESULT::CONTINUE; });
|
||||||
|
|
||||||
|
/// readline insert-comment
|
||||||
|
auto insert_comment_action = [this](char32_t code)
|
||||||
|
{
|
||||||
|
replxx::Replxx::State state(rx.get_state());
|
||||||
|
const char * line = state.text();
|
||||||
|
const char * line_end = line + strlen(line);
|
||||||
|
|
||||||
|
std::string commented_line;
|
||||||
|
if (std::find(line, line_end, '\n') != line_end)
|
||||||
|
{
|
||||||
|
/// If query has multiple lines, multiline comment is used over
|
||||||
|
/// commenting each line separately for easier uncomment (though
|
||||||
|
/// with invoking editor it is simpler to uncomment multiple lines)
|
||||||
|
///
|
||||||
|
/// Note, that using multiline comment is OK even with nested
|
||||||
|
/// comments, since nested comments are supported.
|
||||||
|
commented_line = fmt::format("/* {} */", state.text());
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
// In a simplest case use simple comment.
|
||||||
|
commented_line = fmt::format("-- {}", state.text());
|
||||||
|
}
|
||||||
|
rx.set_state(replxx::Replxx::State(commented_line.c_str(), commented_line.size()));
|
||||||
|
|
||||||
|
return rx.invoke(Replxx::ACTION::COMMIT_LINE, code);
|
||||||
|
};
|
||||||
|
rx.bind_key(Replxx::KEY::meta('#'), insert_comment_action);
|
||||||
}
|
}
|
||||||
|
|
||||||
ReplxxLineReader::~ReplxxLineReader()
|
ReplxxLineReader::~ReplxxLineReader()
|
||||||
|
@ -5,8 +5,9 @@
|
|||||||
#include <type_traits>
|
#include <type_traits>
|
||||||
|
|
||||||
|
|
||||||
/** \brief Returns value `from` converted to type `To` while retaining bit representation.
|
/** Returns value `from` converted to type `To` while retaining bit representation.
|
||||||
* `To` and `From` must satisfy `CopyConstructible`.
|
* `To` and `From` must satisfy `CopyConstructible`.
|
||||||
|
* In contrast to std::bit_cast can cast types of different width.
|
||||||
*/
|
*/
|
||||||
template <typename To, typename From>
|
template <typename To, typename From>
|
||||||
std::decay_t<To> bit_cast(const From & from)
|
std::decay_t<To> bit_cast(const From & from)
|
||||||
@ -15,13 +16,3 @@ std::decay_t<To> bit_cast(const From & from)
|
|||||||
memcpy(static_cast<void*>(&res), &from, std::min(sizeof(res), sizeof(from)));
|
memcpy(static_cast<void*>(&res), &from, std::min(sizeof(res), sizeof(from)));
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** \brief Returns value `from` converted to type `To` while retaining bit representation.
|
|
||||||
* `To` and `From` must satisfy `CopyConstructible`.
|
|
||||||
*/
|
|
||||||
template <typename To, typename From>
|
|
||||||
std::decay_t<To> safe_bit_cast(const From & from)
|
|
||||||
{
|
|
||||||
static_assert(sizeof(To) == sizeof(From), "bit cast on types of different width");
|
|
||||||
return bit_cast<To, From>(from);
|
|
||||||
}
|
|
||||||
|
@ -8,6 +8,7 @@
|
|||||||
* As a drawback, this only works if no dynamic object unloading happens after this point.
|
* As a drawback, this only works if no dynamic object unloading happens after this point.
|
||||||
* This function is thread-safe. You should call it to update cache after loading new shared libraries.
|
* This function is thread-safe. You should call it to update cache after loading new shared libraries.
|
||||||
* Otherwise exception handling from dlopened libraries won't work (will call std::terminate immediately).
|
* Otherwise exception handling from dlopened libraries won't work (will call std::terminate immediately).
|
||||||
|
* NOTE: dlopen is forbidden in our code.
|
||||||
*
|
*
|
||||||
* NOTE: It is disabled with Thread Sanitizer because TSan can only use original "dl_iterate_phdr" function.
|
* NOTE: It is disabled with Thread Sanitizer because TSan can only use original "dl_iterate_phdr" function.
|
||||||
*/
|
*/
|
||||||
|
@ -3,7 +3,7 @@ option (ENABLE_CLANG_TIDY "Use clang-tidy static analyzer" OFF)
|
|||||||
|
|
||||||
if (ENABLE_CLANG_TIDY)
|
if (ENABLE_CLANG_TIDY)
|
||||||
|
|
||||||
find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12")
|
find_program (CLANG_TIDY_PATH NAMES "clang-tidy" "clang-tidy-15" "clang-tidy-14" "clang-tidy-13" "clang-tidy-12")
|
||||||
|
|
||||||
if (CLANG_TIDY_PATH)
|
if (CLANG_TIDY_PATH)
|
||||||
message(STATUS
|
message(STATUS
|
||||||
|
@ -45,6 +45,7 @@ if (CMAKE_CROSSCOMPILING)
|
|||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (USE_MUSL)
|
if (USE_MUSL)
|
||||||
|
# use of undeclared identifier 'PTHREAD_RECURSIVE_MUTEX_INITIALIZER_NP'
|
||||||
set (ENABLE_SENTRY OFF CACHE INTERNAL "")
|
set (ENABLE_SENTRY OFF CACHE INTERNAL "")
|
||||||
set (ENABLE_ODBC OFF CACHE INTERNAL "")
|
set (ENABLE_ODBC OFF CACHE INTERNAL "")
|
||||||
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
set (ENABLE_GRPC OFF CACHE INTERNAL "")
|
||||||
|
@ -1,35 +1,95 @@
|
|||||||
# Choose to build static or shared library for c-ares.
|
set(LIBRARY_DIR "${ClickHouse_SOURCE_DIR}/contrib/c-ares")
|
||||||
|
|
||||||
|
# Generated from contrib/c-ares/src/lib/Makefile.inc
|
||||||
|
SET(SRCS
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares__addrinfo2hostent.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares__addrinfo_localhost.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares__close_sockets.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares__get_hostent.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares__parse_into_addrinfo.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares__readaddrinfo.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares__sortaddrinfo.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares__read_line.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares__timeval.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_android.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_cancel.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_data.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_destroy.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_expand_name.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_expand_string.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_fds.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_free_hostent.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_free_string.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_freeaddrinfo.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_getaddrinfo.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_getenv.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_gethostbyaddr.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_gethostbyname.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_getnameinfo.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_getsock.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_init.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_library_init.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_llist.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_mkquery.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_create_query.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_nowarn.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_options.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_a_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_aaaa_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_caa_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_mx_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_naptr_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_ns_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_ptr_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_soa_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_srv_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_txt_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_parse_uri_reply.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_platform.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_process.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_query.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_search.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_send.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_strcasecmp.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_strdup.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_strerror.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_strsplit.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_timeout.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_version.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/ares_writev.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/bitncmp.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/inet_net_pton.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/inet_ntop.c"
|
||||||
|
"${LIBRARY_DIR}/src/lib/windows_port.c"
|
||||||
|
)
|
||||||
|
|
||||||
if (USE_STATIC_LIBRARIES)
|
if (USE_STATIC_LIBRARIES)
|
||||||
set(CARES_STATIC ON CACHE BOOL "" FORCE)
|
add_library(_c-ares STATIC ${SRCS})
|
||||||
set(CARES_SHARED OFF CACHE BOOL "" FORCE)
|
target_compile_definitions(_c-ares PUBLIC CARES_STATICLIB)
|
||||||
else()
|
else()
|
||||||
set(CARES_STATIC OFF CACHE BOOL "" FORCE)
|
add_library(_c-ares SHARED ${SRCS})
|
||||||
set(CARES_SHARED ON CACHE BOOL "" FORCE)
|
target_compile_definitions(_c-ares PUBLIC CARES_BUILDING_LIBRARY)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Disable looking for libnsl on a platforms that has gethostbyname in glibc
|
target_compile_definitions(_c-ares PRIVATE HAVE_CONFIG_H=1)
|
||||||
#
|
|
||||||
# c-ares searching for gethostbyname in the libnsl library, however in the
|
target_include_directories(_c-ares SYSTEM PUBLIC
|
||||||
# version that shipped with gRPC it doing it wrong [1], since it uses
|
"${LIBRARY_DIR}/src/lib"
|
||||||
# CHECK_LIBRARY_EXISTS(), which will return TRUE even if the function exists in
|
"${LIBRARY_DIR}/include"
|
||||||
# another dependent library. The upstream already contains correct macro [2],
|
)
|
||||||
# but it is not included in gRPC (even upstream gRPC, not the one that is
|
|
||||||
# shipped with clickhousee).
|
# Platform-specific include directories. The original build system does a lot of checks to eventually generate two header files with defines:
|
||||||
#
|
# ares_build.h and ares_config.h. To update, run the original CMake build in c-ares for each platform and copy the headers into the
|
||||||
# [1]: https://github.com/c-ares/c-ares/blob/e982924acee7f7313b4baa4ee5ec000c5e373c30/CMakeLists.txt#L125
|
# platform-specific folder.
|
||||||
# [2]: https://github.com/c-ares/c-ares/blob/44fbc813685a1fa8aa3f27fcd7544faf612d376a/CMakeLists.txt#L146
|
# For the platform-specific compile definitions, see c-ares top-level CMakeLists.txt.
|
||||||
#
|
if (OS_LINUX)
|
||||||
# And because if you by some reason have libnsl [3] installed, clickhouse will
|
target_include_directories(_c-ares SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/c-ares-cmake/linux")
|
||||||
# reject to start w/o it. While this is completelly different library.
|
target_compile_definitions(_c-ares PRIVATE -D_GNU_SOURCE -D_POSIX_C_SOURCE=199309L -D_XOPEN_SOURCE=600)
|
||||||
#
|
elseif (OS_DARWIN)
|
||||||
# [3]: https://packages.debian.org/bullseye/libnsl2
|
target_include_directories(_c-ares SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/c-ares-cmake/darwin")
|
||||||
if (NOT CMAKE_SYSTEM_NAME STREQUAL "SunOS")
|
target_compile_definitions(_c-ares PRIVATE -D_DARWIN_C_SOURCE)
|
||||||
set(HAVE_LIBNSL OFF CACHE BOOL "" FORCE)
|
elseif (OS_FREEBSD)
|
||||||
|
target_include_directories(_c-ares SYSTEM PUBLIC "${ClickHouse_SOURCE_DIR}/contrib/c-ares-cmake/freebsd")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
# Force use of c-ares inet_net_pton instead of libresolv one
|
add_library(ch_contrib::c-ares ALIAS _c-ares)
|
||||||
set(HAVE_INET_NET_PTON OFF CACHE BOOL "" FORCE)
|
|
||||||
|
|
||||||
add_subdirectory("../c-ares/" "../c-ares/")
|
|
||||||
|
|
||||||
add_library(ch_contrib::c-ares ALIAS c-ares)
|
|
||||||
|
43
contrib/c-ares-cmake/darwin/ares_build.h
Normal file
43
contrib/c-ares-cmake/darwin/ares_build.h
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
#ifndef __CARES_BUILD_H
|
||||||
|
#define __CARES_BUILD_H
|
||||||
|
|
||||||
|
#define CARES_TYPEOF_ARES_SOCKLEN_T socklen_t
|
||||||
|
#define CARES_TYPEOF_ARES_SSIZE_T ssize_t
|
||||||
|
|
||||||
|
/* Prefix names with CARES_ to make sure they don't conflict with other config.h
|
||||||
|
* files. We need to include some dependent headers that may be system specific
|
||||||
|
* for C-Ares */
|
||||||
|
#define CARES_HAVE_SYS_TYPES_H
|
||||||
|
#define CARES_HAVE_SYS_SOCKET_H
|
||||||
|
/* #undef CARES_HAVE_WINDOWS_H */
|
||||||
|
/* #undef CARES_HAVE_WS2TCPIP_H */
|
||||||
|
/* #undef CARES_HAVE_WINSOCK2_H */
|
||||||
|
/* #undef CARES_HAVE_WINDOWS_H */
|
||||||
|
#define CARES_HAVE_ARPA_NAMESER_H
|
||||||
|
#define CARES_HAVE_ARPA_NAMESER_COMPAT_H
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_SYS_TYPES_H
|
||||||
|
# include <sys/types.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_SYS_SOCKET_H
|
||||||
|
# include <sys/socket.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_WINSOCK2_H
|
||||||
|
# include <winsock2.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_WS2TCPIP_H
|
||||||
|
# include <ws2tcpip.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_WINDOWS_H
|
||||||
|
# include <windows.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
typedef CARES_TYPEOF_ARES_SOCKLEN_T ares_socklen_t;
|
||||||
|
typedef CARES_TYPEOF_ARES_SSIZE_T ares_ssize_t;
|
||||||
|
|
||||||
|
#endif /* __CARES_BUILD_H */
|
432
contrib/c-ares-cmake/darwin/ares_config.h
Normal file
432
contrib/c-ares-cmake/darwin/ares_config.h
Normal file
@ -0,0 +1,432 @@
|
|||||||
|
/* Generated from ares_config.h.cmake */
|
||||||
|
|
||||||
|
/* Define if building universal (internal helper macro) */
|
||||||
|
#undef AC_APPLE_UNIVERSAL_BUILD
|
||||||
|
|
||||||
|
/* define this if ares is built for a big endian system */
|
||||||
|
#undef ARES_BIG_ENDIAN
|
||||||
|
|
||||||
|
/* when building as static part of libcurl */
|
||||||
|
#undef BUILDING_LIBCURL
|
||||||
|
|
||||||
|
/* Defined for build that exposes internal static functions for testing. */
|
||||||
|
#undef CARES_EXPOSE_STATICS
|
||||||
|
|
||||||
|
/* Defined for build with symbol hiding. */
|
||||||
|
#undef CARES_SYMBOL_HIDING
|
||||||
|
|
||||||
|
/* Definition to make a library symbol externally visible. */
|
||||||
|
#undef CARES_SYMBOL_SCOPE_EXTERN
|
||||||
|
|
||||||
|
/* Use resolver library to configure cares */
|
||||||
|
/* #undef CARES_USE_LIBRESOLV */
|
||||||
|
|
||||||
|
/* if a /etc/inet dir is being used */
|
||||||
|
#undef ETC_INET
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for gethostname. */
|
||||||
|
#define GETHOSTNAME_TYPE_ARG2 size_t
|
||||||
|
|
||||||
|
/* Define to the type qualifier of arg 1 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_QUAL_ARG1
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG1 struct sockaddr *
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG2 socklen_t
|
||||||
|
|
||||||
|
/* Define to the type of args 4 and 6 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG46 socklen_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 7 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG7 int
|
||||||
|
|
||||||
|
/* Specifies the number of arguments to getservbyport_r */
|
||||||
|
#define GETSERVBYPORT_R_ARGS
|
||||||
|
|
||||||
|
/* Specifies the number of arguments to getservbyname_r */
|
||||||
|
#define GETSERVBYNAME_R_ARGS
|
||||||
|
|
||||||
|
/* Define to 1 if you have AF_INET6. */
|
||||||
|
#define HAVE_AF_INET6
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <arpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <arpa/nameser_compat.h> header file. */
|
||||||
|
#define HAVE_ARPA_NAMESER_COMPAT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <arpa/nameser.h> header file. */
|
||||||
|
#define HAVE_ARPA_NAMESER_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <assert.h> header file. */
|
||||||
|
#define HAVE_ASSERT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `bitncmp' function. */
|
||||||
|
/* #undef HAVE_BITNCMP */
|
||||||
|
|
||||||
|
/* Define to 1 if bool is an available type. */
|
||||||
|
#define HAVE_BOOL_T
|
||||||
|
|
||||||
|
/* Define to 1 if you have the clock_gettime function and monotonic timer. */
|
||||||
|
#define HAVE_CLOCK_GETTIME_MONOTONIC
|
||||||
|
|
||||||
|
/* Define to 1 if you have the closesocket function. */
|
||||||
|
/* #undef HAVE_CLOSESOCKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the CloseSocket camel case function. */
|
||||||
|
/* #undef HAVE_CLOSESOCKET_CAMEL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the connect function. */
|
||||||
|
#define HAVE_CONNECT
|
||||||
|
|
||||||
|
/* define if the compiler supports basic C++11 syntax */
|
||||||
|
/* #undef HAVE_CXX11 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <dlfcn.h> header file. */
|
||||||
|
#define HAVE_DLFCN_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <errno.h> header file. */
|
||||||
|
#define HAVE_ERRNO_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the fcntl function. */
|
||||||
|
#define HAVE_FCNTL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <fcntl.h> header file. */
|
||||||
|
#define HAVE_FCNTL_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working fcntl O_NONBLOCK function. */
|
||||||
|
#define HAVE_FCNTL_O_NONBLOCK
|
||||||
|
|
||||||
|
/* Define to 1 if you have the freeaddrinfo function. */
|
||||||
|
#define HAVE_FREEADDRINFO
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working getaddrinfo function. */
|
||||||
|
#define HAVE_GETADDRINFO
|
||||||
|
|
||||||
|
/* Define to 1 if the getaddrinfo function is threadsafe. */
|
||||||
|
#define HAVE_GETADDRINFO_THREADSAFE
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getenv function. */
|
||||||
|
#define HAVE_GETENV
|
||||||
|
|
||||||
|
/* Define to 1 if you have the gethostbyaddr function. */
|
||||||
|
#define HAVE_GETHOSTBYADDR
|
||||||
|
|
||||||
|
/* Define to 1 if you have the gethostbyname function. */
|
||||||
|
#define HAVE_GETHOSTBYNAME
|
||||||
|
|
||||||
|
/* Define to 1 if you have the gethostname function. */
|
||||||
|
#define HAVE_GETHOSTNAME
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getnameinfo function. */
|
||||||
|
#define HAVE_GETNAMEINFO
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getservbyport_r function. */
|
||||||
|
/* #undef HAVE_GETSERVBYPORT_R */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getservbyname_r function. */
|
||||||
|
/* #undef HAVE_GETSERVBYNAME_R */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gettimeofday' function. */
|
||||||
|
#define HAVE_GETTIMEOFDAY
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `if_indextoname' function. */
|
||||||
|
#define HAVE_IF_INDEXTONAME
|
||||||
|
|
||||||
|
/* Define to 1 if you have a IPv6 capable working inet_net_pton function. */
|
||||||
|
/* #undef HAVE_INET_NET_PTON */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a IPv6 capable working inet_ntop function. */
|
||||||
|
#define HAVE_INET_NTOP
|
||||||
|
|
||||||
|
/* Define to 1 if you have a IPv6 capable working inet_pton function. */
|
||||||
|
#define HAVE_INET_PTON
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <inttypes.h> header file. */
|
||||||
|
#define HAVE_INTTYPES_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the ioctl function. */
|
||||||
|
#define HAVE_IOCTL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the ioctlsocket function. */
|
||||||
|
/* #undef HAVE_IOCTLSOCKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the IoctlSocket camel case function. */
|
||||||
|
/* #undef HAVE_IOCTLSOCKET_CAMEL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working IoctlSocket camel case FIONBIO function.
|
||||||
|
*/
|
||||||
|
/* #undef HAVE_IOCTLSOCKET_CAMEL_FIONBIO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working ioctlsocket FIONBIO function. */
|
||||||
|
/* #undef HAVE_IOCTLSOCKET_FIONBIO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working ioctl FIONBIO function. */
|
||||||
|
#define HAVE_IOCTL_FIONBIO
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working ioctl SIOCGIFADDR function. */
|
||||||
|
#define HAVE_IOCTL_SIOCGIFADDR
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `resolve' library (-lresolve). */
|
||||||
|
/* #undef HAVE_LIBRESOLV */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <limits.h> header file. */
|
||||||
|
#define HAVE_LIMITS_H
|
||||||
|
|
||||||
|
/* if your compiler supports LL */
|
||||||
|
#define HAVE_LL
|
||||||
|
|
||||||
|
/* Define to 1 if the compiler supports the 'long long' data type. */
|
||||||
|
#define HAVE_LONGLONG
|
||||||
|
|
||||||
|
/* Define to 1 if you have the malloc.h header file. */
|
||||||
|
/* #undef HAVE_MALLOC_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the memory.h header file. */
|
||||||
|
#define HAVE_MEMORY_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the MSG_NOSIGNAL flag. */
|
||||||
|
/* #undef HAVE_MSG_NOSIGNAL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <netdb.h> header file. */
|
||||||
|
#define HAVE_NETDB_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <netinet/in.h> header file. */
|
||||||
|
#define HAVE_NETINET_IN_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <netinet/tcp.h> header file. */
|
||||||
|
#define HAVE_NETINET_TCP_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <net/if.h> header file. */
|
||||||
|
#define HAVE_NET_IF_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have PF_INET6. */
|
||||||
|
#define HAVE_PF_INET6
|
||||||
|
|
||||||
|
/* Define to 1 if you have the recv function. */
|
||||||
|
#define HAVE_RECV
|
||||||
|
|
||||||
|
/* Define to 1 if you have the recvfrom function. */
|
||||||
|
#define HAVE_RECVFROM
|
||||||
|
|
||||||
|
/* Define to 1 if you have the send function. */
|
||||||
|
#define HAVE_SEND
|
||||||
|
|
||||||
|
/* Define to 1 if you have the setsockopt function. */
|
||||||
|
#define HAVE_SETSOCKOPT
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working setsockopt SO_NONBLOCK function. */
|
||||||
|
/* #undef HAVE_SETSOCKOPT_SO_NONBLOCK */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <signal.h> header file. */
|
||||||
|
#define HAVE_SIGNAL_H
|
||||||
|
|
||||||
|
/* Define to 1 if sig_atomic_t is an available typedef. */
|
||||||
|
#define HAVE_SIG_ATOMIC_T
|
||||||
|
|
||||||
|
/* Define to 1 if sig_atomic_t is already defined as volatile. */
|
||||||
|
/* #undef HAVE_SIG_ATOMIC_T_VOLATILE */
|
||||||
|
|
||||||
|
/* Define to 1 if your struct sockaddr_in6 has sin6_scope_id. */
|
||||||
|
#define HAVE_SOCKADDR_IN6_SIN6_SCOPE_ID
|
||||||
|
|
||||||
|
/* Define to 1 if you have the socket function. */
|
||||||
|
#define HAVE_SOCKET
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <socket.h> header file. */
|
||||||
|
/* #undef HAVE_SOCKET_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdbool.h> header file. */
|
||||||
|
#define HAVE_STDBOOL_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdlib.h> header file. */
|
||||||
|
#define HAVE_STDLIB_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strcasecmp function. */
|
||||||
|
#define HAVE_STRCASECMP
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strcmpi function. */
|
||||||
|
/* #undef HAVE_STRCMPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strdup function. */
|
||||||
|
#define HAVE_STRDUP
|
||||||
|
|
||||||
|
/* Define to 1 if you have the stricmp function. */
|
||||||
|
/* #undef HAVE_STRICMP */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <strings.h> header file. */
|
||||||
|
#define HAVE_STRINGS_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <string.h> header file. */
|
||||||
|
#define HAVE_STRING_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strncasecmp function. */
|
||||||
|
#define HAVE_STRNCASECMP
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strncmpi function. */
|
||||||
|
/* #undef HAVE_STRNCMPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strnicmp function. */
|
||||||
|
/* #undef HAVE_STRNICMP */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stropts.h> header file. */
|
||||||
|
/* #undef HAVE_STROPTS_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have struct addrinfo. */
|
||||||
|
#define HAVE_STRUCT_ADDRINFO
|
||||||
|
|
||||||
|
/* Define to 1 if you have struct in6_addr. */
|
||||||
|
#define HAVE_STRUCT_IN6_ADDR
|
||||||
|
|
||||||
|
/* Define to 1 if you have struct sockaddr_in6. */
|
||||||
|
#define HAVE_STRUCT_SOCKADDR_IN6
|
||||||
|
|
||||||
|
/* if struct sockaddr_storage is defined */
|
||||||
|
#define HAVE_STRUCT_SOCKADDR_STORAGE
|
||||||
|
|
||||||
|
/* Define to 1 if you have the timeval struct. */
|
||||||
|
#define HAVE_STRUCT_TIMEVAL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/ioctl.h> header file. */
|
||||||
|
#define HAVE_SYS_IOCTL_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/param.h> header file. */
|
||||||
|
#define HAVE_SYS_PARAM_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/select.h> header file. */
|
||||||
|
#define HAVE_SYS_SELECT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/socket.h> header file. */
|
||||||
|
#define HAVE_SYS_SOCKET_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/stat.h> header file. */
|
||||||
|
#define HAVE_SYS_STAT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/types.h> header file. */
|
||||||
|
#define HAVE_SYS_TYPES_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/uio.h> header file. */
|
||||||
|
#define HAVE_SYS_UIO_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <time.h> header file. */
|
||||||
|
#define HAVE_TIME_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the windows.h header file. */
|
||||||
|
/* #undef HAVE_WINDOWS_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the winsock2.h header file. */
|
||||||
|
/* #undef HAVE_WINSOCK2_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the winsock.h header file. */
|
||||||
|
/* #undef HAVE_WINSOCK_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the writev function. */
|
||||||
|
#define HAVE_WRITEV
|
||||||
|
|
||||||
|
/* Define to 1 if you have the ws2tcpip.h header file. */
|
||||||
|
/* #undef HAVE_WS2TCPIP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the __system_property_get function */
|
||||||
|
#define HAVE___SYSTEM_PROPERTY_GET
|
||||||
|
|
||||||
|
/* Define to 1 if you need the malloc.h header file even with stdlib.h */
|
||||||
|
/* #undef NEED_MALLOC_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you need the memory.h header file even with stdlib.h */
|
||||||
|
/* #undef NEED_MEMORY_H */
|
||||||
|
|
||||||
|
/* a suitable file/device to read random data from */
|
||||||
|
#define CARES_RANDOM_FILE "/dev/urandom"
|
||||||
|
|
||||||
|
/* Define to the type qualifier pointed by arg 5 for recvfrom. */
|
||||||
|
#define RECVFROM_QUAL_ARG5
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG1 int
|
||||||
|
|
||||||
|
/* Define to the type pointed by arg 2 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG2 void *
|
||||||
|
|
||||||
|
/* Define to 1 if the type pointed by arg 2 for recvfrom is void. */
|
||||||
|
#define RECVFROM_TYPE_ARG2_IS_VOID 0
|
||||||
|
|
||||||
|
/* Define to the type of arg 3 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG3 size_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 4 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG4 int
|
||||||
|
|
||||||
|
/* Define to the type pointed by arg 5 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG5 struct sockaddr *
|
||||||
|
|
||||||
|
/* Define to 1 if the type pointed by arg 5 for recvfrom is void. */
|
||||||
|
#define RECVFROM_TYPE_ARG5_IS_VOID 0
|
||||||
|
|
||||||
|
/* Define to the type pointed by arg 6 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG6 socklen_t *
|
||||||
|
|
||||||
|
/* Define to 1 if the type pointed by arg 6 for recvfrom is void. */
|
||||||
|
#define RECVFROM_TYPE_ARG6_IS_VOID 0
|
||||||
|
|
||||||
|
/* Define to the function return type for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_RETV ssize_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for recv. */
|
||||||
|
#define RECV_TYPE_ARG1 int
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for recv. */
|
||||||
|
#define RECV_TYPE_ARG2 void *
|
||||||
|
|
||||||
|
/* Define to the type of arg 3 for recv. */
|
||||||
|
#define RECV_TYPE_ARG3 size_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 4 for recv. */
|
||||||
|
#define RECV_TYPE_ARG4 int
|
||||||
|
|
||||||
|
/* Define to the function return type for recv. */
|
||||||
|
#define RECV_TYPE_RETV ssize_t
|
||||||
|
|
||||||
|
/* Define as the return type of signal handlers (`int' or `void'). */
|
||||||
|
#define RETSIGTYPE
|
||||||
|
|
||||||
|
/* Define to the type qualifier of arg 2 for send. */
|
||||||
|
#define SEND_QUAL_ARG2
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for send. */
|
||||||
|
#define SEND_TYPE_ARG1 int
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for send. */
|
||||||
|
#define SEND_TYPE_ARG2 void *
|
||||||
|
|
||||||
|
/* Define to the type of arg 3 for send. */
|
||||||
|
#define SEND_TYPE_ARG3 size_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 4 for send. */
|
||||||
|
#define SEND_TYPE_ARG4 int
|
||||||
|
|
||||||
|
/* Define to the function return type for send. */
|
||||||
|
#define SEND_TYPE_RETV ssize_t
|
||||||
|
|
||||||
|
/* Define to 1 if you can safely include both <sys/time.h> and <time.h>. */
|
||||||
|
#define TIME_WITH_SYS_TIME
|
||||||
|
|
||||||
|
/* Define to disable non-blocking sockets. */
|
||||||
|
#undef USE_BLOCKING_SOCKETS
|
||||||
|
|
||||||
|
/* Define to avoid automatic inclusion of winsock.h */
|
||||||
|
#undef WIN32_LEAN_AND_MEAN
|
||||||
|
|
||||||
|
/* Type to use in place of in_addr_t when system does not provide it. */
|
||||||
|
#undef in_addr_t
|
||||||
|
|
43
contrib/c-ares-cmake/freebsd/ares_build.h
Normal file
43
contrib/c-ares-cmake/freebsd/ares_build.h
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
#ifndef __CARES_BUILD_H
|
||||||
|
#define __CARES_BUILD_H
|
||||||
|
|
||||||
|
#define CARES_TYPEOF_ARES_SOCKLEN_T socklen_t
|
||||||
|
#define CARES_TYPEOF_ARES_SSIZE_T ssize_t
|
||||||
|
|
||||||
|
/* Prefix names with CARES_ to make sure they don't conflict with other config.h
|
||||||
|
* files. We need to include some dependent headers that may be system specific
|
||||||
|
* for C-Ares */
|
||||||
|
#define CARES_HAVE_SYS_TYPES_H
|
||||||
|
#define CARES_HAVE_SYS_SOCKET_H
|
||||||
|
/* #undef CARES_HAVE_WINDOWS_H */
|
||||||
|
/* #undef CARES_HAVE_WS2TCPIP_H */
|
||||||
|
/* #undef CARES_HAVE_WINSOCK2_H */
|
||||||
|
/* #undef CARES_HAVE_WINDOWS_H */
|
||||||
|
#define CARES_HAVE_ARPA_NAMESER_H
|
||||||
|
#define CARES_HAVE_ARPA_NAMESER_COMPAT_H
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_SYS_TYPES_H
|
||||||
|
# include <sys/types.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_SYS_SOCKET_H
|
||||||
|
# include <sys/socket.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_WINSOCK2_H
|
||||||
|
# include <winsock2.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_WS2TCPIP_H
|
||||||
|
# include <ws2tcpip.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_WINDOWS_H
|
||||||
|
# include <windows.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
typedef CARES_TYPEOF_ARES_SOCKLEN_T ares_socklen_t;
|
||||||
|
typedef CARES_TYPEOF_ARES_SSIZE_T ares_ssize_t;
|
||||||
|
|
||||||
|
#endif /* __CARES_BUILD_H */
|
432
contrib/c-ares-cmake/freebsd/ares_config.h
Normal file
432
contrib/c-ares-cmake/freebsd/ares_config.h
Normal file
@ -0,0 +1,432 @@
|
|||||||
|
/* Generated from ares_config.h.cmake */
|
||||||
|
|
||||||
|
/* Define if building universal (internal helper macro) */
|
||||||
|
#undef AC_APPLE_UNIVERSAL_BUILD
|
||||||
|
|
||||||
|
/* define this if ares is built for a big endian system */
|
||||||
|
#undef ARES_BIG_ENDIAN
|
||||||
|
|
||||||
|
/* when building as static part of libcurl */
|
||||||
|
#undef BUILDING_LIBCURL
|
||||||
|
|
||||||
|
/* Defined for build that exposes internal static functions for testing. */
|
||||||
|
#undef CARES_EXPOSE_STATICS
|
||||||
|
|
||||||
|
/* Defined for build with symbol hiding. */
|
||||||
|
#undef CARES_SYMBOL_HIDING
|
||||||
|
|
||||||
|
/* Definition to make a library symbol externally visible. */
|
||||||
|
#undef CARES_SYMBOL_SCOPE_EXTERN
|
||||||
|
|
||||||
|
/* Use resolver library to configure cares */
|
||||||
|
/* #undef CARES_USE_LIBRESOLV */
|
||||||
|
|
||||||
|
/* if a /etc/inet dir is being used */
|
||||||
|
#undef ETC_INET
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for gethostname. */
|
||||||
|
#define GETHOSTNAME_TYPE_ARG2 size_t
|
||||||
|
|
||||||
|
/* Define to the type qualifier of arg 1 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_QUAL_ARG1
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG1 struct sockaddr *
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG2 socklen_t
|
||||||
|
|
||||||
|
/* Define to the type of args 4 and 6 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG46 socklen_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 7 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG7 int
|
||||||
|
|
||||||
|
/* Specifies the number of arguments to getservbyport_r */
|
||||||
|
#define GETSERVBYPORT_R_ARGS 6
|
||||||
|
|
||||||
|
/* Specifies the number of arguments to getservbyname_r */
|
||||||
|
#define GETSERVBYNAME_R_ARGS 6
|
||||||
|
|
||||||
|
/* Define to 1 if you have AF_INET6. */
|
||||||
|
#define HAVE_AF_INET6
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <arpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <arpa/nameser_compat.h> header file. */
|
||||||
|
#define HAVE_ARPA_NAMESER_COMPAT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <arpa/nameser.h> header file. */
|
||||||
|
#define HAVE_ARPA_NAMESER_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <assert.h> header file. */
|
||||||
|
#define HAVE_ASSERT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `bitncmp' function. */
|
||||||
|
/* #undef HAVE_BITNCMP */
|
||||||
|
|
||||||
|
/* Define to 1 if bool is an available type. */
|
||||||
|
#define HAVE_BOOL_T
|
||||||
|
|
||||||
|
/* Define to 1 if you have the clock_gettime function and monotonic timer. */
|
||||||
|
#define HAVE_CLOCK_GETTIME_MONOTONIC
|
||||||
|
|
||||||
|
/* Define to 1 if you have the closesocket function. */
|
||||||
|
/* #undef HAVE_CLOSESOCKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the CloseSocket camel case function. */
|
||||||
|
/* #undef HAVE_CLOSESOCKET_CAMEL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the connect function. */
|
||||||
|
#define HAVE_CONNECT
|
||||||
|
|
||||||
|
/* define if the compiler supports basic C++11 syntax */
|
||||||
|
/* #undef HAVE_CXX11 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <dlfcn.h> header file. */
|
||||||
|
#define HAVE_DLFCN_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <errno.h> header file. */
|
||||||
|
#define HAVE_ERRNO_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the fcntl function. */
|
||||||
|
#define HAVE_FCNTL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <fcntl.h> header file. */
|
||||||
|
#define HAVE_FCNTL_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working fcntl O_NONBLOCK function. */
|
||||||
|
#define HAVE_FCNTL_O_NONBLOCK
|
||||||
|
|
||||||
|
/* Define to 1 if you have the freeaddrinfo function. */
|
||||||
|
#define HAVE_FREEADDRINFO
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working getaddrinfo function. */
|
||||||
|
#define HAVE_GETADDRINFO
|
||||||
|
|
||||||
|
/* Define to 1 if the getaddrinfo function is threadsafe. */
|
||||||
|
#define HAVE_GETADDRINFO_THREADSAFE
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getenv function. */
|
||||||
|
#define HAVE_GETENV
|
||||||
|
|
||||||
|
/* Define to 1 if you have the gethostbyaddr function. */
|
||||||
|
#define HAVE_GETHOSTBYADDR
|
||||||
|
|
||||||
|
/* Define to 1 if you have the gethostbyname function. */
|
||||||
|
#define HAVE_GETHOSTBYNAME
|
||||||
|
|
||||||
|
/* Define to 1 if you have the gethostname function. */
|
||||||
|
#define HAVE_GETHOSTNAME
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getnameinfo function. */
|
||||||
|
#define HAVE_GETNAMEINFO
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getservbyport_r function. */
|
||||||
|
#define HAVE_GETSERVBYPORT_R
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getservbyname_r function. */
|
||||||
|
#define HAVE_GETSERVBYNAME_R
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gettimeofday' function. */
|
||||||
|
#define HAVE_GETTIMEOFDAY
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `if_indextoname' function. */
|
||||||
|
#define HAVE_IF_INDEXTONAME
|
||||||
|
|
||||||
|
/* Define to 1 if you have a IPv6 capable working inet_net_pton function. */
|
||||||
|
/* #undef HAVE_INET_NET_PTON */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a IPv6 capable working inet_ntop function. */
|
||||||
|
#define HAVE_INET_NTOP
|
||||||
|
|
||||||
|
/* Define to 1 if you have a IPv6 capable working inet_pton function. */
|
||||||
|
#define HAVE_INET_PTON
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <inttypes.h> header file. */
|
||||||
|
#define HAVE_INTTYPES_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the ioctl function. */
|
||||||
|
#define HAVE_IOCTL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the ioctlsocket function. */
|
||||||
|
/* #undef HAVE_IOCTLSOCKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the IoctlSocket camel case function. */
|
||||||
|
/* #undef HAVE_IOCTLSOCKET_CAMEL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working IoctlSocket camel case FIONBIO function.
|
||||||
|
*/
|
||||||
|
/* #undef HAVE_IOCTLSOCKET_CAMEL_FIONBIO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working ioctlsocket FIONBIO function. */
|
||||||
|
/* #undef HAVE_IOCTLSOCKET_FIONBIO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working ioctl FIONBIO function. */
|
||||||
|
#define HAVE_IOCTL_FIONBIO
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working ioctl SIOCGIFADDR function. */
|
||||||
|
#define HAVE_IOCTL_SIOCGIFADDR
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `resolve' library (-lresolve). */
|
||||||
|
/* #undef HAVE_LIBRESOLV */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <limits.h> header file. */
|
||||||
|
#define HAVE_LIMITS_H
|
||||||
|
|
||||||
|
/* if your compiler supports LL */
|
||||||
|
#define HAVE_LL
|
||||||
|
|
||||||
|
/* Define to 1 if the compiler supports the 'long long' data type. */
|
||||||
|
#define HAVE_LONGLONG
|
||||||
|
|
||||||
|
/* Define to 1 if you have the malloc.h header file. */
|
||||||
|
/* #undef HAVE_MALLOC_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the memory.h header file. */
|
||||||
|
#define HAVE_MEMORY_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the MSG_NOSIGNAL flag. */
|
||||||
|
#define HAVE_MSG_NOSIGNAL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <netdb.h> header file. */
|
||||||
|
#define HAVE_NETDB_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <netinet/in.h> header file. */
|
||||||
|
#define HAVE_NETINET_IN_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <netinet/tcp.h> header file. */
|
||||||
|
#define HAVE_NETINET_TCP_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <net/if.h> header file. */
|
||||||
|
#define HAVE_NET_IF_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have PF_INET6. */
|
||||||
|
#define HAVE_PF_INET6
|
||||||
|
|
||||||
|
/* Define to 1 if you have the recv function. */
|
||||||
|
#define HAVE_RECV
|
||||||
|
|
||||||
|
/* Define to 1 if you have the recvfrom function. */
|
||||||
|
#define HAVE_RECVFROM
|
||||||
|
|
||||||
|
/* Define to 1 if you have the send function. */
|
||||||
|
#define HAVE_SEND
|
||||||
|
|
||||||
|
/* Define to 1 if you have the setsockopt function. */
|
||||||
|
#define HAVE_SETSOCKOPT
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working setsockopt SO_NONBLOCK function. */
|
||||||
|
/* #undef HAVE_SETSOCKOPT_SO_NONBLOCK */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <signal.h> header file. */
|
||||||
|
#define HAVE_SIGNAL_H
|
||||||
|
|
||||||
|
/* Define to 1 if sig_atomic_t is an available typedef. */
|
||||||
|
#define HAVE_SIG_ATOMIC_T
|
||||||
|
|
||||||
|
/* Define to 1 if sig_atomic_t is already defined as volatile. */
|
||||||
|
/* #undef HAVE_SIG_ATOMIC_T_VOLATILE */
|
||||||
|
|
||||||
|
/* Define to 1 if your struct sockaddr_in6 has sin6_scope_id. */
|
||||||
|
#define HAVE_SOCKADDR_IN6_SIN6_SCOPE_ID
|
||||||
|
|
||||||
|
/* Define to 1 if you have the socket function. */
|
||||||
|
#define HAVE_SOCKET
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <socket.h> header file. */
|
||||||
|
/* #undef HAVE_SOCKET_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdbool.h> header file. */
|
||||||
|
#define HAVE_STDBOOL_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdlib.h> header file. */
|
||||||
|
#define HAVE_STDLIB_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strcasecmp function. */
|
||||||
|
#define HAVE_STRCASECMP
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strcmpi function. */
|
||||||
|
/* #undef HAVE_STRCMPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strdup function. */
|
||||||
|
#define HAVE_STRDUP
|
||||||
|
|
||||||
|
/* Define to 1 if you have the stricmp function. */
|
||||||
|
/* #undef HAVE_STRICMP */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <strings.h> header file. */
|
||||||
|
#define HAVE_STRINGS_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <string.h> header file. */
|
||||||
|
#define HAVE_STRING_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strncasecmp function. */
|
||||||
|
#define HAVE_STRNCASECMP
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strncmpi function. */
|
||||||
|
/* #undef HAVE_STRNCMPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strnicmp function. */
|
||||||
|
/* #undef HAVE_STRNICMP */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stropts.h> header file. */
|
||||||
|
/* #undef HAVE_STROPTS_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have struct addrinfo. */
|
||||||
|
#define HAVE_STRUCT_ADDRINFO
|
||||||
|
|
||||||
|
/* Define to 1 if you have struct in6_addr. */
|
||||||
|
#define HAVE_STRUCT_IN6_ADDR
|
||||||
|
|
||||||
|
/* Define to 1 if you have struct sockaddr_in6. */
|
||||||
|
#define HAVE_STRUCT_SOCKADDR_IN6
|
||||||
|
|
||||||
|
/* if struct sockaddr_storage is defined */
|
||||||
|
#define HAVE_STRUCT_SOCKADDR_STORAGE
|
||||||
|
|
||||||
|
/* Define to 1 if you have the timeval struct. */
|
||||||
|
#define HAVE_STRUCT_TIMEVAL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/ioctl.h> header file. */
|
||||||
|
#define HAVE_SYS_IOCTL_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/param.h> header file. */
|
||||||
|
#define HAVE_SYS_PARAM_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/select.h> header file. */
|
||||||
|
#define HAVE_SYS_SELECT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/socket.h> header file. */
|
||||||
|
#define HAVE_SYS_SOCKET_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/stat.h> header file. */
|
||||||
|
#define HAVE_SYS_STAT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/types.h> header file. */
|
||||||
|
#define HAVE_SYS_TYPES_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/uio.h> header file. */
|
||||||
|
#define HAVE_SYS_UIO_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <time.h> header file. */
|
||||||
|
#define HAVE_TIME_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the windows.h header file. */
|
||||||
|
/* #undef HAVE_WINDOWS_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the winsock2.h header file. */
|
||||||
|
/* #undef HAVE_WINSOCK2_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the winsock.h header file. */
|
||||||
|
/* #undef HAVE_WINSOCK_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the writev function. */
|
||||||
|
#define HAVE_WRITEV
|
||||||
|
|
||||||
|
/* Define to 1 if you have the ws2tcpip.h header file. */
|
||||||
|
/* #undef HAVE_WS2TCPIP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the __system_property_get function */
|
||||||
|
#define HAVE___SYSTEM_PROPERTY_GET
|
||||||
|
|
||||||
|
/* Define to 1 if you need the malloc.h header file even with stdlib.h */
|
||||||
|
/* #undef NEED_MALLOC_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you need the memory.h header file even with stdlib.h */
|
||||||
|
/* #undef NEED_MEMORY_H */
|
||||||
|
|
||||||
|
/* a suitable file/device to read random data from */
|
||||||
|
#define CARES_RANDOM_FILE "/dev/urandom"
|
||||||
|
|
||||||
|
/* Define to the type qualifier pointed by arg 5 for recvfrom. */
|
||||||
|
#define RECVFROM_QUAL_ARG5
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG1 int
|
||||||
|
|
||||||
|
/* Define to the type pointed by arg 2 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG2 void *
|
||||||
|
|
||||||
|
/* Define to 1 if the type pointed by arg 2 for recvfrom is void. */
|
||||||
|
#define RECVFROM_TYPE_ARG2_IS_VOID 0
|
||||||
|
|
||||||
|
/* Define to the type of arg 3 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG3 size_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 4 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG4 int
|
||||||
|
|
||||||
|
/* Define to the type pointed by arg 5 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG5 struct sockaddr *
|
||||||
|
|
||||||
|
/* Define to 1 if the type pointed by arg 5 for recvfrom is void. */
|
||||||
|
#define RECVFROM_TYPE_ARG5_IS_VOID 0
|
||||||
|
|
||||||
|
/* Define to the type pointed by arg 6 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG6 socklen_t *
|
||||||
|
|
||||||
|
/* Define to 1 if the type pointed by arg 6 for recvfrom is void. */
|
||||||
|
#define RECVFROM_TYPE_ARG6_IS_VOID 0
|
||||||
|
|
||||||
|
/* Define to the function return type for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_RETV ssize_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for recv. */
|
||||||
|
#define RECV_TYPE_ARG1 int
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for recv. */
|
||||||
|
#define RECV_TYPE_ARG2 void *
|
||||||
|
|
||||||
|
/* Define to the type of arg 3 for recv. */
|
||||||
|
#define RECV_TYPE_ARG3 size_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 4 for recv. */
|
||||||
|
#define RECV_TYPE_ARG4 int
|
||||||
|
|
||||||
|
/* Define to the function return type for recv. */
|
||||||
|
#define RECV_TYPE_RETV ssize_t
|
||||||
|
|
||||||
|
/* Define as the return type of signal handlers (`int' or `void'). */
|
||||||
|
#define RETSIGTYPE
|
||||||
|
|
||||||
|
/* Define to the type qualifier of arg 2 for send. */
|
||||||
|
#define SEND_QUAL_ARG2
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for send. */
|
||||||
|
#define SEND_TYPE_ARG1 int
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for send. */
|
||||||
|
#define SEND_TYPE_ARG2 void *
|
||||||
|
|
||||||
|
/* Define to the type of arg 3 for send. */
|
||||||
|
#define SEND_TYPE_ARG3 size_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 4 for send. */
|
||||||
|
#define SEND_TYPE_ARG4 int
|
||||||
|
|
||||||
|
/* Define to the function return type for send. */
|
||||||
|
#define SEND_TYPE_RETV ssize_t
|
||||||
|
|
||||||
|
/* Define to 1 if you can safely include both <sys/time.h> and <time.h>. */
|
||||||
|
#define TIME_WITH_SYS_TIME
|
||||||
|
|
||||||
|
/* Define to disable non-blocking sockets. */
|
||||||
|
#undef USE_BLOCKING_SOCKETS
|
||||||
|
|
||||||
|
/* Define to avoid automatic inclusion of winsock.h */
|
||||||
|
#undef WIN32_LEAN_AND_MEAN
|
||||||
|
|
||||||
|
/* Type to use in place of in_addr_t when system does not provide it. */
|
||||||
|
#undef in_addr_t
|
||||||
|
|
43
contrib/c-ares-cmake/linux/ares_build.h
Normal file
43
contrib/c-ares-cmake/linux/ares_build.h
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
#ifndef __CARES_BUILD_H
|
||||||
|
#define __CARES_BUILD_H
|
||||||
|
|
||||||
|
#define CARES_TYPEOF_ARES_SOCKLEN_T socklen_t
|
||||||
|
#define CARES_TYPEOF_ARES_SSIZE_T ssize_t
|
||||||
|
|
||||||
|
/* Prefix names with CARES_ to make sure they don't conflict with other config.h
|
||||||
|
* files. We need to include some dependent headers that may be system specific
|
||||||
|
* for C-Ares */
|
||||||
|
#define CARES_HAVE_SYS_TYPES_H
|
||||||
|
#define CARES_HAVE_SYS_SOCKET_H
|
||||||
|
/* #undef CARES_HAVE_WINDOWS_H */
|
||||||
|
/* #undef CARES_HAVE_WS2TCPIP_H */
|
||||||
|
/* #undef CARES_HAVE_WINSOCK2_H */
|
||||||
|
/* #undef CARES_HAVE_WINDOWS_H */
|
||||||
|
#define CARES_HAVE_ARPA_NAMESER_H
|
||||||
|
#define CARES_HAVE_ARPA_NAMESER_COMPAT_H
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_SYS_TYPES_H
|
||||||
|
# include <sys/types.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_SYS_SOCKET_H
|
||||||
|
# include <sys/socket.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_WINSOCK2_H
|
||||||
|
# include <winsock2.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_WS2TCPIP_H
|
||||||
|
# include <ws2tcpip.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef CARES_HAVE_WINDOWS_H
|
||||||
|
# include <windows.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
typedef CARES_TYPEOF_ARES_SOCKLEN_T ares_socklen_t;
|
||||||
|
typedef CARES_TYPEOF_ARES_SSIZE_T ares_ssize_t;
|
||||||
|
|
||||||
|
#endif /* __CARES_BUILD_H */
|
432
contrib/c-ares-cmake/linux/ares_config.h
Normal file
432
contrib/c-ares-cmake/linux/ares_config.h
Normal file
@ -0,0 +1,432 @@
|
|||||||
|
/* Generated from ares_config.h.cmake */
|
||||||
|
|
||||||
|
/* Define if building universal (internal helper macro) */
|
||||||
|
#undef AC_APPLE_UNIVERSAL_BUILD
|
||||||
|
|
||||||
|
/* define this if ares is built for a big endian system */
|
||||||
|
#undef ARES_BIG_ENDIAN
|
||||||
|
|
||||||
|
/* when building as static part of libcurl */
|
||||||
|
#undef BUILDING_LIBCURL
|
||||||
|
|
||||||
|
/* Defined for build that exposes internal static functions for testing. */
|
||||||
|
#undef CARES_EXPOSE_STATICS
|
||||||
|
|
||||||
|
/* Defined for build with symbol hiding. */
|
||||||
|
#undef CARES_SYMBOL_HIDING
|
||||||
|
|
||||||
|
/* Definition to make a library symbol externally visible. */
|
||||||
|
#undef CARES_SYMBOL_SCOPE_EXTERN
|
||||||
|
|
||||||
|
/* Use resolver library to configure cares */
|
||||||
|
/* #undef CARES_USE_LIBRESOLV */
|
||||||
|
|
||||||
|
/* if a /etc/inet dir is being used */
|
||||||
|
#undef ETC_INET
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for gethostname. */
|
||||||
|
#define GETHOSTNAME_TYPE_ARG2 size_t
|
||||||
|
|
||||||
|
/* Define to the type qualifier of arg 1 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_QUAL_ARG1
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG1 struct sockaddr *
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG2 socklen_t
|
||||||
|
|
||||||
|
/* Define to the type of args 4 and 6 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG46 socklen_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 7 for getnameinfo. */
|
||||||
|
#define GETNAMEINFO_TYPE_ARG7 int
|
||||||
|
|
||||||
|
/* Specifies the number of arguments to getservbyport_r */
|
||||||
|
#define GETSERVBYPORT_R_ARGS 6
|
||||||
|
|
||||||
|
/* Specifies the number of arguments to getservbyname_r */
|
||||||
|
#define GETSERVBYNAME_R_ARGS 6
|
||||||
|
|
||||||
|
/* Define to 1 if you have AF_INET6. */
|
||||||
|
#define HAVE_AF_INET6
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <arpa/inet.h> header file. */
|
||||||
|
#define HAVE_ARPA_INET_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <arpa/nameser_compat.h> header file. */
|
||||||
|
#define HAVE_ARPA_NAMESER_COMPAT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <arpa/nameser.h> header file. */
|
||||||
|
#define HAVE_ARPA_NAMESER_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <assert.h> header file. */
|
||||||
|
#define HAVE_ASSERT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `bitncmp' function. */
|
||||||
|
/* #undef HAVE_BITNCMP */
|
||||||
|
|
||||||
|
/* Define to 1 if bool is an available type. */
|
||||||
|
#define HAVE_BOOL_T
|
||||||
|
|
||||||
|
/* Define to 1 if you have the clock_gettime function and monotonic timer. */
|
||||||
|
#define HAVE_CLOCK_GETTIME_MONOTONIC
|
||||||
|
|
||||||
|
/* Define to 1 if you have the closesocket function. */
|
||||||
|
/* #undef HAVE_CLOSESOCKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the CloseSocket camel case function. */
|
||||||
|
/* #undef HAVE_CLOSESOCKET_CAMEL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the connect function. */
|
||||||
|
#define HAVE_CONNECT
|
||||||
|
|
||||||
|
/* define if the compiler supports basic C++11 syntax */
|
||||||
|
/* #undef HAVE_CXX11 */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <dlfcn.h> header file. */
|
||||||
|
#define HAVE_DLFCN_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <errno.h> header file. */
|
||||||
|
#define HAVE_ERRNO_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the fcntl function. */
|
||||||
|
#define HAVE_FCNTL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <fcntl.h> header file. */
|
||||||
|
#define HAVE_FCNTL_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working fcntl O_NONBLOCK function. */
|
||||||
|
#define HAVE_FCNTL_O_NONBLOCK
|
||||||
|
|
||||||
|
/* Define to 1 if you have the freeaddrinfo function. */
|
||||||
|
#define HAVE_FREEADDRINFO
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working getaddrinfo function. */
|
||||||
|
#define HAVE_GETADDRINFO
|
||||||
|
|
||||||
|
/* Define to 1 if the getaddrinfo function is threadsafe. */
|
||||||
|
/* #undef HAVE_GETADDRINFO_THREADSAFE */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getenv function. */
|
||||||
|
#define HAVE_GETENV
|
||||||
|
|
||||||
|
/* Define to 1 if you have the gethostbyaddr function. */
|
||||||
|
#define HAVE_GETHOSTBYADDR
|
||||||
|
|
||||||
|
/* Define to 1 if you have the gethostbyname function. */
|
||||||
|
#define HAVE_GETHOSTBYNAME
|
||||||
|
|
||||||
|
/* Define to 1 if you have the gethostname function. */
|
||||||
|
#define HAVE_GETHOSTNAME
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getnameinfo function. */
|
||||||
|
#define HAVE_GETNAMEINFO
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getservbyport_r function. */
|
||||||
|
#define HAVE_GETSERVBYPORT_R
|
||||||
|
|
||||||
|
/* Define to 1 if you have the getservbyname_r function. */
|
||||||
|
#define HAVE_GETSERVBYNAME_R
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `gettimeofday' function. */
|
||||||
|
#define HAVE_GETTIMEOFDAY
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `if_indextoname' function. */
|
||||||
|
#define HAVE_IF_INDEXTONAME
|
||||||
|
|
||||||
|
/* Define to 1 if you have a IPv6 capable working inet_net_pton function. */
|
||||||
|
/* #undef HAVE_INET_NET_PTON */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a IPv6 capable working inet_ntop function. */
|
||||||
|
#define HAVE_INET_NTOP
|
||||||
|
|
||||||
|
/* Define to 1 if you have a IPv6 capable working inet_pton function. */
|
||||||
|
#define HAVE_INET_PTON
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <inttypes.h> header file. */
|
||||||
|
#define HAVE_INTTYPES_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the ioctl function. */
|
||||||
|
#define HAVE_IOCTL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the ioctlsocket function. */
|
||||||
|
/* #undef HAVE_IOCTLSOCKET */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the IoctlSocket camel case function. */
|
||||||
|
/* #undef HAVE_IOCTLSOCKET_CAMEL */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working IoctlSocket camel case FIONBIO function.
|
||||||
|
*/
|
||||||
|
/* #undef HAVE_IOCTLSOCKET_CAMEL_FIONBIO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working ioctlsocket FIONBIO function. */
|
||||||
|
/* #undef HAVE_IOCTLSOCKET_FIONBIO */
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working ioctl FIONBIO function. */
|
||||||
|
#define HAVE_IOCTL_FIONBIO
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working ioctl SIOCGIFADDR function. */
|
||||||
|
#define HAVE_IOCTL_SIOCGIFADDR
|
||||||
|
|
||||||
|
/* Define to 1 if you have the `resolve' library (-lresolve). */
|
||||||
|
/* #undef HAVE_LIBRESOLV */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <limits.h> header file. */
|
||||||
|
#define HAVE_LIMITS_H
|
||||||
|
|
||||||
|
/* if your compiler supports LL */
|
||||||
|
#define HAVE_LL
|
||||||
|
|
||||||
|
/* Define to 1 if the compiler supports the 'long long' data type. */
|
||||||
|
#define HAVE_LONGLONG
|
||||||
|
|
||||||
|
/* Define to 1 if you have the malloc.h header file. */
|
||||||
|
#define HAVE_MALLOC_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the memory.h header file. */
|
||||||
|
#define HAVE_MEMORY_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the MSG_NOSIGNAL flag. */
|
||||||
|
#define HAVE_MSG_NOSIGNAL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <netdb.h> header file. */
|
||||||
|
#define HAVE_NETDB_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <netinet/in.h> header file. */
|
||||||
|
#define HAVE_NETINET_IN_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <netinet/tcp.h> header file. */
|
||||||
|
#define HAVE_NETINET_TCP_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <net/if.h> header file. */
|
||||||
|
#define HAVE_NET_IF_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have PF_INET6. */
|
||||||
|
#define HAVE_PF_INET6
|
||||||
|
|
||||||
|
/* Define to 1 if you have the recv function. */
|
||||||
|
#define HAVE_RECV
|
||||||
|
|
||||||
|
/* Define to 1 if you have the recvfrom function. */
|
||||||
|
#define HAVE_RECVFROM
|
||||||
|
|
||||||
|
/* Define to 1 if you have the send function. */
|
||||||
|
#define HAVE_SEND
|
||||||
|
|
||||||
|
/* Define to 1 if you have the setsockopt function. */
|
||||||
|
#define HAVE_SETSOCKOPT
|
||||||
|
|
||||||
|
/* Define to 1 if you have a working setsockopt SO_NONBLOCK function. */
|
||||||
|
/* #undef HAVE_SETSOCKOPT_SO_NONBLOCK */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <signal.h> header file. */
|
||||||
|
#define HAVE_SIGNAL_H
|
||||||
|
|
||||||
|
/* Define to 1 if sig_atomic_t is an available typedef. */
|
||||||
|
#define HAVE_SIG_ATOMIC_T
|
||||||
|
|
||||||
|
/* Define to 1 if sig_atomic_t is already defined as volatile. */
|
||||||
|
/* #undef HAVE_SIG_ATOMIC_T_VOLATILE */
|
||||||
|
|
||||||
|
/* Define to 1 if your struct sockaddr_in6 has sin6_scope_id. */
|
||||||
|
#define HAVE_SOCKADDR_IN6_SIN6_SCOPE_ID
|
||||||
|
|
||||||
|
/* Define to 1 if you have the socket function. */
|
||||||
|
#define HAVE_SOCKET
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <socket.h> header file. */
|
||||||
|
/* #undef HAVE_SOCKET_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdbool.h> header file. */
|
||||||
|
#define HAVE_STDBOOL_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdint.h> header file. */
|
||||||
|
#define HAVE_STDINT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stdlib.h> header file. */
|
||||||
|
#define HAVE_STDLIB_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strcasecmp function. */
|
||||||
|
#define HAVE_STRCASECMP
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strcmpi function. */
|
||||||
|
/* #undef HAVE_STRCMPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strdup function. */
|
||||||
|
#define HAVE_STRDUP
|
||||||
|
|
||||||
|
/* Define to 1 if you have the stricmp function. */
|
||||||
|
/* #undef HAVE_STRICMP */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <strings.h> header file. */
|
||||||
|
#define HAVE_STRINGS_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <string.h> header file. */
|
||||||
|
#define HAVE_STRING_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strncasecmp function. */
|
||||||
|
#define HAVE_STRNCASECMP
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strncmpi function. */
|
||||||
|
/* #undef HAVE_STRNCMPI */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the strnicmp function. */
|
||||||
|
/* #undef HAVE_STRNICMP */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <stropts.h> header file. */
|
||||||
|
#define HAVE_STROPTS_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have struct addrinfo. */
|
||||||
|
#define HAVE_STRUCT_ADDRINFO
|
||||||
|
|
||||||
|
/* Define to 1 if you have struct in6_addr. */
|
||||||
|
#define HAVE_STRUCT_IN6_ADDR
|
||||||
|
|
||||||
|
/* Define to 1 if you have struct sockaddr_in6. */
|
||||||
|
#define HAVE_STRUCT_SOCKADDR_IN6
|
||||||
|
|
||||||
|
/* if struct sockaddr_storage is defined */
|
||||||
|
#define HAVE_STRUCT_SOCKADDR_STORAGE
|
||||||
|
|
||||||
|
/* Define to 1 if you have the timeval struct. */
|
||||||
|
#define HAVE_STRUCT_TIMEVAL
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/ioctl.h> header file. */
|
||||||
|
#define HAVE_SYS_IOCTL_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/param.h> header file. */
|
||||||
|
#define HAVE_SYS_PARAM_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/select.h> header file. */
|
||||||
|
#define HAVE_SYS_SELECT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/socket.h> header file. */
|
||||||
|
#define HAVE_SYS_SOCKET_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/stat.h> header file. */
|
||||||
|
#define HAVE_SYS_STAT_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/time.h> header file. */
|
||||||
|
#define HAVE_SYS_TIME_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/types.h> header file. */
|
||||||
|
#define HAVE_SYS_TYPES_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <sys/uio.h> header file. */
|
||||||
|
#define HAVE_SYS_UIO_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <time.h> header file. */
|
||||||
|
#define HAVE_TIME_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the <unistd.h> header file. */
|
||||||
|
#define HAVE_UNISTD_H
|
||||||
|
|
||||||
|
/* Define to 1 if you have the windows.h header file. */
|
||||||
|
/* #undef HAVE_WINDOWS_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the winsock2.h header file. */
|
||||||
|
/* #undef HAVE_WINSOCK2_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the winsock.h header file. */
|
||||||
|
/* #undef HAVE_WINSOCK_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the writev function. */
|
||||||
|
#define HAVE_WRITEV
|
||||||
|
|
||||||
|
/* Define to 1 if you have the ws2tcpip.h header file. */
|
||||||
|
/* #undef HAVE_WS2TCPIP_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you have the __system_property_get function */
|
||||||
|
#define HAVE___SYSTEM_PROPERTY_GET
|
||||||
|
|
||||||
|
/* Define to 1 if you need the malloc.h header file even with stdlib.h */
|
||||||
|
/* #undef NEED_MALLOC_H */
|
||||||
|
|
||||||
|
/* Define to 1 if you need the memory.h header file even with stdlib.h */
|
||||||
|
/* #undef NEED_MEMORY_H */
|
||||||
|
|
||||||
|
/* a suitable file/device to read random data from */
|
||||||
|
#define CARES_RANDOM_FILE "/dev/urandom"
|
||||||
|
|
||||||
|
/* Define to the type qualifier pointed by arg 5 for recvfrom. */
|
||||||
|
#define RECVFROM_QUAL_ARG5
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG1 int
|
||||||
|
|
||||||
|
/* Define to the type pointed by arg 2 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG2 void *
|
||||||
|
|
||||||
|
/* Define to 1 if the type pointed by arg 2 for recvfrom is void. */
|
||||||
|
#define RECVFROM_TYPE_ARG2_IS_VOID 0
|
||||||
|
|
||||||
|
/* Define to the type of arg 3 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG3 size_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 4 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG4 int
|
||||||
|
|
||||||
|
/* Define to the type pointed by arg 5 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG5 struct sockaddr *
|
||||||
|
|
||||||
|
/* Define to 1 if the type pointed by arg 5 for recvfrom is void. */
|
||||||
|
#define RECVFROM_TYPE_ARG5_IS_VOID 0
|
||||||
|
|
||||||
|
/* Define to the type pointed by arg 6 for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_ARG6 socklen_t *
|
||||||
|
|
||||||
|
/* Define to 1 if the type pointed by arg 6 for recvfrom is void. */
|
||||||
|
#define RECVFROM_TYPE_ARG6_IS_VOID 0
|
||||||
|
|
||||||
|
/* Define to the function return type for recvfrom. */
|
||||||
|
#define RECVFROM_TYPE_RETV ssize_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for recv. */
|
||||||
|
#define RECV_TYPE_ARG1 int
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for recv. */
|
||||||
|
#define RECV_TYPE_ARG2 void *
|
||||||
|
|
||||||
|
/* Define to the type of arg 3 for recv. */
|
||||||
|
#define RECV_TYPE_ARG3 size_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 4 for recv. */
|
||||||
|
#define RECV_TYPE_ARG4 int
|
||||||
|
|
||||||
|
/* Define to the function return type for recv. */
|
||||||
|
#define RECV_TYPE_RETV ssize_t
|
||||||
|
|
||||||
|
/* Define as the return type of signal handlers (`int' or `void'). */
|
||||||
|
#define RETSIGTYPE
|
||||||
|
|
||||||
|
/* Define to the type qualifier of arg 2 for send. */
|
||||||
|
#define SEND_QUAL_ARG2
|
||||||
|
|
||||||
|
/* Define to the type of arg 1 for send. */
|
||||||
|
#define SEND_TYPE_ARG1 int
|
||||||
|
|
||||||
|
/* Define to the type of arg 2 for send. */
|
||||||
|
#define SEND_TYPE_ARG2 void *
|
||||||
|
|
||||||
|
/* Define to the type of arg 3 for send. */
|
||||||
|
#define SEND_TYPE_ARG3 size_t
|
||||||
|
|
||||||
|
/* Define to the type of arg 4 for send. */
|
||||||
|
#define SEND_TYPE_ARG4 int
|
||||||
|
|
||||||
|
/* Define to the function return type for send. */
|
||||||
|
#define SEND_TYPE_RETV ssize_t
|
||||||
|
|
||||||
|
/* Define to 1 if you can safely include both <sys/time.h> and <time.h>. */
|
||||||
|
#define TIME_WITH_SYS_TIME
|
||||||
|
|
||||||
|
/* Define to disable non-blocking sockets. */
|
||||||
|
#undef USE_BLOCKING_SOCKETS
|
||||||
|
|
||||||
|
/* Define to avoid automatic inclusion of winsock.h */
|
||||||
|
#undef WIN32_LEAN_AND_MEAN
|
||||||
|
|
||||||
|
/* Type to use in place of in_addr_t when system does not provide it. */
|
||||||
|
#undef in_addr_t
|
||||||
|
|
@ -415,7 +415,7 @@
|
|||||||
/*
|
/*
|
||||||
* Defined if strerror_r returns char * if _GNU_SOURCE is defined.
|
* Defined if strerror_r returns char * if _GNU_SOURCE is defined.
|
||||||
*/
|
*/
|
||||||
#define JEMALLOC_STRERROR_R_RETURNS_CHAR_WITH_GNU_SOURCE
|
/* #undef JEMALLOC_STRERROR_R_RETURNS_CHAR_WITH_GNU_SOURCE */
|
||||||
|
|
||||||
/* Performs additional safety checks when defined. */
|
/* Performs additional safety checks when defined. */
|
||||||
/* #undef JEMALLOC_OPT_SAFETY_CHECKS */
|
/* #undef JEMALLOC_OPT_SAFETY_CHECKS */
|
||||||
|
2
contrib/krb5
vendored
2
contrib/krb5
vendored
@ -1 +1 @@
|
|||||||
Subproject commit d879821c7a4c70b0c3ad739d9951d1a2b1903df7
|
Subproject commit b89e20367b074bd02dd118a6534099b21e88b3c3
|
@ -440,7 +440,9 @@
|
|||||||
#define HAVE_STRERROR 1
|
#define HAVE_STRERROR 1
|
||||||
|
|
||||||
/* Define to 1 if you have the `strerror_r' function. */
|
/* Define to 1 if you have the `strerror_r' function. */
|
||||||
|
#ifndef USE_MUSL
|
||||||
#define HAVE_STRERROR_R 1
|
#define HAVE_STRERROR_R 1
|
||||||
|
#endif
|
||||||
|
|
||||||
/* Define to 1 if you have the <strings.h> header file. */
|
/* Define to 1 if you have the <strings.h> header file. */
|
||||||
#define HAVE_STRINGS_H 1
|
#define HAVE_STRINGS_H 1
|
||||||
|
2
contrib/libcpuid
vendored
2
contrib/libcpuid
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 8db3b8d2d32d22437f063ce692a1b9bb15e42d18
|
Subproject commit 503083acb77edf9fbce22a05826307dff2ce96e6
|
@ -63,6 +63,13 @@ target_include_directories (_libpq SYSTEM PUBLIC ${LIBPQ_SOURCE_DIR})
|
|||||||
target_include_directories (_libpq SYSTEM PUBLIC "${LIBPQ_SOURCE_DIR}/include")
|
target_include_directories (_libpq SYSTEM PUBLIC "${LIBPQ_SOURCE_DIR}/include")
|
||||||
target_include_directories (_libpq SYSTEM PRIVATE "${LIBPQ_SOURCE_DIR}/configs")
|
target_include_directories (_libpq SYSTEM PRIVATE "${LIBPQ_SOURCE_DIR}/configs")
|
||||||
|
|
||||||
|
# NOTE: this is a dirty hack to avoid and instead pg_config.h should be shipped
|
||||||
|
# for different OS'es like for jemalloc, not one generic for all OS'es like
|
||||||
|
# now.
|
||||||
|
if (OS_DARWIN OR OS_FREEBSD OR USE_MUSL)
|
||||||
|
target_compile_definitions(_libpq PRIVATE -DSTRERROR_R_INT=1)
|
||||||
|
endif()
|
||||||
|
|
||||||
target_link_libraries (_libpq PRIVATE OpenSSL::SSL)
|
target_link_libraries (_libpq PRIVATE OpenSSL::SSL)
|
||||||
|
|
||||||
add_library(ch_contrib::libpq ALIAS _libpq)
|
add_library(ch_contrib::libpq ALIAS _libpq)
|
||||||
|
2
contrib/librdkafka
vendored
2
contrib/librdkafka
vendored
@ -1 +1 @@
|
|||||||
Subproject commit ff32b4e9eeafd0b276f010ee969179e4e9e6d0b2
|
Subproject commit 6f3b483426a8c8ec950e27e446bec175cf8b553f
|
2
contrib/llvm
vendored
2
contrib/llvm
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 20607e61728e97c969e536644c3c0c1bb1a50672
|
Subproject commit 0db5bf5bd2452cd8f1283a1fcdc04845af705bfc
|
2
contrib/replxx
vendored
2
contrib/replxx
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 3fd0e3c9364a589447453d9906d854ebd8d385c5
|
Subproject commit 5d04501f93a4fb7f0bb8b73b8f614bc986f9e25b
|
2
contrib/sentry-native
vendored
2
contrib/sentry-native
vendored
@ -1 +1 @@
|
|||||||
Subproject commit f431047ac8da13179c488018dddf1c0d0771a997
|
Subproject commit ae10fb8c224c3f41571446e1ed7fd57b9e5e366b
|
2
contrib/vectorscan
vendored
2
contrib/vectorscan
vendored
@ -1 +1 @@
|
|||||||
Subproject commit 73695e419c27af7fe2a099c7aa57931cc02aea5d
|
Subproject commit f6250ae3e5a3085000239313ad0689cc1e00cdc2
|
@ -304,7 +304,7 @@ target_include_directories (_vectorscan SYSTEM PUBLIC "${LIBRARY_DIR}/src")
|
|||||||
# Please regenerate these files if you update vectorscan.
|
# Please regenerate these files if you update vectorscan.
|
||||||
|
|
||||||
if (ARCH_AMD64)
|
if (ARCH_AMD64)
|
||||||
target_include_directories (_vectorscan PRIVATE x86_64)
|
target_include_directories (_vectorscan PRIVATE amd64)
|
||||||
endif ()
|
endif ()
|
||||||
|
|
||||||
if (ARCH_AARCH64)
|
if (ARCH_AARCH64)
|
||||||
|
@ -2,8 +2,10 @@ set (SOURCE_DIR ${CMAKE_SOURCE_DIR}/contrib/zlib-ng)
|
|||||||
|
|
||||||
add_definitions(-DZLIB_COMPAT)
|
add_definitions(-DZLIB_COMPAT)
|
||||||
add_definitions(-DWITH_GZFILEOP)
|
add_definitions(-DWITH_GZFILEOP)
|
||||||
|
if(NOT ARCH_S390X)
|
||||||
add_definitions(-DUNALIGNED_OK)
|
add_definitions(-DUNALIGNED_OK)
|
||||||
add_definitions(-DUNALIGNED64_OK)
|
add_definitions(-DUNALIGNED64_OK)
|
||||||
|
endif()
|
||||||
|
|
||||||
set (HAVE_UNISTD_H 1)
|
set (HAVE_UNISTD_H 1)
|
||||||
add_definitions(-D_LARGEFILE64_SOURCE=1 -D__USE_LARGEFILE64)
|
add_definitions(-D_LARGEFILE64_SOURCE=1 -D__USE_LARGEFILE64)
|
||||||
|
@ -46,7 +46,7 @@ RUN apt-get install binutils-riscv64-linux-gnu
|
|||||||
|
|
||||||
# Architecture of the image when BuildKit/buildx is used
|
# Architecture of the image when BuildKit/buildx is used
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG NFPM_VERSION=2.16.0
|
ARG NFPM_VERSION=2.18.1
|
||||||
|
|
||||||
RUN arch=${TARGETARCH:-amd64} \
|
RUN arch=${TARGETARCH:-amd64} \
|
||||||
&& curl -Lo /tmp/nfpm.deb "https://github.com/goreleaser/nfpm/releases/download/v${NFPM_VERSION}/nfpm_${arch}.deb" \
|
&& curl -Lo /tmp/nfpm.deb "https://github.com/goreleaser/nfpm/releases/download/v${NFPM_VERSION}/nfpm_${arch}.deb" \
|
||||||
@ -67,24 +67,5 @@ ENV GOCACHE=/workdir/
|
|||||||
RUN mkdir /workdir && chmod 777 /workdir
|
RUN mkdir /workdir && chmod 777 /workdir
|
||||||
WORKDIR /workdir
|
WORKDIR /workdir
|
||||||
|
|
||||||
# NOTE: thread sanitizer is broken in clang-14, we have to build it with clang-15
|
|
||||||
# https://github.com/ClickHouse/ClickHouse/pull/39450
|
|
||||||
# https://github.com/google/sanitizers/issues/1540
|
|
||||||
# https://github.com/google/sanitizers/issues/1552
|
|
||||||
|
|
||||||
RUN export CODENAME="$(lsb_release --codename --short | tr 'A-Z' 'a-z')" \
|
|
||||||
&& echo "deb [trusted=yes] https://apt.llvm.org/${CODENAME}/ llvm-toolchain-${CODENAME}-15 main" >> \
|
|
||||||
/etc/apt/sources.list.d/clang.list \
|
|
||||||
&& apt-get update \
|
|
||||||
&& apt-get install \
|
|
||||||
clang-15 \
|
|
||||||
llvm-15 \
|
|
||||||
clang-tidy-15 \
|
|
||||||
--yes --no-install-recommends \
|
|
||||||
&& apt-get clean
|
|
||||||
|
|
||||||
# for external_symbolizer_path
|
|
||||||
RUN ln -s /usr/bin/llvm-symbolizer-15 /usr/bin/llvm-symbolizer
|
|
||||||
|
|
||||||
COPY build.sh /
|
COPY build.sh /
|
||||||
CMD ["bash", "-c", "/build.sh 2>&1"]
|
CMD ["bash", "-c", "/build.sh 2>&1"]
|
||||||
|
@ -339,17 +339,16 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--compiler",
|
"--compiler",
|
||||||
choices=(
|
choices=(
|
||||||
"clang-15", # For TSAN builds, see #39450
|
"clang-15",
|
||||||
"clang-14",
|
"clang-15-darwin",
|
||||||
"clang-14-darwin",
|
"clang-15-darwin-aarch64",
|
||||||
"clang-14-darwin-aarch64",
|
"clang-15-aarch64",
|
||||||
"clang-14-aarch64",
|
"clang-15-ppc64le",
|
||||||
"clang-14-ppc64le",
|
"clang-15-amd64sse2",
|
||||||
"clang-14-amd64sse2",
|
"clang-15-freebsd",
|
||||||
"clang-14-freebsd",
|
|
||||||
"gcc-11",
|
"gcc-11",
|
||||||
),
|
),
|
||||||
default="clang-14",
|
default="clang-15",
|
||||||
help="a compiler to use",
|
help="a compiler to use",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
@ -33,7 +33,7 @@ RUN arch=${TARGETARCH:-amd64} \
|
|||||||
# lts / testing / prestable / etc
|
# lts / testing / prestable / etc
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
ARG REPOSITORY="https://packages.clickhouse.com/tgz/${REPO_CHANNEL}"
|
||||||
ARG VERSION="20.9.3.45"
|
ARG VERSION="22.8.5.29"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# user/group precreated explicitly with fixed uid/gid on purpose.
|
# user/group precreated explicitly with fixed uid/gid on purpose.
|
||||||
|
@ -21,7 +21,7 @@ RUN sed -i "s|http://archive.ubuntu.com|${apt_archive}|g" /etc/apt/sources.list
|
|||||||
|
|
||||||
ARG REPO_CHANNEL="stable"
|
ARG REPO_CHANNEL="stable"
|
||||||
ARG REPOSITORY="deb https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
ARG REPOSITORY="deb https://packages.clickhouse.com/deb ${REPO_CHANNEL} main"
|
||||||
ARG VERSION=22.6.1.*
|
ARG VERSION="22.8.5.29"
|
||||||
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
ARG PACKAGES="clickhouse-client clickhouse-server clickhouse-common-static"
|
||||||
|
|
||||||
# set non-empty deb_location_url url to create a docker image
|
# set non-empty deb_location_url url to create a docker image
|
||||||
|
@ -38,6 +38,7 @@ FORMAT_SCHEMA_PATH="$(clickhouse extract-from-config --config-file "$CLICKHOUSE_
|
|||||||
|
|
||||||
# There could be many disks declared in config
|
# There could be many disks declared in config
|
||||||
readarray -t DISKS_PATHS < <(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key='storage_configuration.disks.*.path' || true)
|
readarray -t DISKS_PATHS < <(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key='storage_configuration.disks.*.path' || true)
|
||||||
|
readarray -t DISKS_METADATA_PATHS < <(clickhouse extract-from-config --config-file "$CLICKHOUSE_CONFIG" --key='storage_configuration.disks.*.metadata_path' || true)
|
||||||
|
|
||||||
CLICKHOUSE_USER="${CLICKHOUSE_USER:-default}"
|
CLICKHOUSE_USER="${CLICKHOUSE_USER:-default}"
|
||||||
CLICKHOUSE_PASSWORD="${CLICKHOUSE_PASSWORD:-}"
|
CLICKHOUSE_PASSWORD="${CLICKHOUSE_PASSWORD:-}"
|
||||||
@ -50,7 +51,8 @@ for dir in "$DATA_DIR" \
|
|||||||
"$TMP_DIR" \
|
"$TMP_DIR" \
|
||||||
"$USER_PATH" \
|
"$USER_PATH" \
|
||||||
"$FORMAT_SCHEMA_PATH" \
|
"$FORMAT_SCHEMA_PATH" \
|
||||||
"${DISKS_PATHS[@]}"
|
"${DISKS_PATHS[@]}" \
|
||||||
|
"${DISKS_METADATA_PATHS[@]}"
|
||||||
do
|
do
|
||||||
# check if variable not empty
|
# check if variable not empty
|
||||||
[ -z "$dir" ] && continue
|
[ -z "$dir" ] && continue
|
||||||
|
@ -16,11 +16,10 @@ RUN apt-get update \
|
|||||||
# and MEMORY_LIMIT_EXCEEDED exceptions in Functional tests (total memory limit in Functional tests is ~55.24 GiB).
|
# and MEMORY_LIMIT_EXCEEDED exceptions in Functional tests (total memory limit in Functional tests is ~55.24 GiB).
|
||||||
# TSAN will flush shadow memory when reaching this limit.
|
# TSAN will flush shadow memory when reaching this limit.
|
||||||
# It may cause false-negatives, but it's better than OOM.
|
# It may cause false-negatives, but it's better than OOM.
|
||||||
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7 memory_limit_mb=46080'" >> /etc/environment; \
|
RUN echo "TSAN_OPTIONS='verbosity=1000 halt_on_error=1 history_size=7 memory_limit_mb=46080'" >> /etc/environment
|
||||||
echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment; \
|
RUN echo "UBSAN_OPTIONS='print_stacktrace=1'" >> /etc/environment
|
||||||
echo "MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'" >> /etc/environment; \
|
RUN echo "MSAN_OPTIONS='abort_on_error=1 poison_in_dtor=1'" >> /etc/environment
|
||||||
echo "LSAN_OPTIONS='suppressions=/usr/share/clickhouse-test/config/lsan_suppressions.txt'" >> /etc/environment; \
|
RUN echo "LSAN_OPTIONS='suppressions=/usr/share/clickhouse-test/config/lsan_suppressions.txt'" >> /etc/environment
|
||||||
ln -s /usr/lib/llvm-${LLVM_VERSION}/bin/llvm-symbolizer /usr/bin/llvm-symbolizer;
|
|
||||||
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
|
# Sanitizer options for current shell (not current, but the one that will be spawned on "docker run")
|
||||||
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
|
# (but w/o verbosity for TSAN, otherwise test.reference will not match)
|
||||||
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080'
|
ENV TSAN_OPTIONS='halt_on_error=1 history_size=7 memory_limit_mb=46080'
|
||||||
|
@ -8,16 +8,41 @@ FROM clickhouse/binary-builder:$FROM_TAG
|
|||||||
ARG apt_archive="http://archive.ubuntu.com"
|
ARG apt_archive="http://archive.ubuntu.com"
|
||||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||||
|
|
||||||
RUN apt-get update && apt-get --yes --allow-unauthenticated install clang-14 libllvm14 libclang-14-dev libmlir-14-dev
|
RUN apt-get update && apt-get --yes --allow-unauthenticated install libclang-${LLVM_VERSION}-dev libmlir-${LLVM_VERSION}-dev
|
||||||
|
|
||||||
|
# libclang-15-dev does not contain proper symlink:
|
||||||
|
#
|
||||||
|
# This is what cmake will search for:
|
||||||
|
#
|
||||||
|
# # readlink -f /usr/lib/llvm-15/lib/libclang-15.so.1
|
||||||
|
# /usr/lib/x86_64-linux-gnu/libclang-15.so.1
|
||||||
|
#
|
||||||
|
# This is what exists:
|
||||||
|
#
|
||||||
|
# # ls -l /usr/lib/x86_64-linux-gnu/libclang-15*
|
||||||
|
# lrwxrwxrwx 1 root root 16 Sep 5 13:31 /usr/lib/x86_64-linux-gnu/libclang-15.so -> libclang-15.so.1
|
||||||
|
# lrwxrwxrwx 1 root root 21 Sep 5 13:31 /usr/lib/x86_64-linux-gnu/libclang-15.so.15 -> libclang-15.so.15.0.0
|
||||||
|
# -rw-r--r-- 1 root root 31835760 Sep 5 13:31 /usr/lib/x86_64-linux-gnu/libclang-15.so.15.0.0
|
||||||
|
#
|
||||||
|
ARG TARGETARCH
|
||||||
|
RUN arch=${TARGETARCH:-amd64} \
|
||||||
|
&& case $arch in \
|
||||||
|
amd64) rarch=x86_64 ;; \
|
||||||
|
arm64) rarch=aarch64 ;; \
|
||||||
|
*) exit 1 ;; \
|
||||||
|
esac \
|
||||||
|
&& ln -rsf /usr/lib/$rarch-linux-gnu/libclang-15.so.15 /usr/lib/$rarch-linux-gnu/libclang-15.so.1
|
||||||
|
|
||||||
# repo versions doesn't work correctly with C++17
|
# repo versions doesn't work correctly with C++17
|
||||||
# also we push reports to s3, so we add index.html to subfolder urls
|
# also we push reports to s3, so we add index.html to subfolder urls
|
||||||
# https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
|
# https://github.com/ClickHouse-Extras/woboq_codebrowser/commit/37e15eaf377b920acb0b48dbe82471be9203f76b
|
||||||
# TODO: remove branch in a few weeks after merge, e.g. in May or June 2022
|
# TODO: remove branch in a few weeks after merge, e.g. in May or June 2022
|
||||||
RUN git clone https://github.com/ClickHouse-Extras/woboq_codebrowser --branch llvm-14 \
|
#
|
||||||
|
# FIXME: update location of a repo
|
||||||
|
RUN git clone https://github.com/azat/woboq_codebrowser --branch llvm-15 \
|
||||||
&& cd woboq_codebrowser \
|
&& cd woboq_codebrowser \
|
||||||
&& cmake . -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang\+\+-14 -DCMAKE_C_COMPILER=clang-14 \
|
&& cmake . -G Ninja -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_COMPILER=clang\+\+-${LLVM_VERSION} -DCMAKE_C_COMPILER=clang-${LLVM_VERSION} \
|
||||||
&& make -j \
|
&& ninja \
|
||||||
&& cd .. \
|
&& cd .. \
|
||||||
&& rm -rf woboq_codebrowser
|
&& rm -rf woboq_codebrowser
|
||||||
|
|
||||||
@ -32,7 +57,7 @@ ENV SHA=nosha
|
|||||||
ENV DATA="https://s3.amazonaws.com/clickhouse-test-reports/codebrowser/data"
|
ENV DATA="https://s3.amazonaws.com/clickhouse-test-reports/codebrowser/data"
|
||||||
|
|
||||||
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
|
CMD mkdir -p $BUILD_DIRECTORY && cd $BUILD_DIRECTORY && \
|
||||||
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-14 -DCMAKE_C_COMPILER=/usr/bin/clang-14 -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DENABLE_EMBEDDED_COMPILER=0 -DENABLE_S3=0 && \
|
cmake $SOURCE_DIRECTORY -DCMAKE_CXX_COMPILER=/usr/bin/clang\+\+-${LLVM_VERSION} -DCMAKE_C_COMPILER=/usr/bin/clang-${LLVM_VERSION} -DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DENABLE_EMBEDDED_COMPILER=0 -DENABLE_S3=0 && \
|
||||||
mkdir -p $HTML_RESULT_DIRECTORY && \
|
mkdir -p $HTML_RESULT_DIRECTORY && \
|
||||||
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA | ts '%Y-%m-%d %H:%M:%S' && \
|
$CODEGEN -b $BUILD_DIRECTORY -a -o $HTML_RESULT_DIRECTORY -p ClickHouse:$SOURCE_DIRECTORY:$SHA -d $DATA | ts '%Y-%m-%d %H:%M:%S' && \
|
||||||
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\
|
cp -r $STATIC_DATA $HTML_RESULT_DIRECTORY/ &&\
|
||||||
|
@ -19,7 +19,7 @@ stage=${stage:-}
|
|||||||
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
script_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||||
echo "$script_dir"
|
echo "$script_dir"
|
||||||
repo_dir=ch
|
repo_dir=ch
|
||||||
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-14_debug_none_unsplitted_disable_False_binary"}
|
BINARY_TO_DOWNLOAD=${BINARY_TO_DOWNLOAD:="clang-15_debug_none_unsplitted_disable_False_binary"}
|
||||||
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
BINARY_URL_TO_DOWNLOAD=${BINARY_URL_TO_DOWNLOAD:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/$BINARY_TO_DOWNLOAD/clickhouse"}
|
||||||
|
|
||||||
function clone
|
function clone
|
||||||
|
@ -28,10 +28,9 @@ done
|
|||||||
set -e
|
set -e
|
||||||
|
|
||||||
# cleanup for retry run if volume is not recreated
|
# cleanup for retry run if volume is not recreated
|
||||||
# shellcheck disable=SC2046
|
|
||||||
{
|
{
|
||||||
docker ps -aq | xargs -r docker kill || true
|
docker ps --all --quiet | xargs --no-run-if-empty docker kill || true
|
||||||
docker ps -aq | xargs -r docker rm || true
|
docker ps --all --quiet | xargs --no-run-if-empty docker rm || true
|
||||||
}
|
}
|
||||||
|
|
||||||
echo "Start tests"
|
echo "Start tests"
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
|
||||||
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-14_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
CLICKHOUSE_PACKAGE=${CLICKHOUSE_PACKAGE:="https://clickhouse-builds.s3.amazonaws.com/$PR_TO_TEST/$SHA_TO_TEST/clickhouse_build_check/clang-15_relwithdebuginfo_none_unsplitted_disable_False_binary/clickhouse"}
|
||||||
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
CLICKHOUSE_REPO_PATH=${CLICKHOUSE_REPO_PATH:=""}
|
||||||
|
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ function configure
|
|||||||
cp -rv right/config left ||:
|
cp -rv right/config left ||:
|
||||||
|
|
||||||
# Start a temporary server to rename the tables
|
# Start a temporary server to rename the tables
|
||||||
while pkill clickhouse-serv; do echo . ; sleep 1 ; done
|
while pkill -f clickhouse-serv ; do echo . ; sleep 1 ; done
|
||||||
echo all killed
|
echo all killed
|
||||||
|
|
||||||
set -m # Spawn temporary in its own process groups
|
set -m # Spawn temporary in its own process groups
|
||||||
@ -88,7 +88,7 @@ function configure
|
|||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "create database test" ||:
|
clickhouse-client --port $LEFT_SERVER_PORT --query "create database test" ||:
|
||||||
clickhouse-client --port $LEFT_SERVER_PORT --query "rename table datasets.hits_v1 to test.hits" ||:
|
clickhouse-client --port $LEFT_SERVER_PORT --query "rename table datasets.hits_v1 to test.hits" ||:
|
||||||
|
|
||||||
while pkill clickhouse-serv; do echo . ; sleep 1 ; done
|
while pkill -f clickhouse-serv ; do echo . ; sleep 1 ; done
|
||||||
echo all killed
|
echo all killed
|
||||||
|
|
||||||
# Make copies of the original db for both servers. Use hardlinks instead
|
# Make copies of the original db for both servers. Use hardlinks instead
|
||||||
@ -106,7 +106,7 @@ function configure
|
|||||||
|
|
||||||
function restart
|
function restart
|
||||||
{
|
{
|
||||||
while pkill clickhouse-serv; do echo . ; sleep 1 ; done
|
while pkill -f clickhouse-serv ; do echo . ; sleep 1 ; done
|
||||||
echo all killed
|
echo all killed
|
||||||
|
|
||||||
# Change the jemalloc settings here.
|
# Change the jemalloc settings here.
|
||||||
@ -1400,7 +1400,7 @@ case "$stage" in
|
|||||||
while env kill -- -$watchdog_pid ; do sleep 1; done
|
while env kill -- -$watchdog_pid ; do sleep 1; done
|
||||||
|
|
||||||
# Stop the servers to free memory for the subsequent query analysis.
|
# Stop the servers to free memory for the subsequent query analysis.
|
||||||
while pkill clickhouse-serv; do echo . ; sleep 1 ; done
|
while pkill -f clickhouse-serv ; do echo . ; sleep 1 ; done
|
||||||
echo Servers stopped.
|
echo Servers stopped.
|
||||||
;&
|
;&
|
||||||
"analyze_queries")
|
"analyze_queries")
|
||||||
|
@ -179,17 +179,17 @@ pigz < /var/log/clickhouse-server/clickhouse-server.log > /test_output/clickhous
|
|||||||
# for files >64MB, we want this files to be compressed explicitly
|
# for files >64MB, we want this files to be compressed explicitly
|
||||||
for table in query_log zookeeper_log trace_log transactions_info_log
|
for table in query_log zookeeper_log trace_log transactions_info_log
|
||||||
do
|
do
|
||||||
clickhouse-local --path /var/lib/clickhouse/ -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.tsv.gz ||:
|
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.tsv.gz ||:
|
||||||
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
if [[ -n "$USE_DATABASE_REPLICATED" ]] && [[ "$USE_DATABASE_REPLICATED" -eq 1 ]]; then
|
||||||
clickhouse-local --path /var/lib/clickhouse1/ -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.1.tsv.gz ||:
|
clickhouse-local --path /var/lib/clickhouse1/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.1.tsv.gz ||:
|
||||||
clickhouse-local --path /var/lib/clickhouse2/ -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.2.tsv.gz ||:
|
clickhouse-local --path /var/lib/clickhouse2/ --only-system-tables -q "select * from system.$table format TSVWithNamesAndTypes" | pigz > /test_output/$table.2.tsv.gz ||:
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Also export trace log in flamegraph-friendly format.
|
# Also export trace log in flamegraph-friendly format.
|
||||||
for trace_type in CPU Memory Real
|
for trace_type in CPU Memory Real
|
||||||
do
|
do
|
||||||
clickhouse-local --path /var/lib/clickhouse/ -q "
|
clickhouse-local --path /var/lib/clickhouse/ --only-system-tables -q "
|
||||||
select
|
select
|
||||||
arrayStringConcat((arrayMap(x -> concat(splitByChar('/', addressToLine(x))[-1], '#', demangle(addressToSymbol(x)) ), trace)), ';') AS stack,
|
arrayStringConcat((arrayMap(x -> concat(splitByChar('/', addressToLine(x))[-1], '#', demangle(addressToSymbol(x)) ), trace)), ';') AS stack,
|
||||||
count(*) AS samples
|
count(*) AS samples
|
||||||
|
@ -370,6 +370,7 @@ else
|
|||||||
|
|
||||||
# Avoid "Setting s3_check_objects_after_upload is neither a builtin setting..."
|
# Avoid "Setting s3_check_objects_after_upload is neither a builtin setting..."
|
||||||
rm -f /etc/clickhouse-server/users.d/enable_blobs_check.xml ||:
|
rm -f /etc/clickhouse-server/users.d/enable_blobs_check.xml ||:
|
||||||
|
rm -f /etc/clickhouse-server/users.d/marks.xml ||:
|
||||||
|
|
||||||
# Remove s3 related configs to avoid "there is no disk type `cache`"
|
# Remove s3 related configs to avoid "there is no disk type `cache`"
|
||||||
rm -f /etc/clickhouse-server/config.d/storage_conf.xml ||:
|
rm -f /etc/clickhouse-server/config.d/storage_conf.xml ||:
|
||||||
|
@ -5,7 +5,7 @@ FROM ubuntu:20.04
|
|||||||
ARG apt_archive="http://archive.ubuntu.com"
|
ARG apt_archive="http://archive.ubuntu.com"
|
||||||
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
RUN sed -i "s|http://archive.ubuntu.com|$apt_archive|g" /etc/apt/sources.list
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=14
|
ENV DEBIAN_FRONTEND=noninteractive LLVM_VERSION=15
|
||||||
|
|
||||||
RUN apt-get update \
|
RUN apt-get update \
|
||||||
&& apt-get install \
|
&& apt-get install \
|
||||||
@ -56,6 +56,8 @@ RUN apt-get update \
|
|||||||
|
|
||||||
# This symlink required by gcc to find lld compiler
|
# This symlink required by gcc to find lld compiler
|
||||||
RUN ln -s /usr/bin/lld-${LLVM_VERSION} /usr/bin/ld.lld
|
RUN ln -s /usr/bin/lld-${LLVM_VERSION} /usr/bin/ld.lld
|
||||||
|
# for external_symbolizer_path
|
||||||
|
RUN ln -s /usr/bin/llvm-symbolizer-${LLVM_VERSION} /usr/bin/llvm-symbolizer
|
||||||
|
|
||||||
ARG CCACHE_VERSION=4.6.1
|
ARG CCACHE_VERSION=4.6.1
|
||||||
RUN mkdir /tmp/ccache \
|
RUN mkdir /tmp/ccache \
|
||||||
|
34
docs/changelogs/v22.6.8.35-stable.md
Normal file
34
docs/changelogs/v22.6.8.35-stable.md
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.6.8.35-stable (b91dc59a565) FIXME as compared to v22.6.7.7-stable (8eae2af3b9a)
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* Backported in [#40868](https://github.com/ClickHouse/ClickHouse/issues/40868): Add setting to disable limit on kafka_num_consumers. Closes [#40331](https://github.com/ClickHouse/ClickHouse/issues/40331). [#40670](https://github.com/ClickHouse/ClickHouse/pull/40670) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
||||||
|
#### Bug Fix
|
||||||
|
* Backported in [#41274](https://github.com/ClickHouse/ClickHouse/issues/41274): Fix memory safety issues with functions `encrypt` and `contingency` if Array of Nullable is used as an argument. This fixes [#41004](https://github.com/ClickHouse/ClickHouse/issues/41004). [#40195](https://github.com/ClickHouse/ClickHouse/pull/40195) ([Alexey Milovidov](https://github.com/alexey-milovidov)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#41282](https://github.com/ClickHouse/ClickHouse/issues/41282): Fix unused unknown columns introduced by WITH statement. This fixes [#37812](https://github.com/ClickHouse/ClickHouse/issues/37812) . [#39131](https://github.com/ClickHouse/ClickHouse/pull/39131) ([Amos Bird](https://github.com/amosbird)).
|
||||||
|
* Backported in [#40905](https://github.com/ClickHouse/ClickHouse/issues/40905): Fix potential deadlock in WriteBufferFromS3 during task scheduling failure. [#40070](https://github.com/ClickHouse/ClickHouse/pull/40070) ([Maksim Kita](https://github.com/kitaisreal)).
|
||||||
|
* Backported in [#40864](https://github.com/ClickHouse/ClickHouse/issues/40864): - Fix crash while parsing values of type `Object` that contains arrays of variadic dimension. [#40483](https://github.com/ClickHouse/ClickHouse/pull/40483) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Backported in [#40803](https://github.com/ClickHouse/ClickHouse/issues/40803): During insertion of a new query to the `ProcessList` allocations happen. If we reach the memory limit during these allocations we can not use `OvercommitTracker`, because `ProcessList::mutex` is already acquired. Fixes [#40611](https://github.com/ClickHouse/ClickHouse/issues/40611). [#40677](https://github.com/ClickHouse/ClickHouse/pull/40677) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Backported in [#40891](https://github.com/ClickHouse/ClickHouse/issues/40891): Fix memory leak while pushing to MVs w/o query context (from Kafka/...). [#40732](https://github.com/ClickHouse/ClickHouse/pull/40732) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Backported in [#41133](https://github.com/ClickHouse/ClickHouse/issues/41133): Fix access rights for `DESCRIBE TABLE url()` and some other `DESCRIBE TABLE <table_function>()`. [#40975](https://github.com/ClickHouse/ClickHouse/pull/40975) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Backported in [#41360](https://github.com/ClickHouse/ClickHouse/issues/41360): Fix incorrect logical error `Expected relative path` in disk object storage. Related to [#41246](https://github.com/ClickHouse/ClickHouse/issues/41246). [#41297](https://github.com/ClickHouse/ClickHouse/pull/41297) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
* Backported in [#41357](https://github.com/ClickHouse/ClickHouse/issues/41357): Add column type check before UUID insertion in MsgPack format. [#41309](https://github.com/ClickHouse/ClickHouse/pull/41309) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* use ROBOT_CLICKHOUSE_COMMIT_TOKEN for create-pull-request [#40067](https://github.com/ClickHouse/ClickHouse/pull/40067) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* use input token instead of env var [#40421](https://github.com/ClickHouse/ClickHouse/pull/40421) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* Migrate artifactory [#40831](https://github.com/ClickHouse/ClickHouse/pull/40831) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Docker server version [#41256](https://github.com/ClickHouse/ClickHouse/pull/41256) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
* Increase open files limit [#41345](https://github.com/ClickHouse/ClickHouse/pull/41345) ([Eugene Konkov](https://github.com/ekonkov)).
|
||||||
|
|
34
docs/changelogs/v22.8.5.29-lts.md
Normal file
34
docs/changelogs/v22.8.5.29-lts.md
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
---
|
||||||
|
sidebar_position: 1
|
||||||
|
sidebar_label: 2022
|
||||||
|
---
|
||||||
|
|
||||||
|
# 2022 Changelog
|
||||||
|
|
||||||
|
### ClickHouse release v22.8.5.29-lts (74ffb843807) FIXME as compared to v22.8.4.7-lts (baad27bcd2f)
|
||||||
|
|
||||||
|
#### New Feature
|
||||||
|
* Backported in [#40870](https://github.com/ClickHouse/ClickHouse/issues/40870): Add setting to disable limit on kafka_num_consumers. Closes [#40331](https://github.com/ClickHouse/ClickHouse/issues/40331). [#40670](https://github.com/ClickHouse/ClickHouse/pull/40670) ([Kruglov Pavel](https://github.com/Avogar)).
|
||||||
|
|
||||||
|
#### Improvement
|
||||||
|
* Backported in [#40817](https://github.com/ClickHouse/ClickHouse/issues/40817): The setting `show_addresses_in_stack_traces` was accidentally disabled in default `config.xml`. It's removed from the config now, so the setting is enabled by default. [#40749](https://github.com/ClickHouse/ClickHouse/pull/40749) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Backported in [#40944](https://github.com/ClickHouse/ClickHouse/issues/40944): Fix issue with passing MySQL timeouts for MySQL database engine and MySQL table function. Closes [#34168](https://github.com/ClickHouse/ClickHouse/issues/34168)?notification_referrer_id=NT_kwDOAzsV57MzMDMxNjAzNTY5OjU0MjAzODc5. [#40751](https://github.com/ClickHouse/ClickHouse/pull/40751) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
|
||||||
|
#### Build/Testing/Packaging Improvement
|
||||||
|
* Backported in [#41157](https://github.com/ClickHouse/ClickHouse/issues/41157): Add macOS binaries to GH release assets, it fixes [#37718](https://github.com/ClickHouse/ClickHouse/issues/37718). [#41088](https://github.com/ClickHouse/ClickHouse/pull/41088) ([Mikhail f. Shiryaev](https://github.com/Felixoid)).
|
||||||
|
|
||||||
|
#### Bug Fix (user-visible misbehavior in official stable or prestable release)
|
||||||
|
|
||||||
|
* Backported in [#40866](https://github.com/ClickHouse/ClickHouse/issues/40866): - Fix crash while parsing values of type `Object` that contains arrays of variadic dimension. [#40483](https://github.com/ClickHouse/ClickHouse/pull/40483) ([Duc Canh Le](https://github.com/canhld94)).
|
||||||
|
* Backported in [#40805](https://github.com/ClickHouse/ClickHouse/issues/40805): During insertion of a new query to the `ProcessList` allocations happen. If we reach the memory limit during these allocations we can not use `OvercommitTracker`, because `ProcessList::mutex` is already acquired. Fixes [#40611](https://github.com/ClickHouse/ClickHouse/issues/40611). [#40677](https://github.com/ClickHouse/ClickHouse/pull/40677) ([Dmitry Novik](https://github.com/novikd)).
|
||||||
|
* Backported in [#40777](https://github.com/ClickHouse/ClickHouse/issues/40777): Fix memory leak while pushing to MVs w/o query context (from Kafka/...). [#40732](https://github.com/ClickHouse/ClickHouse/pull/40732) ([Azat Khuzhin](https://github.com/azat)).
|
||||||
|
* Backported in [#41135](https://github.com/ClickHouse/ClickHouse/issues/41135): Fix access rights for `DESCRIBE TABLE url()` and some other `DESCRIBE TABLE <table_function>()`. [#40975](https://github.com/ClickHouse/ClickHouse/pull/40975) ([Vitaly Baranov](https://github.com/vitlibar)).
|
||||||
|
* Backported in [#41242](https://github.com/ClickHouse/ClickHouse/issues/41242): Fixed "possible deadlock avoided" error on automatic conversion of database engine from Ordinary to Atomic. [#41146](https://github.com/ClickHouse/ClickHouse/pull/41146) ([Alexander Tokmakov](https://github.com/tavplubix)).
|
||||||
|
* Backported in [#41234](https://github.com/ClickHouse/ClickHouse/issues/41234): Fix background clean up of broken detached parts. [#41190](https://github.com/ClickHouse/ClickHouse/pull/41190) ([Kseniia Sumarokova](https://github.com/kssenii)).
|
||||||
|
|
||||||
|
#### NOT FOR CHANGELOG / INSIGNIFICANT
|
||||||
|
|
||||||
|
* use ROBOT_CLICKHOUSE_COMMIT_TOKEN for create-pull-request [#40067](https://github.com/ClickHouse/ClickHouse/pull/40067) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* use input token instead of env var [#40421](https://github.com/ClickHouse/ClickHouse/pull/40421) ([Yakov Olkhovskiy](https://github.com/yakov-olkhovskiy)).
|
||||||
|
* CaresPTRResolver small safety improvement [#40890](https://github.com/ClickHouse/ClickHouse/pull/40890) ([Arthur Passos](https://github.com/arthurpassos)).
|
||||||
|
|
@ -8,4 +8,4 @@ sidebar_label: MaterializedView
|
|||||||
|
|
||||||
Used for implementing materialized views (for more information, see [CREATE VIEW](../../../sql-reference/statements/create/view.md#materialized)). For storing data, it uses a different engine that was specified when creating the view. When reading from a table, it just uses that engine.
|
Used for implementing materialized views (for more information, see [CREATE VIEW](../../../sql-reference/statements/create/view.md#materialized)). For storing data, it uses a different engine that was specified when creating the view. When reading from a table, it just uses that engine.
|
||||||
|
|
||||||
[Original article](https://clickhouse.com/docs/en/engines/table-engines/special/materializedview/) <!--hide-->
|
[Original article](https://clickhouse.com/docs/en/sql-reference/statements/create/view#materialized-view) <!--hide-->
|
||||||
|
@ -2,10 +2,9 @@
|
|||||||
slug: /en/interfaces/formats
|
slug: /en/interfaces/formats
|
||||||
sidebar_position: 21
|
sidebar_position: 21
|
||||||
sidebar_label: Input and Output Formats
|
sidebar_label: Input and Output Formats
|
||||||
|
title: Formats for Input and Output Data
|
||||||
---
|
---
|
||||||
|
|
||||||
# Formats for Input and Output Data
|
|
||||||
|
|
||||||
ClickHouse can accept and return data in various formats. A format supported for input can be used to parse the data provided to `INSERT`s, to perform `SELECT`s from a file-backed table such as File, URL or HDFS, or to read an external dictionary. A format supported for output can be used to arrange the
|
ClickHouse can accept and return data in various formats. A format supported for input can be used to parse the data provided to `INSERT`s, to perform `SELECT`s from a file-backed table such as File, URL or HDFS, or to read an external dictionary. A format supported for output can be used to arrange the
|
||||||
results of a `SELECT`, and to perform `INSERT`s into a file-backed table.
|
results of a `SELECT`, and to perform `INSERT`s into a file-backed table.
|
||||||
|
|
||||||
|
@ -103,6 +103,7 @@ ClickHouse, Inc. does **not** maintain the tools and libraries listed below and
|
|||||||
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
|
- [ClickHouse.Client](https://github.com/DarkWanderer/ClickHouse.Client)
|
||||||
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
- [ClickHouse.Net](https://github.com/ilyabreev/ClickHouse.Net)
|
||||||
- [ClickHouse.Net.Migrations](https://github.com/ilyabreev/ClickHouse.Net.Migrations)
|
- [ClickHouse.Net.Migrations](https://github.com/ilyabreev/ClickHouse.Net.Migrations)
|
||||||
|
- [Linq To DB](https://github.com/linq2db/linq2db)
|
||||||
- Elixir
|
- Elixir
|
||||||
- [Ecto](https://github.com/elixir-ecto/ecto)
|
- [Ecto](https://github.com/elixir-ecto/ecto)
|
||||||
- [clickhouse_ecto](https://github.com/appodeal/clickhouse_ecto)
|
- [clickhouse_ecto](https://github.com/appodeal/clickhouse_ecto)
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
---
|
---
|
||||||
slug: /en/operations/opentelemetry
|
slug: /en/operations/opentelemetry
|
||||||
sidebar_position: 62
|
sidebar_position: 62
|
||||||
sidebar_label: OpenTelemetry Support
|
sidebar_label: Tracing ClickHouse with OpenTelemetry
|
||||||
title: "[experimental] OpenTelemetry Support"
|
title: "[experimental] Tracing ClickHouse with OpenTelemetry"
|
||||||
---
|
---
|
||||||
|
|
||||||
[OpenTelemetry](https://opentelemetry.io/) is an open standard for collecting traces and metrics from the distributed application. ClickHouse has some support for OpenTelemetry.
|
[OpenTelemetry](https://opentelemetry.io/) is an open standard for collecting traces and metrics from the distributed application. ClickHouse has some support for OpenTelemetry.
|
||||||
|
@ -26,13 +26,33 @@ The constraints are defined as the following:
|
|||||||
<setting_name_4>
|
<setting_name_4>
|
||||||
<readonly/>
|
<readonly/>
|
||||||
</setting_name_4>
|
</setting_name_4>
|
||||||
|
<setting_name_5>
|
||||||
|
<min>lower_boundary</min>
|
||||||
|
<max>upper_boundary</max>
|
||||||
|
<changeable_in_readonly/>
|
||||||
|
</setting_name_5>
|
||||||
</constraints>
|
</constraints>
|
||||||
</user_name>
|
</user_name>
|
||||||
</profiles>
|
</profiles>
|
||||||
```
|
```
|
||||||
|
|
||||||
If the user tries to violate the constraints an exception is thrown and the setting isn’t changed.
|
If the user tries to violate the constraints an exception is thrown and the setting isn’t changed.
|
||||||
There are supported three types of constraints: `min`, `max`, `readonly`. The `min` and `max` constraints specify upper and lower boundaries for a numeric setting and can be used in combination. The `readonly` constraint specifies that the user cannot change the corresponding setting at all.
|
There are supported few types of constraints: `min`, `max`, `readonly` (with alias `const`) and `changeable_in_readonly`. The `min` and `max` constraints specify upper and lower boundaries for a numeric setting and can be used in combination. The `readonly` or `const` constraint specifies that the user cannot change the corresponding setting at all. The `changeable_in_readonly` constraint type allows user to change the setting within `min`/`max` range even if `readonly` setting is set to 1, otherwise settings are not allow to be changed in `readonly=1` mode. Note that `changeable_in_readonly` is supported only if `settings_constraints_replace_previous` is enabled:
|
||||||
|
``` xml
|
||||||
|
<access_control_improvements>
|
||||||
|
<settings_constraints_replace_previous>true<settings_constraints_replace_previous>
|
||||||
|
</access_control_improvements>
|
||||||
|
```
|
||||||
|
|
||||||
|
If there are multiple profiles active for a user, then constraints are merged. Merge process depends on `settings_constraints_replace_previous`:
|
||||||
|
- **true** (recommended): constraints for the same setting are replaced during merge, such that the last constraint is used and all previous are ignored including fields that are not set in new constraint.
|
||||||
|
- **false** (default): constraints for the same setting are merged in a way that every not set type of constraint is taken from previous profile and every set type of constraint is replaced by value from new profile.
|
||||||
|
|
||||||
|
Read-only mode is enabled by `readonly` setting (not to confuse with `readonly` constraint type):
|
||||||
|
- `readonly=0`: No read-only restrictions.
|
||||||
|
- `readonly=1`: Only read queries are allowed and settings cannot be changes unless `changeable_in_readonly` is set.
|
||||||
|
- `readonly=2`: Only read queries are allowed, but settings can be changed, except for `readonly` setting itself.
|
||||||
|
|
||||||
|
|
||||||
**Example:** Let `users.xml` includes lines:
|
**Example:** Let `users.xml` includes lines:
|
||||||
|
|
||||||
|
@ -37,8 +37,7 @@ After setting `readonly = 1`, the user can’t change `readonly` and `allow_ddl`
|
|||||||
|
|
||||||
When using the `GET` method in the [HTTP interface](../../interfaces/http.md), `readonly = 1` is set automatically. To modify data, use the `POST` method.
|
When using the `GET` method in the [HTTP interface](../../interfaces/http.md), `readonly = 1` is set automatically. To modify data, use the `POST` method.
|
||||||
|
|
||||||
Setting `readonly = 1` prohibit the user from changing all the settings. There is a way to prohibit the user
|
Setting `readonly = 1` prohibit the user from changing all the settings. There is a way to prohibit the user from changing only specific settings. Also there is a way to allow changing only specific settings under `readonly = 1` restrictions. For details see [constraints on settings](../../operations/settings/constraints-on-settings.md).
|
||||||
from changing only specific settings, for details see [constraints on settings](../../operations/settings/constraints-on-settings.md).
|
|
||||||
|
|
||||||
Default value: 0
|
Default value: 0
|
||||||
|
|
||||||
|
@ -3145,6 +3145,17 @@ Result:
|
|||||||
└─────┴─────┴───────┘
|
└─────┴─────┴───────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## enable_extended_results_for_datetime_functions {#enable-extended-results-for-datetime-functions}
|
||||||
|
|
||||||
|
Enables or disables returning results of type `Date32` with extended range (compared to type `Date`) for functions [toStartOfYear](../../sql-reference/functions/date-time-functions.md#tostartofyear), [toStartOfISOYear](../../sql-reference/functions/date-time-functions.md#tostartofisoyear), [toStartOfQuarter](../../sql-reference/functions/date-time-functions.md#tostartofquarter), [toStartOfMonth](../../sql-reference/functions/date-time-functions.md#tostartofmonth), [toStartOfWeek](../../sql-reference/functions/date-time-functions.md#tostartofweek), [toMonday](../../sql-reference/functions/date-time-functions.md#tomonday) and [toLastDayOfMonth](../../sql-reference/functions/date-time-functions.md#tolastdayofmonth).
|
||||||
|
|
||||||
|
Possible values:
|
||||||
|
|
||||||
|
- 0 — Functions return `Date` for all types of arguments.
|
||||||
|
- 1 — Functions return `Date32` for `Date32` or `DateTime64` arguments and `Date` otherwise.
|
||||||
|
|
||||||
|
Default value: `0`.
|
||||||
|
|
||||||
## optimize_move_to_prewhere {#optimize_move_to_prewhere}
|
## optimize_move_to_prewhere {#optimize_move_to_prewhere}
|
||||||
|
|
||||||
Enables or disables automatic [PREWHERE](../../sql-reference/statements/select/prewhere.md) optimization in [SELECT](../../sql-reference/statements/select/index.md) queries.
|
Enables or disables automatic [PREWHERE](../../sql-reference/statements/select/prewhere.md) optimization in [SELECT](../../sql-reference/statements/select/index.md) queries.
|
||||||
@ -3422,7 +3433,7 @@ Possible values:
|
|||||||
- 0 — Disabled.
|
- 0 — Disabled.
|
||||||
- 1 — Enabled.
|
- 1 — Enabled.
|
||||||
|
|
||||||
Default value: 0.
|
Default value: 1.
|
||||||
|
|
||||||
## input_format_with_names_use_header {#input_format_with_names_use_header}
|
## input_format_with_names_use_header {#input_format_with_names_use_header}
|
||||||
|
|
||||||
|
@ -134,6 +134,13 @@ Example of configuration for versions later or equal to 22.8:
|
|||||||
<max_size>10000000</max_size>
|
<max_size>10000000</max_size>
|
||||||
</cache>
|
</cache>
|
||||||
</disks>
|
</disks>
|
||||||
|
<policies>
|
||||||
|
<volumes>
|
||||||
|
<main>
|
||||||
|
<disk>cache</disk>
|
||||||
|
</main>
|
||||||
|
</volumes>
|
||||||
|
<policies>
|
||||||
</storage_configuration>
|
</storage_configuration>
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -148,9 +155,16 @@ Example of configuration for versions earlier than 22.8:
|
|||||||
<endpoint>...</endpoint>
|
<endpoint>...</endpoint>
|
||||||
... s3 configuration ...
|
... s3 configuration ...
|
||||||
<data_cache_enabled>1</data_cache_enabled>
|
<data_cache_enabled>1</data_cache_enabled>
|
||||||
<data_cache_size>10000000</data_cache_size>
|
<data_cache_max_size>10000000</data_cache_max_size>
|
||||||
</s3>
|
</s3>
|
||||||
</disks>
|
</disks>
|
||||||
|
<policies>
|
||||||
|
<volumes>
|
||||||
|
<main>
|
||||||
|
<disk>s3</disk>
|
||||||
|
</main>
|
||||||
|
</volumes>
|
||||||
|
<policies>
|
||||||
</storage_configuration>
|
</storage_configuration>
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -166,7 +180,7 @@ Cache **configuration settings**:
|
|||||||
|
|
||||||
- `enable_cache_hits_threshold` - a number, which defines how many times some data needs to be read before it will be cached. Default: `0`, e.g. the data is cached at the first attempt to read it.
|
- `enable_cache_hits_threshold` - a number, which defines how many times some data needs to be read before it will be cached. Default: `0`, e.g. the data is cached at the first attempt to read it.
|
||||||
|
|
||||||
- `do_not_evict_index_and_mark_files` - do not evict small frequently used files according to cache policy. Default: `true`.
|
- `do_not_evict_index_and_mark_files` - do not evict small frequently used files according to cache policy. Default: `false`. This setting was added in version 22.8. If you used filesystem cache before this version, then it will not work on versions starting from 22.8 if this setting is set to `true`. If you want to use this setting, clear old cache created before version 22.8 before upgrading.
|
||||||
|
|
||||||
- `max_file_segment_size` - a maximum size of a single cache file. Default: `104857600` (100 Mb).
|
- `max_file_segment_size` - a maximum size of a single cache file. Default: `104857600` (100 Mb).
|
||||||
|
|
||||||
|
@ -2,10 +2,9 @@
|
|||||||
slug: /en/operations/troubleshooting
|
slug: /en/operations/troubleshooting
|
||||||
sidebar_position: 46
|
sidebar_position: 46
|
||||||
sidebar_label: Troubleshooting
|
sidebar_label: Troubleshooting
|
||||||
|
title: Troubleshooting
|
||||||
---
|
---
|
||||||
|
|
||||||
# Troubleshooting
|
|
||||||
|
|
||||||
- [Installation](#troubleshooting-installation-errors)
|
- [Installation](#troubleshooting-installation-errors)
|
||||||
- [Connecting to the server](#troubleshooting-accepts-no-connections)
|
- [Connecting to the server](#troubleshooting-accepts-no-connections)
|
||||||
- [Query processing](#troubleshooting-does-not-process-queries)
|
- [Query processing](#troubleshooting-does-not-process-queries)
|
||||||
|
@ -7,13 +7,8 @@ title: "Geo Data Types"
|
|||||||
|
|
||||||
ClickHouse supports data types for representing geographical objects — locations, lands, etc.
|
ClickHouse supports data types for representing geographical objects — locations, lands, etc.
|
||||||
|
|
||||||
:::warning
|
|
||||||
Currently geo data types are an experimental feature. To work with them you must set `allow_experimental_geo_types = 1`.
|
|
||||||
:::
|
|
||||||
|
|
||||||
**See Also**
|
**See Also**
|
||||||
- [Representing simple geographical features](https://en.wikipedia.org/wiki/GeoJSON).
|
- [Representing simple geographical features](https://en.wikipedia.org/wiki/GeoJSON).
|
||||||
- [allow_experimental_geo_types](../../operations/settings/settings.md#allow-experimental-geo-types) setting.
|
|
||||||
|
|
||||||
## Point
|
## Point
|
||||||
|
|
||||||
@ -24,7 +19,6 @@ Currently geo data types are an experimental feature. To work with them you must
|
|||||||
Query:
|
Query:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SET allow_experimental_geo_types = 1;
|
|
||||||
CREATE TABLE geo_point (p Point) ENGINE = Memory();
|
CREATE TABLE geo_point (p Point) ENGINE = Memory();
|
||||||
INSERT INTO geo_point VALUES((10, 10));
|
INSERT INTO geo_point VALUES((10, 10));
|
||||||
SELECT p, toTypeName(p) FROM geo_point;
|
SELECT p, toTypeName(p) FROM geo_point;
|
||||||
@ -46,7 +40,6 @@ Result:
|
|||||||
Query:
|
Query:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SET allow_experimental_geo_types = 1;
|
|
||||||
CREATE TABLE geo_ring (r Ring) ENGINE = Memory();
|
CREATE TABLE geo_ring (r Ring) ENGINE = Memory();
|
||||||
INSERT INTO geo_ring VALUES([(0, 0), (10, 0), (10, 10), (0, 10)]);
|
INSERT INTO geo_ring VALUES([(0, 0), (10, 0), (10, 10), (0, 10)]);
|
||||||
SELECT r, toTypeName(r) FROM geo_ring;
|
SELECT r, toTypeName(r) FROM geo_ring;
|
||||||
@ -68,7 +61,6 @@ Result:
|
|||||||
This is a polygon with one hole:
|
This is a polygon with one hole:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SET allow_experimental_geo_types = 1;
|
|
||||||
CREATE TABLE geo_polygon (pg Polygon) ENGINE = Memory();
|
CREATE TABLE geo_polygon (pg Polygon) ENGINE = Memory();
|
||||||
INSERT INTO geo_polygon VALUES([[(20, 20), (50, 20), (50, 50), (20, 50)], [(30, 30), (50, 50), (50, 30)]]);
|
INSERT INTO geo_polygon VALUES([[(20, 20), (50, 20), (50, 50), (20, 50)], [(30, 30), (50, 50), (50, 30)]]);
|
||||||
SELECT pg, toTypeName(pg) FROM geo_polygon;
|
SELECT pg, toTypeName(pg) FROM geo_polygon;
|
||||||
@ -91,7 +83,6 @@ Result:
|
|||||||
This multipolygon consists of two separate polygons — the first one without holes, and the second with one hole:
|
This multipolygon consists of two separate polygons — the first one without holes, and the second with one hole:
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SET allow_experimental_geo_types = 1;
|
|
||||||
CREATE TABLE geo_multipolygon (mpg MultiPolygon) ENGINE = Memory();
|
CREATE TABLE geo_multipolygon (mpg MultiPolygon) ENGINE = Memory();
|
||||||
INSERT INTO geo_multipolygon VALUES([[[(0, 0), (10, 0), (10, 10), (0, 10)]], [[(20, 20), (50, 20), (50, 50), (20, 50)],[(30, 30), (50, 50), (50, 30)]]]);
|
INSERT INTO geo_multipolygon VALUES([[[(0, 0), (10, 0), (10, 10), (0, 10)]], [[(20, 20), (50, 20), (50, 50), (20, 50)],[(30, 30), (50, 50), (50, 30)]]]);
|
||||||
SELECT mpg, toTypeName(mpg) FROM geo_multipolygon;
|
SELECT mpg, toTypeName(mpg) FROM geo_multipolygon;
|
||||||
|
10
docs/en/sql-reference/formats.mdx
Normal file
10
docs/en/sql-reference/formats.mdx
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
---
|
||||||
|
slug: /en/sql-reference/formats
|
||||||
|
sidebar_position: 50
|
||||||
|
sidebar_label: Input and Output Formats
|
||||||
|
title: Formats for Input and Output Data
|
||||||
|
---
|
||||||
|
|
||||||
|
import Content from '@site/docs/en/interfaces/formats.md';
|
||||||
|
|
||||||
|
<Content />
|
@ -268,13 +268,15 @@ Result:
|
|||||||
```
|
```
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
The return type of `toStartOf*`, `toLastDayOfMonth`, `toMonday` functions described below is `Date` or `DateTime`.
|
The return type of `toStartOf*`, `toLastDayOfMonth`, `toMonday` functions described below is determined by the configuration parameter [enable_extended_results_for_datetime_functions](../../operations/settings/settings#enable-extended-results-for-datetime-functions) which is `0` by default.
|
||||||
Though these functions can take values of the extended types `Date32` and `DateTime64` as an argument, passing them a time outside the normal range (year 1970 to 2149 for `Date` / 2106 for `DateTime`) will produce wrong results.
|
|
||||||
In case argument is out of normal range:
|
Behavior for
|
||||||
|
* `enable_extended_results_for_datetime_functions = 0`: Functions `toStartOf*`, `toLastDayOfMonth`, `toMonday` return `Date` or `DateTime`. Though these functions can take values of the extended types `Date32` and `DateTime64` as an argument, passing them a time outside the normal range (year 1970 to 2149 for `Date` / 2106 for `DateTime`) will produce wrong results. In case argument is out of normal range:
|
||||||
* If the argument is smaller than 1970, the result will be calculated from the argument `1970-01-01 (00:00:00)` instead.
|
* If the argument is smaller than 1970, the result will be calculated from the argument `1970-01-01 (00:00:00)` instead.
|
||||||
* If the return type is `DateTime` and the argument is larger than `2106-02-07 08:28:15`, the result will be calculated from the argument `2106-02-07 08:28:15` instead.
|
* If the return type is `DateTime` and the argument is larger than `2106-02-07 08:28:15`, the result will be calculated from the argument `2106-02-07 08:28:15` instead.
|
||||||
* If the return type is `Date` and the argument is larger than `2149-06-06`, the result will be calculated from the argument `2149-06-06` instead.
|
* If the return type is `Date` and the argument is larger than `2149-06-06`, the result will be calculated from the argument `2149-06-06` instead.
|
||||||
* If `toLastDayOfMonth` is called with an argument greater then `2149-05-31`, the result will be calculated from the argument `2149-05-31` instead.
|
* If `toLastDayOfMonth` is called with an argument greater then `2149-05-31`, the result will be calculated from the argument `2149-05-31` instead.
|
||||||
|
* `enable_extended_results_for_datetime_functions = 1`: Functions `toStartOfYear`, `toStartOfISOYear`, `toStartOfQuarter`, `toStartOfMonth`, `toStartOfWeek`, `toLastDayOfMonth`, `toMonday` return `Date` or `DateTime` if their argument is a `Date` or `DateTime`, and they return `Date32` or `DateTime64` if their argument is a `Date32` or `DateTime64`.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
## toStartOfYear
|
## toStartOfYear
|
||||||
@ -303,6 +305,8 @@ Returns the date.
|
|||||||
Rounds up a date or date with time to the last day of the month.
|
Rounds up a date or date with time to the last day of the month.
|
||||||
Returns the date.
|
Returns the date.
|
||||||
|
|
||||||
|
If `toLastDayOfMonth` is called with an argument of type `Date` greater then 2149-05-31, the result will be calculated from the argument 2149-05-31 instead.
|
||||||
|
|
||||||
## toMonday
|
## toMonday
|
||||||
|
|
||||||
Rounds down a date or date with time to the nearest Monday.
|
Rounds down a date or date with time to the nearest Monday.
|
||||||
@ -1227,6 +1231,8 @@ Result:
|
|||||||
|
|
||||||
Function converts Unix timestamp to a calendar date and a time of a day. When there is only a single argument of [Integer](../../sql-reference/data-types/int-uint.md) type, it acts in the same way as [toDateTime](../../sql-reference/functions/type-conversion-functions.md#todatetime) and return [DateTime](../../sql-reference/data-types/datetime.md) type.
|
Function converts Unix timestamp to a calendar date and a time of a day. When there is only a single argument of [Integer](../../sql-reference/data-types/int-uint.md) type, it acts in the same way as [toDateTime](../../sql-reference/functions/type-conversion-functions.md#todatetime) and return [DateTime](../../sql-reference/data-types/datetime.md) type.
|
||||||
|
|
||||||
|
Alias: `fromUnixTimestamp`.
|
||||||
|
|
||||||
**Example:**
|
**Example:**
|
||||||
|
|
||||||
Query:
|
Query:
|
||||||
|
@ -1823,6 +1823,36 @@ Result:
|
|||||||
Evaluate external model.
|
Evaluate external model.
|
||||||
Accepts a model name and model arguments. Returns Float64.
|
Accepts a model name and model arguments. Returns Float64.
|
||||||
|
|
||||||
|
## catboostEvaluate(path_to_model, feature_1, feature_2, …, feature_n)
|
||||||
|
|
||||||
|
Evaluate external catboost model. [CatBoost](https://catboost.ai) is an open-source gradient boosting library developed by Yandex for machine learing.
|
||||||
|
Accepts a path to a catboost model and model arguments (features). Returns Float64.
|
||||||
|
|
||||||
|
``` sql
|
||||||
|
SELECT feat1, ..., feat_n, catboostEvaluate('/path/to/model.bin', feat_1, ..., feat_n) AS prediction
|
||||||
|
FROM data_table
|
||||||
|
```
|
||||||
|
|
||||||
|
**Prerequisites**
|
||||||
|
|
||||||
|
1. Build the catboost evaluation library
|
||||||
|
|
||||||
|
Before evaluating catboost models, the `libcatboostmodel.<so|dylib>` library must be made available. See [CatBoost documentation](https://catboost.ai/docs/concepts/c-plus-plus-api_dynamic-c-pluplus-wrapper.html) how to compile it.
|
||||||
|
|
||||||
|
Next, specify the path to `libcatboostmodel.<so|dylib>` in the clickhouse configuration:
|
||||||
|
|
||||||
|
``` xml
|
||||||
|
<clickhouse>
|
||||||
|
...
|
||||||
|
<catboost_lib_path>/path/to/libcatboostmodel.so</catboost_lib_path>
|
||||||
|
...
|
||||||
|
</clickhouse>
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Train a catboost model using libcatboost
|
||||||
|
|
||||||
|
See [Training and applying models](https://catboost.ai/docs/features/training.html#training) for how to train catboost models from a training data set.
|
||||||
|
|
||||||
## throwIf(x\[, message\[, error_code\]\])
|
## throwIf(x\[, message\[, error_code\]\])
|
||||||
|
|
||||||
Throw an exception if the argument is non zero.
|
Throw an exception if the argument is non zero.
|
||||||
|
@ -13,5 +13,5 @@ Syntax:
|
|||||||
``` sql
|
``` sql
|
||||||
ALTER ROLE [IF EXISTS] name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
ALTER ROLE [IF EXISTS] name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | PROFILE 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | PROFILE 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
@ -13,5 +13,5 @@ Syntax:
|
|||||||
``` sql
|
``` sql
|
||||||
ALTER SETTINGS PROFILE [IF EXISTS] TO name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
ALTER SETTINGS PROFILE [IF EXISTS] TO name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | INHERIT 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
@ -11,7 +11,7 @@ Syntax:
|
|||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
CREATE ROLE [IF NOT EXISTS | OR REPLACE] name1 [, name2 ...]
|
CREATE ROLE [IF NOT EXISTS | OR REPLACE] name1 [, name2 ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | PROFILE 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | PROFILE 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Managing Roles
|
## Managing Roles
|
||||||
|
@ -12,7 +12,7 @@ Syntax:
|
|||||||
``` sql
|
``` sql
|
||||||
CREATE SETTINGS PROFILE [IF NOT EXISTS | OR REPLACE] TO name1 [ON CLUSTER cluster_name1]
|
CREATE SETTINGS PROFILE [IF NOT EXISTS | OR REPLACE] TO name1 [ON CLUSTER cluster_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | INHERIT 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
|
||||||
`ON CLUSTER` clause allows creating settings profiles on a cluster, see [Distributed DDL](../../../sql-reference/distributed-ddl.md).
|
`ON CLUSTER` clause allows creating settings profiles on a cluster, see [Distributed DDL](../../../sql-reference/distributed-ddl.md).
|
||||||
|
@ -32,6 +32,12 @@ SET allow_experimental_lightweight_delete = true;
|
|||||||
An [alternative way to delete rows](./alter/delete.md) in ClickHouse is `ALTER TABLE ... DELETE`, which might be more efficient if you do bulk deletes only occasionally and don't need the operation to be applied instantly. In most use cases the new lightweight `DELETE FROM` behavior will be considerably faster.
|
An [alternative way to delete rows](./alter/delete.md) in ClickHouse is `ALTER TABLE ... DELETE`, which might be more efficient if you do bulk deletes only occasionally and don't need the operation to be applied instantly. In most use cases the new lightweight `DELETE FROM` behavior will be considerably faster.
|
||||||
|
|
||||||
:::warning
|
:::warning
|
||||||
Even though deletes are becoming more lightweight in ClickHouse, they should still not be used as aggressively as on OLTP system. Ligthweight deletes are currently efficient for wide parts, but for compact parts they can be a heavyweight operation, and it may be better to use `ALTER TABLE` for some scenarios.
|
Even though deletes are becoming more lightweight in ClickHouse, they should still not be used as aggressively as on an OLTP system. Ligthweight deletes are currently efficient for wide parts, but for compact parts they can be a heavyweight operation, and it may be better to use `ALTER TABLE` for some scenarios.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
|
:::note
|
||||||
|
`DELETE FROM` requires the `ALTER DELETE` privilege:
|
||||||
|
```sql
|
||||||
|
grant ALTER DELETE ON db.table to username;
|
||||||
|
```
|
||||||
|
:::
|
||||||
|
@ -213,9 +213,10 @@ If the `WITH TOTALS` modifier is specified, another row will be calculated. This
|
|||||||
|
|
||||||
This extra row is only produced in `JSON*`, `TabSeparated*`, and `Pretty*` formats, separately from the other rows:
|
This extra row is only produced in `JSON*`, `TabSeparated*`, and `Pretty*` formats, separately from the other rows:
|
||||||
|
|
||||||
- In `JSON*` formats, this row is output as a separate ‘totals’ field.
|
- In `XML` and `JSON*` formats, this row is output as a separate ‘totals’ field.
|
||||||
- In `TabSeparated*` formats, the row comes after the main result, preceded by an empty row (after the other data).
|
- In `TabSeparated*`, `CSV*` and `Vertical` formats, the row comes after the main result, preceded by an empty row (after the other data).
|
||||||
- In `Pretty*` formats, the row is output as a separate table after the main result.
|
- In `Pretty*` formats, the row is output as a separate table after the main result.
|
||||||
|
- In `Template` format, the row is output according to specified template.
|
||||||
- In the other formats it is not available.
|
- In the other formats it is not available.
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
|
@ -135,9 +135,9 @@ In all other cases, we do not recommend using the asterisk, since it only gives
|
|||||||
|
|
||||||
In addition to results, you can also get minimum and maximum values for the results columns. To do this, set the **extremes** setting to 1. Minimums and maximums are calculated for numeric types, dates, and dates with times. For other columns, the default values are output.
|
In addition to results, you can also get minimum and maximum values for the results columns. To do this, set the **extremes** setting to 1. Minimums and maximums are calculated for numeric types, dates, and dates with times. For other columns, the default values are output.
|
||||||
|
|
||||||
An extra two rows are calculated – the minimums and maximums, respectively. These extra two rows are output in `JSON*`, `TabSeparated*`, and `Pretty*` [formats](../../../interfaces/formats.md), separate from the other rows. They are not output for other formats.
|
An extra two rows are calculated – the minimums and maximums, respectively. These extra two rows are output in `XML`, `JSON*`, `TabSeparated*`, `CSV*`, `Vertical`, `Template` and `Pretty*` [formats](../../../interfaces/formats.md), separate from the other rows. They are not output for other formats.
|
||||||
|
|
||||||
In `JSON*` formats, the extreme values are output in a separate ‘extremes’ field. In `TabSeparated*` formats, the row comes after the main result, and after ‘totals’ if present. It is preceded by an empty row (after the other data). In `Pretty*` formats, the row is output as a separate table after the main result, and after `totals` if present.
|
In `JSON*` and `XML` formats, the extreme values are output in a separate ‘extremes’ field. In `TabSeparated*`, `CSV*` and `Vertical` formats, the row comes after the main result, and after ‘totals’ if present. It is preceded by an empty row (after the other data). In `Pretty*` formats, the row is output as a separate table after the main result, and after `totals` if present. In `Template` format the extreme values are output according to specified template.
|
||||||
|
|
||||||
Extreme values are calculated for rows before `LIMIT`, but after `LIMIT BY`. However, when using `LIMIT offset, size`, the rows before `offset` are included in `extremes`. In stream requests, the result may also include a small number of rows that passed through `LIMIT`.
|
Extreme values are calculated for rows before `LIMIT`, but after `LIMIT BY`. However, when using `LIMIT offset, size`, the rows before `offset` are included in `extremes`. In stream requests, the result may also include a small number of rows that passed through `LIMIT`.
|
||||||
|
|
||||||
|
@ -6,45 +6,6 @@ sidebar_label: SYSTEM
|
|||||||
|
|
||||||
# SYSTEM Statements
|
# SYSTEM Statements
|
||||||
|
|
||||||
The list of available `SYSTEM` statements:
|
|
||||||
|
|
||||||
- [RELOAD EMBEDDED DICTIONARIES](#query_language-system-reload-emdedded-dictionaries)
|
|
||||||
- [RELOAD DICTIONARIES](#query_language-system-reload-dictionaries)
|
|
||||||
- [RELOAD DICTIONARY](#query_language-system-reload-dictionary)
|
|
||||||
- [RELOAD MODELS](#query_language-system-reload-models)
|
|
||||||
- [RELOAD MODEL](#query_language-system-reload-model)
|
|
||||||
- [RELOAD FUNCTIONS](#query_language-system-reload-functions)
|
|
||||||
- [RELOAD FUNCTION](#query_language-system-reload-functions)
|
|
||||||
- [DROP DNS CACHE](#query_language-system-drop-dns-cache)
|
|
||||||
- [DROP MARK CACHE](#query_language-system-drop-mark-cache)
|
|
||||||
- [DROP UNCOMPRESSED CACHE](#query_language-system-drop-uncompressed-cache)
|
|
||||||
- [DROP COMPILED EXPRESSION CACHE](#query_language-system-drop-compiled-expression-cache)
|
|
||||||
- [DROP REPLICA](#query_language-system-drop-replica)
|
|
||||||
- [FLUSH LOGS](#query_language-system-flush_logs)
|
|
||||||
- [RELOAD CONFIG](#query_language-system-reload-config)
|
|
||||||
- [SHUTDOWN](#query_language-system-shutdown)
|
|
||||||
- [KILL](#query_language-system-kill)
|
|
||||||
- [STOP DISTRIBUTED SENDS](#query_language-system-stop-distributed-sends)
|
|
||||||
- [FLUSH DISTRIBUTED](#query_language-system-flush-distributed)
|
|
||||||
- [START DISTRIBUTED SENDS](#query_language-system-start-distributed-sends)
|
|
||||||
- [STOP MERGES](#query_language-system-stop-merges)
|
|
||||||
- [START MERGES](#query_language-system-start-merges)
|
|
||||||
- [STOP TTL MERGES](#query_language-stop-ttl-merges)
|
|
||||||
- [START TTL MERGES](#query_language-start-ttl-merges)
|
|
||||||
- [STOP MOVES](#query_language-stop-moves)
|
|
||||||
- [START MOVES](#query_language-start-moves)
|
|
||||||
- [SYSTEM UNFREEZE](#query_language-system-unfreeze)
|
|
||||||
- [STOP FETCHES](#query_language-system-stop-fetches)
|
|
||||||
- [START FETCHES](#query_language-system-start-fetches)
|
|
||||||
- [STOP REPLICATED SENDS](#query_language-system-start-replicated-sends)
|
|
||||||
- [START REPLICATED SENDS](#query_language-system-start-replicated-sends)
|
|
||||||
- [STOP REPLICATION QUEUES](#query_language-system-stop-replication-queues)
|
|
||||||
- [START REPLICATION QUEUES](#query_language-system-start-replication-queues)
|
|
||||||
- [SYNC REPLICA](#query_language-system-sync-replica)
|
|
||||||
- [RESTART REPLICA](#query_language-system-restart-replica)
|
|
||||||
- [RESTORE REPLICA](#query_language-system-restore-replica)
|
|
||||||
- [RESTART REPLICAS](#query_language-system-restart-replicas)
|
|
||||||
|
|
||||||
## RELOAD EMBEDDED DICTIONARIES
|
## RELOAD EMBEDDED DICTIONARIES
|
||||||
|
|
||||||
Reload all [Internal dictionaries](../../sql-reference/dictionaries/internal-dicts.md).
|
Reload all [Internal dictionaries](../../sql-reference/dictionaries/internal-dicts.md).
|
||||||
@ -69,7 +30,12 @@ SELECT name, status FROM system.dictionaries;
|
|||||||
|
|
||||||
## RELOAD MODELS
|
## RELOAD MODELS
|
||||||
|
|
||||||
Reloads all [CatBoost](../../guides/developer/apply-catboost-model.md) models if the configuration was updated without restarting the server.
|
:::note
|
||||||
|
This statement and `SYSTEM RELOAD MODEL` merely unload catboost models from the clickhouse-library-bridge. The function `catboostEvaluate()`
|
||||||
|
loads a model upon first access if it is not loaded yet.
|
||||||
|
:::
|
||||||
|
|
||||||
|
Unloads all CatBoost models.
|
||||||
|
|
||||||
**Syntax**
|
**Syntax**
|
||||||
|
|
||||||
@ -79,12 +45,12 @@ SYSTEM RELOAD MODELS [ON CLUSTER cluster_name]
|
|||||||
|
|
||||||
## RELOAD MODEL
|
## RELOAD MODEL
|
||||||
|
|
||||||
Completely reloads a CatBoost model `model_name` if the configuration was updated without restarting the server.
|
Unloads a CatBoost model at `model_path`.
|
||||||
|
|
||||||
**Syntax**
|
**Syntax**
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SYSTEM RELOAD MODEL [ON CLUSTER cluster_name] <model_name>
|
SYSTEM RELOAD MODEL [ON CLUSTER cluster_name] <model_path>
|
||||||
```
|
```
|
||||||
|
|
||||||
## RELOAD FUNCTIONS
|
## RELOAD FUNCTIONS
|
||||||
|
@ -13,7 +13,7 @@ Creates a table from a file. This table function is similar to [url](../../sql-r
|
|||||||
**Syntax**
|
**Syntax**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
file(path, format, structure)
|
file(path [,format] [,structure])
|
||||||
```
|
```
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
@ -11,7 +11,7 @@ Provides table-like interface to select/insert files in [Amazon S3](https://aws.
|
|||||||
**Syntax**
|
**Syntax**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
s3(path, [aws_access_key_id, aws_secret_access_key,] format, structure, [compression])
|
s3(path [,aws_access_key_id, aws_secret_access_key] [,format] [,structure] [,compression])
|
||||||
```
|
```
|
||||||
|
|
||||||
**Arguments**
|
**Arguments**
|
||||||
|
@ -10,7 +10,7 @@ Allows processing files from [Amazon S3](https://aws.amazon.com/s3/) in parallel
|
|||||||
**Syntax**
|
**Syntax**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
s3Cluster(cluster_name, source, [access_key_id, secret_access_key,] format, structure)
|
s3Cluster(cluster_name, source, [,access_key_id, secret_access_key] [,format] [,structure])
|
||||||
```
|
```
|
||||||
|
|
||||||
**Arguments**
|
**Arguments**
|
||||||
|
@ -13,7 +13,7 @@ sidebar_label: url
|
|||||||
**Syntax**
|
**Syntax**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
url(URL, format, structure)
|
url(URL [,format] [,structure])
|
||||||
```
|
```
|
||||||
|
|
||||||
**Parameters**
|
**Parameters**
|
||||||
|
@ -155,7 +155,6 @@ getting_started/index.md getting-started/index.md
|
|||||||
getting_started/install.md getting-started/install.md
|
getting_started/install.md getting-started/install.md
|
||||||
getting_started/playground.md getting-started/playground.md
|
getting_started/playground.md getting-started/playground.md
|
||||||
getting_started/tutorial.md getting-started/tutorial.md
|
getting_started/tutorial.md getting-started/tutorial.md
|
||||||
guides/apply_catboost_model.md guides/apply-catboost-model.md
|
|
||||||
images/column_oriented.gif images/column-oriented.gif
|
images/column_oriented.gif images/column-oriented.gif
|
||||||
images/row_oriented.gif images/row-oriented.gif
|
images/row_oriented.gif images/row-oriented.gif
|
||||||
interfaces/http_interface.md interfaces/http.md
|
interfaces/http_interface.md interfaces/http.md
|
||||||
|
@ -1,241 +0,0 @@
|
|||||||
---
|
|
||||||
slug: /ru/guides/apply-catboost-model
|
|
||||||
sidebar_position: 41
|
|
||||||
sidebar_label: "Применение модели CatBoost в ClickHouse"
|
|
||||||
---
|
|
||||||
|
|
||||||
# Применение модели CatBoost в ClickHouse {#applying-catboost-model-in-clickhouse}
|
|
||||||
|
|
||||||
[CatBoost](https://catboost.ai) — открытая программная библиотека разработанная компанией [Яндекс](https://yandex.ru/company/) для машинного обучения, которая использует схему градиентного бустинга.
|
|
||||||
|
|
||||||
С помощью этой инструкции вы научитесь применять предобученные модели в ClickHouse: в результате вы запустите вывод модели из SQL.
|
|
||||||
|
|
||||||
Чтобы применить модель CatBoost в ClickHouse:
|
|
||||||
|
|
||||||
1. [Создайте таблицу](#create-table).
|
|
||||||
2. [Вставьте данные в таблицу](#insert-data-to-table).
|
|
||||||
3. [Интегрируйте CatBoost в ClickHouse](#integrate-catboost-into-clickhouse) (Опциональный шаг).
|
|
||||||
4. [Запустите вывод модели из SQL](#run-model-inference).
|
|
||||||
|
|
||||||
Подробнее об обучении моделей в CatBoost, см. [Обучение и применение моделей](https://catboost.ai/docs/features/training.html#training).
|
|
||||||
|
|
||||||
Вы можете перегрузить модели CatBoost, если их конфигурация была обновлена, без перезагрузки сервера. Для этого используйте системные запросы [RELOAD MODEL](../sql-reference/statements/system.md#query_language-system-reload-model) и [RELOAD MODELS](../sql-reference/statements/system.md#query_language-system-reload-models).
|
|
||||||
|
|
||||||
## Перед началом работы {#prerequisites}
|
|
||||||
|
|
||||||
Если у вас еще нет [Docker](https://docs.docker.com/install/), установите его.
|
|
||||||
|
|
||||||
:::note "Примечание"
|
|
||||||
[Docker](https://www.docker.com) – это программная платформа для создания контейнеров, которые изолируют установку CatBoost и ClickHouse от остальной части системы.
|
|
||||||
:::
|
|
||||||
Перед применением модели CatBoost:
|
|
||||||
|
|
||||||
**1.** Скачайте [Docker-образ](https://hub.docker.com/r/yandex/tutorial-catboost-clickhouse) из реестра:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ docker pull yandex/tutorial-catboost-clickhouse
|
|
||||||
```
|
|
||||||
|
|
||||||
Данный Docker-образ содержит все необходимое для запуска CatBoost и ClickHouse: код, среду выполнения, библиотеки, переменные окружения и файлы конфигурации.
|
|
||||||
|
|
||||||
**2.** Проверьте, что Docker-образ успешно скачался:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ docker image ls
|
|
||||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
|
||||||
yandex/tutorial-catboost-clickhouse latest 622e4d17945b 22 hours ago 1.37GB
|
|
||||||
```
|
|
||||||
|
|
||||||
**3.** Запустите Docker-контейнер основанный на данном образе:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ docker run -it -p 8888:8888 yandex/tutorial-catboost-clickhouse
|
|
||||||
```
|
|
||||||
|
|
||||||
## 1. Создайте таблицу {#create-table}
|
|
||||||
|
|
||||||
Чтобы создать таблицу для обучающей выборки:
|
|
||||||
|
|
||||||
**1.** Запустите клиент ClickHouse:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ clickhouse client
|
|
||||||
```
|
|
||||||
|
|
||||||
:::note "Примечание"
|
|
||||||
Сервер ClickHouse уже запущен внутри Docker-контейнера.
|
|
||||||
:::
|
|
||||||
**2.** Создайте таблицу в ClickHouse с помощью следующей команды:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) CREATE TABLE amazon_train
|
|
||||||
(
|
|
||||||
date Date MATERIALIZED today(),
|
|
||||||
ACTION UInt8,
|
|
||||||
RESOURCE UInt32,
|
|
||||||
MGR_ID UInt32,
|
|
||||||
ROLE_ROLLUP_1 UInt32,
|
|
||||||
ROLE_ROLLUP_2 UInt32,
|
|
||||||
ROLE_DEPTNAME UInt32,
|
|
||||||
ROLE_TITLE UInt32,
|
|
||||||
ROLE_FAMILY_DESC UInt32,
|
|
||||||
ROLE_FAMILY UInt32,
|
|
||||||
ROLE_CODE UInt32
|
|
||||||
)
|
|
||||||
ENGINE = MergeTree ORDER BY date
|
|
||||||
```
|
|
||||||
|
|
||||||
**3.** Выйдите из клиента ClickHouse:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) exit
|
|
||||||
```
|
|
||||||
|
|
||||||
## 2. Вставьте данные в таблицу {#insert-data-to-table}
|
|
||||||
|
|
||||||
Чтобы вставить данные:
|
|
||||||
|
|
||||||
**1.** Выполните следующую команду:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ clickhouse client --host 127.0.0.1 --query 'INSERT INTO amazon_train FORMAT CSVWithNames' < ~/amazon/train.csv
|
|
||||||
```
|
|
||||||
|
|
||||||
**2.** Запустите клиент ClickHouse:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ clickhouse client
|
|
||||||
```
|
|
||||||
|
|
||||||
**3.** Проверьте, что данные успешно загрузились:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) SELECT count() FROM amazon_train
|
|
||||||
|
|
||||||
SELECT count()
|
|
||||||
FROM amazon_train
|
|
||||||
|
|
||||||
+-count()-+
|
|
||||||
| 65538 |
|
|
||||||
+---------+
|
|
||||||
```
|
|
||||||
|
|
||||||
## 3. Интегрируйте CatBoost в ClickHouse {#integrate-catboost-into-clickhouse}
|
|
||||||
|
|
||||||
:::note "Примечание"
|
|
||||||
**Опциональный шаг.** Docker-образ содержит все необходимое для запуска CatBoost и ClickHouse.
|
|
||||||
:::
|
|
||||||
Чтобы интегрировать CatBoost в ClickHouse:
|
|
||||||
|
|
||||||
**1.** Создайте библиотеку для оценки модели.
|
|
||||||
|
|
||||||
Наиболее быстрый способ оценить модель CatBoost — это скомпилировать библиотеку `libcatboostmodel.<so|dll|dylib>`. Подробнее о том, как скомпилировать библиотеку, читайте в [документации CatBoost](https://catboost.ai/docs/concepts/c-plus-plus-api_dynamic-c-pluplus-wrapper.html).
|
|
||||||
|
|
||||||
**2.** Создайте в любом месте новую директорию с произвольным названием, например `data` и поместите в нее созданную библиотеку. Docker-образ уже содержит библиотеку `data/libcatboostmodel.so`.
|
|
||||||
|
|
||||||
**3.** Создайте в любом месте новую директорию для конфигурации модели с произвольным названием, например `models`.
|
|
||||||
|
|
||||||
**4.** Создайте файл конфигурации модели с произвольным названием, например `models/amazon_model.xml`.
|
|
||||||
|
|
||||||
**5.** Опишите конфигурацию модели:
|
|
||||||
|
|
||||||
``` xml
|
|
||||||
<models>
|
|
||||||
<model>
|
|
||||||
<!-- Тип модели. В настоящий момент ClickHouse предоставляет только модель catboost. -->
|
|
||||||
<type>catboost</type>
|
|
||||||
<!-- Имя модели. -->
|
|
||||||
<name>amazon</name>
|
|
||||||
<!-- Путь к обученной модели. -->
|
|
||||||
<path>/home/catboost/tutorial/catboost_model.bin</path>
|
|
||||||
<!-- Интервал обновления. -->
|
|
||||||
<lifetime>0</lifetime>
|
|
||||||
</model>
|
|
||||||
</models>
|
|
||||||
```
|
|
||||||
|
|
||||||
**6.** Добавьте в конфигурацию ClickHouse путь к CatBoost и конфигурации модели:
|
|
||||||
|
|
||||||
``` xml
|
|
||||||
<!-- Файл etc/clickhouse-server/config.d/models_config.xml. -->
|
|
||||||
<catboost_dynamic_library_path>/home/catboost/data/libcatboostmodel.so</catboost_dynamic_library_path>
|
|
||||||
<models_config>/home/catboost/models/*_model.xml</models_config>
|
|
||||||
```
|
|
||||||
:::note "Примечание"
|
|
||||||
Вы можете позднее изменить путь к конфигурации модели CatBoost без перезагрузки сервера.
|
|
||||||
:::
|
|
||||||
## 4. Запустите вывод модели из SQL {#run-model-inference}
|
|
||||||
|
|
||||||
Для тестирования модели запустите клиент ClickHouse `$ clickhouse client`.
|
|
||||||
|
|
||||||
Проверьте, что модель работает:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) SELECT
|
|
||||||
modelEvaluate('amazon',
|
|
||||||
RESOURCE,
|
|
||||||
MGR_ID,
|
|
||||||
ROLE_ROLLUP_1,
|
|
||||||
ROLE_ROLLUP_2,
|
|
||||||
ROLE_DEPTNAME,
|
|
||||||
ROLE_TITLE,
|
|
||||||
ROLE_FAMILY_DESC,
|
|
||||||
ROLE_FAMILY,
|
|
||||||
ROLE_CODE) > 0 AS prediction,
|
|
||||||
ACTION AS target
|
|
||||||
FROM amazon_train
|
|
||||||
LIMIT 10
|
|
||||||
```
|
|
||||||
|
|
||||||
:::note "Примечание"
|
|
||||||
Функция [modelEvaluate](../sql-reference/functions/other-functions.md#function-modelevaluate) возвращает кортежи (tuple) с исходными прогнозами по классам для моделей с несколькими классами.
|
|
||||||
:::
|
|
||||||
Спрогнозируйте вероятность:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) SELECT
|
|
||||||
modelEvaluate('amazon',
|
|
||||||
RESOURCE,
|
|
||||||
MGR_ID,
|
|
||||||
ROLE_ROLLUP_1,
|
|
||||||
ROLE_ROLLUP_2,
|
|
||||||
ROLE_DEPTNAME,
|
|
||||||
ROLE_TITLE,
|
|
||||||
ROLE_FAMILY_DESC,
|
|
||||||
ROLE_FAMILY,
|
|
||||||
ROLE_CODE) AS prediction,
|
|
||||||
1. / (1 + exp(-prediction)) AS probability,
|
|
||||||
ACTION AS target
|
|
||||||
FROM amazon_train
|
|
||||||
LIMIT 10
|
|
||||||
```
|
|
||||||
|
|
||||||
:::note "Примечание"
|
|
||||||
Подробнее про функцию [exp()](../sql-reference/functions/math-functions.md).
|
|
||||||
:::
|
|
||||||
Посчитайте логистическую функцию потерь (LogLoss) на всей выборке:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) SELECT -avg(tg * log(prob) + (1 - tg) * log(1 - prob)) AS logloss
|
|
||||||
FROM
|
|
||||||
(
|
|
||||||
SELECT
|
|
||||||
modelEvaluate('amazon',
|
|
||||||
RESOURCE,
|
|
||||||
MGR_ID,
|
|
||||||
ROLE_ROLLUP_1,
|
|
||||||
ROLE_ROLLUP_2,
|
|
||||||
ROLE_DEPTNAME,
|
|
||||||
ROLE_TITLE,
|
|
||||||
ROLE_FAMILY_DESC,
|
|
||||||
ROLE_FAMILY,
|
|
||||||
ROLE_CODE) AS prediction,
|
|
||||||
1. / (1. + exp(-prediction)) AS prob,
|
|
||||||
ACTION AS tg
|
|
||||||
FROM amazon_train
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
:::note "Примечание"
|
|
||||||
Подробнее про функции [avg()](../sql-reference/aggregate-functions/reference/avg.md#agg_function-avg), [log()](../sql-reference/functions/math-functions.md).
|
|
||||||
:::
|
|
@ -7,5 +7,3 @@ sidebar_label: "Руководства"
|
|||||||
# Руководства {#rukovodstva}
|
# Руководства {#rukovodstva}
|
||||||
|
|
||||||
Подробные пошаговые инструкции, которые помогут вам решать различные задачи с помощью ClickHouse.
|
Подробные пошаговые инструкции, которые помогут вам решать различные задачи с помощью ClickHouse.
|
||||||
|
|
||||||
- [Применение модели CatBoost в ClickHouse](apply-catboost-model.md)
|
|
||||||
|
@ -3799,6 +3799,17 @@ Exception: Total regexp lengths too large.
|
|||||||
|
|
||||||
Значение по умолчанию: `1`.
|
Значение по умолчанию: `1`.
|
||||||
|
|
||||||
|
## enable_extended_results_for_datetime_functions {#enable-extended-results-for-datetime-functions}
|
||||||
|
|
||||||
|
Включает или отключает возвращение результатов типа `Date32` с расширенным диапазоном (по сравнению с типом `Date`) для функций [toStartOfYear](../../sql-reference/functions/date-time-functions.md#tostartofyear), [toStartOfISOYear](../../sql-reference/functions/date-time-functions.md#tostartofisoyear), [toStartOfQuarter](../../sql-reference/functions/date-time-functions.md#tostartofquarter), [toStartOfMonth](../../sql-reference/functions/date-time-functions.md#tostartofmonth), [toStartOfWeek](../../sql-reference/functions/date-time-functions.md#tostartofweek), [toMonday](../../sql-reference/functions/date-time-functions.md#tomonday) и [toLastDayOfMonth](../../sql-reference/functions/date-time-functions.md#tolastdayofmonth).
|
||||||
|
|
||||||
|
Возможные значения:
|
||||||
|
|
||||||
|
- 0 — Функции возвращают результаты типа `Date` для всех типов аргументов.
|
||||||
|
- 1 — Функции возвращают результаты типа `Date32` для аргументов типа `Date32` или `DateTime64` и возвращают `Date` в других случаях.
|
||||||
|
|
||||||
|
Значение по умолчанию: `0`.
|
||||||
|
|
||||||
**Пример**
|
**Пример**
|
||||||
|
|
||||||
Запрос:
|
Запрос:
|
||||||
|
@ -268,24 +268,18 @@ SELECT toUnixTimestamp('2017-11-05 08:07:47', 'Asia/Tokyo') AS unix_timestamp;
|
|||||||
```
|
```
|
||||||
|
|
||||||
:::note
|
:::note
|
||||||
Тип возвращаемого описанными далее функциями `toStartOf*`, `toMonday` значения - `Date` или `DateTime`.
|
Тип возвращаемого значения описанными далее функциями `toStartOf*`, `toLastDayOfMonth`, `toMonday` определяется конфигурационным параметром [enable_extended_results_for_datetime_functions](../../operations/settings/settings#enable-extended-results-for-datetime-functions) имеющим по умолчанию значение `0`.
|
||||||
Хотя эти функции могут принимать значения типа `Date32` или `DateTime64` в качестве аргумента, при обработке аргумента вне нормального диапазона значений (`1970` - `2148` для `Date` и `1970-01-01 00:00:00`-`2106-02-07 08:28:15` для `DateTime`) будет получен некорректный результат.
|
|
||||||
Возвращаемые значения для значений вне нормального диапазона:
|
Поведение для
|
||||||
|
* `enable_extended_results_for_datetime_functions = 0`: Функции `toStartOf*`, `toLastDayOfMonth`, `toMonday` возвращают `Date` или `DateTime`. Хотя эти функции могут принимать значения типа `Date32` или `DateTime64` в качестве аргумента, при обработке аргумента вне нормального диапазона значений (`1970` - `2148` для `Date` и `1970-01-01 00:00:00`-`2106-02-07 08:28:15` для `DateTime`) будет получен некорректный результат.
|
||||||
|
В случае если значение аргумента вне нормального диапазона:
|
||||||
* `1970-01-01 (00:00:00)` будет возвращён для моментов времени до 1970 года,
|
* `1970-01-01 (00:00:00)` будет возвращён для моментов времени до 1970 года,
|
||||||
* `2106-02-07 08:28:15` будет взят в качестве аргумента, если полученный аргумент превосходит данное значение и возвращаемый тип - `DateTime`,
|
* `2106-02-07 08:28:15` будет взят в качестве аргумента, если полученный аргумент превосходит данное значение и возвращаемый тип - `DateTime`,
|
||||||
* `2149-06-06` будет взят в качестве аргумента, если полученный аргумент превосходит данное значение и возвращаемый тип - `Date`,
|
* `2149-06-06` будет взят в качестве аргумента, если полученный аргумент превосходит данное значение и возвращаемый тип - `Date`,
|
||||||
* `2149-05-31` будет результатом функции `toLastDayOfMonth` при обработке аргумента больше `2149-05-31`.
|
* `2149-05-31` будет результатом функции `toLastDayOfMonth` при обработке аргумента больше `2149-05-31`.
|
||||||
|
* `enable_extended_results_for_datetime_functions = 1`: Функции `toStartOfYear`, `toStartOfISOYear`, `toStartOfQuarter`, `toStartOfMonth`, `toStartOfWeek`, `toLastDayOfMonth`, `toMonday` возвращают `Date` или `DateTime` если их аргумент `Date` или `DateTime` и они возвращают `Date32` или `DateTime64` если их аргумент `Date32` или `DateTime64`.
|
||||||
:::
|
:::
|
||||||
|
|
||||||
:::note
|
|
||||||
Тип возвращаемого описанными далее функциями `toStartOf*`, `toLastDayOfMonth`, `toMonday` значения - `Date` или `DateTime`.
|
|
||||||
Хотя эти функции могут принимать значения типа `Date32` или `DateTime64` в качестве аргумента, при обработке аргумента вне нормального диапазона значений (`1970` - `2148` для `Date` и `1970-01-01 00:00:00`-`2106-02-07 08:28:15` для `DateTime`) будет получен некорректный результат.
|
|
||||||
Возвращаемые значения для значений вне нормального диапазона:
|
|
||||||
* `1970-01-01 (00:00:00)` будет возвращён для моментов времени до 1970 года,
|
|
||||||
* `2106-02-07 08:28:15` будет взят в качестве аргумента, если полученный аргумент превосходит данное значение и возвращаемый тип - `DateTime`,
|
|
||||||
* `2149-06-06` будет взят в качестве аргумента, если полученный аргумент превосходит данное значение и возвращаемый тип - `Date`.
|
|
||||||
:::
|
|
||||||
*
|
|
||||||
## toStartOfYear {#tostartofyear}
|
## toStartOfYear {#tostartofyear}
|
||||||
|
|
||||||
Округляет дату или дату-с-временем вниз до первого дня года.
|
Округляет дату или дату-с-временем вниз до первого дня года.
|
||||||
@ -324,6 +318,8 @@ SELECT toStartOfISOYear(toDate('2017-01-01')) AS ISOYear20170101;
|
|||||||
Округляет дату или дату-с-временем до последнего числа месяца.
|
Округляет дату или дату-с-временем до последнего числа месяца.
|
||||||
Возвращается дата.
|
Возвращается дата.
|
||||||
|
|
||||||
|
Если `toLastDayOfMonth` вызывается с аргументом типа `Date` большим чем 2149-05-31, то результат будет вычислен от аргумента 2149-05-31.
|
||||||
|
|
||||||
## toMonday {#tomonday}
|
## toMonday {#tomonday}
|
||||||
|
|
||||||
Округляет дату или дату-с-временем вниз до ближайшего понедельника.
|
Округляет дату или дату-с-временем вниз до ближайшего понедельника.
|
||||||
|
@ -13,6 +13,6 @@ sidebar_label: ROLE
|
|||||||
``` sql
|
``` sql
|
||||||
ALTER ROLE [IF EXISTS] name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
ALTER ROLE [IF EXISTS] name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | PROFILE 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | PROFILE 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -13,6 +13,6 @@ sidebar_label: SETTINGS PROFILE
|
|||||||
``` sql
|
``` sql
|
||||||
ALTER SETTINGS PROFILE [IF EXISTS] TO name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
ALTER SETTINGS PROFILE [IF EXISTS] TO name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | INHERIT 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -12,7 +12,7 @@ sidebar_label: "Роль"
|
|||||||
|
|
||||||
```sql
|
```sql
|
||||||
CREATE ROLE [IF NOT EXISTS | OR REPLACE] name1 [, name2 ...]
|
CREATE ROLE [IF NOT EXISTS | OR REPLACE] name1 [, name2 ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | PROFILE 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | PROFILE 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Управление ролями {#managing-roles}
|
## Управление ролями {#managing-roles}
|
||||||
|
@ -13,7 +13,7 @@ sidebar_label: "Профиль настроек"
|
|||||||
``` sql
|
``` sql
|
||||||
CREATE SETTINGS PROFILE [IF NOT EXISTS | OR REPLACE] TO name1 [ON CLUSTER cluster_name1]
|
CREATE SETTINGS PROFILE [IF NOT EXISTS | OR REPLACE] TO name1 [ON CLUSTER cluster_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | INHERIT 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
|
||||||
Секция `ON CLUSTER` позволяет создавать профили на кластере, см. [Распределенные DDL запросы](../../../sql-reference/distributed-ddl.md).
|
Секция `ON CLUSTER` позволяет создавать профили на кластере, см. [Распределенные DDL запросы](../../../sql-reference/distributed-ddl.md).
|
||||||
|
@ -6,43 +6,6 @@ sidebar_label: SYSTEM
|
|||||||
|
|
||||||
# Запросы SYSTEM {#query-language-system}
|
# Запросы SYSTEM {#query-language-system}
|
||||||
|
|
||||||
- [RELOAD EMBEDDED DICTIONARIES](#query_language-system-reload-emdedded-dictionaries)
|
|
||||||
- [RELOAD DICTIONARIES](#query_language-system-reload-dictionaries)
|
|
||||||
- [RELOAD DICTIONARY](#query_language-system-reload-dictionary)
|
|
||||||
- [RELOAD MODELS](#query_language-system-reload-models)
|
|
||||||
- [RELOAD MODEL](#query_language-system-reload-model)
|
|
||||||
- [RELOAD FUNCTIONS](#query_language-system-reload-functions)
|
|
||||||
- [RELOAD FUNCTION](#query_language-system-reload-functions)
|
|
||||||
- [DROP DNS CACHE](#query_language-system-drop-dns-cache)
|
|
||||||
- [DROP MARK CACHE](#query_language-system-drop-mark-cache)
|
|
||||||
- [DROP UNCOMPRESSED CACHE](#query_language-system-drop-uncompressed-cache)
|
|
||||||
- [DROP COMPILED EXPRESSION CACHE](#query_language-system-drop-compiled-expression-cache)
|
|
||||||
- [DROP REPLICA](#query_language-system-drop-replica)
|
|
||||||
- [FLUSH LOGS](#query_language-system-flush_logs)
|
|
||||||
- [RELOAD CONFIG](#query_language-system-reload-config)
|
|
||||||
- [SHUTDOWN](#query_language-system-shutdown)
|
|
||||||
- [KILL](#query_language-system-kill)
|
|
||||||
- [STOP DISTRIBUTED SENDS](#query_language-system-stop-distributed-sends)
|
|
||||||
- [FLUSH DISTRIBUTED](#query_language-system-flush-distributed)
|
|
||||||
- [START DISTRIBUTED SENDS](#query_language-system-start-distributed-sends)
|
|
||||||
- [STOP MERGES](#query_language-system-stop-merges)
|
|
||||||
- [START MERGES](#query_language-system-start-merges)
|
|
||||||
- [STOP TTL MERGES](#query_language-stop-ttl-merges)
|
|
||||||
- [START TTL MERGES](#query_language-start-ttl-merges)
|
|
||||||
- [STOP MOVES](#query_language-stop-moves)
|
|
||||||
- [START MOVES](#query_language-start-moves)
|
|
||||||
- [SYSTEM UNFREEZE](#query_language-system-unfreeze)
|
|
||||||
- [STOP FETCHES](#query_language-system-stop-fetches)
|
|
||||||
- [START FETCHES](#query_language-system-start-fetches)
|
|
||||||
- [STOP REPLICATED SENDS](#query_language-system-start-replicated-sends)
|
|
||||||
- [START REPLICATED SENDS](#query_language-system-start-replicated-sends)
|
|
||||||
- [STOP REPLICATION QUEUES](#query_language-system-stop-replication-queues)
|
|
||||||
- [START REPLICATION QUEUES](#query_language-system-start-replication-queues)
|
|
||||||
- [SYNC REPLICA](#query_language-system-sync-replica)
|
|
||||||
- [RESTART REPLICA](#query_language-system-restart-replica)
|
|
||||||
- [RESTORE REPLICA](#query_language-system-restore-replica)
|
|
||||||
- [RESTART REPLICAS](#query_language-system-restart-replicas)
|
|
||||||
|
|
||||||
## RELOAD EMBEDDED DICTIONARIES] {#query_language-system-reload-emdedded-dictionaries}
|
## RELOAD EMBEDDED DICTIONARIES] {#query_language-system-reload-emdedded-dictionaries}
|
||||||
Перегружает все [Встроенные словари](../dictionaries/internal-dicts.md).
|
Перегружает все [Встроенные словари](../dictionaries/internal-dicts.md).
|
||||||
По умолчанию встроенные словари выключены.
|
По умолчанию встроенные словари выключены.
|
||||||
@ -66,7 +29,12 @@ SELECT name, status FROM system.dictionaries;
|
|||||||
|
|
||||||
## RELOAD MODELS {#query_language-system-reload-models}
|
## RELOAD MODELS {#query_language-system-reload-models}
|
||||||
|
|
||||||
Перегружает все модели [CatBoost](../../guides/apply-catboost-model.md#applying-catboost-model-in-clickhouse), если их конфигурация была обновлена, без перезагрузки сервера.
|
:::note
|
||||||
|
Это утверждение и `SYSTEM RELOAD MODEL` просто выгружают модели catboost из clickhouse-library-bridge. Функция `catboostEvaluate()`
|
||||||
|
загружает модель при первом обращении, если она еще не загружена.
|
||||||
|
:::
|
||||||
|
|
||||||
|
Разгрузите все модели CatBoost.
|
||||||
|
|
||||||
**Синтаксис**
|
**Синтаксис**
|
||||||
|
|
||||||
@ -76,12 +44,12 @@ SYSTEM RELOAD MODELS
|
|||||||
|
|
||||||
## RELOAD MODEL {#query_language-system-reload-model}
|
## RELOAD MODEL {#query_language-system-reload-model}
|
||||||
|
|
||||||
Полностью перегружает модель [CatBoost](../../guides/apply-catboost-model.md#applying-catboost-model-in-clickhouse) `model_name`, если ее конфигурация была обновлена, без перезагрузки сервера.
|
Выгружает модель CatBoost по адресу `модель_путь`.
|
||||||
|
|
||||||
**Синтаксис**
|
**Синтаксис**
|
||||||
|
|
||||||
```sql
|
```sql
|
||||||
SYSTEM RELOAD MODEL <model_name>
|
SYSTEM RELOAD MODEL <model_path>
|
||||||
```
|
```
|
||||||
|
|
||||||
## RELOAD FUNCTIONS {#query_language-system-reload-functions}
|
## RELOAD FUNCTIONS {#query_language-system-reload-functions}
|
||||||
|
@ -13,7 +13,7 @@ sidebar_label: file
|
|||||||
**Синтаксис**
|
**Синтаксис**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
file(path, format, structure)
|
file(path [,format] [,structure])
|
||||||
```
|
```
|
||||||
|
|
||||||
**Параметры**
|
**Параметры**
|
||||||
|
@ -11,7 +11,7 @@ sidebar_label: s3
|
|||||||
**Синтаксис**
|
**Синтаксис**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
s3(path, [aws_access_key_id, aws_secret_access_key,] format, structure, [compression])
|
s3(path [,aws_access_key_id, aws_secret_access_key] [,format] [,structure] [,compression])
|
||||||
```
|
```
|
||||||
|
|
||||||
**Aргументы**
|
**Aргументы**
|
||||||
|
@ -11,7 +11,7 @@ sidebar_label: s3Cluster
|
|||||||
**Синтаксис**
|
**Синтаксис**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
s3Cluster(cluster_name, source, [access_key_id, secret_access_key,] format, structure)
|
s3Cluster(cluster_name, source, [,access_key_id, secret_access_key] [,format] [,structure])
|
||||||
```
|
```
|
||||||
|
|
||||||
**Аргументы**
|
**Аргументы**
|
||||||
|
@ -13,7 +13,7 @@ sidebar_label: url
|
|||||||
**Синтаксис**
|
**Синтаксис**
|
||||||
|
|
||||||
``` sql
|
``` sql
|
||||||
url(URL, format, structure)
|
url(URL [,format] [,structure])
|
||||||
```
|
```
|
||||||
|
|
||||||
**Параметры**
|
**Параметры**
|
||||||
|
@ -1,244 +0,0 @@
|
|||||||
---
|
|
||||||
slug: /zh/guides/apply-catboost-model
|
|
||||||
sidebar_position: 41
|
|
||||||
sidebar_label: "\u5E94\u7528CatBoost\u6A21\u578B"
|
|
||||||
---
|
|
||||||
|
|
||||||
# 在ClickHouse中应用Catboost模型 {#applying-catboost-model-in-clickhouse}
|
|
||||||
|
|
||||||
[CatBoost](https://catboost.ai) 是一个由[Yandex](https://yandex.com/company/)开发的开源免费机器学习库。
|
|
||||||
|
|
||||||
|
|
||||||
通过本篇文档,您将学会如何用SQL语句调用已经存放在Clickhouse中的预训练模型来预测数据。
|
|
||||||
|
|
||||||
|
|
||||||
为了在ClickHouse中应用CatBoost模型,需要进行如下步骤:
|
|
||||||
|
|
||||||
1. [创建数据表](#create-table).
|
|
||||||
2. [将数据插入到表中](#insert-data-to-table).
|
|
||||||
3. [将CatBoost集成到ClickHouse中](#integrate-catboost-into-clickhouse) (可跳过)。
|
|
||||||
4. [从SQL运行模型推断](#run-model-inference).
|
|
||||||
|
|
||||||
有关训练CatBoost模型的详细信息,请参阅 [训练和模型应用](https://catboost.ai/docs/features/training.html#training).
|
|
||||||
|
|
||||||
您可以通过[RELOAD MODEL](https://clickhouse.com/docs/en/sql-reference/statements/system/#query_language-system-reload-model)与[RELOAD MODELS](https://clickhouse.com/docs/en/sql-reference/statements/system/#query_language-system-reload-models)语句来重载CatBoost模型。
|
|
||||||
|
|
||||||
## 先决条件 {#prerequisites}
|
|
||||||
|
|
||||||
请先安装 [Docker](https://docs.docker.com/install/)。
|
|
||||||
|
|
||||||
!!! note "注"
|
|
||||||
[Docker](https://www.docker.com) 是一个软件平台,用户可以用Docker来创建独立于已有系统并集成了CatBoost和ClickHouse的容器。
|
|
||||||
|
|
||||||
在应用CatBoost模型之前:
|
|
||||||
|
|
||||||
**1.** 从容器仓库拉取示例docker镜像 (https://hub.docker.com/r/yandex/tutorial-catboost-clickhouse) :
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ docker pull yandex/tutorial-catboost-clickhouse
|
|
||||||
```
|
|
||||||
|
|
||||||
此示例Docker镜像包含运行CatBoost和ClickHouse所需的所有内容:代码、运行时、库、环境变量和配置文件。
|
|
||||||
|
|
||||||
**2.** 确保已成功拉取Docker镜像:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ docker image ls
|
|
||||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
|
||||||
yandex/tutorial-catboost-clickhouse latest 622e4d17945b 22 hours ago 1.37GB
|
|
||||||
```
|
|
||||||
|
|
||||||
**3.** 基于此镜像启动一个Docker容器:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ docker run -it -p 8888:8888 yandex/tutorial-catboost-clickhouse
|
|
||||||
```
|
|
||||||
|
|
||||||
## 1. 创建数据表 {#create-table}
|
|
||||||
|
|
||||||
为训练样本创建ClickHouse表:
|
|
||||||
|
|
||||||
**1.** 在交互模式下启动ClickHouse控制台客户端:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ clickhouse client
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! note "注"
|
|
||||||
ClickHouse服务器已经在Docker容器内运行。
|
|
||||||
|
|
||||||
**2.** 使用以下命令创建表:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) CREATE TABLE amazon_train
|
|
||||||
(
|
|
||||||
date Date MATERIALIZED today(),
|
|
||||||
ACTION UInt8,
|
|
||||||
RESOURCE UInt32,
|
|
||||||
MGR_ID UInt32,
|
|
||||||
ROLE_ROLLUP_1 UInt32,
|
|
||||||
ROLE_ROLLUP_2 UInt32,
|
|
||||||
ROLE_DEPTNAME UInt32,
|
|
||||||
ROLE_TITLE UInt32,
|
|
||||||
ROLE_FAMILY_DESC UInt32,
|
|
||||||
ROLE_FAMILY UInt32,
|
|
||||||
ROLE_CODE UInt32
|
|
||||||
)
|
|
||||||
ENGINE = MergeTree ORDER BY date
|
|
||||||
```
|
|
||||||
|
|
||||||
**3.** 从ClickHouse控制台客户端退出:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) exit
|
|
||||||
```
|
|
||||||
|
|
||||||
## 2. 将数据插入到表中 {#insert-data-to-table}
|
|
||||||
|
|
||||||
插入数据:
|
|
||||||
|
|
||||||
**1.** 运行以下命令:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ clickhouse client --host 127.0.0.1 --query 'INSERT INTO amazon_train FORMAT CSVWithNames' < ~/amazon/train.csv
|
|
||||||
```
|
|
||||||
|
|
||||||
**2.** 在交互模式下启动ClickHouse控制台客户端:
|
|
||||||
|
|
||||||
``` bash
|
|
||||||
$ clickhouse client
|
|
||||||
```
|
|
||||||
|
|
||||||
**3.** 确保数据已上传:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) SELECT count() FROM amazon_train
|
|
||||||
|
|
||||||
SELECT count()
|
|
||||||
FROM amazon_train
|
|
||||||
|
|
||||||
+-count()-+
|
|
||||||
| 65538 |
|
|
||||||
+-------+
|
|
||||||
```
|
|
||||||
|
|
||||||
## 3. 将CatBoost集成到ClickHouse中 {#integrate-catboost-into-clickhouse}
|
|
||||||
|
|
||||||
!!! note "注"
|
|
||||||
**可跳过。** 示例Docker映像已经包含了运行CatBoost和ClickHouse所需的所有内容。
|
|
||||||
|
|
||||||
为了将CatBoost集成进ClickHouse,需要进行如下步骤:
|
|
||||||
|
|
||||||
**1.** 构建评估库。
|
|
||||||
|
|
||||||
评估CatBoost模型的最快方法是编译 `libcatboostmodel.<so|dll|dylib>` 库文件.
|
|
||||||
|
|
||||||
有关如何构建库文件的详细信息,请参阅 [CatBoost文件](https://catboost.ai/docs/concepts/c-plus-plus-api_dynamic-c-pluplus-wrapper.html).
|
|
||||||
|
|
||||||
**2.** 创建一个新目录(位置与名称可随意指定), 如 `data` 并将创建的库文件放入其中。 示例Docker镜像已经包含了库 `data/libcatboostmodel.so`.
|
|
||||||
|
|
||||||
**3.** 创建一个新目录来放配置模型, 如 `models`.
|
|
||||||
|
|
||||||
**4.** 创建一个模型配置文件,如 `models/amazon_model.xml`.
|
|
||||||
|
|
||||||
**5.** 修改模型配置:
|
|
||||||
|
|
||||||
``` xml
|
|
||||||
<models>
|
|
||||||
<model>
|
|
||||||
<!-- Model type. Now catboost only. -->
|
|
||||||
<type>catboost</type>
|
|
||||||
<!-- Model name. -->
|
|
||||||
<name>amazon</name>
|
|
||||||
<!-- Path to trained model. -->
|
|
||||||
<path>/home/catboost/tutorial/catboost_model.bin</path>
|
|
||||||
<!-- Update interval. -->
|
|
||||||
<lifetime>0</lifetime>
|
|
||||||
</model>
|
|
||||||
</models>
|
|
||||||
```
|
|
||||||
|
|
||||||
**6.** 将CatBoost库文件的路径和模型配置添加到ClickHouse配置:
|
|
||||||
|
|
||||||
``` xml
|
|
||||||
<!-- File etc/clickhouse-server/config.d/models_config.xml. -->
|
|
||||||
<catboost_dynamic_library_path>/home/catboost/data/libcatboostmodel.so</catboost_dynamic_library_path>
|
|
||||||
<models_config>/home/catboost/models/*_model.xml</models_config>
|
|
||||||
```
|
|
||||||
|
|
||||||
## 4. 使用SQL调用预测模型 {#run-model-inference}
|
|
||||||
|
|
||||||
为了测试模型是否正常,可以使用ClickHouse客户端 `$ clickhouse client`.
|
|
||||||
|
|
||||||
让我们确保模型能正常工作:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) SELECT
|
|
||||||
modelEvaluate('amazon',
|
|
||||||
RESOURCE,
|
|
||||||
MGR_ID,
|
|
||||||
ROLE_ROLLUP_1,
|
|
||||||
ROLE_ROLLUP_2,
|
|
||||||
ROLE_DEPTNAME,
|
|
||||||
ROLE_TITLE,
|
|
||||||
ROLE_FAMILY_DESC,
|
|
||||||
ROLE_FAMILY,
|
|
||||||
ROLE_CODE) > 0 AS prediction,
|
|
||||||
ACTION AS target
|
|
||||||
FROM amazon_train
|
|
||||||
LIMIT 10
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! note "注"
|
|
||||||
函数 [modelEvaluate](../sql-reference/functions/other-functions.md#function-modelevaluate) 会对多类别模型返回一个元组,其中包含每一类别的原始预测值。
|
|
||||||
|
|
||||||
执行预测:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) SELECT
|
|
||||||
modelEvaluate('amazon',
|
|
||||||
RESOURCE,
|
|
||||||
MGR_ID,
|
|
||||||
ROLE_ROLLUP_1,
|
|
||||||
ROLE_ROLLUP_2,
|
|
||||||
ROLE_DEPTNAME,
|
|
||||||
ROLE_TITLE,
|
|
||||||
ROLE_FAMILY_DESC,
|
|
||||||
ROLE_FAMILY,
|
|
||||||
ROLE_CODE) AS prediction,
|
|
||||||
1. / (1 + exp(-prediction)) AS probability,
|
|
||||||
ACTION AS target
|
|
||||||
FROM amazon_train
|
|
||||||
LIMIT 10
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! note "注"
|
|
||||||
查看函数说明 [exp()](../sql-reference/functions/math-functions.md) 。
|
|
||||||
|
|
||||||
让我们计算样本的LogLoss:
|
|
||||||
|
|
||||||
``` sql
|
|
||||||
:) SELECT -avg(tg * log(prob) + (1 - tg) * log(1 - prob)) AS logloss
|
|
||||||
FROM
|
|
||||||
(
|
|
||||||
SELECT
|
|
||||||
modelEvaluate('amazon',
|
|
||||||
RESOURCE,
|
|
||||||
MGR_ID,
|
|
||||||
ROLE_ROLLUP_1,
|
|
||||||
ROLE_ROLLUP_2,
|
|
||||||
ROLE_DEPTNAME,
|
|
||||||
ROLE_TITLE,
|
|
||||||
ROLE_FAMILY_DESC,
|
|
||||||
ROLE_FAMILY,
|
|
||||||
ROLE_CODE) AS prediction,
|
|
||||||
1. / (1. + exp(-prediction)) AS prob,
|
|
||||||
ACTION AS tg
|
|
||||||
FROM amazon_train
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! note "注"
|
|
||||||
查看函数说明 [avg()](../sql-reference/aggregate-functions/reference/avg.md#agg_function-avg) 和 [log()](../sql-reference/functions/math-functions.md) 。
|
|
||||||
|
|
||||||
[原始文章](https://clickhouse.com/docs/en/guides/apply_catboost_model/) <!--hide-->
|
|
@ -9,6 +9,5 @@ sidebar_label: ClickHouse指南
|
|||||||
列出了如何使用 Clickhouse 解决各种任务的详细说明:
|
列出了如何使用 Clickhouse 解决各种任务的详细说明:
|
||||||
|
|
||||||
- [关于简单集群设置的教程](../getting-started/tutorial.md)
|
- [关于简单集群设置的教程](../getting-started/tutorial.md)
|
||||||
- [在ClickHouse中应用CatBoost模型](apply-catboost-model.md)
|
|
||||||
|
|
||||||
[原始文章](https://clickhouse.com/docs/en/guides/) <!--hide-->
|
[原始文章](https://clickhouse.com/docs/en/guides/) <!--hide-->
|
||||||
|
@ -500,7 +500,7 @@ ALTER USER [IF EXISTS] name [ON CLUSTER cluster_name]
|
|||||||
[IDENTIFIED [WITH {PLAINTEXT_PASSWORD|SHA256_PASSWORD|DOUBLE_SHA1_PASSWORD}] BY {'password'|'hash'}]
|
[IDENTIFIED [WITH {PLAINTEXT_PASSWORD|SHA256_PASSWORD|DOUBLE_SHA1_PASSWORD}] BY {'password'|'hash'}]
|
||||||
[[ADD|DROP] HOST {LOCAL | NAME 'name' | REGEXP 'name_regexp' | IP 'address' | LIKE 'pattern'} [,...] | ANY | NONE]
|
[[ADD|DROP] HOST {LOCAL | NAME 'name' | REGEXP 'name_regexp' | IP 'address' | LIKE 'pattern'} [,...] | ANY | NONE]
|
||||||
[DEFAULT ROLE role [,...] | ALL | ALL EXCEPT role [,...] ]
|
[DEFAULT ROLE role [,...] | ALL | ALL EXCEPT role [,...] ]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | PROFILE 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | PROFILE 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
|
||||||
### 说明 {#alter-user-dscr}
|
### 说明 {#alter-user-dscr}
|
||||||
@ -540,7 +540,7 @@ ALTER USER user DEFAULT ROLE ALL EXCEPT role1, role2
|
|||||||
``` sql
|
``` sql
|
||||||
ALTER ROLE [IF EXISTS] name [ON CLUSTER cluster_name]
|
ALTER ROLE [IF EXISTS] name [ON CLUSTER cluster_name]
|
||||||
[RENAME TO new_name]
|
[RENAME TO new_name]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | PROFILE 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | PROFILE 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
|
||||||
## 修改row policy {#alter-row-policy-statement}
|
## 修改row policy {#alter-row-policy-statement}
|
||||||
@ -584,7 +584,7 @@ ALTER QUOTA [IF EXISTS] name [ON CLUSTER cluster_name]
|
|||||||
``` sql
|
``` sql
|
||||||
ALTER SETTINGS PROFILE [IF EXISTS] name [ON CLUSTER cluster_name]
|
ALTER SETTINGS PROFILE [IF EXISTS] name [ON CLUSTER cluster_name]
|
||||||
[RENAME TO new_name]
|
[RENAME TO new_name]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | INHERIT 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
|
||||||
[Original article](https://clickhouse.com/docs/en/query_language/alter/) <!--hide-->
|
[Original article](https://clickhouse.com/docs/en/query_language/alter/) <!--hide-->
|
||||||
|
@ -13,5 +13,5 @@ sidebar_label: 角色
|
|||||||
``` sql
|
``` sql
|
||||||
ALTER ROLE [IF EXISTS] name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
ALTER ROLE [IF EXISTS] name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | PROFILE 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | PROFILE 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
@ -13,5 +13,5 @@ sidebar_label: 配置文件设置
|
|||||||
``` sql
|
``` sql
|
||||||
ALTER SETTINGS PROFILE [IF EXISTS] TO name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
ALTER SETTINGS PROFILE [IF EXISTS] TO name1 [ON CLUSTER cluster_name1] [RENAME TO new_name1]
|
||||||
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
[, name2 [ON CLUSTER cluster_name2] [RENAME TO new_name2] ...]
|
||||||
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [READONLY|WRITABLE] | INHERIT 'profile_name'] [,...]
|
[SETTINGS variable [= value] [MIN [=] min_value] [MAX [=] max_value] [CONST|READONLY|WRITABLE|CHANGEABLE_IN_READONLY] | INHERIT 'profile_name'] [,...]
|
||||||
```
|
```
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user